barbican-6.0.1/0000775000175000017500000000000013311733364013305 5ustar zuulzuul00000000000000barbican-6.0.1/tox.ini0000666000175000017500000001032113311733072014613 0ustar zuulzuul00000000000000[tox] minversion = 2.0 envlist = py35,py27,pep8,docs skipsdist = True [testenv] usedevelop = True install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt?h=stable/queens} -U {opts} {packages} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = oslo-config-generator --config-file etc/oslo-config-generator/barbican.conf --output-file etc/barbican/barbican.conf /usr/bin/find . -type f -name "*.py[c|o]" -delete rm -f .testrepository/times.dbm coverage erase python setup.py testr --coverage --testr-args='{posargs}' coverage report -m whitelist_externals = rm [testenv:cover] deps = {[testenv]deps} diff_cover commands = coverage erase python setup.py testr --coverage --testr-args='{posargs}' coverage xml diff-cover --fail-under 100 --compare-branch master coverage.xml [testenv:releasenotes] commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:pep8] sitepackages = False commands = flake8 {posargs} # Run security linter bandit -r barbican -x tests -n5 [testenv:genconfig] whitelist_externals = bash envdir = {toxworkdir}/pep8 commands = oslo-config-generator --config-file etc/oslo-config-generator/barbican.conf [testenv:venv] commands = {posargs} [testenv:debug] commands = oslo_debug_helper {posargs} [testenv:py3pep8] # This hack is in place to allow us to run py3 based flake8 # without installing barbican. basepython = python3 install_command = /bin/echo {packages} commands = pip install "hacking>=0.10.0,<0.11" flake8 barbican setup.py [testenv:docs] commands= rm -rf doc/build api-guide/build api-ref/build python setup.py build_sphinx sphinx-build -W -b html api-guide/source api-guide/build/html sphinx-build -W -b html api-ref/source api-ref/build/html whitelist_externals = rm [testenv:api-guide] # This environment is called from CI scripts to test and publish # the API Guide to developer.openstack.org. commands = sphinx-build -W -b html -d api-guide/build/doctrees api-guide/source api-guide/build/html [testenv:api-ref] # This environment is called from CI scripts to test and publish # the API Ref to developer.openstack.org. commands = sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html [testenv:functional] # This tox env is purely to make local test development easier # Note: This requires local running instances of Barbican and Keystone deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/functionaltests commands = /usr/bin/find . -type f -name "*.py[c|o]" -delete /bin/bash {toxinidir}/functionaltests/pretty_tox.sh '{posargs}' passenv = KMIP_PLUGIN_ENABLED [testenv:py35functional] basepython = python3 deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/functionaltests commands = /usr/bin/find . -type f -name "*.py[c|o]" -delete /bin/bash {toxinidir}/functionaltests/pretty_tox.sh '{posargs}' passenv = KMIP_PLUGIN_ENABLED [testenv:cmd] # This tox env is purely to make local test development easier # Note: This requires local running instances of Barbican and Keystone deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/barbican/cmd/functionaltests commands = /usr/bin/find . -type f -name "*.py[c|o]" -delete /bin/bash {toxinidir}/functionaltests/pretty_tox.sh '{posargs}' [flake8] exclude = .git,.idea,.tox,bin,dist,debian,rpmbuild,tools,*.egg-info,*.eggs,contrib, *docs/target,*.egg,build [testenv:bandit] deps = -r{toxinidir}/test-requirements.txt commands = bandit -r barbican -x tests -n5 [testenv:bindep] # Do not install any requirements. We want this to be fast and work even if # system dependencies are missing, since it's used to tell you what system # dependencies are missing! This also means that bindep must be installed # separately, outside of the requirements files. deps = bindep commands = bindep test [testenv:genpolicy] envdir = {toxworkdir}/pep8 commands = oslopolicy-sample-generator --config-file=etc/oslo-config-generator/policy.conf [hacking] local-check-factory = barbican.hacking.checks.factory barbican-6.0.1/playbooks/0000775000175000017500000000000013311733364015310 5ustar zuulzuul00000000000000barbican-6.0.1/playbooks/legacy/0000775000175000017500000000000013311733364016554 5ustar zuulzuul00000000000000barbican-6.0.1/playbooks/legacy/barbican-devstack-tempest-base/0000775000175000017500000000000013311733364024506 5ustar zuulzuul00000000000000barbican-6.0.1/playbooks/legacy/barbican-devstack-tempest-base/post.yaml0000666000175000017500000000063313311733060026354 0ustar zuulzuul00000000000000- hosts: primary tasks: - name: Copy files from {{ ansible_user_dir }}/workspace/ on node synchronize: src: '{{ ansible_user_dir }}/workspace/' dest: '{{ zuul.executor.log_root }}' mode: pull copy_links: true verify_host: true rsync_opts: - --include=/logs/** - --include=*/ - --exclude=* - --prune-empty-dirs barbican-6.0.1/playbooks/legacy/barbican-devstack-tempest-base/run.yaml0000666000175000017500000000550113311733060026172 0ustar zuulzuul00000000000000- hosts: all name: Barbican devstack tempest base tasks: - name: Ensure legacy workspace directory file: path: '{{ ansible_user_dir }}/workspace' state: directory - shell: cmd: | set -e set -x cat > clonemap.yaml << EOF clonemap: - name: openstack-infra/devstack-gate dest: devstack-gate EOF /usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \ git://git.openstack.org \ openstack-infra/devstack-gate executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' - shell: cmd: | set -e set -x export PYTHONUNBUFFERED=true export DEVSTACK_GATE_TEMPEST=1 export DEVSTACK_GATE_TEMPEST_REGEX=barbican export KEEP_LOCALRC=1 export PROJECTS="openstack/barbican $PROJECTS" export PROJECTS="openstack/python-barbicanclient $PROJECTS" export PROJECTS="openstack/barbican-tempest-plugin $PROJECTS" export DEVSTACK_LOCAL_CONFIG="enable_plugin barbican git://git.openstack.org/openstack/barbican" export DEVSTACK_LOCAL_CONFIG+=$'\n'"export TEMPEST_PLUGINS='/opt/stack/new/barbican-tempest-plugin'" export BRANCH_OVERRIDE=default if [ "$BRANCH_OVERRIDE" != "default" ] ; then export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE fi # Add configuration values for enabling security features in local.conf function pre_test_hook { if [ -f $BASE/new/barbican-tempest-plugin/tools/pre_test_hook.sh ] ; then . $BASE/new/barbican-tempest-plugin/tools/pre_test_hook.sh fi } export -f pre_test_hook if [ "{{ database }}" == "postgres" ] ; then export DEVSTACK_GATE_POSTGRES=1 elif [ "{{ castellan_from_git }}" == "1" ] ; then export DEVSTACK_PROJECT_FROM_GIT="castellan" elif [ "{{ cursive }}" == "1" ] ; then export DEVSTACK_PROJECT_FROM_GIT="cursive" elif [ "{{ python_version }}" == "py35" ] ; then export DEVSTACK_GATE_USE_PYTHON3=True export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-account" export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-container" export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-object" export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-proxy" fi cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' barbican-6.0.1/playbooks/legacy/barbican-devstack-base/0000775000175000017500000000000013311733364023027 5ustar zuulzuul00000000000000barbican-6.0.1/playbooks/legacy/barbican-devstack-base/post.yaml0000666000175000017500000000063313311733060024675 0ustar zuulzuul00000000000000- hosts: primary tasks: - name: Copy files from {{ ansible_user_dir }}/workspace/ on node synchronize: src: '{{ ansible_user_dir }}/workspace/' dest: '{{ zuul.executor.log_root }}' mode: pull copy_links: true verify_host: true rsync_opts: - --include=/logs/** - --include=*/ - --exclude=* - --prune-empty-dirs barbican-6.0.1/playbooks/legacy/barbican-devstack-base/run.yaml0000666000175000017500000000444513311733060024521 0ustar zuulzuul00000000000000- hosts: all name: Barbican devstack base tasks: - name: Ensure legacy workspace directory file: path: '{{ ansible_user_dir }}/workspace' state: directory - shell: cmd: | set -e set -x cat > clonemap.yaml << EOF clonemap: - name: openstack-infra/devstack-gate dest: devstack-gate EOF /usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \ git://git.openstack.org \ openstack-infra/devstack-gate executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' - shell: cmd: | set -e set -x export PYTHONUNBUFFERED=true export ENABLED_SERVICES="{{ services }}" export PROJECTS="openstack/barbican $PROJECTS" export PROJECTS="openstack/python-barbicanclient $PROJECTS" export PROJECTS="openstack/barbican-tempest-plugin $PROJECTS" export DEVSTACK_LOCAL_CONFIG="enable_plugin barbican git://git.openstack.org/openstack/barbican" if [ "{{ python_version }}" == "py35" ] ; then export DEVSTACK_GATE_USE_PYTHON3=True export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-account" export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-container" export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-object" export DEVSTACK_LOCAL_CONFIG+=$'\n'"disable_service s-proxy" else export DEVSTACK_GATE_USE_PYTHON3=False fi function gate_hook { $BASE/new/barbican/devstack/gate_hook.sh } export -f gate_hook function post_test_hook { cd /opt/stack/new/barbican/functionaltests ./post_test_hook.sh "{{ plugin }}" } export -f post_test_hook if [ "{{ database }}" == "postgres" ] ; then export DEVSTACK_GATE_POSTGRES=1 fi cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' barbican-6.0.1/playbooks/legacy/grenade-devstack-barbican/0000775000175000017500000000000013311733364023522 5ustar zuulzuul00000000000000barbican-6.0.1/playbooks/legacy/grenade-devstack-barbican/post.yaml0000666000175000017500000000063313311733060025370 0ustar zuulzuul00000000000000- hosts: primary tasks: - name: Copy files from {{ ansible_user_dir }}/workspace/ on node synchronize: src: '{{ ansible_user_dir }}/workspace/' dest: '{{ zuul.executor.log_root }}' mode: pull copy_links: true verify_host: true rsync_opts: - --include=/logs/** - --include=*/ - --exclude=* - --prune-empty-dirs barbican-6.0.1/playbooks/legacy/grenade-devstack-barbican/run.yaml0000666000175000017500000000436013311733060025210 0ustar zuulzuul00000000000000- hosts: all name: Autoconverted job legacy-grenade-dsvm-barbican from old job gate-grenade-dsvm-barbican-ubuntu-xenial tasks: - name: Ensure legacy workspace directory file: path: '{{ ansible_user_dir }}/workspace' state: directory - shell: cmd: | set -e set -x cat > clonemap.yaml << EOF clonemap: - name: openstack-infra/devstack-gate dest: devstack-gate EOF /usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \ git://git.openstack.org \ openstack-infra/devstack-gate executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' - shell: cmd: | set -e set -x export PYTHONUNBUFFERED=true export PROJECTS="openstack/barbican $PROJECTS" export PROJECTS="openstack-dev/grenade $PROJECTS" export PROJECTS="openstack/python-barbicanclient $PROJECTS" export PROJECTS="openstack/barbican-tempest-plugin $PROJECTS" export GRENADE_PLUGINRC="enable_grenade_plugin barbican https://git.openstack.org/openstack/barbican" export DEVSTACK_LOCAL_CONFIG+=$'\n'"export TEMPEST_PLUGINS='/opt/stack/new/barbican-tempest-plugin'" export DEVSTACK_GATE_TEMPEST=1 export DEVSTACK_GATE_GRENADE=pullup export DEVSTACK_GATE_TEMPEST_REGEX=barbican export BRANCH_OVERRIDE=default if [ "$BRANCH_OVERRIDE" != "default" ] ; then export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE fi # Add configuration values for enabling security features in local.conf function pre_test_hook { if [ -f /opt/stack/old/barbican-tempest-plugin/tools/pre_test_hook.sh ] ; then . /opt/stack/old/barbican-tempest-plugin/tools/pre_test_hook.sh fi } export -f pre_test_hook cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' barbican-6.0.1/playbooks/legacy/barbican-devstack-functional-base/0000775000175000017500000000000013311733364025167 5ustar zuulzuul00000000000000barbican-6.0.1/playbooks/legacy/barbican-devstack-functional-base/post.yaml0000666000175000017500000000063313311733060027035 0ustar zuulzuul00000000000000- hosts: primary tasks: - name: Copy files from {{ ansible_user_dir }}/workspace/ on node synchronize: src: '{{ ansible_user_dir }}/workspace/' dest: '{{ zuul.executor.log_root }}' mode: pull copy_links: true verify_host: true rsync_opts: - --include=/logs/** - --include=*/ - --exclude=* - --prune-empty-dirs barbican-6.0.1/playbooks/legacy/barbican-devstack-functional-base/run.yaml0000666000175000017500000000440513311733060026655 0ustar zuulzuul00000000000000- hosts: all name: Barbican devstack functional base tasks: - name: Ensure legacy workspace directory file: path: '{{ ansible_user_dir }}/workspace' state: directory - shell: cmd: | set -e set -x cat > clonemap.yaml << EOF clonemap: - name: openstack-infra/devstack-gate dest: devstack-gate EOF /usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \ git://git.openstack.org \ openstack-infra/devstack-gate executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' - shell: cmd: | set -e set -x cat << 'EOF' >>"/tmp/dg-local.conf" [[local|localrc]] enable_plugin barbican git://git.openstack.org/openstack/barbican EOF executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' - shell: cmd: | set -e set -x export PYTHONUNBUFFERED=true export OVERRIDE_ENABLED_SERVICES="{{ services }}" export PROJECTS="openstack/barbican $PROJECTS" export PROJECTS="openstack/python-barbicanclient $PROJECTS" export PROJECTS="openstack/barbican-tempest-plugin $PROJECTS" if [ "{{ python_version }}" == "py35" ] ; then export DEVSTACK_GATE_USE_PYTHON3=True else export DEVSTACK_GATE_USE_PYTHON3=False fi function gate_hook { $BASE/new/barbican/devstack/gate_hook.sh } export -f gate_hook function post_test_hook { cd /opt/stack/new/barbican/functionaltests ./post_test_hook.sh "{{plugin}}" } export -f post_test_hook if [ "{{ database }}" == "postgres" ] ; then export DEVSTACK_GATE_POSTGRES=1 fi cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh executable: /bin/bash chdir: '{{ ansible_user_dir }}/workspace' environment: '{{ zuul | zuul_legacy_vars }}' barbican-6.0.1/setup.py0000666000175000017500000000200613311733060015010 0ustar zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools # In python < 2.7.4, a lazy loading of package `pbr` will break # setuptools if some other modules registered functions in `atexit`. # solution from: http://bugs.python.org/issue15881#msg170215 try: import multiprocessing # noqa except ImportError: pass setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) barbican-6.0.1/README.md0000666000175000017500000001113313311733072014561 0ustar zuulzuul00000000000000Team and repository tags ======================== [![Team and repository tags](http://governance.openstack.org/badges/barbican.svg)](http://governance.openstack.org/reference/tags/index.html) # Barbican Barbican is a REST API designed for the secure storage, provisioning and management of secrets. It is aimed at being useful for all environments, including large ephemeral Clouds. Barbican is an OpenStack project developed by the [Barbican Project Team ](https://wiki.openstack.org/wiki/Barbican) with support from [Rackspace Hosting](http://www.rackspace.com/), EMC, Ericsson, Johns Hopkins University, HP, Red Hat, Cisco Systems, and many more. The full documentation can be found on the [Barbican Developer Documentation Site](https://docs.openstack.org/barbican/latest/). If you have a technical question, you can ask it at [Ask OpenStack]( https://ask.openstack.org/en/questions/) with the `barbican` tag, or you can send an email to the [OpenStack General mailing list]( http://lists.openstack.org/pipermail/openstack/) at `openstack@lists.openstack.org` with the prefix `[barbican]` in the subject. To file a bug, use our bug tracker on [Launchpad]( https://bugs.launchpad.net/barbican/). For development questions or discussion, hop on the [OpenStack-dev mailing list ](http://lists.openstack.org/pipermail/openstack-dev/) at `openstack-dev@lists.openstack.org` and let us know what you think, just add `[barbican]` to the subject. You can also join our IRC channel `#openstack-barbican` on Freenode. Barbican began as part of a set of applications that make up the CloudKeep ecosystem. The other systems are: * [Postern](https://github.com/cloudkeep/postern) - Go based agent that provides access to secrets from the Barbican API. * [Palisade](https://github.com/cloudkeep/palisade) - AngularJS based web ui for the Barbican API. * [Python-barbicanclient](https://github.com/openstack/python-barbicanclient) - A convenient Python-based library to interact with the Barbican API. ## Getting Started Please visit our [Users, Developers and Operators documentation ](https://docs.openstack.org/barbican/latest/) for details. ## Why Should You Use Barbican? The current state of key management is atrocious. While Windows does have some decent options through the use of the Data Protection API (DPAPI) and Active Directory, Linux lacks a cohesive story around how to manage keys for application use. Barbican was designed to solve this problem. The system was motivated by internal Rackspace needs, requirements from [OpenStack](http://www.openstack.org/) and a realization that the current state of the art could use some help. Barbican will handle many types of secrets, including: * **Symmetric Keys** - Used to perform reversible encryption of data at rest, typically using the AES algorithm set. This type of key is required to enable features like [encrypted Swift containers and Cinder volumes](http://www.openstack.org/software/openstack-storage/), [encrypted Cloud Backups](http://www.rackspace.com/cloud/backup/), etc. * **Asymmetric Keys** - Asymmetric key pairs (sometimes referred to as [public / private keys](http://en.wikipedia.org/wiki/Public-key_cryptography)) are used in many scenarios where communication between untrusted parties is desired. The most common case is with SSL/TLS certificates, but also is used in solutions like SSH keys, S/MIME (mail) encryption and digital signatures. * **Raw Secrets** - Barbican stores secrets as a base64 encoded block of data (encrypted, naturally). Clients can use the API to store any secrets in any format they desire. The [Postern](https://github.com/cloudkeep/postern) agent is capable of presenting these secrets in various formats to ease integration. For the symmetric and asymmetric key types, Barbican supports full life cycle management including provisioning, expiration, reporting, etc. A plugin system allows for multiple certificate authority support (including public and private CAs). ## Design Goals 1. Provide a central secret-store capable of distributing secret / keying material to all types of deployments including ephemeral Cloud instances. 2. Support reasonable compliance regimes through reporting and auditability. 3. Application adoption costs should be minimal or non-existent. 4. Build a community and ecosystem by being open-source and extensible. 5. Improve security through sane defaults and centralized management of [policies for all secrets](https://github.com/cloudkeep/barbican/wiki/Policies). 6. Provide an out of band communication mechanism to notify and protect sensitive assets. barbican-6.0.1/.coveragerc0000666000175000017500000000047413311733060015426 0ustar zuulzuul00000000000000[run] branch = True omit = etc/*,setup.py,*egg*,.tox/*,barbican/tests/*, functionaltests/*, barbican/model/migration/alembic_migrations/versions/*, barbican/plugin/dogtag.py, barbican/plugin/symantec.py [report] ignore_errors = True exclude_lines = pragma: no cover @abc.abstractmethod barbican-6.0.1/api-ref/0000775000175000017500000000000013311733364014630 5ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/0000775000175000017500000000000013311733364016130 5ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/cas.inc0000666000175000017500000000000013311733060017352 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/containers.inc0000666000175000017500000000000013311733060020751 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/transportkeys.inc0000666000175000017500000000000013311733060021534 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/secret_metadata.inc0000666000175000017500000000000013311733060021731 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/secrets.inc0000666000175000017500000000000013311733060020254 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/index.rst0000666000175000017500000000061113311733060017762 0ustar zuulzuul00000000000000================================== OpenStack Key Manager Service APIs ================================== .. rest_expand_all:: .. include:: acls.inc .. include:: cas.inc .. include:: consumers.inc .. include:: containers.inc .. include:: orders.inc .. include:: quotas.inc .. include:: secret_metadata.inc .. include:: secrets.inc .. include:: secretstores.inc .. include:: transportkeys.inc barbican-6.0.1/api-ref/source/orders.inc0000666000175000017500000000000013311733060020102 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/quotas.inc0000666000175000017500000000000013311733060020120 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/conf.py0000666000175000017500000002430113311733060017422 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # manila documentation build configuration file, created by # sphinx-quickstart on Sat May 7 13:35:27 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys html_theme = 'openstackdocs' html_theme_options = { "sidebar_mode": "toc", } extensions = [ 'os_api_ref', 'openstackdocstheme' ] # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../../')) sys.path.insert(0, os.path.abspath('../')) sys.path.insert(0, os.path.abspath('./')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. # Add any paths that contain templates here, relative to this directory. # templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Key Manager API Reference' copyright = u'OpenStack Foundation' repository_name = 'openstack/barbican' bug_project = 'barbican' bug_tag = 'api-ref' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # from barbican.version import version_info # The full version, including alpha/beta/rc tags. release = version_info.release_string() # The short X.Y version. version = version_info.version_string() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. # " v documentation" by default. # html_title = u'Shared File Systems API Reference v2' # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or # 32x32 pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. html_last_updated_fmt = '%Y-%m-%d %H:%M' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'barbicandoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', # Latex figure (float) alignment # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'barbican.tex', u'OpenStack Key Manager API Documentation', u'OpenStack Foundation', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'barbican', u'OpenStack Key Manager API Documentation', u'Openstack Foundation', 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Barbican', u'OpenStack Key Manager API Documentation', u'OpenStack Foundation', 'Barbican', 'OpenStack Key Manager', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False barbican-6.0.1/api-ref/source/acls.inc0000666000175000017500000000000013311733060017526 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/secretstores.inc0000666000175000017500000000000013311733060021331 0ustar zuulzuul00000000000000barbican-6.0.1/api-ref/source/consumers.inc0000666000175000017500000000000013311733060020622 0ustar zuulzuul00000000000000barbican-6.0.1/barbican/0000775000175000017500000000000013311733364015046 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/0000775000175000017500000000000013311733364016146 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/sync.py0000666000175000017500000000404313311733060017470 0ustar zuulzuul00000000000000# Copyright (c) 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.model import repositories as repo from oslo_log import log # Import and configure logging. CONF = config.CONF log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) def sync_secret_stores(sql_url, verbose, log_file): """Command to sync secret stores table with config . :param sql_url: sql connection string to connect to a database :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Syncing the secret_stores table with barbican.conf") log.setup(CONF, 'barbican') try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory( initialize_secret_stores=True) repo.commit() except Exception as ex: LOG.exception('Failed to sync secret_stores table.') repo.rollback() raise ex finally: if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides barbican-6.0.1/barbican/model/migration/0000775000175000017500000000000013311733364020137 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/migration/__init__.py0000666000175000017500000000000013311733060022231 0ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/migration/commands.py0000666000175000017500000000547513311733060022320 0ustar zuulzuul00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Interface to the Alembic migration process and environment. Concepts in this file are based on Quantum's Alembic approach. Available Alembic commands are detailed here: https://alembic.readthedocs.org/en/latest/api.html#module-alembic.command """ import os from alembic import command as alembic_command from alembic import config as alembic_config from barbican.common import config from barbican.common import utils LOG = utils.getLogger(__name__) CONF = config.CONF def init_config(sql_url=None): """Initialize and return the Alembic configuration.""" sqlalchemy_url = sql_url or CONF.sql_connection if not sqlalchemy_url: raise RuntimeError("Please specify a SQLAlchemy-friendly URL to " "connect to the proper database, either through " "the CLI or the configuration file.") if sqlalchemy_url and 'sqlite' in sqlalchemy_url: LOG.warning('!!! Limited support for migration commands using' ' sqlite databases; This operation may not succeed.') config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini') ) config.barbican_sqlalchemy_url = sqlalchemy_url config.set_main_option('script_location', 'barbican.model.migration:alembic_migrations') return config def upgrade(to_version='head', sql_url=None): """Upgrade to the specified version.""" alembic_cfg = init_config(sql_url) alembic_command.upgrade(alembic_cfg, to_version) def history(verbose, sql_url=None): alembic_cfg = init_config(sql_url) alembic_command.history(alembic_cfg, verbose=verbose) def current(verbose, sql_url=None): alembic_cfg = init_config(sql_url) alembic_command.current(alembic_cfg, verbose=verbose) def stamp(to_version='head', sql_url=None): """Stamp the specified version, with no migration performed.""" alembic_cfg = init_config(sql_url) alembic_command.stamp(alembic_cfg, to_version) def generate(autogenerate=True, message='generate changes', sql_url=None): """Generate a version file.""" alembic_cfg = init_config(sql_url) alembic_command.revision(alembic_cfg, message=message, autogenerate=autogenerate) barbican-6.0.1/barbican/model/migration/alembic_migrations/0000775000175000017500000000000013311733364023767 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/kek_init_ops.py0000666000175000017500000000366613311733060027025 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'kek_data', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('plugin_name', sa.String(length=255), nullable=False), sa.Column('kek_label', sa.String(length=255), nullable=True), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('active', sa.Boolean(), nullable=False), sa.Column('bind_completed', sa.Boolean(), nullable=False), sa.Column('algorithm', sa.String(length=255), nullable=True), sa.Column('bit_length', sa.Integer(), nullable=True), sa.Column('mode', sa.String(length=255), nullable=True), sa.Column('plugin_meta', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'],), sa.PrimaryKeyConstraint('id') ) barbican-6.0.1/barbican/model/migration/alembic_migrations/README0000666000175000017500000000004713311733060024643 0ustar zuulzuul00000000000000Generic single-database configuration. barbican-6.0.1/barbican/model/migration/alembic_migrations/__init__.py0000666000175000017500000000000013311733060026061 0ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/order_ops.py0000666000175000017500000000424713311733060026337 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'orders', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('error_status_code', sa.String(length=16), nullable=True), sa.Column('error_reason', sa.String(length=255), nullable=True), sa.Column('secret_id', sa.String(length=36), nullable=True), sa.Column('secret_mode', sa.String(length=255), nullable=True), sa.Column('secret_algorithm', sa.String(length=255), nullable=True), sa.Column('secret_bit_length', sa.String(length=255), nullable=True), sa.Column('secret_expiration', sa.String(length=255), nullable=True), sa.Column('secret_payload_content_type', sa.String(length=255), nullable=True), sa.Column('secret_name', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'], ), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ), sa.PrimaryKeyConstraint('id') ) barbican-6.0.1/barbican/model/migration/alembic_migrations/transport_keys_init_ops.py0000666000175000017500000000265613311733060031340 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'transport_keys', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('plugin_name', sa.String(length=255), nullable=False), sa.Column('transport_key', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id') ) barbican-6.0.1/barbican/model/migration/alembic_migrations/script.py.mako0000666000175000017500000000172313311733060026571 0ustar zuulzuul00000000000000# Copyright ${create_date.year} OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """${message} Revision ID: ${up_revision} Revises: ${down_revision} Create Date: ${create_date} """ # revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} from alembic import op import sqlalchemy as sa ${imports if imports else ""} def upgrade(): ${upgrades if upgrades else "pass"} barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/0000775000175000017500000000000013311733364025637 5ustar zuulzuul00000000000000././@LongLink0000000000000000000000000000016700000000000011221 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/6a4457517a3_rename_acl_creator_only_to_project_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/6a4457517a3_rename_acl_creator_o0000666000175000017500000000304113311733060033271 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """rename ACL creator_only to project_access Revision ID: 6a4457517a3 Revises: 30dba269cc64 Create Date: 2015-06-03 11:54:55.187875 """ # revision identifiers, used by Alembic. revision = '6a4457517a3' down_revision = '30dba269cc64' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column('secret_acls', 'creator_only', existing_type=sa.BOOLEAN(), new_column_name='project_access') # reverse existing flag value as project_access is negation of creator_only op.execute('UPDATE secret_acls SET project_access = NOT project_access', execution_options={'autocommit': True}) op.alter_column('container_acls', 'creator_only', existing_type=sa.BOOLEAN(), new_column_name='project_access') # reverse existing flag value as project_access is negation of creator_only op.execute('UPDATE container_acls SET project_access = NOT project_access', execution_options={'autocommit': True}) ././@LongLink0000000000000000000000000000016700000000000011221 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/10220ccbe7fa_remove_transport_keys_column_from_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/10220ccbe7fa_remove_transport_ke0000666000175000017500000000157713311733060033616 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Remove transport keys column from project quotas table Revision ID: 10220ccbe7fa Revises: 3c3b04040bfe Create Date: 2015-09-09 09:10:23.812681 """ # revision identifiers, used by Alembic. revision = '10220ccbe7fa' down_revision = '3c3b04040bfe' from alembic import op def upgrade(): op.drop_column('project_quotas', 'transport_keys') barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1a0c2cdafb38_initial_version.py0000666000175000017500000000141313311733060033422 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """create test table Revision ID: 1a0c2cdafb38 Revises: juno Create Date: 2013-06-17 16:42:13.634746 """ # revision identifiers, used by Alembic. revision = '1a0c2cdafb38' down_revision = None def upgrade(): pass ././@LongLink0000000000000000000000000000017100000000000011214 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/443d6f4a69ac_added_secret_type_column_to_secrets_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/443d6f4a69ac_added_secret_type_c0000666000175000017500000000173213311733060033433 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """added secret type column to secrets table Revision ID: 443d6f4a69ac Revises: aa2cf96a1d5 Create Date: 2015-02-16 12:35:12.876413 """ # revision identifiers, used by Alembic. revision = '443d6f4a69ac' down_revision = 'aa2cf96a1d5' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('secrets', sa.Column('secret_type', sa.String(length=255), nullable=False, server_default="opaque")) ././@LongLink0000000000000000000000000000017200000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/3c3b04040bfe_add_owning_project_and_creator_to_cas.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/3c3b04040bfe_add_owning_project_0000666000175000017500000000240713311733060033435 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add owning project and creator to CAs Revision ID: 3c3b04040bfe Revises: 156cd9933643 Create Date: 2015-09-04 12:22:22.745824 """ # revision identifiers, used by Alembic. revision = '3c3b04040bfe' down_revision = '156cd9933643' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('certificate_authorities', sa.Column('creator_id', sa.String(length=255), nullable=True)) op.add_column('certificate_authorities', sa.Column('project_id', sa.String(length=36), nullable=True)) op.create_foreign_key('cas_project_fk', 'certificate_authorities', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000017200000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/4070806f6972_add_orders_plugin_metadata_table_and_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/4070806f6972_add_orders_plugin_m0000666000175000017500000000342613311733060033170 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add orders plugin metadata table and relationships Revision ID: 4070806f6972 Revises: 47b69e523451 Create Date: 2014-08-21 14:06:48.237701 """ # revision identifiers, used by Alembic. revision = '4070806f6972' down_revision = '47b69e523451' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'order_plugin_metadata') if not table_exists: op.create_table( 'order_plugin_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('order_id', sa.String(length=36), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['order_id'], ['orders.id'],), sa.PrimaryKeyConstraint('id'), ) ././@LongLink0000000000000000000000000000017300000000000011216 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/256da65e0c5f_change_keystone_id_for_external_id_in_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/256da65e0c5f_change_keystone_id_0000666000175000017500000000174613311733060033445 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change keystone_id for external_id in Project model Revision ID: 256da65e0c5f Revises: 795737bb3c3 Create Date: 2014-12-22 03:55:29.072375 """ # revision identifiers, used by Alembic. revision = '256da65e0c5f' down_revision = '795737bb3c3' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column('projects', 'keystone_id', type_=sa.String(36), new_column_name='external_id') barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/juno_initial.py0000666000175000017500000000274513311733060030700 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """juno_initial Revision ID: juno Revises: None """ # revision identifiers, used by Alembic. revision = 'juno' down_revision = '1a0c2cdafb38' from barbican.model.migration.alembic_migrations import container_init_ops from barbican.model.migration.alembic_migrations import encrypted_init_ops from barbican.model.migration.alembic_migrations import kek_init_ops from barbican.model.migration.alembic_migrations import order_ops from barbican.model.migration.alembic_migrations import projects_init_ops from barbican.model.migration.alembic_migrations import secrets_init_ops from barbican.model.migration.alembic_migrations import transport_keys_init_ops def upgrade(): projects_init_ops.upgrade() secrets_init_ops.upgrade() container_init_ops.upgrade() kek_init_ops.upgrade() encrypted_init_ops.upgrade() order_ops.upgrade() transport_keys_init_ops.upgrade() ././@LongLink0000000000000000000000000000015500000000000011216 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/dce488646127_add_secret_user_metadata.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/dce488646127_add_secret_user_met0000666000175000017500000000366313311733060033336 0ustar zuulzuul00000000000000# Copyright (c) 2015 IBM # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add-secret-user-metadata Revision ID: dce488646127 Revises: 39a96e67e990 Create Date: 2016-02-09 04:52:03.975486 """ # revision identifiers, used by Alembic. revision = 'dce488646127' down_revision = '39a96e67e990' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_user_metadata') if not table_exists: op.create_table( 'secret_user_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('secret_id', 'key', name='_secret_key_uc') ) ././@LongLink0000000000000000000000000000015700000000000011220 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1bece815014f_remove_projectsecret_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1bece815014f_remove_projectsecre0000666000175000017500000000152013311733060033510 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """remove ProjectSecret table Revision ID: 1bece815014f Revises: 161f8aceb687 Create Date: 2015-06-23 16:17:50.805295 """ # revision identifiers, used by Alembic. revision = '1bece815014f' down_revision = '161f8aceb687' from alembic import op def upgrade(): op.drop_table('project_secret') ././@LongLink0000000000000000000000000000017300000000000011216 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/4ecde3a3a72a_add_cas_column_to_project_quotas_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/4ecde3a3a72a_add_cas_column_to_p0000666000175000017500000000166413311733060033566 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add cas column to project quotas table Revision ID: 4ecde3a3a72a Revises: 10220ccbe7fa Create Date: 2015-09-09 09:40:08.540064 """ # revision identifiers, used by Alembic. revision = '4ecde3a3a72a' down_revision = '10220ccbe7fa' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column( 'project_quotas', sa.Column('cas', sa.Integer(), nullable=True)) ././@LongLink0000000000000000000000000000017100000000000011214 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/254495565185_removing_redundant_fields_from_order.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/254495565185_removing_redundant_0000666000175000017500000000213713311733060033143 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """removing redundant fields from order Revision ID: 254495565185 Revises: 2843d6469f25 Create Date: 2014-09-16 12:09:23.716390 """ # revision identifiers, used by Alembic. revision = '254495565185' down_revision = '2843d6469f25' from alembic import op def upgrade(): op.drop_column('orders', 'secret_mode') op.drop_column('orders', 'secret_algorithm') op.drop_column('orders', 'secret_bit_length') op.drop_column('orders', 'secret_expiration') op.drop_column('orders', 'secret_payload_content_type') op.drop_column('orders', 'secret_name') ././@LongLink0000000000000000000000000000016700000000000011221 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1c0f328bfce0_fixing_composite_primary_keys_and_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1c0f328bfce0_fixing_composite_pr0000666000175000017500000001100713311733060033571 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fixing composite primary keys and adding indexes to foreign key Revision ID: 1c0f328bfce0 Revises: 3d36a26b88af Create Date: 2015-03-04 17:09:41.479708 """ # revision identifiers, used by Alembic. revision = '1c0f328bfce0' down_revision = '2d21598e7e70' from alembic import op import sqlalchemy as sa def _drop_constraint(ctx, name, table): if ctx.dialect.name == 'mysql': # MySQL won't allow some operations with constraints in place op.drop_constraint(name, table, type_='foreignkey') def upgrade(): op.create_index(op.f('ix_certificate_authority_metadata_ca_id'), 'certificate_authority_metadata', ['ca_id'], unique=False) op.create_index(op.f('ix_certificate_authority_metadata_key'), 'certificate_authority_metadata', ['key'], unique=False) op.create_index(op.f('ix_container_consumer_metadata_container_id'), 'container_consumer_metadata', ['container_id'], unique=False) op.create_index(op.f('ix_container_secret_container_id'), 'container_secret', ['container_id'], unique=False) op.create_index(op.f('ix_container_secret_secret_id'), 'container_secret', ['secret_id'], unique=False) op.create_index(op.f('ix_containers_project_id'), 'containers', ['project_id'], unique=False) op.create_index(op.f('ix_encrypted_data_kek_id'), 'encrypted_data', ['kek_id'], unique=False) op.create_index(op.f('ix_encrypted_data_secret_id'), 'encrypted_data', ['secret_id'], unique=False) op.create_index(op.f('ix_kek_data_project_id'), 'kek_data', ['project_id'], unique=False) op.create_index(op.f('ix_order_barbican_metadata_order_id'), 'order_barbican_metadata', ['order_id'], unique=False) op.create_index(op.f('ix_order_plugin_metadata_order_id'), 'order_plugin_metadata', ['order_id'], unique=False) op.create_index(op.f('ix_order_retry_tasks_order_id'), 'order_retry_tasks', ['order_id'], unique=False) op.create_index(op.f('ix_orders_container_id'), 'orders', ['container_id'], unique=False) op.create_index(op.f('ix_orders_project_id'), 'orders', ['project_id'], unique=False) op.create_index(op.f('ix_orders_secret_id'), 'orders', ['secret_id'], unique=False) ctx = op.get_context() _drop_constraint(ctx, 'preferred_certificate_authorities_ibfk_1', 'preferred_certificate_authorities') op.alter_column('preferred_certificate_authorities', 'ca_id', existing_type=sa.VARCHAR(length=36), nullable=False) op.create_foreign_key('preferred_certificate_authorities_fk', 'preferred_certificate_authorities', 'certificate_authorities', ['ca_id'], ['id']) op.create_index(op.f('ix_preferred_certificate_authorities_ca_id'), 'preferred_certificate_authorities', ['ca_id'], unique=False) op.create_index(op.f('ix_preferred_certificate_authorities_project_id'), 'preferred_certificate_authorities', ['project_id'], unique=True) op.create_index(op.f('ix_project_certificate_authorities_ca_id'), 'project_certificate_authorities', ['ca_id'], unique=False) op.create_index(op.f('ix_project_certificate_authorities_project_id'), 'project_certificate_authorities', ['project_id'], unique=False) op.create_index(op.f('ix_project_secret_project_id'), 'project_secret', ['project_id'], unique=False) op.create_index(op.f('ix_project_secret_secret_id'), 'project_secret', ['secret_id'], unique=False) op.create_index(op.f('ix_secret_store_metadata_secret_id'), 'secret_store_metadata', ['secret_id'], unique=False) ././@LongLink0000000000000000000000000000017100000000000011214 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/156cd9933643_add_project_column_to_consumer_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/156cd9933643_add_project_column_0000666000175000017500000000244113311733060033237 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add project column to consumer table Revision ID: 156cd9933643 Revises: 46b98cde536 Create Date: 2015-08-28 20:53:23.205128 """ # revision identifiers, used by Alembic. revision = '156cd9933643' down_revision = '46b98cde536' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column( 'container_consumer_metadata', sa.Column('project_id', sa.String(length=36), nullable=True)) op.create_index( op.f('ix_container_consumer_metadata_project_id'), 'container_consumer_metadata', ['project_id'], unique=False) op.create_foreign_key( None, 'container_consumer_metadata', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000014600000000000011216 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_length.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_length.p0000666000175000017500000000166013311733060033273 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """change_url_length Revision ID: d2780d5aa510 Revises: dce488646127 Create Date: 2016-03-11 09:39:32.593231 """ # revision identifiers, used by Alembic. revision = 'd2780d5aa510' down_revision = 'dce488646127' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column( 'container_consumer_metadata', 'URL', type_=sa.String(length=255) ) ././@LongLink0000000000000000000000000000016700000000000011221 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/13d127569afa_create_secret_store_metadata_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/13d127569afa_create_secret_store0000666000175000017500000000357013311733060033423 0ustar zuulzuul00000000000000# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """create_secret_store_metadata_table Revision ID: 13d127569afa Revises: juno Create Date: 2014-04-24 13:15:41.858266 """ # revision identifiers, used by Alembic. revision = '13d127569afa' down_revision = 'juno' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_store_metadata') if not table_exists: op.create_table( 'secret_store_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.PrimaryKeyConstraint('id'), ) ././@LongLink0000000000000000000000000000017000000000000011213 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1a7cf79559e3_new_secret_and_container_acl_tables.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1a7cf79559e3_new_secret_and_cont0000666000175000017500000001321213311733060033413 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """New secret and container ACL tables Revision ID: 1a7cf79559e3 Revises: 1c0f328bfce0 Create Date: 2015-04-01 13:31:04.292754 """ # revision identifiers, used by Alembic. revision = '1a7cf79559e3' down_revision = '1c0f328bfce0' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_acls') if not table_exists: op.create_table( 'secret_acls', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.Column('operation', sa.String(length=255), nullable=False), sa.Column('creator_only', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('secret_id', 'operation', name='_secret_acl_operation_uc') ) op.create_index(op.f('ix_secret_acls_secret_id'), 'secret_acls', ['secret_id'], unique=False) table_exists = ctx.dialect.has_table(con.engine, 'container_acls') if not table_exists: op.create_table( 'container_acls', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('container_id', sa.String(length=36), nullable=False), sa.Column('operation', sa.String(length=255), nullable=False), sa.Column('creator_only', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['container_id'], ['containers.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('container_id', 'operation', name='_container_acl_operation_uc') ) op.create_index(op.f('ix_container_acls_container_id'), 'container_acls', ['container_id'], unique=False) table_exists = ctx.dialect.has_table(con.engine, 'secret_acl_users') if not table_exists: op.create_table( 'secret_acl_users', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('acl_id', sa.String(length=36), nullable=False), sa.Column('user_id', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['acl_id'], ['secret_acls.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('acl_id', 'user_id', name='_secret_acl_user_uc') ) op.create_index(op.f('ix_secret_acl_users_acl_id'), 'secret_acl_users', ['acl_id'], unique=False) table_exists = ctx.dialect.has_table(con.engine, 'container_acl_users') if not table_exists: op.create_table( 'container_acl_users', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('acl_id', sa.String(length=36), nullable=False), sa.Column('user_id', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['acl_id'], ['container_acls.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('acl_id', 'user_id', name='_container_acl_user_uc') ) op.create_index(op.f('ix_container_acl_users_acl_id'), 'container_acl_users', ['acl_id'], unique=False) op.add_column(u'containers', sa.Column('creator_id', sa.String(length=255), nullable=True)) op.add_column(u'orders', sa.Column('creator_id', sa.String(length=255), nullable=True)) op.add_column(u'secrets', sa.Column('creator_id', sa.String(length=255), nullable=True)) ././@LongLink0000000000000000000000000000016600000000000011220 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/3d36a26b88af_add_order_barbican_metadata_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/3d36a26b88af_add_order_barbican_0000666000175000017500000000340713311733060033365 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add OrderBarbicanMetadata table Revision ID: 3d36a26b88af Revises: 443d6f4a69ac Create Date: 2015-02-20 12:27:08.155647 """ # revision identifiers, used by Alembic. revision = '3d36a26b88af' down_revision = '443d6f4a69ac' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'order_barbican_metadata') if not table_exists: op.create_table( 'order_barbican_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('order_id', sa.String(length=36), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['order_id'], ['orders.id'], ), sa.PrimaryKeyConstraint('id') ) ././@LongLink0000000000000000000000000000014600000000000011216 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/aa2cf96a1d5_add_orderretrytask.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/aa2cf96a1d5_add_orderretrytask.p0000666000175000017500000000265513311733060033605 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add OrderRetryTask Revision ID: aa2cf96a1d5 Revises: 256da65e0c5f Create Date: 2015-01-19 10:27:19.179196 """ # revision identifiers, used by Alembic. revision = "aa2cf96a1d5" down_revision = "256da65e0c5f" from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( "order_retry_tasks", sa.Column("id", sa.String(length=36), nullable=False), sa.Column("order_id", sa.String(length=36), nullable=False), sa.Column("retry_task", sa.Text(), nullable=False), sa.Column("retry_at", sa.DateTime(), nullable=False), sa.Column("retry_args", sa.Text(), nullable=False), sa.Column("retry_kwargs", sa.Text(), nullable=False), sa.Column("retry_count", sa.Integer(), nullable=False), sa.ForeignKeyConstraint(["order_id"], ["orders.id"]), sa.PrimaryKeyConstraint("id"), mysql_engine="InnoDB" ) ././@LongLink0000000000000000000000000000016300000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/2843d6469f25_add_sub_status_info_for_orders.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/2843d6469f25_add_sub_status_info0000666000175000017500000000214113311733060033264 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add sub status info for orders Revision ID: 2843d6469f25 Revises: 2ab3f5371bde Create Date: 2014-09-16 12:31:15.181380 """ # revision identifiers, used by Alembic. revision = '2843d6469f25' down_revision = '2ab3f5371bde' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('orders', sa.Column('sub_status', sa.String(length=36), nullable=True)) op.add_column('orders', sa.Column('sub_status_message', sa.String(length=255), nullable=True)) ././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/39a96e67e990_add_missing_constraints.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/39a96e67e990_add_missing_constra0000666000175000017500000000317213311733060033356 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add missing constraints Revision ID: 39a96e67e990 Revises: 4ecde3a3a72a Create Date: 2016-01-26 13:18:06.113621 """ # revision identifiers, used by Alembic. revision = '39a96e67e990' down_revision = '4ecde3a3a72a' from alembic import op import sqlalchemy as sa def upgrade(): # Add missing projects table keystone_id uniqueness constraint. op.create_unique_constraint('uc_projects_external_ids', 'projects', ['external_id']) # Add missing default for secret_acls' project_access. op.alter_column('secret_acls', 'project_access', server_default=sa.sql.expression.true(), existing_type=sa.Boolean, existing_server_default=None, existing_nullable=False) # Add missing default for container_acls' project_access. op.alter_column('container_acls', 'project_access', server_default=sa.sql.expression.true(), existing_type=sa.Boolean, existing_server_default=None, existing_nullable=False) ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/161f8aceb687_fill_project_id_to_secrets_where_missing.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/161f8aceb687_fill_project_id_to_0000666000175000017500000000444413311733060033461 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """fill project_id to secrets where missing Revision ID: 161f8aceb687 Revises: 1bc885808c76 Create Date: 2015-06-22 15:58:03.131256 """ # revision identifiers, used by Alembic. revision = '161f8aceb687' down_revision = '1bc885808c76' from alembic import op import sqlalchemy as sa def _get_database_metadata(): con = op.get_bind() metadata = sa.MetaData(bind=con) metadata.reflect() return metadata def _drop_constraint(ctx, name, table): if ctx.dialect.name == 'mysql': # MySQL won't allow some operations with constraints in place op.drop_constraint(name, table, type_='foreignkey') def _create_constraint(ctx, name, tableone, tabletwo, columnone, columntwo): if ctx.dialect.name == 'mysql': # Recreate foreign key constraint op.create_foreign_key(name, tableone, tabletwo, columnone, columntwo) def upgrade(): metadata = _get_database_metadata() # Get relevant tables secrets = metadata.tables['secrets'] project_secret = metadata.tables['project_secret'] # Add project_id to the secrets op.execute(secrets.update(). values({'project_id': project_secret.c.project_id}). where(secrets.c.id == project_secret.c.secret_id). where(secrets.c.project_id == None) # noqa ) # Need to drop foreign key constraint before mysql will allow changes ctx = op.get_context() _drop_constraint(ctx, 'secrets_project_fk', 'secrets') # make project_id no longer nullable op.alter_column('secrets', 'project_id', type_=sa.String(36), nullable=False) # Create foreign key constraint again _create_constraint(ctx, 'secrets_project_fk', 'secrets', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000016700000000000011221 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/2ab3f5371bde_dsa_in_container_type_modelbase_to.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/2ab3f5371bde_dsa_in_container_ty0000666000175000017500000000331513311733060033542 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """dsa in container type modelbase_to Revision ID: 2ab3f5371bde Revises: 4070806f6972 Create Date: 2014-09-02 12:11:43.524247 """ # revision identifiers, used by Alembic. revision = '2ab3f5371bde' down_revision = '4070806f6972' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('container_secret', sa.Column('created_at', sa.DateTime(), nullable=False)) op.add_column('container_secret', sa.Column('deleted', sa.Boolean(), nullable=False)) op.add_column('container_secret', sa.Column('deleted_at', sa.DateTime(), nullable=True)) op.add_column('container_secret', sa.Column('id', sa.String(length=36), nullable=False)) op.add_column('container_secret', sa.Column('status', sa.String(length=20), nullable=False)) op.add_column('container_secret', sa.Column('updated_at', sa.DateTime(), nullable=False)) op.create_primary_key('pk_container_secret', 'container_secret', ['id']) op.create_unique_constraint( '_container_secret_name_uc', 'container_secret', ['container_id', 'secret_id', 'name'] ) ././@LongLink0000000000000000000000000000016700000000000011221 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/39cf2e645cba_model_for_multiple_backend_support.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/39cf2e645cba_model_for_multiple_0000666000175000017500000000632613311733060033564 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Model for multiple backend support Revision ID: 39cf2e645cba Revises: d2780d5aa510 Create Date: 2016-07-29 16:45:22.953811 """ # revision identifiers, used by Alembic. revision = '39cf2e645cba' down_revision = 'd2780d5aa510' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_stores') if not table_exists: op.create_table( 'secret_stores', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('store_plugin', sa.String(length=255), nullable=False), sa.Column('crypto_plugin', sa.String(length=255), nullable=True), sa.Column('global_default', sa.Boolean(), nullable=False, default=False), sa.Column('name', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('store_plugin', 'crypto_plugin', name='_secret_stores_plugin_names_uc'), sa.UniqueConstraint('name', name='_secret_stores_name_uc') ) table_exists = ctx.dialect.has_table(con.engine, 'project_secret_store') if not table_exists: op.create_table( 'project_secret_store', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('secret_store_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['project_id'], ['projects.id'],), sa.ForeignKeyConstraint( ['secret_store_id'], ['secret_stores.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('project_id', name='_project_secret_store_project_uc') ) op.create_index(op.f('ix_project_secret_store_project_id'), 'project_secret_store', ['project_id'], unique=True) ././@LongLink0000000000000000000000000000015600000000000011217 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1bc885808c76_add_project_id_to_secrets.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1bc885808c76_add_project_id_to_s0000666000175000017500000000222713311733060033310 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add project id to Secrets Revision ID: 1bc885808c76 Revises: 6a4457517a3 Create Date: 2015-04-24 13:53:29.926426 """ # revision identifiers, used by Alembic. revision = '1bc885808c76' down_revision = '6a4457517a3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('secrets', sa.Column('project_id', sa.String(length=36), nullable=True)) op.create_index(op.f('ix_secrets_project_id'), 'secrets', ['project_id'], unique=False) op.create_foreign_key('secrets_project_fk', 'secrets', 'projects', ['project_id'], ['id']) barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/kilo_release.py0000666000175000017500000000156413311733060030650 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """kilo Revision ID: kilo Revises: 1bece815014f Create Date: 2015-08-26 00:00:00.000000 """ # revision identifiers, used by Alembic. revision = 'kilo' down_revision = '1bece815014f' def upgrade(): """A no-op migration for marking the Kilo release.""" pass ././@LongLink0000000000000000000000000000015500000000000011216 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/cd4106a1a0_add_cert_to_container_type.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/cd4106a1a0_add_cert_to_container0000666000175000017500000000174413311733060033520 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add-cert-to-container-type Revision ID: cd4106a1a0 Revises: 1e86c18af2dd Create Date: 2014-06-10 15:07:25.084173 """ # revision identifiers, used by Alembic. revision = 'cd4106a1a0' down_revision = '1e86c18af2dd' from alembic import op import sqlalchemy as sa def upgrade(): enum_type = sa.Enum( 'generic', 'rsa', 'dsa', 'certificate', name='container_types') op.alter_column('containers', 'type', type_=enum_type) ././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/46b98cde536_add_project_quotas_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/46b98cde536_add_project_quotas_t0000666000175000017500000000427513311733060033530 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add project quotas table Revision ID: 46b98cde536 Revises: 1bece815014f Create Date: 2015-08-28 17:42:35.057103 """ # revision identifiers, used by Alembic. revision = '46b98cde536' down_revision = 'kilo' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'project_quotas') if not table_exists: op.create_table( 'project_quotas', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('secrets', sa.Integer(), nullable=True), sa.Column('orders', sa.Integer(), nullable=True), sa.Column('containers', sa.Integer(), nullable=True), sa.Column('transport_keys', sa.Integer(), nullable=True), sa.Column('consumers', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], name='project_quotas_fk'), sa.PrimaryKeyConstraint('id'), mysql_engine='InnoDB') op.create_index( op.f('ix_project_quotas_project_id'), 'project_quotas', ['project_id'], unique=False) ././@LongLink0000000000000000000000000000017400000000000011217 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/3041b53b95d7_remove_size_limits_on_meta_table_values.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/3041b53b95d7_remove_size_limits_0000666000175000017500000000204713311733060033363 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Remove size limits on meta table values Revision ID: 3041b53b95d7 Revises: 1a7cf79559e3 Create Date: 2015-04-08 15:43:32.852529 """ # revision identifiers, used by Alembic. revision = '3041b53b95d7' down_revision = '1a7cf79559e3' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column( 'order_barbican_metadata', 'value', type_=sa.Text() ) op.alter_column( 'certificate_authority_metadata', 'value', type_=sa.Text() ) ././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/2d21598e7e70_added_ca_related_tables.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/2d21598e7e70_added_ca_related_ta0000666000175000017500000001177313311733060033224 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Added CA related tables Revision ID: 2d21598e7e70 Revises: 3d36a26b88af Create Date: 2015-03-11 15:47:32.292944 """ # revision identifiers, used by Alembic. revision = '2d21598e7e70' down_revision = '3d36a26b88af' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'certificate_authorities') if not table_exists: op.create_table( 'certificate_authorities', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('plugin_name', sa.String(length=255), nullable=False), sa.Column('plugin_ca_id', sa.Text(), nullable=False), sa.Column('expiration', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) table_exists = ctx.dialect.has_table( con.engine, 'project_certificate_authorities') if not table_exists: op.create_table( 'project_certificate_authorities', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('ca_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],), sa.ForeignKeyConstraint(['project_id'], ['projects.id'],), sa.PrimaryKeyConstraint('id', 'project_id', 'ca_id'), sa.UniqueConstraint('project_id', 'ca_id', name='_project_certificate_authority_uc') ) table_exists = ctx.dialect.has_table( con.engine, 'certificate_authority_metadata') if not table_exists: op.create_table( 'certificate_authority_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.Column('ca_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],), sa.PrimaryKeyConstraint('id', 'key', 'ca_id'), sa.UniqueConstraint('ca_id', 'key', name='_certificate_authority_metadatum_uc') ) table_exists = ctx.dialect.has_table( con.engine, 'preferred_certificate_authorities') if not table_exists: op.create_table( 'preferred_certificate_authorities', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('ca_id', sa.String(length=36), nullable=True), sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],), sa.ForeignKeyConstraint(['project_id'], ['projects.id'],), sa.PrimaryKeyConstraint('id', 'project_id'), sa.UniqueConstraint('project_id') ) ././@LongLink0000000000000000000000000000017200000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1e86c18af2dd_add_new_columns_type_meta_containerid.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/1e86c18af2dd_add_new_columns_typ0000666000175000017500000000224313311733060033573 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add new columns type meta containerId Revision ID: 1e86c18af2dd Revises: 13d127569afa Create Date: 2014-06-04 09:53:27.116054 """ # revision identifiers, used by Alembic. revision = '1e86c18af2dd' down_revision = '13d127569afa' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('orders', sa.Column('container_id', sa.String(length=36), nullable=True)) op.add_column('orders', sa.Column('meta', sa.Text, nullable=True)) op.add_column('orders', sa.Column('type', sa.String(length=255), nullable=True)) ././@LongLink0000000000000000000000000000016300000000000011215 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/30dba269cc64_update_order_retry_tasks_table.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/30dba269cc64_update_order_retry_0000666000175000017500000000341013311733060033506 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Update order_retry_tasks table Revision ID: 30dba269cc64 Revises: 3041b53b95d7 Create Date: 2015-04-01 17:53:25.447919 """ # revision identifiers, used by Alembic. revision = '30dba269cc64' down_revision = '3041b53b95d7' from oslo_utils import timeutils from alembic import op from barbican.model import models as m import sqlalchemy as sa def upgrade(): op.add_column( 'order_retry_tasks', sa.Column( 'created_at', sa.DateTime(), nullable=False, server_default=str(timeutils.utcnow()))) op.add_column( 'order_retry_tasks', sa.Column( 'deleted', sa.Boolean(), nullable=False, server_default='0')) op.add_column( 'order_retry_tasks', sa.Column('deleted_at', sa.DateTime(), nullable=True)) op.add_column( 'order_retry_tasks', sa.Column( 'status', sa.String(length=20), nullable=False, server_default=m.States.PENDING)) op.add_column( 'order_retry_tasks', sa.Column( 'updated_at', sa.DateTime(), nullable=False, server_default=str(timeutils.utcnow()))) ././@LongLink0000000000000000000000000000017000000000000011213 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/47b69e523451_made_plugin_names_in_kek_datum_non_.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/47b69e523451_made_plugin_names_i0000666000175000017500000000167213311733060033226 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Made plugin names in kek datum non nullable Revision ID: 47b69e523451 Revises: cd4106a1a0 Create Date: 2014-06-16 14:05:45.428226 """ # revision identifiers, used by Alembic. revision = '47b69e523451' down_revision = 'cd4106a1a0' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column('kek_data', 'plugin_name', type_=sa.String(255), nullable=False) ././@LongLink0000000000000000000000000000015600000000000011217 Lustar 00000000000000barbican-6.0.1/barbican/model/migration/alembic_migrations/versions/795737bb3c3_change_tenants_to_projects.pybarbican-6.0.1/barbican/model/migration/alembic_migrations/versions/795737bb3c3_change_tenants_to_pr0000666000175000017500000000612213311733060033424 0ustar zuulzuul00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change tenants to projects Revision ID: 795737bb3c3 Revises: 254495565185 Create Date: 2014-12-09 15:58:35.535032 """ # revision identifiers, used by Alembic. revision = '795737bb3c3' down_revision = '254495565185' from alembic import op import sqlalchemy as sa def _drop_constraint(ctx, con, table, fk_name_to_try): if ctx.dialect.name == 'mysql': # MySQL creates different default names for foreign key constraints op.drop_constraint(fk_name_to_try, table, type_='foreignkey') def _change_fk_to_project(ctx, con, table, fk_old, fk_new): _drop_constraint(ctx, con, table, fk_old) op.alter_column(table, 'tenant_id', type_=sa.String(36), new_column_name='project_id') op.create_foreign_key(fk_new, table, 'projects', ['project_id'], ['id']) def upgrade(): # project_secret table ctx = op.get_context() con = op.get_bind() # ---- Update tenant_secret table to project_secret: _drop_constraint(ctx, con, 'tenant_secret', 'tenant_secret_ibfk_1') _drop_constraint(ctx, con, 'tenant_secret', 'tenant_secret_ibfk_2') op.drop_constraint('_tenant_secret_uc', 'tenant_secret', type_='unique') op.rename_table('tenant_secret', 'project_secret') op.alter_column('project_secret', 'tenant_id', type_=sa.String(36), new_column_name='project_id') op.create_unique_constraint('_project_secret_uc', 'project_secret', ['project_id', 'secret_id']) # ---- Update tenants table to projects: op.rename_table('tenants', 'projects') # re-create the foreign key constraints with explicit names. op.create_foreign_key('project_secret_project_fk', 'project_secret', 'projects', ['project_id'], ['id']) op.create_foreign_key('project_secret_secret_fk', 'project_secret', 'secrets', ['secret_id'], ['id']) # ---- Update containers table: _change_fk_to_project( ctx, con, 'containers', 'containers_ibfk_1', 'containers_project_fk') # ---- Update kek_data table: _change_fk_to_project( ctx, con, 'kek_data', 'kek_data_ibfk_1', 'kek_data_project_fk') # ---- Update orders table: _change_fk_to_project( ctx, con, 'orders', 'orders_ibfk_2', 'orders_project_fk') op.create_foreign_key('orders_ibfk_2', 'orders', 'containers', ['container_id'], ['id']) barbican-6.0.1/barbican/model/migration/alembic_migrations/env.py0000666000175000017500000000553513311733060025134 0ustar zuulzuul00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import with_statement from alembic import context from oslo_db.sqlalchemy import session from barbican.model import models # this is the Alembic Config object, which provides # access to the values within the .ini file in use. # Note that the 'config' instance is not available in for unit testing. try: config = context.config except Exception: config = None # WARNING! The following was autogenerated by Alembic as part of setting up # the initial environment. Unfortunately it also **clobbers** the logging # for the rest of this application, so please do not use it! # Interpret the config file for Python logging. # This line sets up loggers basically. # fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = models.BASE.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def get_sqlalchemy_url(): return (config.barbican_sqlalchemy_url or config.get_main_option("sqlalchemy.url")) def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ context.configure(url=get_sqlalchemy_url()) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = session.create_engine( get_sqlalchemy_url()) connection = engine.connect() context.configure( connection=connection, target_metadata=target_metadata ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if config: if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() barbican-6.0.1/barbican/model/migration/alembic_migrations/projects_init_ops.py0000666000175000017500000000255013311733060030073 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'tenants', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('keystone_id', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id') ) barbican-6.0.1/barbican/model/migration/alembic_migrations/container_init_ops.py0000666000175000017500000000577013311733060030233 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'containers', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('type', sa.Enum('generic', 'rsa', 'dsa', 'certificate', name='container_types'), nullable=True), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'],), sa.PrimaryKeyConstraint('id') ) op.create_table( 'container_consumer_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('container_id', sa.String(length=36), nullable=False), sa.Column('URL', sa.String(length=255), nullable=True), sa.Column('data_hash', sa.CHAR(64), nullable=True), sa.ForeignKeyConstraint(['container_id'], ['containers.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('data_hash', name='_consumer_hashed_container_name_url_uc'), sa.Index('values_index', 'container_id', 'name', 'URL') ) op.create_table( 'container_secret', sa.Column('name', sa.String(length=255), nullable=True), sa.Column('container_id', sa.String(length=36), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['container_id'], ['containers.id'],), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],) ) barbican-6.0.1/barbican/model/migration/alembic_migrations/secrets_init_ops.py0000666000175000017500000000402413311733060027710 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'secrets', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('expiration', sa.DateTime(), nullable=True), sa.Column('algorithm', sa.String(length=255), nullable=True), sa.Column('bit_length', sa.Integer(), nullable=True), sa.Column('mode', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table( 'tenant_secret', sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'],), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.UniqueConstraint('tenant_id', 'secret_id', name='_tenant_secret_uc') ) barbican-6.0.1/barbican/model/migration/alembic_migrations/encrypted_init_ops.py0000666000175000017500000000336713311733060030246 0ustar zuulzuul00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'encrypted_data', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('content_type', sa.String(length=255), nullable=True), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.Column('kek_id', sa.String(length=36), nullable=False), sa.Column('cypher_text', sa.Text(), nullable=True), sa.Column('kek_meta_extended', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.ForeignKeyConstraint(['kek_id'], ['kek_data.id'],), sa.PrimaryKeyConstraint('id') ) barbican-6.0.1/barbican/model/migration/alembic.ini0000666000175000017500000000241113311733060022225 0ustar zuulzuul00000000000000# A generic, single database configuration [alembic] # path to migration scripts script_location = %(here)s/alembic_migrations # template used to generate migration files # file_template = %%(rev)s_%%(slug)s # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # default to an empty string because the Barbican migration process will # extract the correct value and set it programmatically before alembic is fully # invoked. sqlalchemy.url = #sqlalchemy.url = driver://user:pass@localhost/dbname #sqlalchemy.url = sqlite:///barbican.sqlite #sqlalchemy.url = sqlite:////var/lib/barbican/barbican.sqlite #sqlalchemy.url = postgresql+psycopg2://postgres:postgres@localhost:5432/barbican_api # Logging configuration [loggers] keys = alembic #keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = DEBUG handlers = console qualname = [logger_sqlalchemy] level = DEBUG handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S barbican-6.0.1/barbican/model/clean.py0000666000175000017500000003537013311733060017605 0ustar zuulzuul00000000000000# Copyright (c) 2016 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.model import models from barbican.model import repositories as repo from oslo_log import log from oslo_utils import timeutils from sqlalchemy import sql as sa_sql import datetime # Import and configure logging. CONF = config.CONF log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) def cleanup_unassociated_projects(): """Clean up unassociated projects. This looks for projects that have no children entries on the dependent tables and removes them. """ LOG.debug("Cleaning up unassociated projects") session = repo.get_session() project_children_tables = [models.Order, models.KEKDatum, models.Secret, models.ContainerConsumerMetadatum, models.Container, models.PreferredCertificateAuthority, models.CertificateAuthority, models.ProjectCertificateAuthority, models.ProjectQuotas] children_names = map(lambda child: child.__name__, project_children_tables) LOG.debug("Children tables for Project table being checked: %s", str(children_names)) sub_query = session.query(models.Project.id) for model in project_children_tables: sub_query = sub_query.outerjoin(model, models.Project.id == model.project_id) sub_query = sub_query.filter(model.id == None) # nopep8 sub_query = sub_query.subquery() sub_query = sa_sql.select([sub_query]) query = session.query(models.Project) query = query.filter(models.Project.id.in_(sub_query)) delete_count = query.delete(synchronize_session='fetch') LOG.info("Cleaned up %(delete_count)s entries for " "%(project_name)s", {'delete_count': str(delete_count), 'project_name': models.Project.__name__}) return delete_count def cleanup_parent_with_no_child(parent_model, child_model, threshold_date=None): """Clean up soft deletions in parent that do not have references in child. Before running this function, the child table should be cleaned of soft deletions. This function left outer joins the parent and child tables and finds the parent entries that do not have a foreign key reference in the child table. Then the results are filtered by soft deletions and are cleaned up. :param parent_model: table class for parent :param child_model: table class for child which restricts parent deletion :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from database """ LOG.debug("Cleaning soft deletes for %(parent_name)s without " "a child in %(child_name)s" % {'parent_name': parent_model.__name__, 'child_name': child_model.__name__}) session = repo.get_session() sub_query = session.query(parent_model.id) sub_query = sub_query.outerjoin(child_model) sub_query = sub_query.filter(child_model.id == None) # nopep8 sub_query = sub_query.subquery() sub_query = sa_sql.select([sub_query]) query = session.query(parent_model) query = query.filter(parent_model.id.in_(sub_query)) query = query.filter(parent_model.deleted) if threshold_date: query = query.filter(parent_model.deleted_at <= threshold_date) delete_count = query.delete(synchronize_session='fetch') LOG.info("Cleaned up %(delete_count)s entries for %(parent_name)s " "with no children in %(child_name)s", {'delete_count': delete_count, 'parent_name': parent_model.__name__, 'child_name': child_model.__name__}) return delete_count def cleanup_softdeletes(model, threshold_date=None): """Remove soft deletions from a table. :param model: table class to remove soft deletions :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from the database """ LOG.debug("Cleaning soft deletes: %s", model.__name__) session = repo.get_session() query = session.query(model) query = query.filter_by(deleted=True) if threshold_date: query = query.filter(model.deleted_at <= threshold_date) delete_count = query.delete() LOG.info("Cleaned up %(delete_count)s entries for %(model_name)s", {'delete_count': delete_count, 'model_name': model.__name__}) return delete_count def cleanup_all(threshold_date=None): """Clean up the main soft deletable resources. This function contains an order of calls to clean up the soft-deletable resources. :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from the database """ LOG.debug("Cleaning up soft deletions where deletion date" " is older than %s", str(threshold_date)) total = 0 total += cleanup_softdeletes(models.TransportKey, threshold_date=threshold_date) total += cleanup_softdeletes(models.OrderBarbicanMetadatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.OrderRetryTask, threshold_date=threshold_date) total += cleanup_softdeletes(models.OrderPluginMetadatum, threshold_date=threshold_date) total += cleanup_parent_with_no_child(models.Order, models.OrderRetryTask, threshold_date=threshold_date) total += cleanup_softdeletes(models.EncryptedDatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.SecretUserMetadatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.SecretStoreMetadatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.ContainerSecret, threshold_date=threshold_date) total += cleanup_parent_with_no_child(models.Secret, models.Order, threshold_date=threshold_date) total += cleanup_softdeletes(models.ContainerConsumerMetadatum, threshold_date=threshold_date) total += cleanup_parent_with_no_child(models.Container, models.Order, threshold_date=threshold_date) total += cleanup_softdeletes(models.KEKDatum, threshold_date=threshold_date) # TODO(edtubill) Clean up projects that were soft deleted by # the keystone listener LOG.info("Cleaned up %s soft deleted entries", total) return total def _soft_delete_expired_secrets(threshold_date): """Soft delete expired secrets. :param threshold_date: secrets that have expired past this date will be soft deleted :returns: total number of secrets that were soft deleted """ current_time = timeutils.utcnow() session = repo.get_session() query = session.query(models.Secret.id) query = query.filter(~models.Secret.deleted) query = query.filter( models.Secret.expiration <= threshold_date ) update_count = query.update( { models.Secret.deleted: True, models.Secret.deleted_at: current_time }, synchronize_session='fetch') return update_count def _hard_delete_acls_for_soft_deleted_secrets(): """Remove acl entries for secrets that have been soft deleted. Removes entries in SecretACL and SecretACLUser which are for secrets that have been soft deleted. """ session = repo.get_session() acl_user_sub_query = session.query(models.SecretACLUser.id) acl_user_sub_query = acl_user_sub_query.join(models.SecretACL) acl_user_sub_query = acl_user_sub_query.join(models.Secret) acl_user_sub_query = acl_user_sub_query.filter(models.Secret.deleted) acl_user_sub_query = acl_user_sub_query.subquery() acl_user_sub_query = sa_sql.select([acl_user_sub_query]) acl_user_query = session.query(models.SecretACLUser) acl_user_query = acl_user_query.filter( models.SecretACLUser.id.in_(acl_user_sub_query)) acl_total = acl_user_query.delete(synchronize_session='fetch') acl_sub_query = session.query(models.SecretACL.id) acl_sub_query = acl_sub_query.join(models.Secret) acl_sub_query = acl_sub_query.filter(models.Secret.deleted) acl_sub_query = acl_sub_query.subquery() acl_sub_query = sa_sql.select([acl_sub_query]) acl_query = session.query(models.SecretACL) acl_query = acl_query.filter( models.SecretACL.id.in_(acl_sub_query)) acl_total += acl_query.delete(synchronize_session='fetch') return acl_total def _soft_delete_expired_secret_children(threshold_date): """Soft delete the children tables of expired secrets. Soft deletes the children tables and hard deletes the ACL children tables of the expired secrets. :param threshold_date: threshold date for secret expiration :returns: returns a pair for number of soft delete children and deleted ACLs """ current_time = timeutils.utcnow() secret_children = [models.SecretStoreMetadatum, models.SecretUserMetadatum, models.EncryptedDatum, models.ContainerSecret] children_names = map(lambda child: child.__name__, secret_children) LOG.debug("Children tables for Secret table being checked: %s", str(children_names)) session = repo.get_session() update_count = 0 for table in secret_children: # Go through children and soft delete them sub_query = session.query(table.id) sub_query = sub_query.join(models.Secret) sub_query = sub_query.filter( models.Secret.expiration <= threshold_date ) sub_query = sub_query.subquery() sub_query = sa_sql.select([sub_query]) query = session.query(table) query = query.filter(table.id.in_(sub_query)) current_update_count = query.update( { table.deleted: True, table.deleted_at: current_time }, synchronize_session='fetch') update_count += current_update_count session.flush() acl_total = _hard_delete_acls_for_soft_deleted_secrets() return update_count, acl_total def soft_delete_expired_secrets(threshold_date): """Soft deletes secrets that are past expiration date. The expired secrets and its children are marked for deletion. ACLs are soft deleted and then purged from the database. :param threshold_date: secrets that have expired past this date will be soft deleted :returns: the sum of soft deleted entries and hard deleted acl entries """ # Note: sqllite does not support multiple table updates so # several db updates are used instead LOG.debug('Soft deleting expired secrets older than: %s', str(threshold_date)) update_count = _soft_delete_expired_secrets(threshold_date) children_count, acl_total = _soft_delete_expired_secret_children( threshold_date) update_count += children_count LOG.info("Soft deleted %(update_count)s entries due to secret " "expiration and %(acl_total)s secret acl entries " "were removed from the database", {'update_count': update_count, 'acl_total': acl_total}) return update_count + acl_total def clean_command(sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, log_file): """Clean command to clean up the database. :param sql_url: sql connection string to connect to a database :param min_num_days: clean up soft deletions older than this date :param do_clean_unassociated_projects: If True, clean up unassociated projects :param do_soft_delete_expired_secrets: If True, soft delete secrets that have expired :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Cleaning up soft deletions in the barbican database") log.setup(CONF, 'barbican') cleanup_total = 0 current_time = timeutils.utcnow() stop_watch = timeutils.StopWatch() stop_watch.start() try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory() if do_clean_unassociated_projects: cleanup_total += cleanup_unassociated_projects() if do_soft_delete_expired_secrets: cleanup_total += soft_delete_expired_secrets( threshold_date=current_time) threshold_date = None if min_num_days >= 0: threshold_date = current_time - datetime.timedelta( days=min_num_days) else: threshold_date = current_time cleanup_total += cleanup_all(threshold_date=threshold_date) repo.commit() except Exception as ex: LOG.exception('Failed to clean up soft deletions in database.') repo.rollback() cleanup_total = 0 # rollback happened, no entries affected raise ex finally: stop_watch.stop() elapsed_time = stop_watch.elapsed() if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides LOG.info("Cleaning of database affected %s entries", cleanup_total) LOG.info('DB clean up finished in %s seconds', elapsed_time) barbican-6.0.1/barbican/model/__init__.py0000666000175000017500000000000013311733060020240 0ustar zuulzuul00000000000000barbican-6.0.1/barbican/model/repositories.py0000666000175000017500000026343213311733072021257 0ustar zuulzuul00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Defines interface for DB access that Resource controllers may reference TODO: The top part of this file was 'borrowed' from Glance, but seems quite intense for sqlalchemy, and maybe could be simplified. """ import logging import re import sys import time from oslo_db import exception as db_exc from oslo_db.sqlalchemy import session from oslo_utils import timeutils from oslo_utils import uuidutils import sqlalchemy from sqlalchemy import func as sa_func from sqlalchemy import or_ import sqlalchemy.orm as sa_orm from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.model.migration import commands from barbican.model import models LOG = utils.getLogger(__name__) _ENGINE = None _SESSION_FACTORY = None BASE = models.BASE sa_logger = None # Singleton repository references, instantiated via get_xxxx_repository() # functions below. Please keep this list in alphabetical order. _CA_REPOSITORY = None _CONTAINER_ACL_REPOSITORY = None _CONTAINER_CONSUMER_REPOSITORY = None _CONTAINER_REPOSITORY = None _CONTAINER_SECRET_REPOSITORY = None _ENCRYPTED_DATUM_REPOSITORY = None _KEK_DATUM_REPOSITORY = None _ORDER_PLUGIN_META_REPOSITORY = None _ORDER_BARBICAN_META_REPOSITORY = None _ORDER_REPOSITORY = None _ORDER_RETRY_TASK_REPOSITORY = None _PREFERRED_CA_REPOSITORY = None _PROJECT_REPOSITORY = None _PROJECT_CA_REPOSITORY = None _PROJECT_QUOTAS_REPOSITORY = None _SECRET_ACL_REPOSITORY = None _SECRET_META_REPOSITORY = None _SECRET_USER_META_REPOSITORY = None _SECRET_REPOSITORY = None _TRANSPORT_KEY_REPOSITORY = None _SECRET_STORES_REPOSITORY = None _PROJECT_SECRET_STORE_REPOSITORY = None CONF = config.CONF def hard_reset(): """Performs a hard reset of database resources, used for unit testing.""" # TODO(jvrbanac): Remove this as soon as we improve our unit testing # to not require this. global _ENGINE, _SESSION_FACTORY if _ENGINE: _ENGINE.dispose() _ENGINE = None _SESSION_FACTORY = None # Make sure we reinitialize the engine and session factory setup_database_engine_and_factory() def setup_database_engine_and_factory(initialize_secret_stores=False): global sa_logger, _SESSION_FACTORY, _ENGINE LOG.info('Setting up database engine and session factory') if CONF.debug: sa_logger = logging.getLogger('sqlalchemy.engine') sa_logger.setLevel(logging.DEBUG) if CONF.sql_pool_logging: pool_logger = logging.getLogger('sqlalchemy.pool') pool_logger.setLevel(logging.DEBUG) _ENGINE = _get_engine(_ENGINE) # Utilize SQLAlchemy's scoped_session to ensure that we only have one # session instance per thread. session_maker = sa_orm.sessionmaker(bind=_ENGINE) _SESSION_FACTORY = sqlalchemy.orm.scoped_session(session_maker) if initialize_secret_stores: _initialize_secret_stores_data() def start(): """Start for read-write requests placeholder Typically performed at the start of a request cycle, say for POST or PUT requests. """ pass def start_read_only(): """Start for read-only requests placeholder Typically performed at the start of a request cycle, say for GET or HEAD requests. """ pass def commit(): """Commit session state so far to the database. Typically performed at the end of a request cycle. """ get_session().commit() def rollback(): """Rollback session state so far. Typically performed when the request cycle raises an Exception. """ get_session().rollback() def clear(): """Dispose of this session, releases db resources. Typically performed at the end of a request cycle, after a commit() or rollback(). """ if _SESSION_FACTORY: # not initialized in some unit test _SESSION_FACTORY.remove() def get_session(): """Helper method to grab session.""" return _SESSION_FACTORY() def _get_engine(engine): if not engine: connection = CONF.sql_connection if not connection: raise exception.BarbicanException( u._('No SQL connection configured')) # TODO(jfwood): # connection_dict = sqlalchemy.engine.url.make_url(_CONNECTION) engine_args = { 'idle_timeout': CONF.sql_idle_timeout} if CONF.sql_pool_size: engine_args['max_pool_size'] = CONF.sql_pool_size if CONF.sql_pool_max_overflow: engine_args['max_overflow'] = CONF.sql_pool_max_overflow db_connection = None try: engine = _create_engine(connection, **engine_args) db_connection = engine.connect() except Exception as err: msg = u._("Error configuring registry database with supplied " "sql_connection. Got error: {error}").format(error=err) LOG.exception(msg) raise exception.BarbicanException(msg) finally: if db_connection: db_connection.close() if CONF.db_auto_create: meta = sqlalchemy.MetaData() meta.reflect(bind=engine) tables = meta.tables _auto_generate_tables(engine, tables) else: LOG.info('Not auto-creating barbican registry DB') return engine def _initialize_secret_stores_data(): """Initializes secret stores data in database. This logic is executed only when database engine and factory is built. Secret store get_manager internally reads secret store plugin configuration from service configuration and saves it in secret_stores table in database. """ if utils.is_multiple_backends_enabled(): from barbican.plugin.interface import secret_store secret_store.get_manager() def is_db_connection_error(args): """Return True if error in connecting to db.""" # NOTE(adam_g): This is currently MySQL specific and needs to be extended # to support Postgres and others. conn_err_codes = ('2002', '2003', '2006') for err_code in conn_err_codes: if args.find(err_code) != -1: return True return False def _create_engine(connection, **engine_args): LOG.debug('Sql connection: please check "sql_connection" property in ' 'barbican configuration file; Args: %s', engine_args) engine = session.create_engine(connection, **engine_args) # TODO(jfwood): if 'mysql' in connection_dict.drivername: # TODO(jfwood): sqlalchemy.event.listen(_ENGINE, 'checkout', # TODO(jfwood): ping_listener) # Wrap the engine's connect method with a retry decorator. engine.connect = wrap_db_error(engine.connect) return engine def _auto_generate_tables(engine, tables): if tables and 'alembic_version' in tables: # Upgrade the database to the latest version. LOG.info('Updating schema to latest version') commands.upgrade() else: # Create database tables from our models. LOG.info('Auto-creating barbican registry DB') models.BASE.metadata.create_all(engine) # Sync the alembic version 'head' with current models. commands.stamp() def wrap_db_error(f): """Retry DB connection. Copied from nova and modified.""" def _wrap(*args, **kwargs): try: return f(*args, **kwargs) except sqlalchemy.exc.OperationalError as e: if not is_db_connection_error(e.args[0]): raise remaining_attempts = CONF.sql_max_retries while True: LOG.warning('SQL connection failed. %d attempts left.', remaining_attempts) remaining_attempts -= 1 time.sleep(CONF.sql_retry_interval) try: return f(*args, **kwargs) except sqlalchemy.exc.OperationalError as e: if (remaining_attempts <= 0 or not is_db_connection_error(e.args[0])): raise except sqlalchemy.exc.DBAPIError: raise except sqlalchemy.exc.DBAPIError: raise _wrap.__name__ = f.__name__ return _wrap def clean_paging_values(offset_arg=0, limit_arg=CONF.default_limit_paging): """Cleans and safely limits raw paging offset/limit values.""" offset_arg = offset_arg or 0 limit_arg = limit_arg or CONF.default_limit_paging try: offset = int(offset_arg) if offset < 0: offset = 0 if offset > sys.maxsize: offset = 0 except ValueError: offset = 0 try: limit = int(limit_arg) if limit < 1: limit = 1 if limit > CONF.max_limit_paging: limit = CONF.max_limit_paging except ValueError: limit = CONF.default_limit_paging LOG.debug("Clean paging values limit=%(limit)s, offset=%(offset)s" % {'limit': limit, 'offset': offset}) return offset, limit def delete_all_project_resources(project_id): """Logic to cleanup all project resources. This cleanup uses same alchemy session to perform all db operations as a transaction and will commit only when all db operations are performed without error. """ session = get_session() container_repo = get_container_repository() container_repo.delete_project_entities( project_id, suppress_exception=False, session=session) # secret children SecretStoreMetadatum, EncryptedDatum # and container_secrets are deleted as part of secret delete secret_repo = get_secret_repository() secret_repo.delete_project_entities( project_id, suppress_exception=False, session=session) kek_repo = get_kek_datum_repository() kek_repo.delete_project_entities( project_id, suppress_exception=False, session=session) project_repo = get_project_repository() project_repo.delete_project_entities( project_id, suppress_exception=False, session=session) class BaseRepo(object): """Base repository for the barbican entities. This class provides template methods that allow sub-classes to hook specific functionality as needed. Clients access instances of this class via singletons, therefore implementations should be stateless aside from configuration. """ def get_session(self, session=None): LOG.debug("Getting session...") return session or get_session() def get(self, entity_id, external_project_id=None, force_show_deleted=False, suppress_exception=False, session=None): """Get an entity or raise if it does not exist.""" session = self.get_session(session) try: query = self._do_build_get_query(entity_id, external_project_id, session) # filter out deleted entities if requested if not force_show_deleted: query = query.filter_by(deleted=False) entity = query.one() except sa_orm.exc.NoResultFound: LOG.exception("Not found for %s", entity_id) entity = None if not suppress_exception: _raise_entity_not_found(self._do_entity_name(), entity_id) return entity def create_from(self, entity, session=None): """Sub-class hook: create from entity.""" if not entity: msg = u._( "Must supply non-None {entity_name}." ).format(entity_name=self._do_entity_name()) raise exception.Invalid(msg) if entity.id: msg = u._( "Must supply {entity_name} with id=None (i.e. new entity)." ).format(entity_name=self._do_entity_name()) raise exception.Invalid(msg) LOG.debug("Begin create from...") session = self.get_session(session) start = time.time() # DEBUG # Validate the attributes before we go any further. From my # (unknown Glance developer) investigation, the @validates # decorator does not validate # on new records, only on existing records, which is, well, # idiotic. self._do_validate(entity.to_dict()) try: LOG.debug("Saving entity...") entity.save(session=session) except db_exc.DBDuplicateEntry as e: session.rollback() LOG.exception('Problem saving entity for create') error_msg = re.sub('[()]', '', str(e.args)) raise exception.ConstraintCheck(error=error_msg) LOG.debug('Elapsed repo ' 'create secret:%s', (time.time() - start)) # DEBUG return entity def save(self, entity): """Saves the state of the entity.""" entity.updated_at = timeutils.utcnow() # Validate the attributes before we go any further. From my # (unknown Glance developer) investigation, the @validates # decorator does not validate # on new records, only on existing records, which is, well, # idiotic. self._do_validate(entity.to_dict()) entity.save() def delete_entity_by_id(self, entity_id, external_project_id, session=None): """Remove the entity by its ID.""" session = self.get_session(session) entity = self.get(entity_id=entity_id, external_project_id=external_project_id, session=session) entity.delete(session=session) def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Entity" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return None def _do_convert_values(self, values): """Sub-class hook: convert text-based values to target types This is specifically for database values. """ pass def _do_validate(self, values): """Sub-class hook: validate values. Validates the incoming data and raises an Invalid exception if anything is out of order. :param values: Mapping of entity metadata to check """ status = values.get('status', None) if not status: # TODO(jfwood): I18n this! msg = u._("{entity_name} status is required.").format( entity_name=self._do_entity_name()) raise exception.Invalid(msg) if not models.States.is_valid(status): msg = u._("Invalid status '{status}' for {entity_name}.").format( status=status, entity_name=self._do_entity_name()) raise exception.Invalid(msg) return values def _update_values(self, entity_ref, values): for k in values: if getattr(entity_ref, k) != values[k]: setattr(entity_ref, k, values[k]) def _build_get_project_entities_query(self, project_id, session): """Sub-class hook: build a query to retrieve entities for a project. :param project_id: id of barbican project entity :param session: existing db session reference. :returns: A query object for getting all project related entities This will filter deleted entities if there. """ msg = u._( "{entity_name} is missing query build method for get " "project entities.").format( entity_name=self._do_entity_name()) raise NotImplementedError(msg) def get_project_entities(self, project_id, session=None): """Gets entities associated with a given project. :param project_id: id of barbican project entity :param session: existing db session reference. If None, gets session. :returns: list of matching entities found otherwise returns empty list if no entity exists for a given project. Sub-class should implement `_build_get_project_entities_query` function to delete related entities otherwise it would raise NotImplementedError on its usage. """ session = self.get_session(session) query = self._build_get_project_entities_query(project_id, session) if query: return query.all() else: return [] def get_count(self, project_id, session=None): """Gets count of entities associated with a given project :param project_id: id of barbican project entity :param session: existing db session reference. If None, gets session. :return: an number 0 or greater Sub-class should implement `_build_get_project_entities_query` function to delete related entities otherwise it would raise NotImplementedError on its usage. """ session = self.get_session(session) query = self._build_get_project_entities_query(project_id, session) if query: return query.count() else: return 0 def delete_project_entities(self, project_id, suppress_exception=False, session=None): """Deletes entities for a given project. :param project_id: id of barbican project entity :param suppress_exception: Pass True if want to suppress exception :param session: existing db session reference. If None, gets session. Sub-class should implement `_build_get_project_entities_query` function to delete related entities otherwise it would raise NotImplementedError on its usage. """ session = self.get_session(session) query = self._build_get_project_entities_query(project_id, session=session) try: # query cannot be None as related repo class is expected to # implement it otherwise error is raised in build query call for entity in query: # Its a soft delete so its more like entity update entity.delete(session=session) except sqlalchemy.exc.SQLAlchemyError: LOG.exception('Problem finding project related entity to delete') if not suppress_exception: raise exception.BarbicanException(u._('Error deleting project ' 'entities for ' 'project_id=%s'), project_id) class ProjectRepo(BaseRepo): """Repository for the Project entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Project" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.Project).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def find_by_external_project_id(self, external_project_id, suppress_exception=False, session=None): session = self.get_session(session) try: query = session.query(models.Project) query = query.filter_by(external_id=external_project_id) entity = query.one() except sa_orm.exc.NoResultFound: entity = None if not suppress_exception: LOG.exception("Problem getting Project %s", external_project_id) raise exception.NotFound(u._( "No {entity_name} found with keystone-ID {id}").format( entity_name=self._do_entity_name(), id=external_project_id)) return entity def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving project for given id.""" query = session.query(models.Project) return query.filter_by(id=project_id).filter_by(deleted=False) class SecretRepo(BaseRepo): """Repository for the Secret entity.""" def get_secret_list(self, external_project_id, offset_arg=None, limit_arg=None, name=None, alg=None, mode=None, bits=0, secret_type=None, suppress_exception=False, session=None, acl_only=None, user_id=None, created=None, updated=None, expiration=None, sort=None): """Returns a list of secrets The list is scoped to secrets that are associated with the external_project_id (e.g. Keystone Project ID), and filtered using any provided filters. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) utcnow = timeutils.utcnow() query = session.query(models.Secret) query = query.filter_by(deleted=False) query = query.filter(or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow)) if name: query = query.filter(models.Secret.name.like(name)) if alg: query = query.filter(models.Secret.algorithm.like(alg)) if mode: query = query.filter(models.Secret.mode.like(mode)) if bits > 0: query = query.filter(models.Secret.bit_length == bits) if secret_type: query = query.filter(models.Secret.secret_type == secret_type) if created: query = self._build_date_filter_query(query, 'created_at', created) if updated: query = self._build_date_filter_query(query, 'updated_at', updated) if expiration: query = self._build_date_filter_query( query, 'expiration', expiration ) else: query = query.filter(or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow)) if sort: query = self._build_sort_filter_query(query, sort) if acl_only and acl_only.lower() == 'true' and user_id: query = query.join(models.SecretACL) query = query.join(models.SecretACLUser) query = query.filter(models.SecretACLUser.user_id == user_id) else: query = query.join(models.Project) query = query.filter( models.Project.external_id == external_project_id) total = query.count() end_offset = offset + limit LOG.debug('Retrieving from %s to %s', offset, end_offset) query = query.limit(limit).offset(offset) entities = query.all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Secret" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" utcnow = timeutils.utcnow() expiration_filter = or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow) query = session.query(models.Secret) query = query.filter_by(id=entity_id, deleted=False) query = query.filter(expiration_filter) query = query.join(models.Project) query = query.filter(models.Project.external_id == external_project_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving Secrets associated with a given project :param project_id: id of barbican project entity :param session: existing db session reference. """ utcnow = timeutils.utcnow() expiration_filter = or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow) query = session.query(models.Secret).filter_by(deleted=False) query = query.filter(models.Secret.project_id == project_id) query = query.filter(expiration_filter) return query def _build_date_filter_query(self, query, attribute, date_filters): """Parses date_filters to apply each filter to the given query :param query: query object to apply filters to :param attribute: name of the model attribute to be filtered :param date_filters: comma separated string of date filters to apply """ parse = timeutils.parse_isotime for filter in date_filters.split(','): if filter.startswith('lte:'): isotime = filter[4:] query = query.filter(or_( getattr(models.Secret, attribute) < parse(isotime), getattr(models.Secret, attribute) == parse(isotime)) ) elif filter.startswith('lt:'): isotime = filter[3:] query = query.filter( getattr(models.Secret, attribute) < parse(isotime) ) elif filter.startswith('gte:'): isotime = filter[4:] query = query.filter(or_( getattr(models.Secret, attribute) > parse(isotime), getattr(models.Secret, attribute) == parse(isotime)) ) elif filter.startswith('gt:'): isotime = filter[3:] query = query.filter( getattr(models.Secret, attribute) > parse(isotime) ) else: query = query.filter( getattr(models.Secret, attribute) == parse(filter) ) return query def _build_sort_filter_query(self, query, sort_filters): """Parses sort_filters to order the query""" key_to_column_map = { 'created': 'created_at', 'updated': 'updated_at' } ordering = list() for sort in sort_filters.split(','): if ':' in sort: key, direction = sort.split(':') else: key, direction = sort, 'asc' ordering.append( getattr( getattr(models.Secret, key_to_column_map.get(key, key)), direction )() ) return query.order_by(*ordering) def get_secret_by_id(self, entity_id, suppress_exception=False, session=None): """Gets secret by its entity id without project id check.""" session = self.get_session(session) try: utcnow = timeutils.utcnow() expiration_filter = or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow) query = session.query(models.Secret) query = query.filter_by(id=entity_id, deleted=False) query = query.filter(expiration_filter) entity = query.one() except sa_orm.exc.NoResultFound: entity = None if not suppress_exception: LOG.exception("Problem getting secret %s", entity_id) raise exception.NotFound(u._( "No secret found with secret-ID {id}").format( entity_name=self._do_entity_name(), id=entity_id)) return entity class EncryptedDatumRepo(BaseRepo): """Repository for the EncryptedDatum entity Stores encrypted information on behalf of a Secret. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "EncryptedDatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.EncryptedDatum).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class SecretStoreMetadatumRepo(BaseRepo): """Repository for the SecretStoreMetadatum entity Stores key/value information on behalf of a Secret. """ def save(self, metadata, secret_model): """Saves the specified metadata for the secret. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() for k, v in metadata.items(): meta_model = models.SecretStoreMetadatum(k, v) meta_model.updated_at = now meta_model.secret = secret_model meta_model.save() def get_metadata_for_secret(self, secret_id): """Returns a dict of SecretStoreMetadatum instances.""" session = get_session() query = session.query(models.SecretStoreMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.SecretStoreMetadatum.secret_id == secret_id) metadata = query.all() return {m.key: m.value for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretStoreMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretStoreMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class SecretUserMetadatumRepo(BaseRepo): """Repository for the SecretUserMetadatum entity Stores key/value information on behalf of a Secret. """ def create_replace_user_metadata(self, secret_id, metadata): """Creates or replaces the specified metadata for the secret.""" now = timeutils.utcnow() session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(secret_id=secret_id) query.delete() for k, v in metadata.items(): meta_model = models.SecretUserMetadatum(k, v) meta_model.secret_id = secret_id meta_model.updated_at = now meta_model.save(session=session) def get_metadata_for_secret(self, secret_id): """Returns a dict of SecretUserMetadatum instances.""" session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.SecretUserMetadatum.secret_id == secret_id) metadata = query.all() return {m.key: m.value for m in metadata} def create_replace_user_metadatum(self, secret_id, key, value): now = timeutils.utcnow() session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(secret_id=secret_id) query = query.filter_by(key=key) query.delete() meta_model = models.SecretUserMetadatum(key, value) meta_model.secret_id = secret_id meta_model.updated_at = now meta_model.save(session=session) def delete_metadatum(self, secret_id, key): """Removes a key from a SecretUserMetadatum instances.""" session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(secret_id=secret_id) query = query.filter_by(key=key) query.delete() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretUserMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretUserMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class KEKDatumRepo(BaseRepo): """Repository for the KEKDatum entity Stores key encryption key (KEK) metadata used by crypto plugins to encrypt/decrypt secrets. """ def find_or_create_kek_datum(self, project, plugin_name, suppress_exception=False, session=None): """Find or create a KEK datum instance.""" if not plugin_name: raise exception.BarbicanException( u._('Tried to register crypto plugin with null or empty ' 'name.')) kek_datum = None session = self.get_session(session) # TODO(jfwood): Reverse this...attempt insert first, then get on fail. try: query = session.query(models.KEKDatum) query = query.filter_by(project_id=project.id, plugin_name=plugin_name, active=True, deleted=False) kek_datum = query.one() except sa_orm.exc.NoResultFound: kek_datum = models.KEKDatum() kek_datum.kek_label = "project-{0}-key-{1}".format( project.external_id, uuidutils.generate_uuid()) kek_datum.project_id = project.id kek_datum.plugin_name = plugin_name kek_datum.status = models.States.ACTIVE self.save(kek_datum) return kek_datum def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "KEKDatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.KEKDatum).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving KEK Datum instance(s). The returned KEK Datum instance(s) are related to a given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.KEKDatum).filter_by( project_id=project_id).filter_by(deleted=False) class OrderRepo(BaseRepo): """Repository for the Order entity.""" def get_by_create_date(self, external_project_id, offset_arg=None, limit_arg=None, meta_arg=None, suppress_exception=False, session=None): """Returns a list of orders The list is ordered by the date they were created at and paged based on the offset and limit fields. :param external_project_id: The keystone id for the project. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param meta_arg: Optional meta field used to filter results. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.Order) query = query.order_by(models.Order.created_at) query = query.filter_by(deleted=False) if meta_arg: query = query.filter(models.Order.meta.contains(meta_arg)) query = query.join(models.Project, models.Order.project) query = query.filter(models.Project.external_id == external_project_id) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Order" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.Order) query = query.filter_by(id=entity_id, deleted=False) query = query.join(models.Project, models.Order.project) query = query.filter(models.Project.external_id == external_project_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving orders related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.Order).filter_by( project_id=project_id).filter_by(deleted=False) class OrderPluginMetadatumRepo(BaseRepo): """Repository for the OrderPluginMetadatum entity Stores key/value plugin information on behalf of an Order. """ def save(self, metadata, order_model): """Saves the specified metadata for the order. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() session = get_session() for k, v in metadata.items(): meta_model = models.OrderPluginMetadatum(k, v) meta_model.updated_at = now meta_model.order = order_model meta_model.save(session=session) def get_metadata_for_order(self, order_id): """Returns a dict of OrderPluginMetadatum instances.""" session = get_session() try: query = session.query(models.OrderPluginMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.OrderPluginMetadatum.order_id == order_id) metadata = query.all() except sa_orm.exc.NoResultFound: metadata = {} return {m.key: m.value for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "OrderPluginMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.OrderPluginMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class OrderBarbicanMetadatumRepo(BaseRepo): """Repository for the OrderBarbicanMetadatum entity Stores key/value plugin information on behalf of a Order. """ def save(self, metadata, order_model): """Saves the specified metadata for the order. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() session = get_session() for k, v in metadata.items(): meta_model = models.OrderBarbicanMetadatum(k, v) meta_model.updated_at = now meta_model.order = order_model meta_model.save(session=session) def get_metadata_for_order(self, order_id): """Returns a dict of OrderBarbicanMetadatum instances.""" session = get_session() try: query = session.query(models.OrderBarbicanMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.OrderBarbicanMetadatum.order_id == order_id) metadata = query.all() except sa_orm.exc.NoResultFound: metadata = {} return {m.key: m.value for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "OrderBarbicanMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.OrderBarbicanMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class OrderRetryTaskRepo(BaseRepo): """Repository for the OrderRetryTask entity.""" def get_by_create_date( self, only_at_or_before_this_date=None, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of order retry task entities The list is ordered by the date they were created at and paged based on the offset and limit fields. :param only_at_or_before_this_date: If specified, only entities at or before this date are returned. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.OrderRetryTask) query = query.order_by(models.OrderRetryTask.created_at) query = query.filter_by(deleted=False) if only_at_or_before_this_date: query = query.filter( models.OrderRetryTask.retry_at <= only_at_or_before_this_date) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "OrderRetryTask" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.OrderRetryTask) query = query.filter_by(id=entity_id, deleted=False) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ContainerRepo(BaseRepo): """Repository for the Container entity.""" def get_by_create_date(self, external_project_id, offset_arg=None, limit_arg=None, name_arg=None, suppress_exception=False, session=None): """Returns a list of containers The list is ordered by the date they were created at and paged based on the offset and limit fields. The external_project_id is external-to-Barbican value assigned to the project by Keystone. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.Container) query = query.order_by(models.Container.created_at) query = query.filter_by(deleted=False) if name_arg: query = query.filter(models.Container.name.like(name_arg)) query = query.join(models.Project, models.Container.project) query = query.filter(models.Project.external_id == external_project_id) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Container" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.Container) query = query.filter_by(id=entity_id, deleted=False) query = query.join(models.Project, models.Container.project) query = query.filter(models.Project.external_id == external_project_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving container related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.Container).filter_by( deleted=False).filter_by(project_id=project_id) def get_container_by_id(self, entity_id, suppress_exception=False, session=None): """Gets container by its entity id without project id check.""" session = self.get_session(session) try: query = session.query(models.Container) query = query.filter_by(id=entity_id, deleted=False) entity = query.one() except sa_orm.exc.NoResultFound: entity = None if not suppress_exception: LOG.exception("Problem getting container %s", entity_id) raise exception.NotFound(u._( "No container found with container-ID {id}").format( entity_name=self._do_entity_name(), id=entity_id)) return entity class ContainerSecretRepo(BaseRepo): """Repository for the ContainerSecret entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerSecret" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ContainerSecret ).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ContainerConsumerRepo(BaseRepo): """Repository for the Service entity.""" def get_by_container_id(self, container_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of Consumers The list is ordered by the date they were created at and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.ContainerConsumerMetadatum) query = query.order_by(models.ContainerConsumerMetadatum.name) query = query.filter_by(deleted=False) query = query.filter( models.ContainerConsumerMetadatum.container_id == container_id ) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def get_by_values(self, container_id, name, URL, suppress_exception=False, show_deleted=False, session=None): session = self.get_session(session) try: query = session.query(models.ContainerConsumerMetadatum) query = query.filter_by( container_id=container_id, name=name, URL=URL) if not show_deleted: query.filter_by(deleted=False) consumer = query.one() except sa_orm.exc.NoResultFound: consumer = None if not suppress_exception: raise exception.NotFound( u._("Could not find {entity_name}").format( entity_name=self._do_entity_name())) return consumer def create_or_update_from(self, new_consumer, container, session=None): session = self.get_session(session) try: container.updated_at = timeutils.utcnow() container.consumers.append(new_consumer) container.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug("Consumer %s with URL %s already exists for " "container %s, continuing...", new_consumer.name, new_consumer.URL, new_consumer.container_id) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = self.get_by_values( new_consumer.container_id, new_consumer.name, new_consumer.URL, show_deleted=True) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerConsumer" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.ContainerConsumerMetadatum) return query.filter_by(id=entity_id, deleted=False) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving consumers associated with given project :param project_id: id of barbican project entity :param session: existing db session reference. """ query = session.query( models.ContainerConsumerMetadatum).filter_by(deleted=False) query = query.filter( models.ContainerConsumerMetadatum.project_id == project_id) return query class TransportKeyRepo(BaseRepo): """Repository for the TransportKey entity Stores transport keys for wrapping the secret data to/from a barbican client. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "TransportKey" def get_by_create_date(self, plugin_name=None, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of transport keys The list is ordered from latest created first. The search accepts plugin_id as an optional parameter for the search. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.TransportKey) query = query.order_by(models.TransportKey.created_at) if plugin_name is not None: query = session.query(models.TransportKey) query = query.filter_by(deleted=False, plugin_name=plugin_name) else: query = query.filter_by(deleted=False) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number of entities retrieved: %s out of %s', len(entities), total) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def get_latest_transport_key(self, plugin_name, suppress_exception=False, session=None): """Returns the latest transport key for a given plugin.""" entity, offset, limit, total = self.get_by_create_date( plugin_name, offset_arg=0, limit_arg=1, suppress_exception=suppress_exception, session=session) return entity def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.TransportKey).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class CertificateAuthorityRepo(BaseRepo): """Repository for the CertificateAuthority entity. CertificateAuthority entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_by_create_date(self, offset_arg=None, limit_arg=None, plugin_name=None, plugin_ca_id=None, suppress_exception=False, session=None, show_expired=False, project_id=None, restrict_to_project_cas=False): """Returns a list of certificate authorities The returned certificate authorities are ordered by the date they were created and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) if restrict_to_project_cas: # get both subCAs which have been defined for your project # (cas for which the ca.project_id == project_id) AND # project_cas which are defined for your project # (pca.project_id = project_id) query1 = session.query(models.CertificateAuthority) query1 = query1.filter( models.CertificateAuthority.project_id == project_id) query2 = session.query(models.CertificateAuthority) query2 = query2.join(models.ProjectCertificateAuthority) query2 = query2.filter( models.ProjectCertificateAuthority.project_id == project_id) query = query1.union(query2) else: # get both subcas that have been defined for your project # (cas for which ca.project_id == project_id) AND # all top-level CAs (ca.project_id == None) query = session.query(models.CertificateAuthority) query = query.filter(or_( models.CertificateAuthority.project_id == project_id, models.CertificateAuthority.project_id.is_(None) )) query = query.order_by(models.CertificateAuthority.created_at) query = query.filter_by(deleted=False) if not show_expired: utcnow = timeutils.utcnow() query = query.filter(or_( models.CertificateAuthority.expiration.is_(None), models.CertificateAuthority.expiration > utcnow)) if plugin_name: query = query.filter( models.CertificateAuthority.plugin_name.like(plugin_name)) if plugin_ca_id: query = query.filter( models.CertificateAuthority.plugin_ca_id.like(plugin_ca_id)) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def update_entity(self, old_ca, parsed_ca_in, session=None): """Updates CA entry and its sub-entries.""" parsed_ca = dict(parsed_ca_in) # these fields cannot be modified parsed_ca.pop('plugin_name', None) parsed_ca.pop('plugin_ca_id', None) expiration = parsed_ca.pop('expiration', None) expiration_iso = timeutils.parse_isotime(expiration.strip()) new_expiration = timeutils.normalize_time(expiration_iso) session = self.get_session(session) query = session.query(models.CertificateAuthority).filter_by( id=old_ca.id, deleted=False) entity = query.one() entity.expiration = new_expiration for k, v in entity.ca_meta.items(): if k not in parsed_ca.keys(): v.delete(session) for key in parsed_ca: if key not in entity.ca_meta.keys(): meta = models.CertificateAuthorityMetadatum( key, parsed_ca[key]) entity.ca_meta[key] = meta else: entity.ca_meta[key].value = parsed_ca[key] entity.save() return entity def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "CertificateAuthority" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" utcnow = timeutils.utcnow() # TODO(jfwood): Performance? Is the many-to-many join needed? expiration_filter = or_( models.CertificateAuthority.expiration.is_(None), models.CertificateAuthority.expiration > utcnow) query = session.query(models.CertificateAuthority) query = query.filter_by(id=entity_id, deleted=False) query = query.filter(expiration_filter) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving CA related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.CertificateAuthority).filter_by( project_id=project_id).filter_by(deleted=False) class CertificateAuthorityMetadatumRepo(BaseRepo): """Repository for the CertificateAuthorityMetadatum entity Stores key/value information on behalf of a CA. """ def save(self, metadata, ca_model): """Saves the specified metadata for the CA. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() session = get_session() for k, v in metadata.items(): meta_model = models.CertificateAuthorityMetadatum(k, v) meta_model.updated_at = now meta_model.ca = ca_model meta_model.save(session=session) def get_metadata_for_certificate_authority(self, ca_id): """Returns a dict of CertificateAuthorityMetadatum instances.""" session = get_session() try: query = session.query(models.CertificateAuthorityMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.CertificateAuthorityMetadatum.ca_id == ca_id) metadata = query.all() except sa_orm.exc.NoResultFound: metadata = dict() return {(m.key, m.value) for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "CertificateAuthorityMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.CertificateAuthorityMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ProjectCertificateAuthorityRepo(BaseRepo): """Repository for the ProjectCertificateAuthority entity. ProjectCertificateAuthority entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_by_create_date(self, offset_arg=None, limit_arg=None, project_id=None, ca_id=None, suppress_exception=False, session=None): """Returns a list of project CAs The returned project are ordered by the date they were created and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.ProjectCertificateAuthority) query = query.order_by(models.ProjectCertificateAuthority.created_at) query = query.filter_by(deleted=False) if project_id: query = query.filter( models.ProjectCertificateAuthority.project_id.like(project_id)) if ca_id: query = query.filter( models.ProjectCertificateAuthority.ca_id.like(ca_id)) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ProjectCertificateAuthority" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ProjectCertificateAuthority).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving CA related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.ProjectCertificateAuthority).filter_by( project_id=project_id) class PreferredCertificateAuthorityRepo(BaseRepo): """Repository for the PreferredCertificateAuthority entity. PreferredCertificateAuthority entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_by_create_date(self, offset_arg=None, limit_arg=None, project_id=None, ca_id=None, suppress_exception=False, session=None): """Returns a list of preferred CAs The returned CAs are ordered by the date they were created and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.PreferredCertificateAuthority) query = query.order_by(models.PreferredCertificateAuthority.created_at) if project_id: query = query.filter( models.PreferredCertificateAuthority.project_id.like( project_id)) if ca_id: query = query.filter( models.PreferredCertificateAuthority.ca_id.like(ca_id)) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def create_or_update_by_project_id(self, project_id, ca_id, session=None): """Create or update preferred CA for a project by project_id. :param project_id: ID of project whose preferred CA will be saved :param ca_id: ID of preferred CA :param session: SQLAlchemy session object. :return: None """ session = self.get_session(session) query = session.query(models.PreferredCertificateAuthority) query = query.filter_by(project_id=project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: self.create_from( models.PreferredCertificateAuthority(project_id, ca_id), session=session) else: entity.ca_id = ca_id entity.save(session) def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "PreferredCertificateAuthority" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.PreferredCertificateAuthority).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving preferred CA related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.PreferredCertificateAuthority).filter_by( project_id=project_id) class SecretACLRepo(BaseRepo): """Repository for the SecretACL entity. There is no need for SecretACLUserRepo as none of logic access SecretACLUser (ACL user data) directly. Its always derived from SecretACL relationship. SecretACL and SecretACLUser data is not soft delete. So there is no need to have deleted=False filter in queries. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretACL" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretACL) query = query.filter_by(id=entity_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def get_by_secret_id(self, secret_id, session=None): """Return list of secret ACLs by secret id.""" session = self.get_session(session) query = session.query(models.SecretACL) query = query.filter_by(secret_id=secret_id) return query.all() def create_or_replace_from(self, secret, secret_acl, user_ids=None, session=None): session = self.get_session(session) secret.updated_at = timeutils.utcnow() secret_acl.updated_at = timeutils.utcnow() secret.secret_acls.append(secret_acl) secret.save(session=session) self._create_or_replace_acl_users(secret_acl, user_ids, session=session) def _create_or_replace_acl_users(self, secret_acl, user_ids, session=None): """Creates or updates secret acl user based on input user_ids list. user_ids is expected to be list of ids (enforced by schema validation). Input user ids should have complete list of acl users. It does not apply partial update of user ids. If user_ids is None, no change is made in acl user data. If user_ids list is not None, then following change is made. For existing acl users, just update timestamp if user_id is present in input user ids list. Otherwise, remove existing acl user entries. Then add the remaining input user ids as new acl user db entries. """ if user_ids is None: return user_ids = set(user_ids) now = timeutils.utcnow() session = self.get_session(session) secret_acl.updated_at = now for acl_user in secret_acl.acl_users: if acl_user.user_id in user_ids: # input user_id already exists acl_user.updated_at = now user_ids.remove(acl_user.user_id) else: acl_user.delete(session) for user_id in user_ids: acl_user = models.SecretACLUser(secret_acl.id, user_id) secret_acl.acl_users.append(acl_user) secret_acl.save(session=session) def get_count(self, secret_id, session=None): """Gets count of existing secret ACL(s) for a given secret.""" session = self.get_session(session) query = session.query(sa_func.count(models.SecretACL.id)) query = query.filter(models.SecretACL.secret_id == secret_id) return query.scalar() def delete_acls_for_secret(self, secret, session=None): session = self.get_session(session) for entity in secret.secret_acls: entity.delete(session=session) class ContainerACLRepo(BaseRepo): """Repository for the ContainerACL entity. There is no need for ContainerACLUserRepo as none of logic access ContainerACLUser (ACL user data) directly. Its always derived from ContainerACL relationship. ContainerACL and ContainerACLUser data is not soft delete. So there is no need to have deleted=False filter in queries. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerACL" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.ContainerACL) query = query.filter_by(id=entity_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def get_by_container_id(self, container_id, session=None): """Return list of container ACLs by container id.""" session = self.get_session(session) query = session.query(models.ContainerACL) query = query.filter_by(container_id=container_id) return query.all() def create_or_replace_from(self, container, container_acl, user_ids=None, session=None): session = self.get_session(session) container.updated_at = timeutils.utcnow() container_acl.updated_at = timeutils.utcnow() container.container_acls.append(container_acl) container.save(session=session) self._create_or_replace_acl_users(container_acl, user_ids, session) def _create_or_replace_acl_users(self, container_acl, user_ids, session=None): """Creates or updates container acl user based on input user_ids list. user_ids is expected to be list of ids (enforced by schema validation). Input user ids should have complete list of acl users. It does not apply partial update of user ids. If user_ids is None, no change is made in acl user data. If user_ids list is not None, then following change is made. For existing acl users, just update timestamp if user_id is present in input user ids list. Otherwise, remove existing acl user entries. Then add the remaining input user ids as new acl user db entries. """ if user_ids is None: return user_ids = set(user_ids) now = timeutils.utcnow() session = self.get_session(session) container_acl.updated_at = now for acl_user in container_acl.acl_users: if acl_user.user_id in user_ids: # input user_id already exists acl_user.updated_at = now user_ids.remove(acl_user.user_id) else: acl_user.delete(session) for user_id in user_ids: acl_user = models.ContainerACLUser(container_acl.id, user_id) container_acl.acl_users.append(acl_user) container_acl.save(session=session) def get_count(self, container_id, session=None): """Gets count of existing container ACL(s) for a given container.""" session = self.get_session(session) query = session.query(sa_func.count(models.ContainerACL.id)) query = query.filter(models.ContainerACL.container_id == container_id) return query.scalar() def delete_acls_for_container(self, container, session=None): session = self.get_session(session) for entity in container.container_acls: entity.delete(session=session) class ProjectQuotasRepo(BaseRepo): """Repository for the ProjectQuotas entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ProjectQuotas" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ProjectQuotas).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def get_by_create_date(self, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of ProjectQuotas The list is ordered by the date they were created at and paged based on the offset and limit fields. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :raises NotFound: if no quota config is found for the project :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.order_by(models.ProjectQuotas.created_at) query = query.join(models.Project, models.ProjectQuotas.project) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def create_or_update_by_project_id(self, project_id, parsed_project_quotas, session=None): """Create or update Project Quotas config for a project by project_id. :param project_id: ID of project whose quota config will be saved :param parsed_project_quotas: Python dict with quota definition :param session: SQLAlchemy session object. :return: None """ session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.filter_by(project_id=project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: self.create_from( models.ProjectQuotas(project_id, parsed_project_quotas), session=session) else: self._update_values(entity, parsed_project_quotas) entity.save(session) def get_by_external_project_id(self, external_project_id, suppress_exception=False, session=None): """Return configured Project Quotas for a project by project_id. :param external_project_id: external ID of project to get quotas for :param suppress_exception: when True, NotFound is not raised :param session: SQLAlchemy session object. :raises NotFound: if no quota config is found for the project :return: None or Python dict of project quotas for project """ session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.join(models.Project, models.ProjectQuotas.project) query = query.filter(models.Project.external_id == external_project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: if suppress_exception: return None else: _raise_no_entities_found(self._do_entity_name()) return entity def delete_by_external_project_id(self, external_project_id, suppress_exception=False, session=None): """Remove configured Project Quotas for a project by project_id. :param external_project_id: external ID of project to delete quotas :param suppress_exception: when True, NotFound is not raised :param session: SQLAlchemy session object. :raises NotFound: if no quota config is found for the project :return: None """ session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.join(models.Project, models.ProjectQuotas.project) query = query.filter(models.Project.external_id == external_project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: if suppress_exception: return else: _raise_no_entities_found(self._do_entity_name()) entity.delete(session=session) class SecretStoresRepo(BaseRepo): """Repository for the SecretStores entity. SecretStores entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_all(self, session=None): """Get list of available secret stores. Status value is not used while getting complete list as we will just maintain ACTIVE ones. No other state is used and needed here. :param session: SQLAlchemy session object. :return: None """ session = self.get_session(session) query = session.query(models.SecretStores) query.order_by(models.SecretStores.created_at.asc()) return query.all() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretStores" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.SecretStores).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ProjectSecretStoreRepo(BaseRepo): """Repository for the ProjectSecretStore entity. ProjectSecretStore entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_secret_store_for_project(self, project_id, external_project_id, suppress_exception=False, session=None): """Returns preferred secret store for a project if set. :param project_id: ID of project whose preferred secret store is set :param external_project_id: external ID of project whose preferred secret store is set :param suppress_exception: when True, NotFound is not raised :param session: SQLAlchemy session object. Will return preferred secret store by external project id if provided otherwise uses barbican project identifier to lookup. Throws exception in case no preferred secret store is defined and supporess_exception=False. If suppress_exception is True, then returns None for no preferred secret store for a project found. """ session = self.get_session(session) if external_project_id is None: query = session.query(models.ProjectSecretStore).filter_by( project_id=project_id) else: query = session.query(models.ProjectSecretStore) query = query.join(models.Project, models.ProjectSecretStore.project) query = query.filter(models.Project.external_id == external_project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: LOG.info("No preferred secret store found for project = %s", project_id) entity = None if not suppress_exception: _raise_entity_not_found(self._do_entity_name(), project_id) return entity def create_or_update_for_project(self, project_id, secret_store_id, session=None): """Create or update preferred secret store for a project. :param project_id: ID of project whose preferred secret store is set :param secret_store_id: ID of secret store :param session: SQLAlchemy session object. :return: None If preferred secret store is not set for given project, then create new preferred secret store setting for that project. If secret store setting for project is already there, then it updates with given secret store id. """ session = self.get_session(session) try: entity = self.get_secret_store_for_project(project_id, None, session=session) except exception.NotFound: entity = self.create_from( models.ProjectSecretStore(project_id, secret_store_id), session=session) else: entity.secret_store_id = secret_store_id entity.save(session) return entity def get_count_by_secret_store(self, secret_store_id, session=None): """Gets count of projects mapped to a given secret store. :param secret_store_id: id of secret stores entity :param session: existing db session reference. If None, gets session. :return: an number 0 or greater This method is supposed to provide count of projects which are currently set to use input secret store as their preferred store. This is used when existing secret store configuration is removed and validation is done to make sure that there are no projects using it as preferred secret store. """ session = self.get_session(session) query = session.query(models.ProjectSecretStore).filter_by( secret_store_id=secret_store_id) return query.count() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ProjectSecretStore" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ProjectSecretStore).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for getting preferred secret stores list for a project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.ProjectSecretStore).filter_by( project_id=project_id) def get_ca_repository(): """Returns a singleton Secret repository instance.""" global _CA_REPOSITORY return _get_repository(_CA_REPOSITORY, CertificateAuthorityRepo) def get_container_acl_repository(): """Returns a singleton Container ACL repository instance.""" global _CONTAINER_ACL_REPOSITORY return _get_repository(_CONTAINER_ACL_REPOSITORY, ContainerACLRepo) def get_container_consumer_repository(): """Returns a singleton Container Consumer repository instance.""" global _CONTAINER_CONSUMER_REPOSITORY return _get_repository(_CONTAINER_CONSUMER_REPOSITORY, ContainerConsumerRepo) def get_container_repository(): """Returns a singleton Container repository instance.""" global _CONTAINER_REPOSITORY return _get_repository(_CONTAINER_REPOSITORY, ContainerRepo) def get_container_secret_repository(): """Returns a singleton Container-Secret repository instance.""" global _CONTAINER_SECRET_REPOSITORY return _get_repository(_CONTAINER_SECRET_REPOSITORY, ContainerSecretRepo) def get_encrypted_datum_repository(): """Returns a singleton Encrypted Datum repository instance.""" global _ENCRYPTED_DATUM_REPOSITORY return _get_repository(_ENCRYPTED_DATUM_REPOSITORY, EncryptedDatumRepo) def get_kek_datum_repository(): """Returns a singleton KEK Datum repository instance.""" global _KEK_DATUM_REPOSITORY return _get_repository(_KEK_DATUM_REPOSITORY, KEKDatumRepo) def get_order_plugin_meta_repository(): """Returns a singleton Order-Plugin meta repository instance.""" global _ORDER_PLUGIN_META_REPOSITORY return _get_repository(_ORDER_PLUGIN_META_REPOSITORY, OrderPluginMetadatumRepo) def get_order_barbican_meta_repository(): """Returns a singleton Order-Barbican meta repository instance.""" global _ORDER_BARBICAN_META_REPOSITORY return _get_repository(_ORDER_BARBICAN_META_REPOSITORY, OrderBarbicanMetadatumRepo) def get_order_repository(): """Returns a singleton Order repository instance.""" global _ORDER_REPOSITORY return _get_repository(_ORDER_REPOSITORY, OrderRepo) def get_order_retry_tasks_repository(): """Returns a singleton OrderRetryTask repository instance.""" global _ORDER_RETRY_TASK_REPOSITORY return _get_repository(_ORDER_RETRY_TASK_REPOSITORY, OrderRetryTaskRepo) def get_preferred_ca_repository(): """Returns a singleton Secret repository instance.""" global _PREFERRED_CA_REPOSITORY return _get_repository(_PREFERRED_CA_REPOSITORY, PreferredCertificateAuthorityRepo) def get_project_repository(): """Returns a singleton Project repository instance.""" global _PROJECT_REPOSITORY return _get_repository(_PROJECT_REPOSITORY, ProjectRepo) def get_project_ca_repository(): """Returns a singleton Secret repository instance.""" global _PROJECT_CA_REPOSITORY return _get_repository(_PROJECT_CA_REPOSITORY, ProjectCertificateAuthorityRepo) def get_project_quotas_repository(): """Returns a singleton Project Quotas repository instance.""" global _PROJECT_QUOTAS_REPOSITORY return _get_repository(_PROJECT_QUOTAS_REPOSITORY, ProjectQuotasRepo) def get_secret_acl_repository(): """Returns a singleton Secret ACL repository instance.""" global _SECRET_ACL_REPOSITORY return _get_repository(_SECRET_ACL_REPOSITORY, SecretACLRepo) def get_secret_meta_repository(): """Returns a singleton Secret meta repository instance.""" global _SECRET_META_REPOSITORY return _get_repository(_SECRET_META_REPOSITORY, SecretStoreMetadatumRepo) def get_secret_user_meta_repository(): """Returns a singleton Secret user meta repository instance.""" global _SECRET_USER_META_REPOSITORY return _get_repository(_SECRET_USER_META_REPOSITORY, SecretUserMetadatumRepo) def get_secret_repository(): """Returns a singleton Secret repository instance.""" global _SECRET_REPOSITORY return _get_repository(_SECRET_REPOSITORY, SecretRepo) def get_transport_key_repository(): """Returns a singleton Transport Key repository instance.""" global _TRANSPORT_KEY_REPOSITORY return _get_repository(_TRANSPORT_KEY_REPOSITORY, TransportKeyRepo) def get_secret_stores_repository(): """Returns a singleton Secret Stores repository instance.""" global _SECRET_STORES_REPOSITORY return _get_repository(_SECRET_STORES_REPOSITORY, SecretStoresRepo) def get_project_secret_store_repository(): """Returns a singleton Project Secret Store repository instance.""" global _PROJECT_SECRET_STORE_REPOSITORY return _get_repository(_PROJECT_SECRET_STORE_REPOSITORY, ProjectSecretStoreRepo) def _get_repository(global_ref, repo_class): if not global_ref: global_ref = repo_class() return global_ref def _raise_entity_not_found(entity_name, entity_id): raise exception.NotFound(u._("No {entity} found with ID {id}").format( entity=entity_name, id=entity_id)) def _raise_entity_id_not_found(entity_id): raise exception.NotFound(u._("Entity ID {entity_id} not " "found").format(entity_id=entity_id)) def _raise_no_entities_found(entity_name): raise exception.NotFound( u._("No entities of type {entity_name} found").format( entity_name=entity_name)) barbican-6.0.1/barbican/model/models.py0000666000175000017500000014544713311733060020015 0ustar zuulzuul00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Defines database models for Barbican """ import hashlib from oslo_serialization import jsonutils as json from oslo_utils import timeutils import six import sqlalchemy as sa from sqlalchemy.ext import compiler from sqlalchemy.ext import declarative from sqlalchemy import orm from sqlalchemy.orm import collections as col from sqlalchemy import types as sql_types from barbican.common import exception from barbican.common import utils from barbican import i18n as u BASE = declarative.declarative_base() ERROR_REASON_LENGTH = 255 SUB_STATUS_LENGTH = 36 SUB_STATUS_MESSAGE_LENGTH = 255 # Allowed entity states class States(object): PENDING = 'PENDING' ACTIVE = 'ACTIVE' ERROR = 'ERROR' @classmethod def is_valid(cls, state_to_test): """Tests if a state is a valid one.""" return state_to_test in cls.__dict__ class OrderType(object): KEY = 'key' ASYMMETRIC = 'asymmetric' CERTIFICATE = 'certificate' @classmethod def is_valid(cls, order_type): """Tests if a order type is a valid one.""" return order_type in cls.__dict__ class OrderStatus(object): def __init__(self, id, message): self.id = id self.message = message @compiler.compiles(sa.BigInteger, 'sqlite') def compile_big_int_sqlite(type_, compiler, **kw): return 'INTEGER' class JsonBlob(sql_types.TypeDecorator): """JsonBlob is custom type for fields which need to store JSON text.""" impl = sa.Text def process_bind_param(self, value, dialect): if value is not None: return json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: return json.loads(value) return value class ModelBase(object): """Base class for Nova and Barbican Models.""" __table_args__ = {'mysql_engine': 'InnoDB'} __table_initialized__ = False __protected_attributes__ = { "created_at", "updated_at", "deleted_at", "deleted"} id = sa.Column(sa.String(36), primary_key=True, default=utils.generate_uuid) created_at = sa.Column(sa.DateTime, default=timeutils.utcnow, nullable=False) updated_at = sa.Column(sa.DateTime, default=timeutils.utcnow, nullable=False, onupdate=timeutils.utcnow) deleted_at = sa.Column(sa.DateTime) deleted = sa.Column(sa.Boolean, nullable=False, default=False) status = sa.Column(sa.String(20), nullable=False, default=States.PENDING) def save(self, session=None): """Save this object.""" # import api here to prevent circular dependency problem import barbican.model.repositories session = session or barbican.model.repositories.get_session() # if model is being created ensure that created/updated are the same if self.id is None: self.created_at = timeutils.utcnow() self.updated_at = self.created_at session.add(self) session.flush() def delete(self, session=None): """Delete this object.""" import barbican.model.repositories session = session or barbican.model.repositories.get_session() self._do_delete_children(session) session.delete(self) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" pass def update(self, values): """dict.update() behaviour.""" for k, v in values.items(): self[k] = v def __setitem__(self, key, value): setattr(self, key, value) def __getitem__(self, key): return getattr(self, key) def __iter__(self): self._i = iter(orm.object_mapper(self).sa.Columns) return self def next(self): n = next(self._i).name return n, getattr(self, n) def keys(self): return self.__dict__.keys() def values(self): return self.__dict__.values() def items(self): return self.__dict__.items() def to_dict(self): return self.__dict__.copy() def to_dict_fields(self): """Returns a dictionary of just the db fields of this entity.""" if self.created_at: created_at = self.created_at.isoformat() else: created_at = self.created_at if self.updated_at: updated_at = self.updated_at.isoformat() else: updated_at = self.updated_at dict_fields = { 'created': created_at, 'updated': updated_at, 'status': self.status } if self.deleted_at: dict_fields['deleted_at'] = self.deleted_at.isoformat() if self.deleted: dict_fields['deleted'] = True dict_fields.update(self._do_extra_dict_fields()) return dict_fields def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {} def _iso_to_datetime(self, expiration): """Convert ISO formatted string to datetime.""" if isinstance(expiration, six.string_types): expiration_iso = timeutils.parse_isotime(expiration.strip()) expiration = timeutils.normalize_time(expiration_iso) return expiration class SoftDeleteMixIn(object): """Mix-in class that adds soft delete functionality.""" def delete(self, session=None): """Delete this object.""" import barbican.model.repositories session = session or barbican.model.repositories.get_session() self.deleted = True self.deleted_at = timeutils.utcnow() self.save(session=session) self._do_delete_children(session) class ContainerSecret(BASE, SoftDeleteMixIn, ModelBase): """Represents an association between a Container and a Secret.""" __tablename__ = 'container_secret' name = sa.Column(sa.String(255), nullable=True) container_id = sa.Column( sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) # Eager load this relationship via 'lazy=False'. container = orm.relationship( 'Container', backref=orm.backref('container_secrets', lazy=False, primaryjoin="and_(ContainerSecret.container_id == " "Container.id, ContainerSecret.deleted!=True)")) secrets = orm.relationship( 'Secret', backref=orm.backref('container_secrets', primaryjoin="and_(ContainerSecret.secret_id == " "Secret.id, ContainerSecret.deleted!=True)")) __table_args__ = (sa.UniqueConstraint('container_id', 'secret_id', 'name', name='_container_secret_name_uc'),) class Project(BASE, SoftDeleteMixIn, ModelBase): """Represents a Project in the datastore. Projects are users that wish to store secret information within Barbican. """ __tablename__ = 'projects' external_id = sa.Column(sa.String(255), unique=True) orders = orm.relationship("Order", backref="project") secrets = orm.relationship("Secret", backref="project") keks = orm.relationship("KEKDatum", backref="project") containers = orm.relationship("Container", backref="project") cas = orm.relationship("ProjectCertificateAuthority", backref="project") project_quotas = orm.relationship("ProjectQuotas", backref="project") def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'external_id': self.external_id} class Secret(BASE, SoftDeleteMixIn, ModelBase): """Represents a Secret in the datastore. Secrets are any information Projects wish to store within Barbican, though the actual encrypted data is stored in one or more EncryptedData entities on behalf of a Secret. """ __tablename__ = 'secrets' name = sa.Column(sa.String(255)) secret_type = sa.Column(sa.String(255), server_default=utils.SECRET_TYPE_OPAQUE) expiration = sa.Column(sa.DateTime, default=None) algorithm = sa.Column(sa.String(255)) bit_length = sa.Column(sa.Integer) mode = sa.Column(sa.String(255)) creator_id = sa.Column(sa.String(255)) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='secrets_project_fk'), index=True, nullable=False) # TODO(jwood): Performance - Consider avoiding full load of all # datum attributes here. This is only being done to support the # building of the list of supported content types when secret # metadata is retrieved. # See barbican.api.resources.py::SecretsResource.on_get() # Eager load this relationship via 'lazy=False'. encrypted_data = orm.relationship("EncryptedDatum", lazy=False) secret_store_metadata = orm.relationship( "SecretStoreMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="secret", cascade="all, delete-orphan") secret_user_metadata = orm.relationship( "SecretUserMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="secret", cascade="all, delete-orphan") def __init__(self, parsed_request=None): """Creates secret from a dict.""" super(Secret, self).__init__() if parsed_request: self.name = parsed_request.get('name') self.secret_type = parsed_request.get( 'secret_type', utils.SECRET_TYPE_OPAQUE) expiration = self._iso_to_datetime(parsed_request.get ('expiration')) self.expiration = expiration self.algorithm = parsed_request.get('algorithm') self.bit_length = parsed_request.get('bit_length') self.mode = parsed_request.get('mode') self.creator_id = parsed_request.get('creator_id') self.project_id = parsed_request.get('project_id') self.status = States.ACTIVE def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for k, v in self.secret_store_metadata.items(): v.delete(session) for k, v in self.secret_user_metadata.items(): v.delete(session) for datum in self.encrypted_data: datum.delete(session) for secret_ref in self.container_secrets: session.delete(secret_ref) for secret_acl in self.secret_acls: session.delete(secret_acl) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" if self.expiration: expiration = self.expiration.isoformat() else: expiration = self.expiration return { 'secret_id': self.id, 'name': self.name, 'secret_type': self.secret_type, 'expiration': expiration, 'algorithm': self.algorithm, 'bit_length': self.bit_length, 'mode': self.mode, 'creator_id': self.creator_id, } class SecretStoreMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Secret Store metadatum for a single key-value pair.""" __tablename__ = "secret_store_metadata" key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.String(255), nullable=False) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) def __init__(self, key, value): super(SecretStoreMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for SecretStoreMetadatum entry.") if key is None: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'key': self.key, 'value': self.value } class SecretUserMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Secret user metadatum for a single key-value pair.""" __tablename__ = "secret_user_metadata" key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.String(255), nullable=False) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) __table_args__ = (sa.UniqueConstraint('secret_id', 'key', name='_secret_key_uc'),) def __init__(self, key, value): super(SecretUserMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for SecretUserMetadatum entry.") if key is None: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'key': self.key, 'value': self.value } class EncryptedDatum(BASE, SoftDeleteMixIn, ModelBase): """Represents the encrypted data for a Secret.""" __tablename__ = 'encrypted_data' content_type = sa.Column(sa.String(255)) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) kek_id = sa.Column( sa.String(36), sa.ForeignKey('kek_data.id'), index=True, nullable=False) # TODO(jwood) Why LargeBinary on Postgres (BYTEA) not work correctly? cypher_text = sa.Column(sa.Text) kek_meta_extended = sa.Column(sa.Text) # Eager load this relationship via 'lazy=False'. kek_meta_project = orm.relationship("KEKDatum", lazy=False) def __init__(self, secret=None, kek_datum=None): """Creates encrypted datum from a secret and KEK metadata.""" super(EncryptedDatum, self).__init__() if secret: self.secret_id = secret.id if kek_datum: self.kek_id = kek_datum.id self.kek_meta_project = kek_datum self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'content_type': self.content_type} class KEKDatum(BASE, SoftDeleteMixIn, ModelBase): """Key encryption key (KEK) metadata model. Represents the key encryption key (KEK) metadata associated with a process used to encrypt/decrypt secret information. When a secret is encrypted, in addition to the cypher text, the Barbican encryption process produces a KEK metadata object. The cypher text is stored via the EncryptedDatum model above, whereas the metadata is stored within this model. Decryption processes utilize this KEK metadata to decrypt the associated cypher text. Note that this model is intended to be agnostic to the specific means used to encrypt/decrypt the secret information, so please do not place vendor- specific attributes here. Note as well that each Project will have at most one 'active=True' KEKDatum instance at a time, representing the most recent KEK metadata instance to use for encryption processes performed on behalf of the Project. KEKDatum instances that are 'active=False' are associated to previously used encryption processes for the Project, that eventually should be rotated and deleted with the Project's active KEKDatum. """ __tablename__ = 'kek_data' plugin_name = sa.Column(sa.String(255), nullable=False) kek_label = sa.Column(sa.String(255)) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='kek_data_project_fk'), index=True, nullable=False) active = sa.Column(sa.Boolean, nullable=False, default=True) bind_completed = sa.Column(sa.Boolean, nullable=False, default=False) algorithm = sa.Column(sa.String(255)) bit_length = sa.Column(sa.Integer) mode = sa.Column(sa.String(255)) plugin_meta = sa.Column(sa.Text) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'algorithm': self.algorithm} class Order(BASE, SoftDeleteMixIn, ModelBase): """Represents an Order in the datastore. Orders are requests for Barbican to generate secrets, ranging from symmetric, asymmetric keys to automated requests to Certificate Authorities to generate SSL certificates. """ __tablename__ = 'orders' type = sa.Column(sa.String(255), nullable=False, default='key') project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='orders_project_fk'), index=True, nullable=False) error_status_code = sa.Column(sa.String(16)) error_reason = sa.Column(sa.String(ERROR_REASON_LENGTH)) meta = sa.Column(JsonBlob(), nullable=True) secret_id = sa.Column(sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=True) container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=True) sub_status = sa.Column(sa.String(SUB_STATUS_LENGTH), nullable=True) sub_status_message = sa.Column(sa.String(SUB_STATUS_MESSAGE_LENGTH), nullable=True) creator_id = sa.Column(sa.String(255)) order_plugin_metadata = orm.relationship( "OrderPluginMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="order", cascade="all, delete-orphan") order_barbican_metadata = orm.relationship( "OrderBarbicanMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="order", cascade="all, delete-orphan") def __init__(self, parsed_request=None): """Creates a Order entity from a dict.""" super(Order, self).__init__() if parsed_request: self.type = parsed_request.get('type') self.meta = parsed_request.get('meta') self.status = States.ACTIVE self.sub_status = parsed_request.get('sub_status') self.sub_status_message = parsed_request.get( 'sub_status_message') self.creator_id = parsed_request.get('creator_id') def set_error_reason_safely(self, error_reason_raw): """Ensure error reason does not raise database attribute exceptions.""" self.error_reason = error_reason_raw[:ERROR_REASON_LENGTH] def set_sub_status_safely(self, sub_status_raw): """Ensure sub-status does not raise database attribute exceptions.""" self.sub_status = sub_status_raw[:SUB_STATUS_LENGTH] def set_sub_status_message_safely(self, sub_status_message_raw): """Ensure status message doesn't raise database attrib. exceptions.""" self.sub_status_message = sub_status_message_raw[ :SUB_STATUS_MESSAGE_LENGTH ] def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for k, v in self.order_plugin_metadata.items(): v.delete(session) for k, v in self.order_barbican_metadata.items(): v.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" ret = { 'type': self.type, 'meta': self.meta, 'order_id': self.id } if self.secret_id: ret['secret_id'] = self.secret_id if self.container_id: ret['container_id'] = self.container_id if self.error_status_code: ret['error_status_code'] = self.error_status_code if self.error_reason: ret['error_reason'] = self.error_reason if self.sub_status: ret['sub_status'] = self.sub_status if self.sub_status_message: ret['sub_status_message'] = self.sub_status_message if self.creator_id: ret['creator_id'] = self.creator_id return ret class OrderPluginMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Order plugin metadatum for a single key-value pair. This entity is used to store plugin-specific metadata on behalf of an Order instance. """ __tablename__ = "order_plugin_metadata" order_id = sa.Column(sa.String(36), sa.ForeignKey('orders.id'), index=True, nullable=False) key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.String(255), nullable=False) def __init__(self, key, value): super(OrderPluginMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for OrderPluginMetadatum entry.") if key is None: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'key': self.key, 'value': self.value} class OrderBarbicanMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Order barbican metadatum for a single key-value pair. This entity is used to store barbican-specific metadata on behalf of an Order instance. This is data that is stored by the server to help process the order through its life cycle, but which is not in the original request. """ __tablename__ = "order_barbican_metadata" order_id = sa.Column(sa.String(36), sa.ForeignKey('orders.id'), index=True, nullable=False) key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.Text, nullable=False) def __init__(self, key, value): super(OrderBarbicanMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for OrderBarbicanMetadatum entry.") if key is None: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'key': self.key, 'value': self.value} class OrderRetryTask(BASE, SoftDeleteMixIn, ModelBase): __tablename__ = "order_retry_tasks" __table_args__ = {"mysql_engine": "InnoDB"} __table_initialized__ = False id = sa.Column( sa.String(36), primary_key=True, default=utils.generate_uuid, ) order_id = sa.Column( sa.String(36), sa.ForeignKey("orders.id"), index=True, nullable=False, ) retry_task = sa.Column(sa.Text, nullable=False) retry_at = sa.Column(sa.DateTime, default=None, nullable=False) retry_args = sa.Column(JsonBlob(), nullable=False) retry_kwargs = sa.Column(JsonBlob(), nullable=False) retry_count = sa.Column(sa.Integer, nullable=False, default=0) class Container(BASE, SoftDeleteMixIn, ModelBase): """Represents a Container for Secrets in the datastore. Containers store secret references. Containers are owned by Projects. Containers can be generic or have a predefined type. Predefined typed containers allow users to store structured key relationship inside Barbican. """ __tablename__ = 'containers' name = sa.Column(sa.String(255)) type = sa.Column(sa.Enum('generic', 'rsa', 'dsa', 'certificate', name='container_types')) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='containers_project_fk'), index=True, nullable=False) consumers = sa.orm.relationship("ContainerConsumerMetadatum") creator_id = sa.Column(sa.String(255)) def __init__(self, parsed_request=None): """Creates a Container entity from a dict.""" super(Container, self).__init__() if parsed_request: self.name = parsed_request.get('name') self.type = parsed_request.get('type') self.status = States.ACTIVE self.creator_id = parsed_request.get('creator_id') secret_refs = parsed_request.get('secret_refs') if secret_refs: for secret_ref in parsed_request.get('secret_refs'): container_secret = ContainerSecret() container_secret.name = secret_ref.get('name') # TODO(hgedikli) move this into a common location # TODO(hgedikli) validate provided url # TODO(hgedikli) parse out secret_id with regex secret_id = secret_ref.get('secret_ref') if secret_id.endswith('/'): secret_id = secret_id.rsplit('/', 2)[1] elif '/' in secret_id: secret_id = secret_id.rsplit('/', 1)[1] else: secret_id = secret_id container_secret.secret_id = secret_id self.container_secrets.append(container_secret) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for container_secret in self.container_secrets: session.delete(container_secret) for container_acl in self.container_acls: session.delete(container_acl) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'container_id': self.id, 'name': self.name, 'type': self.type, 'creator_id': self.creator_id, 'secret_refs': [ { 'secret_id': container_secret.secret_id, 'name': container_secret.name if hasattr(container_secret, 'name') else None } for container_secret in self.container_secrets], 'consumers': [ { 'name': consumer.name, 'URL': consumer.URL } for consumer in self.consumers if not consumer.deleted ]} class ContainerConsumerMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Stores Consumer Registrations for Containers in the datastore. Services can register interest in Containers. Services will provide a type and a URL for the object that is using the Container. """ __tablename__ = 'container_consumer_metadata' container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=True) name = sa.Column(sa.String(36)) URL = sa.Column(sa.String(255)) data_hash = sa.Column(sa.CHAR(64)) __table_args__ = ( sa.UniqueConstraint('data_hash', name='_consumer_hashed_container_name_url_uc'), sa.Index('values_index', 'container_id', 'name', 'URL') ) def __init__(self, container_id, project_id, parsed_request): """Registers a Consumer to a Container.""" super(ContainerConsumerMetadatum, self).__init__() # TODO(john-wood-w) This class should really be immutable due to the # data_hash attribute. if container_id and parsed_request: self.container_id = container_id self.project_id = project_id self.name = parsed_request.get('name') self.URL = parsed_request.get('URL') hash_text = ''.join((self.container_id, self.name, self.URL)) self.data_hash = hashlib.sha256(hash_text. encode('utf-8')).hexdigest() self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'name': self.name, 'URL': self.URL} class TransportKey(BASE, SoftDeleteMixIn, ModelBase): """Transport Key model for wrapping secrets in transit Represents the transport key used for wrapping secrets in transit to/from clients when storing/retrieving secrets. """ __tablename__ = 'transport_keys' plugin_name = sa.Column(sa.String(255), nullable=False) transport_key = sa.Column(sa.Text, nullable=False) def __init__(self, plugin_name, transport_key): """Creates transport key entity.""" super(TransportKey, self).__init__() msg = u._("Must supply non-None {0} argument for TransportKey entry.") if plugin_name is None: raise exception.MissingArgumentError(msg.format("plugin_name")) self.plugin_name = plugin_name if transport_key is None: raise exception.MissingArgumentError(msg.format("transport_key")) self.transport_key = transport_key self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'transport_key_id': self.id, 'plugin_name': self.plugin_name} class CertificateAuthority(BASE, ModelBase): """CertificateAuthority model to specify the CAs available to Barbican Represents the CAs available for certificate issuance to Barbican. """ __tablename__ = 'certificate_authorities' plugin_name = sa.Column(sa.String(255), nullable=False) plugin_ca_id = sa.Column(sa.Text, nullable=False) expiration = sa.Column(sa.DateTime, default=None) creator_id = sa.Column(sa.String(255), nullable=True) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='cas_project_fk'), nullable=True) ca_meta = orm.relationship( 'CertificateAuthorityMetadatum', collection_class=col.attribute_mapped_collection('key'), backref="ca", cascade="all, delete-orphan" ) def __init__(self, parsed_ca_in): """Creates certificate authority entity.""" super(CertificateAuthority, self).__init__() msg = u._("Must supply Non-None {0} argument " "for CertificateAuthority entry.") parsed_ca = dict(parsed_ca_in) plugin_name = parsed_ca.pop('plugin_name', None) if plugin_name is None: raise exception.MissingArgumentError(msg.format("plugin_name")) self.plugin_name = plugin_name plugin_ca_id = parsed_ca.pop('plugin_ca_id', None) if plugin_ca_id is None: raise exception.MissingArgumentError(msg.format("plugin_ca_id")) self.plugin_ca_id = plugin_ca_id expiration = parsed_ca.pop('expiration', None) self.expiration = self._iso_to_datetime(expiration) creator_id = parsed_ca.pop('creator_id', None) if creator_id is not None: self.creator_id = creator_id project_id = parsed_ca.pop('project_id', None) if project_id is not None: self.project_id = project_id for key in parsed_ca: meta = CertificateAuthorityMetadatum(key, parsed_ca[key]) self.ca_meta[key] = meta self.status = States.ACTIVE def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for k, v in self.ca_meta.items(): v.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" if self.expiration: expiration = self.expiration.isoformat() else: expiration = None return { 'ca_id': self.id, 'plugin_name': self.plugin_name, 'plugin_ca_id': self.plugin_ca_id, 'expiration': expiration, 'meta': [ { meta['key']: meta['value'] } for key, meta in self.ca_meta.items() ] } class CertificateAuthorityMetadatum(BASE, ModelBase): """Represents CA metadatum for a single key-value pair.""" __tablename__ = "certificate_authority_metadata" key = sa.Column(sa.String(255), index=True, nullable=False) value = sa.Column(sa.Text, nullable=False) ca_id = sa.Column( sa.String(36), sa.ForeignKey('certificate_authorities.id'), index=True, nullable=False) __table_args__ = (sa.UniqueConstraint( 'ca_id', 'key', name='_certificate_authority_metadatum_uc'),) def __init__(self, key, value): super(CertificateAuthorityMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for CertificateAuthorityMetadatum entry.") if key is None: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'key': self.key, 'value': self.value } class ProjectCertificateAuthority(BASE, ModelBase): """Stores CAs available for a project. Admins can define a set of CAs that are available for use in a particular project. There can be multiple entries for any given project. """ __tablename__ = 'project_certificate_authorities' project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=False) ca_id = sa.Column(sa.String(36), sa.ForeignKey('certificate_authorities.id'), index=True, nullable=False) ca = orm.relationship("CertificateAuthority", backref="project_cas") __table_args__ = (sa.UniqueConstraint( 'project_id', 'ca_id', name='_project_certificate_authority_uc'),) def __init__(self, project_id, ca_id): """Registers a Consumer to a Container.""" super(ProjectCertificateAuthority, self).__init__() msg = u._("Must supply non-None {0} argument " "for ProjectCertificateAuthority entry.") if project_id is None: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if ca_id is None: raise exception.MissingArgumentError(msg.format("ca_id")) self.ca_id = ca_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'project_id': self.project_id, 'ca_id': self.ca_id} class PreferredCertificateAuthority(BASE, ModelBase): """Stores preferred CAs for any project. Admins can define a set of CAs available for issuance requests for any project in the ProjectCertificateAuthority table.. """ __tablename__ = 'preferred_certificate_authorities' project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, unique=True, nullable=False) ca_id = sa.Column(sa.String(36), sa.ForeignKey( 'certificate_authorities.id', name='preferred_certificate_authorities_fk'), index=True, nullable=False) project = orm.relationship('Project', backref=orm.backref('preferred_ca'), uselist=False) ca = orm.relationship('CertificateAuthority', backref=orm.backref('preferred_ca')) def __init__(self, project_id, ca_id): """Registers a Consumer to a Container.""" super(PreferredCertificateAuthority, self).__init__() msg = u._("Must supply non-None {0} argument " "for PreferredCertificateAuthority entry.") if project_id is None: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if ca_id is None: raise exception.MissingArgumentError(msg.format("ca_id")) self.ca_id = ca_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'project_id': self.project_id, 'ca_id': self.ca_id} class SecretACL(BASE, ModelBase): """Stores Access Control List (ACL) for a secret. Class to define whitelist of user ids who are allowed specific operation on a secret. List of user ids is defined via SecretACLUser via acl_users association. Creator_only flag helps in making a secret private for non-admin project users who may have access otherwise. SecretACL deletes are not soft-deletes. """ __tablename__ = 'secret_acls' secret_id = sa.Column(sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) operation = sa.Column(sa.String(255), nullable=False) project_access = sa.Column(sa.Boolean, nullable=False, default=True) secret = orm.relationship( 'Secret', backref=orm.backref('secret_acls', lazy=False)) acl_users = orm.relationship( 'SecretACLUser', backref=orm.backref('secret_acl', lazy=False), cascade="all, delete-orphan") __table_args__ = (sa.UniqueConstraint( 'secret_id', 'operation', name='_secret_acl_operation_uc'),) def __init__(self, secret_id, operation, project_access=None, user_ids=None): """Creates secret ACL entity.""" super(SecretACL, self).__init__() msg = u._("Must supply non-None {0} argument for SecretACL entry.") if secret_id is None: raise exception.MissingArgumentError(msg.format("secret_id")) self.secret_id = secret_id if operation is None: raise exception.MissingArgumentError(msg.format("operation")) self.operation = operation if project_access is not None: self.project_access = project_access self.status = States.ACTIVE if user_ids is not None and isinstance(user_ids, list): userids = set(user_ids) # remove duplicate if any for user_id in userids: acl_user = SecretACLUser(self.id, user_id) self.acl_users.append(acl_user) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for acl_user in self.acl_users: acl_user.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields. Adds non-deleted acl related users from relationship if there. """ users = [acl_user.user_id for acl_user in self.acl_users if not acl_user.deleted] fields = {'acl_id': self.id, 'secret_id': self.secret_id, 'operation': self.operation, 'project_access': self.project_access} if users: fields['users'] = users return fields class ContainerACL(BASE, ModelBase): """Stores Access Control List (ACL) for a container. Class to define whitelist of user ids who are allowed specific operation on a container. List of user ids is defined in ContainerACLUser via acl_users association. Creator_only flag helps in making a container private for non-admin project users who may have access otherwise. ContainerACL deletes are not soft-deletes. """ __tablename__ = 'container_acls' container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) operation = sa.Column(sa.String(255), nullable=False) project_access = sa.Column(sa.Boolean, nullable=False, default=True) container = orm.relationship( 'Container', backref=orm.backref('container_acls', lazy=False)) acl_users = orm.relationship( 'ContainerACLUser', backref=orm.backref('container_acl', lazy=False), cascade="all, delete-orphan") __table_args__ = (sa.UniqueConstraint( 'container_id', 'operation', name='_container_acl_operation_uc'),) def __init__(self, container_id, operation, project_access=None, user_ids=None): """Creates container ACL entity.""" super(ContainerACL, self).__init__() msg = u._("Must supply non-None {0} argument for ContainerACL entry.") if container_id is None: raise exception.MissingArgumentError(msg.format("container_id")) self.container_id = container_id if operation is None: raise exception.MissingArgumentError(msg.format("operation")) self.operation = operation if project_access is not None: self.project_access = project_access self.status = States.ACTIVE if user_ids is not None and isinstance(user_ids, list): userids = set(user_ids) # remove duplicate if any for user_id in userids: acl_user = ContainerACLUser(self.id, user_id) self.acl_users.append(acl_user) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for acl_user in self.acl_users: acl_user.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields. Adds non-deleted acl related users from relationship if there. """ users = [acl_user.user_id for acl_user in self.acl_users if not acl_user.deleted] fields = {'acl_id': self.id, 'container_id': self.container_id, 'operation': self.operation, 'project_access': self.project_access} if users: fields['users'] = users return fields class SecretACLUser(BASE, ModelBase): """Stores user id for a secret ACL. This class provides way to store list of users associated with a specific ACL operation. SecretACLUser deletes are not soft-deletes. """ __tablename__ = 'secret_acl_users' acl_id = sa.Column(sa.String(36), sa.ForeignKey('secret_acls.id'), index=True, nullable=False) user_id = sa.Column(sa.String(255), nullable=False) __table_args__ = (sa.UniqueConstraint( 'acl_id', 'user_id', name='_secret_acl_user_uc'),) def __init__(self, acl_id, user_id): """Creates secret ACL user entity.""" super(SecretACLUser, self).__init__() msg = u._("Must supply non-None {0} argument for SecretACLUser entry.") self.acl_id = acl_id if user_id is None: raise exception.MissingArgumentError(msg.format("user_id")) self.user_id = user_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'acl_id': self.acl_id, 'user_id': self.user_id} class ContainerACLUser(BASE, ModelBase): """Stores user id for a container ACL. This class provides way to store list of users associated with a specific ACL operation. ContainerACLUser deletes are not soft-deletes. """ __tablename__ = 'container_acl_users' acl_id = sa.Column(sa.String(36), sa.ForeignKey('container_acls.id'), index=True, nullable=False) user_id = sa.Column(sa.String(255), nullable=False) __table_args__ = (sa.UniqueConstraint( 'acl_id', 'user_id', name='_container_acl_user_uc'),) def __init__(self, acl_id, user_id): """Creates container ACL user entity.""" super(ContainerACLUser, self).__init__() msg = u._("Must supply non-None {0} argument for ContainerACLUser " "entry.") self.acl_id = acl_id if user_id is None: raise exception.MissingArgumentError(msg.format("user_id")) self.user_id = user_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'acl_id': self.acl_id, 'user_id': self.user_id} class ProjectQuotas(BASE, ModelBase): """Stores Project Quotas. Class to define project specific resource quotas. Project quota deletes are not soft-deletes. """ __tablename__ = 'project_quotas' project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='project_quotas_fk'), index=True, nullable=False) secrets = sa.Column(sa.Integer, nullable=True) orders = sa.Column(sa.Integer, nullable=True) containers = sa.Column(sa.Integer, nullable=True) consumers = sa.Column(sa.Integer, nullable=True) cas = sa.Column(sa.Integer, nullable=True) def __init__(self, project_id=None, parsed_project_quotas=None): """Creates Project Quotas entity from a project and a dict. :param project_id: the internal id of the project with quotas :param parsed_project_quotas: a dict with the keys matching the resources for which quotas are to be set, and the values containing the quota value to be set for this project and that resource. :return: None """ super(ProjectQuotas, self).__init__() msg = u._("Must supply non-None {0} argument for ProjectQuotas entry.") if project_id is None: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if parsed_project_quotas is None: self.secrets = None self.orders = None self.containers = None self.consumers = None self.cas = None else: self.secrets = parsed_project_quotas.get('secrets') self.orders = parsed_project_quotas.get('orders') self.containers = parsed_project_quotas.get('containers') self.consumers = parsed_project_quotas.get('consumers') self.cas = parsed_project_quotas.get('cas') def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" ret = { 'project_id': self.project_id, } if self.secrets: ret['secrets'] = self.secrets if self.orders: ret['orders'] = self.orders if self.containers: ret['containers'] = self.containers if self.consumers: ret['consumers'] = self.consumers if self.cas: ret['cas'] = self.cas return ret class SecretStores(BASE, ModelBase): """List of secret stores defined via service configuration. This class provides a list of secret stores entities with their respective secret store plugin and crypto plugin names. SecretStores deletes are NOT soft-deletes. """ __tablename__ = 'secret_stores' store_plugin = sa.Column(sa.String(255), nullable=False) crypto_plugin = sa.Column(sa.String(255), nullable=True) global_default = sa.Column(sa.Boolean, nullable=False, default=False) name = sa.Column(sa.String(255), nullable=False) __table_args__ = (sa.UniqueConstraint( 'store_plugin', 'crypto_plugin', name='_secret_stores_plugin_names_uc'), sa.UniqueConstraint('name', name='_secret_stores_name_uc'),) def __init__(self, name, store_plugin, crypto_plugin=None, global_default=None): """Creates secret store entity.""" super(SecretStores, self).__init__() msg = u._("Must supply non-Blank {0} argument for SecretStores entry.") if not name: raise exception.MissingArgumentError(msg.format("name")) if not store_plugin: raise exception.MissingArgumentError(msg.format("store_plugin")) self.store_plugin = store_plugin self.name = name self.crypto_plugin = crypto_plugin if global_default is not None: self.global_default = global_default self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'secret_store_id': self.id, 'store_plugin': self.store_plugin, 'crypto_plugin': self.crypto_plugin, 'global_default': self.global_default, 'name': self.name} class ProjectSecretStore(BASE, ModelBase): """Stores secret store to be used for new project secrets. This class maintains secret store and project mapping so that new project secret entries uses it as plugin backend. ProjectSecretStores deletes are NOT soft-deletes. """ __tablename__ = 'project_secret_store' secret_store_id = sa.Column(sa.String(36), sa.ForeignKey('secret_stores.id'), index=True, nullable=False) project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=False) secret_store = orm.relationship("SecretStores", backref="project_store") project = orm.relationship('Project', backref=orm.backref('preferred_secret_store')) __table_args__ = (sa.UniqueConstraint( 'project_id', name='_project_secret_store_project_uc'),) def __init__(self, project_id, secret_store_id): """Creates project secret store mapping entity.""" super(ProjectSecretStore, self).__init__() msg = u._("Must supply non-None {0} argument for ProjectSecretStore " " entry.") if not project_id: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if not secret_store_id: raise exception.MissingArgumentError(msg.format("secret_store_id")) self.secret_store_id = secret_store_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'secret_store_id': self.secret_store_id, 'project_id': self.project_id} barbican-6.0.1/barbican/i18n.py0000666000175000017500000000144013311733060016171 0ustar zuulzuul00000000000000# Copyright 2010-2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_i18n as i18n _translators = i18n.TranslatorFactory(domain='barbican') # The translation function using the well-known name "_" _ = _translators.primary barbican-6.0.1/barbican/api/0000775000175000017500000000000013311733364015617 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/api/app.wsgi0000666000175000017500000000164013311733060017266 0ustar zuulzuul00000000000000# -*- mode: python -*- # # Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Use this file for deploying the API under mod_wsgi. See http://pecan.readthedocs.org/en/latest/deployment.html for details. NOTE(mtreinish): This wsgi script is deprecated since the wsgi app is now exposed as an entrypoint via barbican-wsgi-api """ from barbican.api import app application = app.get_api_wsgi_script() barbican-6.0.1/barbican/api/middleware/0000775000175000017500000000000013311733364017734 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/api/middleware/__init__.py0000666000175000017500000000564213311733060022047 0ustar zuulzuul00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican middleware modules. """ import sys import webob.dec from barbican.common import utils LOG = utils.getLogger(__name__) class Middleware(object): """Base WSGI middleware wrapper These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ def __init__(self, application): self.application = application @classmethod def factory(cls, global_conf, **local_conf): def filter(app): return cls(app) return filter def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) response.request = req return self.process_response(response) # Brought over from an OpenStack project class Debug(Middleware): """Debug helper class This class can be inserted into any WSGI application chain to get information about the request and response. """ @webob.dec.wsgify def __call__(self, req): LOG.debug(("*" * 40) + " REQUEST ENVIRON") for key, value in req.environ.items(): LOG.debug('%s=%s', key, value) LOG.debug(' ') resp = req.get_response(self.application) LOG.debug(("*" * 40) + " RESPONSE HEADERS") for (key, value) in resp.headers.items(): LOG.debug('%s=%s', key, value) LOG.debug(' ') resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """Iterator that prints the contents of a wrapper string iterator.""" LOG.debug(("*" * 40) + " BODY") for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part LOG.debug(' ') barbican-6.0.1/barbican/api/middleware/simple.py0000666000175000017500000000211213311733060021566 0ustar zuulzuul00000000000000# Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A filter middleware that just outputs to logs, for instructive/sample purposes only. """ from barbican.api import middleware from barbican.common import utils LOG = utils.getLogger(__name__) class SimpleFilter(middleware.Middleware): def __init__(self, app): super(SimpleFilter, self).__init__(app) def process_request(self, req): """Just announce we have been called.""" LOG.debug("Calling SimpleFilter") return None barbican-6.0.1/barbican/api/middleware/context.py0000666000175000017500000001325613311733072021777 0ustar zuulzuul00000000000000# Copyright 2011-2012 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob.exc from barbican.api import middleware as mw from barbican.common import config from barbican.common import utils import barbican.context from barbican import i18n as u LOG = utils.getLogger(__name__) CONF = config.CONF class BaseContextMiddleware(mw.Middleware): def process_request(self, req): request_id = req.headers.get('x-openstack-request-id') if not request_id: request_id = 'req-' + utils.generate_uuid() setattr(req, 'request_id', request_id) def process_response(self, resp): resp.headers['x-openstack-request-id'] = resp.request.request_id LOG.info('Processed request: %(status)s - %(method)s %(url)s', {"status": resp.status, "method": resp.request.method, "url": resp.request.url}) return resp class ContextMiddleware(BaseContextMiddleware): def __init__(self, app): super(ContextMiddleware, self).__init__(app) def process_request(self, req): """Convert authentication information into a request context Generate a barbican.context.RequestContext object from the available authentication headers and store on the 'context' attribute of the req object. :param req: wsgi request object that will be given the context object :raises webob.exc.HTTPUnauthorized: when value of the X-Identity-Status header is not 'Confirmed' and anonymous access is disallowed """ super(ContextMiddleware, self).process_request(req) if req.headers.get('X-Identity-Status') == 'Confirmed': req.context = self._get_authenticated_context(req) elif CONF.allow_anonymous_access: req.context = self._get_anonymous_context() LOG.debug("==== Inserted barbican unauth " "request context: %s ====", req.context.to_dict()) else: raise webob.exc.HTTPUnauthorized() # Ensure that down wind mw.Middleware/app can see this context. req.environ['barbican.context'] = req.context def _get_anonymous_context(self): kwargs = { 'user': None, 'tenant': None, 'is_admin': False, 'read_only': True, } return barbican.context.RequestContext(**kwargs) def _get_authenticated_context(self, req): # NOTE(bcwaldon): X-Roles is a csv string, but we need to parse # it into a list to be useful roles_header = req.headers.get('X-Roles', '') roles = [r.strip().lower() for r in roles_header.split(',')] # NOTE(bcwaldon): This header is deprecated in favor of X-Auth-Token # NOTE(mkbhanda): keeping this just-in-case for swift deprecated_token = req.headers.get('X-Storage-Token') kwargs = { 'auth_token': req.headers.get('X-Auth-Token', deprecated_token), 'user': req.headers.get('X-User-Id'), 'project': req.headers.get('X-Project-Id'), 'roles': roles, 'is_admin': CONF.admin_role.strip().lower() in roles, 'request_id': req.request_id } if req.headers.get('X-Domain-Id'): kwargs['domain'] = req.headers['X-Domain-Id'] if req.headers.get('X-User-Domain-Id'): kwargs['user_domain'] = req.headers['X-User-Domain-Id'] if req.headers.get('X-Project-Domain-Id'): kwargs['project_domain'] = req.headers['X-Project-Domain-Id'] return barbican.context.RequestContext(**kwargs) class UnauthenticatedContextMiddleware(BaseContextMiddleware): def _get_project_id_from_header(self, req): project_id = req.headers.get('X-Project-Id') if not project_id: accept_header = req.headers.get('Accept') if not accept_header: req.headers['Accept'] = 'text/plain' raise webob.exc.HTTPBadRequest(detail=u._('Missing X-Project-Id')) return project_id def process_request(self, req): """Create a context without an authorized user.""" super(UnauthenticatedContextMiddleware, self).process_request(req) project_id = self._get_project_id_from_header(req) config_admin_role = CONF.admin_role.strip().lower() roles_header = req.headers.get('X-Roles', '') roles = [r.strip().lower() for r in roles_header.split(',') if r] # If a role wasn't specified we default to admin if not roles: roles = [config_admin_role] kwargs = { 'user': req.headers.get('X-User-Id'), 'domain': req.headers.get('X-Domain-Id'), 'user_domain': req.headers.get('X-User-Domain-Id'), 'project_domain': req.headers.get('X-Project-Domain-Id'), 'project': project_id, 'roles': roles, 'is_admin': config_admin_role in roles, 'request_id': req.request_id } context = barbican.context.RequestContext(**kwargs) req.environ['barbican.context'] = context barbican-6.0.1/barbican/api/__init__.py0000666000175000017500000001073513311733060017731 0ustar zuulzuul00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ API handler for Barbican """ import pkgutil import six from oslo_policy import policy from oslo_serialization import jsonutils as json import pecan from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) CONF = config.CONF class ApiResource(object): """Base class for API resources.""" pass def load_body(req, resp=None, validator=None): """Helper function for loading an HTTP request body from JSON. This body is placed into into a Python dictionary. :param req: The HTTP request instance to load the body from. :param resp: The HTTP response instance. :param validator: The JSON validator to enforce. :return: A dict of values from the JSON request. """ try: body = req.body_file.read(CONF.max_allowed_request_size_in_bytes) req.body_file.seek(0) except IOError: LOG.exception("Problem reading request JSON stream.") pecan.abort(500, u._('Read Error')) try: # TODO(jwood): Investigate how to get UTF8 format via openstack # jsonutils: # parsed_body = json.loads(raw_json, 'utf-8') parsed_body = json.loads(body) strip_whitespace(parsed_body) except ValueError: LOG.exception("Problem loading request JSON.") pecan.abort(400, u._('Malformed JSON')) if validator: try: parsed_body = validator.validate(parsed_body) except exception.BarbicanHTTPException as e: LOG.exception(six.text_type(e)) pecan.abort(e.status_code, e.client_message) return parsed_body def generate_safe_exception_message(operation_name, excep): """Generates an exception message that is 'safe' for clients to consume. A 'safe' message is one that doesn't contain sensitive information that could be used for (say) cryptographic attacks on Barbican. That generally means that em.CryptoXxxx should be captured here and with a simple message created on behalf of them. :param operation_name: Name of attempted operation, with a 'Verb noun' format (e.g. 'Create Secret). :param excep: The Exception instance that halted the operation. :return: (status, message) where 'status' is one of the webob.exc.HTTP_xxx codes, and 'message' is the sanitized message associated with the error. """ message = None reason = None status = 500 try: raise excep except policy.PolicyNotAuthorized: message = u._( '{operation} attempt not allowed - ' 'please review your ' 'user/project privileges').format(operation=operation_name) status = 403 except exception.BarbicanHTTPException as http_exception: reason = http_exception.client_message status = http_exception.status_code except Exception: message = u._('{operation} failure seen - please contact site ' 'administrator.').format(operation=operation_name) if reason: message = u._('{operation} issue seen - {reason}.').format( operation=operation_name, reason=reason) return status, message @pkgutil.simplegeneric def get_items(obj): """This is used to get items from either a list or a dictionary. While false generator is need to process scalar object """ while False: yield None @get_items.register(dict) def _json_object(obj): return obj.items() @get_items.register(list) def _json_array(obj): return enumerate(obj) def strip_whitespace(json_data): """Recursively trim values from the object passed in using get_items().""" for key, value in get_items(json_data): if hasattr(value, 'strip'): json_data[key] = value.strip() else: strip_whitespace(value) barbican-6.0.1/barbican/api/hooks.py0000666000175000017500000000334713311733060017316 0ustar zuulzuul00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import pecan import webob from oslo_serialization import jsonutils try: import newrelic.agent newrelic_loaded = True except ImportError: newrelic_loaded = False from barbican.model import repositories class JSONErrorHook(pecan.hooks.PecanHook): def on_error(self, state, exc): if isinstance(exc, webob.exc.HTTPError): exc.body = jsonutils.dump_as_bytes({ 'code': exc.status_int, 'title': exc.title, 'description': exc.detail }) state.response.content_type = "application/json" return exc.body class BarbicanTransactionHook(pecan.hooks.TransactionHook): """Custom hook for Barbican transactions.""" def __init__(self): super(BarbicanTransactionHook, self).__init__( start=repositories.start, start_ro=repositories.start_read_only, commit=repositories.commit, rollback=repositories.rollback, clear=repositories.clear ) class NewRelicHook(pecan.hooks.PecanHook): def on_error(self, state, exc): if newrelic_loaded: newrelic.agent.record_exception() barbican-6.0.1/barbican/api/app.py0000666000175000017500000000676213311733060016757 0ustar zuulzuul00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ API application handler for Barbican """ import os from paste import deploy import pecan try: import newrelic.agent newrelic_loaded = True except ImportError: newrelic_loaded = False from oslo_log import log from barbican.api.controllers import versions from barbican.api import hooks from barbican.common import config from barbican.model import repositories from barbican import queue CONF = config.CONF if newrelic_loaded: newrelic.agent.initialize( os.environ.get('NEW_RELIC_CONFIG_FILE', '/etc/newrelic/newrelic.ini'), os.environ.get('NEW_RELIC_ENVIRONMENT') ) def build_wsgi_app(controller=None, transactional=False): """WSGI application creation helper :param controller: Overrides default application controller :param transactional: Adds transaction hook for all requests """ request_hooks = [hooks.JSONErrorHook()] if transactional: request_hooks.append(hooks.BarbicanTransactionHook()) if newrelic_loaded: request_hooks.insert(0, hooks.NewRelicHook()) # Create WSGI app wsgi_app = pecan.Pecan( controller or versions.AVAILABLE_VERSIONS[versions.DEFAULT_VERSION](), hooks=request_hooks, force_canonical=False ) # clear the session created in controller initialization 60 repositories.clear() return wsgi_app def main_app(func): def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. try: repositories.setup_database_engine_and_factory( initialize_secret_stores=True ) repositories.commit() except Exception: LOG.exception('Failed to sync secret_stores table.') repositories.rollback() raise wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG.info('Barbican app created and initialized') return wsgi_app return _wrapper @main_app def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Setup app with transactional hook enabled return build_wsgi_app(versions.V1Controller(), transactional=True) def create_version_app(global_config, **local_conf): wsgi_app = pecan.make_app(versions.VersionsController()) return wsgi_app def get_api_wsgi_script(): conf = '/etc/barbican/barbican-api-paste.ini' application = deploy.loadapp('config:%s' % conf) return application barbican-6.0.1/barbican/api/controllers/0000775000175000017500000000000013311733364020165 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/api/controllers/quotas.py0000666000175000017500000001173613311733060022056 0ustar zuulzuul00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u LOG = utils.getLogger(__name__) def _project_quotas_not_found(): """Throw exception indicating project quotas not found.""" pecan.abort(404, u._('Not Found. Sorry but your project quotas are in ' 'another castle.')) class QuotasController(controllers.ACLMixin): """Handles quota retrieval requests.""" def __init__(self): LOG.debug('=== Creating QuotasController ===') self.quota_driver = quota.QuotaDriver() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Quotas')) @controllers.enforce_rbac('quotas:get') def on_get(self, external_project_id, **kwargs): LOG.debug('=== QuotasController GET ===') # make sure project exists res.get_or_create_project(external_project_id) resp = self.quota_driver.get_quotas(external_project_id) return resp class ProjectQuotasController(controllers.ACLMixin): """Handles project quota requests.""" def __init__(self, project_id): LOG.debug('=== Creating ProjectQuotasController ===') self.passed_project_id = project_id self.validator = validators.ProjectQuotaValidator() self.quota_driver = quota.QuotaDriver() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:get') def on_get(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController GET ===') resp = self.quota_driver.get_project_quotas(self.passed_project_id) if resp: return resp else: _project_quotas_not_found() @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController PUT ===') if not pecan.request.body: raise exception.NoDataToProcess() api.load_body(pecan.request, validator=self.validator) self.quota_driver.set_project_quotas(self.passed_project_id, kwargs['project_quotas']) LOG.info('Put Project Quotas') pecan.response.status = 204 @index.when(method='DELETE', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:delete') def on_delete(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController DELETE ===') try: self.quota_driver.delete_project_quotas(self.passed_project_id) except exception.NotFound: LOG.info('Delete Project Quotas - Project not found') _project_quotas_not_found() else: LOG.info('Delete Project Quotas') pecan.response.status = 204 class ProjectsQuotasController(controllers.ACLMixin): """Handles projects quota retrieval requests.""" def __init__(self): LOG.debug('=== Creating ProjectsQuotaController ===') self.quota_driver = quota.QuotaDriver() @pecan.expose() def _lookup(self, project_id, *remainder): return ProjectQuotasController(project_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:get') def on_get(self, external_project_id, **kwargs): resp = self.quota_driver.get_project_quotas_list( offset_arg=kwargs.get('offset', 0), limit_arg=kwargs.get('limit', None) ) return resp barbican-6.0.1/barbican/api/controllers/secretmeta.py0000666000175000017500000001660613311733060022677 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _secret_metadata_not_found(): """Throw exception indicating secret metadata not found.""" pecan.abort(404, u._('Not Found. Sorry but your secret metadata is in ' 'another castle.')) class SecretMetadataController(controllers.ACLMixin): """Handles SecretMetadata requests by a given secret id.""" def __init__(self, secret): LOG.debug('=== Creating SecretMetadataController ===') self.secret = secret self.secret_project_id = self.secret.project.external_id self.secret_repo = repo.get_secret_repository() self.user_meta_repo = repo.get_secret_user_meta_repository() self.metadata_validator = validators.NewSecretMetadataValidator() self.metadatum_validator = validators.NewSecretMetadatumValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret metadata retrieval')) @controllers.enforce_rbac('secret_meta:get') def on_get(self, external_project_id, **kwargs): """Handles retrieval of existing secret metadata requests.""" LOG.debug('Start secret metadata on_get ' 'for secret-ID %s:', self.secret.id) resp = self.user_meta_repo.get_metadata_for_secret(self.secret.id) pecan.response.status = 200 return {"metadata": resp} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Secret metadata creation')) @controllers.enforce_rbac('secret_meta:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles creation/update of secret metadata.""" data = api.load_body(pecan.request, validator=self.metadata_validator) LOG.debug('Start secret metadata on_put...%s', data) self.user_meta_repo.create_replace_user_metadata(self.secret.id, data) url = hrefs.convert_user_meta_to_href(self.secret.id) LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url} @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Secret metadatum creation')) @controllers.enforce_rbac('secret_meta:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): """Handles creation of secret metadatum.""" data = api.load_body(pecan.request, validator=self.metadatum_validator) key = data.get('key') value = data.get('value') metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if key in metadata: pecan.abort(409, u._('Conflict. Key in request is already in the ' 'secret metadata')) LOG.debug('Start secret metadatum on_post...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) url = hrefs.convert_user_meta_to_href(self.secret.id) LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url + "/%s {key: %s, value:%s}" % (key, key, value)} class SecretMetadatumController(controllers.ACLMixin): def __init__(self, secret): LOG.debug('=== Creating SecretMetadatumController ===') self.user_meta_repo = repo.get_secret_user_meta_repository() self.secret = secret self.metadatum_validator = validators.NewSecretMetadatumValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret metadatum retrieval')) @controllers.enforce_rbac('secret_meta:get') def on_get(self, external_project_id, remainder, **kwargs): """Handles retrieval of existing secret metadatum.""" LOG.debug('Start secret metadatum on_get ' 'for secret-ID %s:', self.secret.id) metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if remainder in metadata: pecan.response.status = 200 pair = {'key': remainder, 'value': metadata[remainder]} return collections.OrderedDict(sorted(pair.items())) else: _secret_metadata_not_found() @index.when(method='PUT', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret metadatum update')) @controllers.enforce_rbac('secret_meta:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, remainder, **kwargs): """Handles update of existing secret metadatum.""" metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) data = api.load_body(pecan.request, validator=self.metadatum_validator) key = data.get('key') value = data.get('value') if remainder not in metadata: _secret_metadata_not_found() elif remainder != key: msg = 'Key in request data does not match key in the ' 'request url.' pecan.abort(409, msg) else: LOG.debug('Start secret metadatum on_put...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) pecan.response.status = 200 pair = {'key': key, 'value': value} return collections.OrderedDict(sorted(pair.items())) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('Secret metadatum removal')) @controllers.enforce_rbac('secret_meta:delete') def on_delete(self, external_project_id, remainder, **kwargs): """Handles removal of existing secret metadatum.""" self.user_meta_repo.delete_metadatum(self.secret.id, remainder) msg = 'Deleted secret metadatum: %s for secret %s' % (remainder, self.secret.id) pecan.response.status = 204 LOG.info(msg) barbican-6.0.1/barbican/api/controllers/acls.py0000666000175000017500000003660213311733060021463 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan import six from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _convert_acl_to_response_format(acl, acls_dict): fields = acl.to_dict_fields() operation = fields['operation'] acl_data = {} # dict for each acl operation data acl_data['project-access'] = fields['project_access'] acl_data['users'] = fields.get('users', []) acl_data['created'] = fields['created'] acl_data['updated'] = fields['updated'] acls_dict[operation] = acl_data DEFAULT_ACL = {'read': {'project-access': True}} class SecretACLsController(controllers.ACLMixin): """Handles SecretACL requests by a given secret id.""" def __init__(self, secret): self.secret = secret self.secret_project_id = self.secret.project.external_id self.acl_repo = repo.get_secret_acl_repository() self.validator = validators.ACLValidator() def get_acl_tuple(self, req, **kwargs): d = {'project_id': self.secret_project_id, 'creator_id': self.secret.creator_id} return 'secret', d @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretACL(s) retrieval')) @controllers.enforce_rbac('secret_acls:get') def on_get(self, external_project_id, **kw): LOG.debug('Start secret ACL on_get ' 'for secret-ID %s:', self.secret.id) return self._return_acl_list_response(self.secret.id) @index.when(method='PATCH', template='json') @controllers.handle_exceptions(u._('SecretACL(s) Update')) @controllers.enforce_rbac('secret_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_patch(self, external_project_id, **kwargs): """Handles update of existing secret acl requests. At least one secret ACL needs to exist for update to proceed. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via SecretACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":true } } """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_patch...%s', data) existing_acls_map = {acl.operation: acl for acl in self.secret.secret_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access') user_ids = data[operation].get('users') s_acl = None if operation in existing_acls_map: # update if matching acl exists s_acl = existing_acls_map[operation] if project_access is not None: s_acl.project_access = project_access else: s_acl = models.SecretACL(self.secret.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.secret, secret_acl=s_acl, user_ids=user_ids) acl_ref = '{0}/acl'.format( hrefs.convert_secret_to_href(self.secret.id)) return {'acl_ref': acl_ref} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('SecretACL(s) Update')) @controllers.enforce_rbac('secret_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles update of existing secret acl requests. Replaces existing secret ACL(s) with input ACL(s) data. Existing ACL operation not specified in input are removed as part of update. For missing project-access in ACL, true is used as default. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via SecretACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } Every secret, by default, has an implicit ACL in case client has not defined an explicit ACL. That default ACL definition, DEFAULT_ACL, signifies that a secret by default has project based access i.e. client with necessary roles on secret project can access the secret. That's why when ACL is added to a secret, it always returns 200 (and not 201) indicating existence of implicit ACL on a secret. """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_put...%s', data) existing_acls_map = {acl.operation: acl for acl in self.secret.secret_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access', True) user_ids = data[operation].get('users', []) s_acl = None if operation in existing_acls_map: # update if matching acl exists s_acl = existing_acls_map.pop(operation) s_acl.project_access = project_access else: s_acl = models.SecretACL(self.secret.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.secret, secret_acl=s_acl, user_ids=user_ids) # delete remaining existing acls as they are not present in input. for acl in existing_acls_map.values(): self.acl_repo.delete_entity_by_id(entity_id=acl.id, external_project_id=None) acl_ref = '{0}/acl'.format( hrefs.convert_secret_to_href(self.secret.id)) return {'acl_ref': acl_ref} @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('SecretACL(s) deletion')) @controllers.enforce_rbac('secret_acls:delete') def on_delete(self, external_project_id, **kwargs): count = self.acl_repo.get_count(self.secret.id) if count > 0: self.acl_repo.delete_acls_for_secret(self.secret) def _return_acl_list_response(self, secret_id): result = self.acl_repo.get_by_secret_id(secret_id) acls_data = {} if result: for acl in result: _convert_acl_to_response_format(acl, acls_data) if not acls_data: acls_data = DEFAULT_ACL.copy() return acls_data class ContainerACLsController(controllers.ACLMixin): """Handles ContainerACL requests by a given container id.""" def __init__(self, container): self.container = container self.container_id = container.id self.acl_repo = repo.get_container_acl_repository() self.container_repo = repo.get_container_repository() self.validator = validators.ACLValidator() self.container_project_id = container.project.external_id def get_acl_tuple(self, req, **kwargs): d = {'project_id': self.container_project_id, 'creator_id': self.container.creator_id} return 'container', d @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) retrieval')) @controllers.enforce_rbac('container_acls:get') def on_get(self, external_project_id, **kw): LOG.debug('Start container ACL on_get ' 'for container-ID %s:', self.container_id) return self._return_acl_list_response(self.container.id) @index.when(method='PATCH', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) Update')) @controllers.enforce_rbac('container_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_patch(self, external_project_id, **kwargs): """Handles update of existing container acl requests. At least one container ACL needs to exist for update to proceed. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via ContainerACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start ContainerACLsController on_patch...%s', data) existing_acls_map = {acl.operation: acl for acl in self.container.container_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access') user_ids = data[operation].get('users') if operation in existing_acls_map: # update if matching acl exists c_acl = existing_acls_map[operation] if project_access is not None: c_acl.project_access = project_access else: c_acl = models.ContainerACL(self.container.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.container, container_acl=c_acl, user_ids=user_ids) acl_ref = '{0}/acl'.format( hrefs.convert_container_to_href(self.container.id)) return {'acl_ref': acl_ref} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) Update')) @controllers.enforce_rbac('container_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles update of existing container acl requests. Replaces existing container ACL(s) with input ACL(s) data. Existing ACL operation not specified in input are removed as part of update. For missing project-access in ACL, true is used as default. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via ContainerACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } Every container, by default, has an implicit ACL in case client has not defined an explicit ACL. That default ACL definition, DEFAULT_ACL, signifies that a container by default has project based access i.e. client with necessary roles on container project can access the container. That's why when ACL is added to a container, it always returns 200 (and not 201) indicating existence of implicit ACL on a container. """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start ContainerACLsController on_put...%s', data) existing_acls_map = {acl.operation: acl for acl in self.container.container_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access', True) user_ids = data[operation].get('users', []) if operation in existing_acls_map: # update if matching acl exists c_acl = existing_acls_map.pop(operation) c_acl.project_access = project_access else: c_acl = models.ContainerACL(self.container.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.container, container_acl=c_acl, user_ids=user_ids) # delete remaining existing acls as they are not present in input. for acl in existing_acls_map.values(): self.acl_repo.delete_entity_by_id(entity_id=acl.id, external_project_id=None) acl_ref = '{0}/acl'.format( hrefs.convert_container_to_href(self.container.id)) return {'acl_ref': acl_ref} @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) deletion')) @controllers.enforce_rbac('container_acls:delete') def on_delete(self, external_project_id, **kwargs): count = self.acl_repo.get_count(self.container_id) if count > 0: self.acl_repo.delete_acls_for_container(self.container) def _return_acl_list_response(self, container_id): result = self.acl_repo.get_by_container_id(container_id) acls_data = {} if result: for acl in result: _convert_acl_to_response_format(acl, acls_data) if not acls_data: acls_data = DEFAULT_ACL.copy() return acls_data barbican-6.0.1/barbican/api/controllers/versions.py0000666000175000017500000001317713311733060022413 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from six.moves.urllib import parse from barbican.api import controllers from barbican.api.controllers import containers from barbican.api.controllers import orders from barbican.api.controllers import quotas from barbican.api.controllers import secrets from barbican.api.controllers import secretstores from barbican.api.controllers import transportkeys from barbican.common import utils from barbican import i18n as u from barbican import version LOG = utils.getLogger(__name__) MIME_TYPE_JSON = 'application/json' MIME_TYPE_JSON_HOME = 'application/json-home' MEDIA_TYPE_JSON = 'application/vnd.openstack.key-manager-%s+json' def _version_not_found(): """Throw exception indicating version not found.""" pecan.abort(404, u._("The version you requested wasn't found")) def _get_versioned_url(version): if version[-1] != '/': version += '/' # If host_href is not set in barbican conf, then derive it from request url host_part = utils.get_base_url_from_request() if host_part[-1] != '/': host_part += '/' return parse.urljoin(host_part, version) class BaseVersionController(object): """Base class for the version-specific controllers""" @classmethod def get_version_info(cls, request): return { 'id': cls.version_id, 'status': 'stable', 'updated': cls.last_updated, 'links': [ { 'rel': 'self', 'href': _get_versioned_url(cls.version_string), }, { 'rel': 'describedby', 'type': 'text/html', 'href': 'https://docs.openstack.org/' } ], 'media-types': [ { 'base': MIME_TYPE_JSON, 'type': MEDIA_TYPE_JSON % cls.version_string } ] } class V1Controller(BaseVersionController): """Root controller for the v1 API""" version_string = 'v1' # NOTE(jaosorior): We might start using decimals in the future, meanwhile # this is the same as the version string. version_id = 'v1' last_updated = '2015-04-28T00:00:00Z' def __init__(self): LOG.debug('=== Creating V1Controller ===') self.secrets = secrets.SecretsController() self.orders = orders.OrdersController() self.containers = containers.ContainersController() self.transport_keys = transportkeys.TransportKeysController() self.quotas = quotas.QuotasController() setattr(self, 'project-quotas', quotas.ProjectsQuotasController()) setattr(self, 'secret-stores', secretstores.SecretStoresController()) @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_certain_content_types(MIME_TYPE_JSON, MIME_TYPE_JSON_HOME) @controllers.handle_exceptions(u._('Version retrieval')) def on_get(self): pecan.core.override_template('json') return {'version': self.get_version_info(pecan.request)} AVAILABLE_VERSIONS = { V1Controller.version_string: V1Controller, } DEFAULT_VERSION = V1Controller.version_string class VersionsController(object): def __init__(self): LOG.debug('=== Creating VersionsController ===') @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_certain_content_types(MIME_TYPE_JSON, MIME_TYPE_JSON_HOME) def on_get(self, **kwargs): """The list of versions is dependent on the context.""" self._redirect_to_default_json_home_if_needed(pecan.request) if 'build' in kwargs: return {'build': version.__version__} versions_info = [version_class.get_version_info(pecan.request) for version_class in AVAILABLE_VERSIONS.values()] version_output = { 'versions': { 'values': versions_info } } # Since we are returning all the versions available, the proper status # code is Multiple Choices (300) pecan.response.status = 300 return version_output def _redirect_to_default_json_home_if_needed(self, request): if self._mime_best_match(request.accept) == MIME_TYPE_JSON_HOME: url = _get_versioned_url(DEFAULT_VERSION) LOG.debug("Redirecting Request to " + url) # NOTE(jaosorior): This issues an "external" redirect because of # two reasons: # * This module doesn't require authorization, and accessing # specific version info needs that. # * The resource is a separate app_factory and won't be found # internally pecan.redirect(url, request=request) def _mime_best_match(self, accept): if not accept: return MIME_TYPE_JSON SUPPORTED_TYPES = [MIME_TYPE_JSON, MIME_TYPE_JSON_HOME] return accept.best_match(SUPPORTED_TYPES) barbican-6.0.1/barbican/api/controllers/__init__.py0000666000175000017500000001722213311733072022300 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections from oslo_policy import policy import pecan from webob import exc from barbican import api from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) def is_json_request_accept(req): """Test if http request 'accept' header configured for JSON response. :param req: HTTP request :return: True if need to return JSON response. """ return (not req.accept or req.accept.header_value == 'application/json' or req.accept.header_value == '*/*') def _get_barbican_context(req): if 'barbican.context' in req.environ: return req.environ['barbican.context'] else: return None def _do_enforce_rbac(inst, req, action_name, ctx, **kwargs): """Enforce RBAC based on 'request' information.""" if action_name and ctx: # Prepare credentials information. credentials = { 'roles': ctx.roles, 'user': ctx.user, 'project': ctx.project } # Enforce special case: secret GET decryption if 'secret:get' == action_name and not is_json_request_accept(req): action_name = 'secret:decrypt' # Override to perform special rules target_name, target_data = inst.get_acl_tuple(req, **kwargs) policy_dict = {} if target_name and target_data: policy_dict['target'] = {target_name: target_data} policy_dict.update(kwargs) # Enforce access controls. if ctx.policy_enforcer: ctx.policy_enforcer.enforce(action_name, flatten(policy_dict), credentials, do_raise=True) def enforce_rbac(action_name='default'): """Decorator handling RBAC enforcement on behalf of REST verb methods.""" def rbac_decorator(fn): def enforcer(inst, *args, **kwargs): # Enforce RBAC rules. # context placed here by context.py # middleware ctx = _get_barbican_context(pecan.request) external_project_id = None if ctx: external_project_id = ctx.project _do_enforce_rbac(inst, pecan.request, action_name, ctx, **kwargs) # insert external_project_id as the first arg to the guarded method args = list(args) args.insert(0, external_project_id) # Execute guarded method now. return fn(inst, *args, **kwargs) return enforcer return rbac_decorator def handle_exceptions(operation_name=u._('System')): """Decorator handling generic exceptions from REST methods.""" def exceptions_decorator(fn): def handler(inst, *args, **kwargs): try: return fn(inst, *args, **kwargs) except exc.HTTPError: LOG.exception('Webob error seen') raise # Already converted to Webob exception, just reraise # In case PolicyNotAuthorized, we do not want to expose payload by # logging exception, so just LOG.error except policy.PolicyNotAuthorized as pna: status, message = api.generate_safe_exception_message( operation_name, pna) LOG.error(message) pecan.abort(status, message) except Exception as e: # In case intervening modules have disabled logging. LOG.logger.disabled = False status, message = api.generate_safe_exception_message( operation_name, e) LOG.exception(message) pecan.abort(status, message) return handler return exceptions_decorator def _do_enforce_content_types(pecan_req, valid_content_types): """Content type enforcement Check to see that content type in the request is one of the valid types passed in by our caller. """ if pecan_req.content_type not in valid_content_types: m = u._( "Unexpected content type. Expected content types " "are: {expected}" ).format( expected=valid_content_types ) pecan.abort(415, m) def enforce_content_types(valid_content_types=[]): """Decorator handling content type enforcement on behalf of REST verbs.""" def content_types_decorator(fn): def content_types_enforcer(inst, *args, **kwargs): _do_enforce_content_types(pecan.request, valid_content_types) return fn(inst, *args, **kwargs) return content_types_enforcer return content_types_decorator def flatten(d, parent_key=''): """Flatten a nested dictionary Converts a dictionary with nested values to a single level flat dictionary, with dotted notation for each key. """ items = [] for k, v in d.items(): new_key = parent_key + '.' + k if parent_key else k if isinstance(v, collections.MutableMapping): items.extend(flatten(v, new_key).items()) else: items.append((new_key, v)) return dict(items) class ACLMixin(object): def get_acl_tuple(self, req, **kwargs): return None, None def get_acl_dict_for_user(self, req, acl_list): """Get acl operation found for token user in acl list. Token user is looked into users list present for each acl operation. If there is a match, it means that ACL data is applicable for policy logic. Policy logic requires data as dictionary so this method capture acl's operation, project_access data in that format. For operation value, matching ACL record's operation is stored in dict as key and value both. project_access flag is intended to make secret/container private for a given operation. It doesn't require user match. So its captured in dict format where key is prefixed with related operation and flag is used as its value. Then for acl related policy logic, this acl dict data is combined with target entity (secret or container) creator_id and project id. The whole dict serves as target in policy enforcement logic i.e. right hand side of policy rule. Following is sample outcome where secret or container has ACL defined and token user is among the ACL users defined for 'read' and 'list' operation. {'read': 'read', 'list': 'list', 'read_project_access': True, 'list_project_access': True } Its possible that ACLs are defined without any user, they just have project_access flag set. This means only creator can read or list ACL entities. In that case, dictionary output can be as follows. {'read_project_access': False, 'list_project_access': False } """ ctxt = _get_barbican_context(req) if not ctxt: return {} acl_dict = {acl.operation: acl.operation for acl in acl_list if ctxt.user in acl.to_dict_fields().get('users', [])} co_dict = {'%s_project_access' % acl.operation: acl.project_access for acl in acl_list if acl.project_access is not None} acl_dict.update(co_dict) return acl_dict barbican-6.0.1/barbican/api/controllers/containers.py0000666000175000017500000003060513311733060022703 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.api.controllers import acls from barbican.api.controllers import consumers from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) CONTAINER_GET = 'container:get' def container_not_found(): """Throw exception indicating container not found.""" pecan.abort(404, u._('Not Found. Sorry but your container is in ' 'another castle.')) def invalid_container_id(): """Throw exception indicating container id is invalid.""" pecan.abort(404, u._('Not Found. Provided container id is invalid.')) class ContainerController(controllers.ACLMixin): """Handles Container entity retrieval and deletion requests.""" def __init__(self, container): self.container = container self.container_id = container.id self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.validator = validators.ContainerValidator() self.consumers = consumers.ContainerConsumersController( self.container_id) self.acl = acls.ContainerACLsController(self.container) def get_acl_tuple(self, req, **kwargs): d = self.get_acl_dict_for_user(req, self.container.container_acls) d['project_id'] = self.container.project.external_id d['creator_id'] = self.container.creator_id return 'container', d @pecan.expose(generic=True, template='json') def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Container retrieval')) @controllers.enforce_rbac(CONTAINER_GET) def on_get(self, external_project_id): dict_fields = self.container.to_dict_fields() for secret_ref in dict_fields['secret_refs']: hrefs.convert_to_hrefs(secret_ref) LOG.info('Retrieved container for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Container deletion')) @controllers.enforce_rbac('container:delete') def on_delete(self, external_project_id, **kwargs): container_consumers = self.consumer_repo.get_by_container_id( self.container_id, suppress_exception=True ) try: self.container_repo.delete_entity_by_id( entity_id=self.container_id, external_project_id=external_project_id ) except exception.NotFound: LOG.exception('Problem deleting container') container_not_found() LOG.info('Deleted container for project: %s', external_project_id) for consumer in container_consumers[0]: try: self.consumer_repo.delete_entity_by_id( consumer.id, external_project_id) except exception.NotFound: # nosec pass class ContainersController(controllers.ACLMixin): """Handles Container creation requests.""" def __init__(self): self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.secret_repo = repo.get_secret_repository() self.validator = validators.ContainerValidator() self.quota_enforcer = quota.QuotaEnforcer('containers', self.container_repo) @pecan.expose() def _lookup(self, container_id, *remainder): if not utils.validate_id_is_uuid(container_id): invalid_container_id() container = self.container_repo.get_container_by_id( entity_id=container_id, suppress_exception=True) if not container: container_not_found() if len(remainder) > 0 and remainder[0] == 'secrets': return ContainersSecretsController(container), () return ContainerController(container), remainder @pecan.expose(generic=True, template='json') def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Containers(s) retrieval')) @controllers.enforce_rbac('containers:get') def on_get(self, project_id, **kw): LOG.debug('Start containers on_get for project-ID %s:', project_id) result = self.container_repo.get_by_create_date( project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), name_arg=kw.get('name', None), suppress_exception=True ) containers, offset, limit, total = result if not containers: resp_ctrs_overall = {'containers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in containers ] for ctr in resp_ctrs: for secret_ref in ctr.get('secret_refs', []): hrefs.convert_to_hrefs(secret_ref) resp_ctrs_overall = hrefs.add_nav_hrefs( 'containers', offset, limit, total, {'containers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved container list for project: %s', project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Container creation')) @controllers.enforce_rbac('containers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) ctxt = controllers._get_barbican_context(pecan.request) if ctxt: # in authenticated pipleline case, always use auth token user data['creator_id'] = ctxt.user self.quota_enforcer.enforce(project) LOG.debug('Start on_post...%s', data) new_container = models.Container(data) new_container.project_id = project.id # TODO(hgedikli): performance optimizations for secret_ref in new_container.container_secrets: secret = self.secret_repo.get( entity_id=secret_ref.secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: # This only partially localizes the error message and # doesn't localize secret_ref.name. pecan.abort( 404, u._("Secret provided for '{secret_name}' doesn't " "exist.").format(secret_name=secret_ref.name) ) self.container_repo.create_from(new_container) url = hrefs.convert_container_to_href(new_container.id) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a container for project: %s', external_project_id) return {'container_ref': url} class ContainersSecretsController(controllers.ACLMixin): """Handles ContainerSecret creation and deletion requests.""" def __init__(self, container): LOG.debug('=== Creating ContainerSecretsController ===') self.container = container self.container_secret_repo = repo.get_container_secret_repository() self.secret_repo = repo.get_secret_repository() self.validator = validators.ContainerSecretValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Container Secret creation')) @controllers.enforce_rbac('container_secret:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): """Handles adding an existing secret to an existing container.""" if self.container.type != 'generic': pecan.abort(400, u._("Only 'generic' containers can be modified.")) data = api.load_body(pecan.request, validator=self.validator) name = data.get('name') secret_ref = data.get('secret_ref') secret_id = hrefs.get_secret_id_from_ref(secret_ref) secret = self.secret_repo.get( entity_id=secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: pecan.abort(404, u._("Secret provided doesn't exist.")) found_container_secrets = list( filter(lambda cs: cs.secret_id == secret_id and cs.name == name, self.container.container_secrets) ) if found_container_secrets: pecan.abort(409, u._('Conflict. A secret with that name and ID is ' 'already stored in this container. The same ' 'secret can exist in a container as long as ' 'the name is unique.')) LOG.debug('Start container secret on_post...%s', secret_ref) new_container_secret = models.ContainerSecret() new_container_secret.container_id = self.container.id new_container_secret.name = name new_container_secret.secret_id = secret_id self.container_secret_repo.save(new_container_secret) url = hrefs.convert_container_to_href(self.container.id) LOG.debug('URI to container is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a container secret for project: %s', external_project_id) return {'container_ref': url} @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Container Secret deletion')) @controllers.enforce_rbac('container_secret:delete') def on_delete(self, external_project_id, **kwargs): """Handles removing a secret reference from an existing container.""" data = api.load_body(pecan.request, validator=self.validator) name = data.get('name') secret_ref = data.get('secret_ref') secret_id = hrefs.get_secret_id_from_ref(secret_ref) secret = self.secret_repo.get( entity_id=secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: pecan.abort(404, u._("Secret '{secret_name}' with reference " "'{secret_ref}' doesn't exist.").format( secret_name=name, secret_ref=secret_ref)) found_container_secrets = list( filter(lambda cs: cs.secret_id == secret_id and cs.name == name, self.container.container_secrets) ) if not found_container_secrets: pecan.abort(404, u._('Secret provided is not in the container')) for container_secret in found_container_secrets: self.container_secret_repo.delete_entity_by_id( container_secret.id, external_project_id) pecan.response.status = 204 LOG.info('Deleted container secret for project: %s', external_project_id) barbican-6.0.1/barbican/api/controllers/transportkeys.py0000666000175000017500000001373113311733060023467 0ustar zuulzuul00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from six.moves.urllib import parse from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _transport_key_not_found(): """Throw exception indicating transport key not found.""" pecan.abort(404, u._('Not Found. Transport Key not found.')) def _invalid_transport_key_id(): """Throw exception indicating transport key id is invalid.""" pecan.abort(404, u._('Not Found. Provided transport key id is invalid.')) class TransportKeyController(controllers.ACLMixin): """Handles transport key retrieval requests.""" def __init__(self, transport_key_id, transport_key_repo=None): LOG.debug('=== Creating TransportKeyController ===') self.transport_key_id = transport_key_id self.repo = transport_key_repo or repo.TransportKeyRepo() @pecan.expose(generic=True) def index(self, external_project_id, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET') @controllers.handle_exceptions(u._('Transport Key retrieval')) @controllers.enforce_rbac('transport_key:get') def on_get(self, external_project_id): LOG.debug("== Getting transport key for %s", external_project_id) transport_key = self.repo.get(entity_id=self.transport_key_id) if not transport_key: _transport_key_not_found() pecan.override_template('json', 'application/json') return transport_key @index.when(method='DELETE') @controllers.handle_exceptions(u._('Transport Key deletion')) @controllers.enforce_rbac('transport_key:delete') def on_delete(self, external_project_id, **kwargs): LOG.debug("== Deleting transport key ===") try: self.repo.delete_entity_by_id( entity_id=self.transport_key_id, external_project_id=external_project_id) # TODO(alee) response should be 204 on success # pecan.response.status = 204 except exception.NotFound: LOG.exception('Problem deleting transport_key') _transport_key_not_found() class TransportKeysController(controllers.ACLMixin): """Handles transport key list requests.""" def __init__(self, transport_key_repo=None): LOG.debug('Creating TransportKeyController') self.repo = transport_key_repo or repo.TransportKeyRepo() self.validator = validators.NewTransportKeyValidator() @pecan.expose() def _lookup(self, transport_key_id, *remainder): if not utils.validate_id_is_uuid(transport_key_id): _invalid_transport_key_id() return TransportKeyController(transport_key_id, self.repo), remainder @pecan.expose(generic=True) def index(self, external_project_id, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Transport Key(s) retrieval')) @controllers.enforce_rbac('transport_keys:get') def on_get(self, external_project_id, **kw): LOG.debug('Start transport_keys on_get') plugin_name = kw.get('plugin_name', None) if plugin_name is not None: plugin_name = parse.unquote_plus(plugin_name) result = self.repo.get_by_create_date( plugin_name=plugin_name, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), suppress_exception=True ) transport_keys, offset, limit, total = result if not transport_keys: transport_keys_resp_overall = {'transport_keys': [], 'total': total} else: transport_keys_resp = [ hrefs.convert_transport_key_to_href(s.id) for s in transport_keys ] transport_keys_resp_overall = hrefs.add_nav_hrefs( 'transport_keys', offset, limit, total, {'transport_keys': transport_keys_resp} ) transport_keys_resp_overall.update({'total': total}) return transport_keys_resp_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Transport Key Creation')) @controllers.enforce_rbac('transport_keys:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): LOG.debug('Start transport_keys on_post') # TODO(alee) POST should determine the plugin name and call the # relevant get_transport_key() call. We will implement this once # we figure out how the plugins will be enumerated. data = api.load_body(pecan.request, validator=self.validator) new_key = models.TransportKey(data.get('plugin_name'), data.get('transport_key')) self.repo.create_from(new_key) url = hrefs.convert_transport_key_to_href(new_key.id) LOG.debug('URI to transport key is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url return {'transport_key_ref': url} barbican-6.0.1/barbican/api/controllers/orders.py0000666000175000017500000001704713311733072022044 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.queue import client as async_client LOG = utils.getLogger(__name__) _DEPRECATION_MSG = '%s has been deprecated in the Newton release. ' \ 'It will be removed in the Pike release.' def _order_not_found(): """Throw exception indicating order not found.""" pecan.abort(404, u._('Not Found. Sorry but your order is in ' 'another castle.')) def _secret_not_in_order(): """Throw exception that secret info is not available in the order.""" pecan.abort(400, u._("Secret metadata expected but not received.")) def _order_update_not_supported(): """Throw exception that PUT operation is not supported for orders.""" pecan.abort(405, u._("Order update is not supported.")) def _order_cannot_be_updated_if_not_pending(order_status): """Throw exception that order cannot be updated if not PENDING.""" pecan.abort(400, u._("Only PENDING orders can be updated. Order is in the" "{0} state.").format(order_status)) def order_cannot_modify_order_type(): """Throw exception that order type cannot be modified.""" pecan.abort(400, u._("Cannot modify order type.")) class OrderController(controllers.ACLMixin): """Handles Order retrieval and deletion requests.""" def __init__(self, order, queue_resource=None): self.order = order self.order_repo = repo.get_order_repository() self.queue = queue_resource or async_client.TaskClient() self.type_order_validator = validators.TypeOrderValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Order retrieval')) @controllers.enforce_rbac('order:get') def on_get(self, external_project_id): return hrefs.convert_to_hrefs(self.order.to_dict_fields()) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Order deletion')) @controllers.enforce_rbac('order:delete') def on_delete(self, external_project_id, **kwargs): self.order_repo.delete_entity_by_id( entity_id=self.order.id, external_project_id=external_project_id) class OrdersController(controllers.ACLMixin): """Handles Order requests for Secret creation.""" def __init__(self, queue_resource=None): LOG.debug('Creating OrdersController') self.order_repo = repo.get_order_repository() self.queue = queue_resource or async_client.TaskClient() self.type_order_validator = validators.TypeOrderValidator() self.quota_enforcer = quota.QuotaEnforcer('orders', self.order_repo) @pecan.expose() def _lookup(self, order_id, *remainder): # NOTE(jaosorior): It's worth noting that even though this section # actually does a lookup in the database regardless of the RBAC policy # check, the execution only gets here if authentication of the user was # previously successful. ctx = controllers._get_barbican_context(pecan.request) order = self.order_repo.get(entity_id=order_id, external_project_id=ctx.project, suppress_exception=True) if not order: _order_not_found() return OrderController(order, self.order_repo), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Order(s) retrieval')) @controllers.enforce_rbac('orders:get') def on_get(self, external_project_id, **kw): LOG.debug('Start orders on_get ' 'for project-ID %s:', external_project_id) result = self.order_repo.get_by_create_date( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), meta_arg=kw.get('meta', None), suppress_exception=True) orders, offset, limit, total = result if not orders: orders_resp_overall = {'orders': [], 'total': total} else: orders_resp = [ hrefs.convert_to_hrefs(o.to_dict_fields()) for o in orders ] orders_resp_overall = hrefs.add_nav_hrefs('orders', offset, limit, total, {'orders': orders_resp}) orders_resp_overall.update({'total': total}) return orders_resp_overall @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Order update')) @controllers.enforce_rbac('orders:put') def on_put(self, external_project_id, **kwargs): _order_update_not_supported() @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Order creation')) @controllers.enforce_rbac('orders:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) body = api.load_body(pecan.request, validator=self.type_order_validator) order_type = body.get('type') order_meta = body.get('meta') request_type = order_meta.get('request_type') LOG.debug('Processing order type %(order_type)s,' ' request type %(request_type)s' % {'order_type': order_type, 'request_type': request_type}) self.quota_enforcer.enforce(project) new_order = models.Order() new_order.meta = body.get('meta') new_order.type = order_type new_order.project_id = project.id request_id = None ctxt = controllers._get_barbican_context(pecan.request) if ctxt: new_order.creator_id = ctxt.user request_id = ctxt.request_id self.order_repo.create_from(new_order) # Grab our id before commit due to obj expiration from sqlalchemy order_id = new_order.id # Force commit to avoid async issues with the workers repo.commit() self.queue.process_type_order(order_id=order_id, project_id=external_project_id, request_id=request_id) url = hrefs.convert_order_to_href(order_id) pecan.response.status = 202 pecan.response.headers['Location'] = url return {'order_ref': url} barbican-6.0.1/barbican/api/controllers/consumers.py0000666000175000017500000002040413311733060022550 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _consumer_not_found(): """Throw exception indicating consumer not found.""" pecan.abort(404, u._('Not Found. Sorry but your consumer is in ' 'another castle.')) def _consumer_ownership_mismatch(): """Throw exception indicating the user does not own this consumer.""" pecan.abort(403, u._('Not Allowed. Sorry, only the creator of a consumer ' 'can delete it.')) def _invalid_consumer_id(): """Throw exception indicating consumer id is invalid.""" pecan.abort(404, u._('Not Found. Provided consumer id is invalid.')) class ContainerConsumerController(controllers.ACLMixin): """Handles Consumer entity retrieval and deletion requests.""" def __init__(self, consumer_id): self.consumer_id = consumer_id self.consumer_repo = repo.get_container_consumer_repository() self.validator = validators.ContainerConsumerValidator() @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerConsumer retrieval')) @controllers.enforce_rbac('consumer:get') def on_get(self, external_project_id): consumer = self.consumer_repo.get( entity_id=self.consumer_id, suppress_exception=True) if not consumer: _consumer_not_found() dict_fields = consumer.to_dict_fields() LOG.info('Retrieved a consumer for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class ContainerConsumersController(controllers.ACLMixin): """Handles Consumer creation requests.""" def __init__(self, container_id): self.container_id = container_id self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.project_repo = repo.get_project_repository() self.validator = validators.ContainerConsumerValidator() self.quota_enforcer = quota.QuotaEnforcer('consumers', self.consumer_repo) @pecan.expose() def _lookup(self, consumer_id, *remainder): if not utils.validate_id_is_uuid(consumer_id): _invalid_consumer_id()() return ContainerConsumerController(consumer_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerConsumers(s) retrieval')) @controllers.enforce_rbac('consumers:get') def on_get(self, external_project_id, **kw): LOG.debug('Start consumers on_get ' 'for container-ID %s:', self.container_id) result = self.consumer_repo.get_by_container_id( self.container_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), suppress_exception=True ) consumers, offset, limit, total = result if not consumers: resp_ctrs_overall = {'consumers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in consumers ] consumer_path = "containers/{container_id}/consumers".format( container_id=self.container_id) resp_ctrs_overall = hrefs.add_nav_hrefs( consumer_path, offset, limit, total, {'consumers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved a consumer list for project: %s', external_project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('ContainerConsumer creation')) @controllers.enforce_rbac('consumers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_post...%s', data) container = self._get_container(self.container_id) self.quota_enforcer.enforce(project) new_consumer = models.ContainerConsumerMetadatum(self.container_id, project.id, data) self.consumer_repo.create_or_update_from(new_consumer, container) url = hrefs.convert_consumer_to_href(new_consumer.container_id) pecan.response.headers['Location'] = url LOG.info('Created a consumer for project: %s', external_project_id) return self._return_container_data(self.container_id) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('ContainerConsumer deletion')) @controllers.enforce_rbac('consumers:delete') @controllers.enforce_content_types(['application/json']) def on_delete(self, external_project_id, **kwargs): data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_delete...%s', data) project = self.project_repo.find_by_external_project_id( external_project_id, suppress_exception=True) if not project: _consumer_not_found() consumer = self.consumer_repo.get_by_values( self.container_id, data["name"], data["URL"], suppress_exception=True ) if not consumer: _consumer_not_found() LOG.debug("Found consumer: %s", consumer) container = self._get_container(self.container_id) owner_of_consumer = consumer.project_id == project.id owner_of_container = container.project.external_id \ == external_project_id if not owner_of_consumer and not owner_of_container: _consumer_ownership_mismatch() try: self.consumer_repo.delete_entity_by_id(consumer.id, external_project_id) except exception.NotFound: LOG.exception('Problem deleting consumer') _consumer_not_found() ret_data = self._return_container_data(self.container_id) LOG.info('Deleted a consumer for project: %s', external_project_id) return ret_data def _get_container(self, container_id): container = self.container_repo.get_container_by_id( container_id, suppress_exception=True) if not container: controllers.containers.container_not_found() return container def _return_container_data(self, container_id): container = self._get_container(container_id) dict_fields = container.to_dict_fields() for secret_ref in dict_fields['secret_refs']: hrefs.convert_to_hrefs(secret_ref) # TODO(john-wood-w) Why two calls to convert_to_hrefs()? return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) barbican-6.0.1/barbican/api/controllers/secretstores.py0000666000175000017500000001776613311733060023300 0ustar zuulzuul00000000000000# (c) Copyright 2015-2016 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican.api import controllers from barbican.common import hrefs from barbican.common import resources as res from barbican.common import utils from barbican import i18n as u from barbican.model import repositories as repo from barbican.plugin.util import multiple_backends LOG = utils.getLogger(__name__) def _secret_store_not_found(): """Throw exception indicating secret store not found.""" pecan.abort(404, u._('Not Found. Secret store not found.')) def _preferred_secret_store_not_found(): """Throw exception indicating preferred secret store not found.""" pecan.abort(404, u._('Not Found. No preferred secret store defined for ' 'this project.')) def _multiple_backends_not_enabled(): """Throw exception indicating multiple backends support is not enabled.""" pecan.abort(404, u._('Not Found. Multiple backends support is not enabled ' 'in service configuration.')) def convert_secret_store_to_response_format(secret_store): data = secret_store.to_dict_fields() data['secret_store_plugin'] = data.pop('store_plugin') data['secret_store_ref'] = hrefs.convert_secret_stores_to_href( data['secret_store_id']) # no need to pass store id as secret_store_ref is returned data.pop('secret_store_id', None) return data class PreferredSecretStoreController(controllers.ACLMixin): """Handles preferred secret store set/removal requests.""" def __init__(self, secret_store): LOG.debug('=== Creating PreferredSecretStoreController ===') self.secret_store = secret_store self.proj_store_repo = repo.get_project_secret_store_repository() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('Removing preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:delete') def on_delete(self, external_project_id, **kw): LOG.debug('Start: Remove project preferred secret-store for store' ' id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) project_store = self.proj_store_repo.get_secret_store_for_project( project.id, None, suppress_exception=True) if project_store is None: _preferred_secret_store_not_found() self.proj_store_repo.delete_entity_by_id( entity_id=project_store.id, external_project_id=external_project_id) pecan.response.status = 204 @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Setting preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:post') def on_post(self, external_project_id, **kwargs): LOG.debug('Start: Set project preferred secret-store for store ' 'id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) self.proj_store_repo.create_or_update_for_project(project.id, self.secret_store.id) pecan.response.status = 204 class SecretStoreController(controllers.ACLMixin): """Handles secret store retrieval requests.""" def __init__(self, secret_store): LOG.debug('=== Creating SecretStoreController ===') self.secret_store = secret_store @pecan.expose() def _lookup(self, action, *remainder): if (action == 'preferred'): return PreferredSecretStoreController(self.secret_store), remainder else: pecan.abort(405) @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret store retrieval')) @controllers.enforce_rbac('secretstore:get') def on_get(self, external_project_id): LOG.debug("== Getting secret store for %s", self.secret_store.id) return convert_secret_store_to_response_format(self.secret_store) class SecretStoresController(controllers.ACLMixin): """Handles secret-stores list requests.""" def __init__(self): LOG.debug('Creating SecretStoresController') self.secret_stores_repo = repo.get_secret_stores_repository() self.proj_store_repo = repo.get_project_secret_store_repository() def __getattr__(self, name): route_table = { 'global-default': self.get_global_default, 'preferred': self.get_preferred, } if name in route_table: return route_table[name] raise AttributeError @pecan.expose() def _lookup(self, secret_store_id, *remainder): if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() secret_store = self.secret_stores_repo.get(entity_id=secret_store_id, suppress_exception=True) if not secret_store: _secret_store_not_found() return SecretStoreController(secret_store), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('List available secret stores')) @controllers.enforce_rbac('secretstores:get') def on_get(self, external_project_id, **kw): LOG.debug('Start SecretStoresController on_get: listing secret ' 'stores') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() res.get_or_create_project(external_project_id) secret_stores = self.secret_stores_repo.get_all() resp_list = [] for store in secret_stores: item = convert_secret_store_to_response_format(store) resp_list.append(item) resp = {'secret_stores': resp_list} return resp @pecan.expose(generic=True, template='json') @controllers.handle_exceptions(u._('Retrieve global default secret store')) @controllers.enforce_rbac('secretstores:get_global_default') def get_global_default(self, external_project_id, **kw): LOG.debug('Start secret-stores get global default secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() res.get_or_create_project(external_project_id) store = multiple_backends.get_global_default_secret_store() return convert_secret_store_to_response_format(store) @pecan.expose(generic=True, template='json') @controllers.handle_exceptions(u._('Retrieve project preferred store')) @controllers.enforce_rbac('secretstores:get_preferred') def get_preferred(self, external_project_id, **kw): LOG.debug('Start secret-stores get preferred secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() project = res.get_or_create_project(external_project_id) project_store = self.proj_store_repo.get_secret_store_for_project( project.id, None, suppress_exception=True) if project_store is None: _preferred_secret_store_not_found() return convert_secret_store_to_response_format( project_store.secret_store) barbican-6.0.1/barbican/api/controllers/secrets.py0000666000175000017500000004277613311733072022225 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils import pecan from six.moves.urllib import parse from barbican import api from barbican.api import controllers from barbican.api.controllers import acls from barbican.api.controllers import secretmeta from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.plugin import resources as plugin from barbican.plugin import util as putil LOG = utils.getLogger(__name__) def _secret_not_found(): """Throw exception indicating secret not found.""" pecan.abort(404, u._('Not Found. Sorry but your secret is in ' 'another castle.')) def _invalid_secret_id(): """Throw exception indicating secret id is invalid.""" pecan.abort(404, u._('Not Found. Provided secret id is invalid.')) def _secret_payload_not_found(): """Throw exception indicating secret's payload is not found.""" pecan.abort(404, u._('Not Found. Sorry but your secret has no payload.')) def _secret_already_has_data(): """Throw exception that the secret already has data.""" pecan.abort(409, u._("Secret already has data, cannot modify it.")) def _bad_query_string_parameters(): pecan.abort(400, u._("URI provided invalid query string parameters.")) def _request_has_twsk_but_no_transport_key_id(): """Throw exception for bad wrapping parameters. Throw exception if transport key wrapped session key has been provided, but the transport key id has not. """ pecan.abort(400, u._('Transport key wrapped session key has been ' 'provided to wrap secrets for retrieval, but the ' 'transport key id has not been provided.')) class SecretController(controllers.ACLMixin): """Handles Secret retrieval and deletion requests.""" def __init__(self, secret): LOG.debug('=== Creating SecretController ===') self.secret = secret self.transport_key_repo = repo.get_transport_key_repository() def get_acl_tuple(self, req, **kwargs): d = self.get_acl_dict_for_user(req, self.secret.secret_acls) d['project_id'] = self.secret.project.external_id d['creator_id'] = self.secret.creator_id return 'secret', d @pecan.expose() def _lookup(self, sub_resource, *remainder): if sub_resource == 'acl': return acls.SecretACLsController(self.secret), remainder elif sub_resource == 'metadata': if len(remainder) == 0 or remainder == ('',): return secretmeta.SecretMetadataController(self.secret), \ remainder else: request_method = pecan.request.method allowed_methods = ['GET', 'PUT', 'DELETE'] if request_method in allowed_methods: return secretmeta.SecretMetadatumController(self.secret), \ remainder else: # methods cannot be handled at controller level pecan.abort(405) else: # only 'acl' and 'metadata' as sub-resource is supported pecan.abort(405) @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret retrieval')) @controllers.enforce_rbac('secret:get') def on_get(self, external_project_id, **kwargs): if controllers.is_json_request_accept(pecan.request): resp = self._on_get_secret_metadata(self.secret, **kwargs) LOG.info('Retrieved secret metadata for project: %s', external_project_id) return resp else: LOG.warning('Decrypted secret %s requested using deprecated ' 'API call.', self.secret.id) return self._on_get_secret_payload(self.secret, external_project_id, **kwargs) def _on_get_secret_metadata(self, secret, **kwargs): """GET Metadata-only for a secret.""" pecan.override_template('json', 'application/json') secret_fields = putil.mime_types.augment_fields_with_content_types( secret) transport_key_id = self._get_transport_key_id_if_needed( kwargs.get('transport_key_needed'), secret) if transport_key_id: secret_fields['transport_key_id'] = transport_key_id return hrefs.convert_to_hrefs(secret_fields) def _get_transport_key_id_if_needed(self, transport_key_needed, secret): if transport_key_needed and transport_key_needed.lower() == 'true': return plugin.get_transport_key_id_for_retrieval(secret) return None def _on_get_secret_payload(self, secret, external_project_id, **kwargs): """GET actual payload containing the secret.""" # With ACL support, the user token project does not have to be same as # project associated with secret. The lookup project_id needs to be # derived from the secret's data considering authorization is already # done. external_project_id = secret.project.external_id project = res.get_or_create_project(external_project_id) # default to application/octet-stream if there is no Accept header accept_header = getattr(pecan.request.accept, 'header_value', 'application/octet-stream') pecan.override_template('', accept_header) # check if payload exists before proceeding if not secret.encrypted_data and not secret.secret_store_metadata: _secret_payload_not_found() twsk = kwargs.get('trans_wrapped_session_key', None) transport_key = None if twsk: transport_key = self._get_transport_key( kwargs.get('transport_key_id', None)) return plugin.get_secret(accept_header, secret, project, twsk, transport_key) def _get_transport_key(self, transport_key_id): if transport_key_id is None: _request_has_twsk_but_no_transport_key_id() transport_key_model = self.transport_key_repo.get( entity_id=transport_key_id, suppress_exception=True) return transport_key_model.transport_key @pecan.expose() @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret payload retrieval')) @controllers.enforce_rbac('secret:decrypt') def payload(self, external_project_id, **kwargs): if pecan.request.method != 'GET': pecan.abort(405) resp = self._on_get_secret_payload(self.secret, external_project_id, **kwargs) LOG.info('Retrieved secret payload for project: %s', external_project_id) return resp @index.when(method='PUT') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret update')) @controllers.enforce_rbac('secret:put') @controllers.enforce_content_types(['application/octet-stream', 'text/plain']) def on_put(self, external_project_id, **kwargs): if (not pecan.request.content_type or pecan.request.content_type == 'application/json'): pecan.abort( 415, u._("Content-Type of '{content_type}' is not supported for " "PUT.").format(content_type=pecan.request.content_type) ) transport_key_id = kwargs.get('transport_key_id') payload = pecan.request.body if not payload: raise exception.NoDataToProcess() if validators.secret_too_big(payload): raise exception.LimitExceeded() if self.secret.encrypted_data or self.secret.secret_store_metadata: _secret_already_has_data() project_model = res.get_or_create_project(external_project_id) content_type = pecan.request.content_type content_encoding = pecan.request.headers.get('Content-Encoding') plugin.store_secret( unencrypted_raw=payload, content_type_raw=content_type, content_encoding=content_encoding, secret_model=self.secret, project_model=project_model, transport_key_id=transport_key_id) LOG.info('Updated secret for project: %s', external_project_id) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret deletion')) @controllers.enforce_rbac('secret:delete') def on_delete(self, external_project_id, **kwargs): plugin.delete_secret(self.secret, external_project_id) LOG.info('Deleted secret for project: %s', external_project_id) class SecretsController(controllers.ACLMixin): """Handles Secret creation requests.""" def __init__(self): LOG.debug('Creating SecretsController') self.validator = validators.NewSecretValidator() self.secret_repo = repo.get_secret_repository() self.quota_enforcer = quota.QuotaEnforcer('secrets', self.secret_repo) def _is_valid_date_filter(self, date_filter): filters = date_filter.split(',') sorted_filters = dict() try: for filter in filters: if filter.startswith('gt:'): if sorted_filters.get('gt') or sorted_filters.get('gte'): return False sorted_filters['gt'] = timeutils.parse_isotime(filter[3:]) elif filter.startswith('gte:'): if sorted_filters.get('gt') or sorted_filters.get( 'gte') or sorted_filters.get('eq'): return False sorted_filters['gte'] = timeutils.parse_isotime(filter[4:]) elif filter.startswith('lt:'): if sorted_filters.get('lt') or sorted_filters.get('lte'): return False sorted_filters['lt'] = timeutils.parse_isotime(filter[3:]) elif filter.startswith('lte:'): if sorted_filters.get('lt') or sorted_filters.get( 'lte') or sorted_filters.get('eq'): return False sorted_filters['lte'] = timeutils.parse_isotime(filter[4:]) elif sorted_filters.get('eq') or sorted_filters.get( 'gte') or sorted_filters.get('lte'): return False else: sorted_filters['eq'] = timeutils.parse_isotime(filter) except ValueError: return False return True def _is_valid_sorting(self, sorting): allowed_keys = ['algorithm', 'bit_length', 'created', 'expiration', 'mode', 'name', 'secret_type', 'status', 'updated'] allowed_directions = ['asc', 'desc'] sorted_keys = dict() for sort in sorting.split(','): if ':' in sort: try: key, direction = sort.split(':') except ValueError: return False else: key, direction = sort, 'asc' if key not in allowed_keys or direction not in allowed_directions: return False if sorted_keys.get(key): return False else: sorted_keys[key] = direction return True @pecan.expose() def _lookup(self, secret_id, *remainder): # NOTE(jaosorior): It's worth noting that even though this section # actually does a lookup in the database regardless of the RBAC policy # check, the execution only gets here if authentication of the user was # previously successful. if not utils.validate_id_is_uuid(secret_id): _invalid_secret_id()() secret = self.secret_repo.get_secret_by_id( entity_id=secret_id, suppress_exception=True) if not secret: _secret_not_found() return SecretController(secret), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret(s) retrieval')) @controllers.enforce_rbac('secrets:get') def on_get(self, external_project_id, **kw): def secret_fields(field): return putil.mime_types.augment_fields_with_content_types(field) LOG.debug('Start secrets on_get ' 'for project-ID %s:', external_project_id) name = kw.get('name', '') if name: name = parse.unquote_plus(name) bits = kw.get('bits', 0) try: bits = int(bits) except ValueError: # as per Github issue 171, if bits is invalid then # the default should be used. bits = 0 for date_filter in 'created', 'updated', 'expiration': if kw.get(date_filter) and not self._is_valid_date_filter( kw.get(date_filter)): _bad_query_string_parameters() if kw.get('sort') and not self._is_valid_sorting(kw.get('sort')): _bad_query_string_parameters() ctxt = controllers._get_barbican_context(pecan.request) user_id = None if ctxt: user_id = ctxt.user result = self.secret_repo.get_secret_list( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), name=name, alg=kw.get('alg'), mode=kw.get('mode'), bits=bits, secret_type=kw.get('secret_type'), suppress_exception=True, acl_only=kw.get('acl_only'), user_id=user_id, created=kw.get('created'), updated=kw.get('updated'), expiration=kw.get('expiration'), sort=kw.get('sort') ) secrets, offset, limit, total = result if not secrets: secrets_resp_overall = {'secrets': [], 'total': total} else: secrets_resp = [ hrefs.convert_to_hrefs(secret_fields(s)) for s in secrets ] secrets_resp_overall = hrefs.add_nav_hrefs( 'secrets', offset, limit, total, {'secrets': secrets_resp} ) secrets_resp_overall.update({'total': total}) LOG.info('Retrieved secret list for project: %s', external_project_id) return secrets_resp_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Secret creation')) @controllers.enforce_rbac('secrets:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): LOG.debug('Start on_post for project-ID %s:...', external_project_id) data = api.load_body(pecan.request, validator=self.validator) project = res.get_or_create_project(external_project_id) self.quota_enforcer.enforce(project) transport_key_needed = data.get('transport_key_needed', 'false').lower() == 'true' ctxt = controllers._get_barbican_context(pecan.request) if ctxt: # in authenticated pipleline case, always use auth token user data['creator_id'] = ctxt.user secret_model = models.Secret(data) new_secret, transport_key_model = plugin.store_secret( unencrypted_raw=data.get('payload'), content_type_raw=data.get('payload_content_type', 'application/octet-stream'), content_encoding=data.get('payload_content_encoding'), secret_model=secret_model, project_model=project, transport_key_needed=transport_key_needed, transport_key_id=data.get('transport_key_id')) url = hrefs.convert_secret_to_href(new_secret.id) LOG.debug('URI to secret is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a secret for project: %s', external_project_id) if transport_key_model is not None: tkey_url = hrefs.convert_transport_key_to_href( transport_key_model.id) return {'secret_ref': url, 'transport_key_ref': tkey_url} else: return {'secret_ref': url} barbican-6.0.1/barbican/cmd/0000775000175000017500000000000013311733364015611 5ustar zuulzuul00000000000000barbican-6.0.1/barbican/cmd/db_manage.py0000666000175000017500000001623213311733060020057 0ustar zuulzuul00000000000000#!/usr/bin/env python # Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import os import sys sys.path.insert(0, os.getcwd()) from barbican.common import config from barbican.model import clean from barbican.model.migration import commands from oslo_log import log # Import and configure logging. CONF = config.CONF log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) class DatabaseManager(object): """Database Manager class. Builds and executes a CLI parser to manage the Barbican database This extends the Alembic commands. """ def __init__(self, conf): self.conf = conf self.parser = self.get_main_parser() self.subparsers = self.parser.add_subparsers( title='subcommands', description='Action to perform') self.add_revision_args() self.add_upgrade_args() self.add_history_args() self.add_current_args() self.add_clean_args() def get_main_parser(self): """Create top-level parser and arguments.""" parser = argparse.ArgumentParser(description='Barbican DB manager.') parser.add_argument('--dburl', '-d', default=self.conf.sql_connection, help='URL to the database.') return parser def add_revision_args(self): """Create 'revision' command parser and arguments.""" create_parser = self.subparsers.add_parser('revision', help='Create a ' 'new DB version file.') create_parser.add_argument('--message', '-m', default='DB change', help='the message for the DB change') create_parser.add_argument('--autogenerate', help='autogenerate from models', action='store_true') create_parser.set_defaults(func=self.revision) def add_upgrade_args(self): """Create 'upgrade' command parser and arguments.""" create_parser = self.subparsers.add_parser('upgrade', help='Upgrade to a ' 'future version DB ' 'version file') create_parser.add_argument('--version', '-v', default='head', help='the version to upgrade to, or else ' 'the latest/head if not specified.') create_parser.set_defaults(func=self.upgrade) def add_history_args(self): """Create 'history' command parser and arguments.""" create_parser = self.subparsers.add_parser( 'history', help='List changeset scripts in chronological order.') create_parser.add_argument('--verbose', '-V', action="store_true", help='Show full information about the ' 'revisions.') create_parser.set_defaults(func=self.history) def add_current_args(self): """Create 'current' command parser and arguments.""" create_parser = self.subparsers.add_parser( 'current', help='Display the current revision for a database.') create_parser.add_argument('--verbose', '-V', action="store_true", help='Show full information about the ' 'revision.') create_parser.set_defaults(func=self.current) def add_clean_args(self): """Create 'clean' command parser and arguments.""" create_parser = self.subparsers.add_parser( 'clean', help='Clean up soft deletions in the database') create_parser.add_argument( '--min-days', '-m', type=int, default=90, help='minimum number of days to keep soft deletions. default is' ' %(default)s days.') create_parser.add_argument('--clean-unassociated-projects', '-p', action="store_true", help='Remove projects that have no ' 'associated resources.') create_parser.add_argument('--soft-delete-expired-secrets', '-e', action="store_true", help='Soft delete expired secrets.') create_parser.add_argument('--verbose', '-V', action='store_true', help='Show full information about the' ' cleanup') create_parser.add_argument('--log-file', '-L', default=CONF.log_file, type=str, help='Set log file location. ' 'Default value for log_file can be ' 'found in barbican.conf') create_parser.set_defaults(func=self.clean) def revision(self, args): """Process the 'revision' Alembic command.""" commands.generate(autogenerate=args.autogenerate, message=args.message, sql_url=args.dburl) def upgrade(self, args): """Process the 'upgrade' Alembic command.""" LOG.debug("Performing database schema migration...") commands.upgrade(to_version=args.version, sql_url=args.dburl) def history(self, args): commands.history(args.verbose, sql_url=args.dburl) def current(self, args): commands.current(args.verbose, sql_url=args.dburl) def clean(self, args): clean.clean_command( sql_url=args.dburl, min_num_days=args.min_days, do_clean_unassociated_projects=args.clean_unassociated_projects, do_soft_delete_expired_secrets=args.soft_delete_expired_secrets, verbose=args.verbose, log_file=args.log_file) def execute(self): """Parse the command line arguments.""" args = self.parser.parse_args() # Perform other setup here... args.func(args) def _exception_is_successful_exit(thrown_exception): return (isinstance(thrown_exception, SystemExit) and (thrown_exception.code is None or thrown_exception.code == 0)) def main(): try: dm = DatabaseManager(CONF) dm.execute() except Exception as ex: if not _exception_is_successful_exit(ex): LOG.exception('Problem seen trying to run barbican db manage') sys.stderr.write("ERROR: {0}\n".format(ex)) sys.exit(1) if __name__ == '__main__': main() barbican-6.0.1/barbican/cmd/retry_scheduler.py0000666000175000017500000000425213311733072021367 0ustar zuulzuul00000000000000#!/usr/bin/env python # Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican worker server, running a periodic retry/scheduler process. """ import eventlet import os import sys # Oslo messaging RPC server uses eventlet. eventlet.monkey_patch() # 'Borrowed' from the Glance project: # If ../barbican/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): sys.path.insert(0, possible_topdir) from barbican.common import config from barbican import queue from barbican.queue import retry_scheduler from barbican import version from oslo_log import log from oslo_service import service def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch( CONF, retry_scheduler.PeriodicServer() ).wait() except RuntimeError as e: fail(1, e) if __name__ == '__main__': main() barbican-6.0.1/barbican/cmd/barbican_manage.py0000666000175000017500000003312513311733060021233 0ustar zuulzuul00000000000000#!/usr/bin/env python # Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ CLI interface for barbican management """ from __future__ import print_function import argparse import sys from oslo_config import cfg from oslo_log import log as logging from barbican.cmd import pkcs11_kek_rewrap as pkcs11_rewrap from barbican.common import config from barbican.model import clean from barbican.model.migration import commands from barbican.model import sync from barbican.plugin.crypto import pkcs11 import barbican.version CONF = cfg.CONF LOG = logging.getLogger(__name__) # Decorators for actions def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) return func return _decorator class DbCommands(object): """Class for managing barbican database""" description = "Subcommands for managing barbican database" clean_description = "Clean up soft deletions in the database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--min-days', '-m', metavar='', dest='min_days', type=int, default=90, help='minimum number of days to keep soft deletions. ' 'default is %(default)s days.') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') @args('--clean-unassociated-projects', '-p', action='store_true', dest='do_clean_unassociated_projects', default=False, help='Remove projects that have no ' 'associated resources.') @args('--soft-delete-expired-secrets', '-e', action='store_true', dest='do_soft_delete_expired_secrets', default=False, help='Soft delete secrets that are expired.') def clean(self, dburl=None, min_days=None, verbose=None, log_file=None, do_clean_unassociated_projects=None, do_soft_delete_expired_secrets=None): """Clean soft deletions in the database""" if dburl is None: dburl = CONF.sql_connection if log_file is None: log_file = CONF.log_file clean.clean_command( sql_url=dburl, min_num_days=min_days, do_clean_unassociated_projects=do_clean_unassociated_projects, do_soft_delete_expired_secrets=do_soft_delete_expired_secrets, verbose=verbose, log_file=log_file) revision_description = "Create a new database version file" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--message', '-m', metavar='', default='DB change', help='the message for the DB change') @args('--autogenerate', action="store_true", dest='autogen', default=False, help='autogenerate from models') def revision(self, dburl=None, message=None, autogen=None): """Process the 'revision' Alembic command.""" if dburl is None: commands.generate(autogenerate=autogen, message=str(message), sql_url=CONF.sql_connection) else: commands.generate(autogenerate=autogen, message=str(message), sql_url=str(dburl)) upgrade_description = "Upgrade to a future database version" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--version', '-v', metavar='', default='head', help='the version to upgrade to, or else ' 'the latest/head if not specified.') def upgrade(self, dburl=None, version=None): """Process the 'upgrade' Alembic command.""" if dburl is None: commands.upgrade(to_version=str(version), sql_url=CONF.sql_connection) else: commands.upgrade(to_version=str(version), sql_url=str(dburl)) history_description = "Show database changset history" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def history(self, dburl=None, verbose=None): if dburl is None: commands.history(verbose, sql_url=CONF.sql_connection) else: commands.history(verbose, sql_url=str(dburl)) current_description = "Show current revision of database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def current(self, dburl=None, verbose=None): if dburl is None: commands.current(verbose, sql_url=CONF.sql_connection) else: commands.current(verbose, sql_url=str(dburl)) sync_secret_stores_description = "Sync secret_stores with barbican.conf" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') def sync_secret_stores(self, dburl=None, verbose=None, log_file=None): """Sync secret_stores table with barbican.conf""" if dburl is None: dburl = CONF.sql_connection if log_file is None: log_file = CONF.log_file sync.sync_secret_stores( sql_url=dburl, verbose=verbose, log_file=log_file) class HSMCommands(object): """Class for managing HSM/pkcs11 plugin""" description = "Subcommands for managing HSM/PKCS11" gen_mkek_description = "Generates a new MKEK" @args('--library-path', metavar='', dest='libpath', default='/usr/lib/libCryptoki2_64.so', help='Path to vendor PKCS11 library') @args('--slot-id', metavar='', dest='slotid', default=1, help='HSM Slot id (Should correspond to a configured PKCS11 slot, \ default is 1)') @args('--passphrase', metavar='', default=None, required=True, help='Password to login to PKCS11 session') @args('--label', '-L', metavar='