././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1743590729.1970294 barbican-20.0.0/0000775000175000017500000000000000000000000013355 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/.coveragerc0000664000175000017500000000043600000000000015501 0ustar00zuulzuul00000000000000[run] branch = True source = barbican omit = barbican/tests/*, barbican/model/migration/alembic_migrations/versions/, barbican/plugin/dogtag.py, barbican/plugin/symantec.py [report] ignore_errors = True exclude_lines = pragma: no cover @abc.abstractmethod ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/.mailmap0000664000175000017500000000065100000000000015000 0ustar00zuulzuul00000000000000# Format is: # # John Wood Malini K. Bhandaru Malini K. Bhandaru Malini Bhandaru ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/.stestr.conf0000664000175000017500000000010300000000000015620 0ustar00zuulzuul00000000000000[DEFAULT] test_path=${OS_TEST_PATH:-./barbican/tests/} top_dir=./ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/.zuul.yaml0000664000175000017500000000760600000000000015327 0ustar00zuulzuul00000000000000--- - job: name: barbican-tox-functional parent: devstack-tox-functional description: Functional testing required-projects: - name: openstack/barbican - name: openstack/castellan - name: openstack/openstacksdk vars: devstack_plugins: barbican: https://opendev.org/openstack/barbican tox_install_siblings: true - job: name: barbican-dogtag-tox-functional nodeset: devstack-single-node-fedora-latest parent: barbican-tox-functional description: | Functional testing for DogTag backend vars: devstack_services: barbican-dogtag: true - job: name: barbican-tox-functional-fips nodeset: devstack-single-node-centos-9-stream parent: barbican-tox-functional description: | Functional testing for a FIPS enabled Centos 8 system pre-run: playbooks/enable-fips.yaml vars: nslookup_target: 'opendev.org' - job: name: barbican-vault-tox-functional parent: barbican-tox-functional description: | Functional testing for Hashicorp Vault backend vars: devstack_services: barbican-vault: true tox_environment: VAULT_PLUGIN_ENABLED: 1 - job: name: barbican-kmip-tox-functional parent: devstack-tox-functional description: | Functional testing for KMIP backend using PyKMIP vars: devstack_services: kmip: true - job: name: barbican-simple-crypto-devstack-tempest parent: barbican-tempest-plugin-simple-crypto description: | Compatibility alias for barbican-tempest-plugin-simple-crypto - job: name: barbican-simple-crypto-devstack-tempest-ipv6-only parent: barbican-tempest-plugin-simple-crypto-ipv6-only description: | Compatibility alias for barbican-tempest-plugin-simple-crypto-ipv6-only - job: name: barbican-simple-crypto-devstack-tempest-castellan-from-git parent: barbican-tempest-plugin-simple-crypto-castellan-src description: | Compatibility alias for barbican-tempest-plugin-simple-crypto-castellan-src - job: name: barbican-simple-crypto-devstack-tempest-cursive parent: barbican-tempest-plugin-simple-crypto-cursive description: | Compatibility alias for barbican-tempest-plugin-simple-crypto-cursive - job: name: barbican-grenade parent: grenade required-projects: - opendev.org/openstack/grenade - opendev.org/openstack/barbican - opendev.org/openstack/barbican-tempest-plugin - opendev.org/openstack/python-barbicanclient vars: devstack_plugins: barbican: https://opendev.org/openstack/barbican devstack_services: barbican-svc: true barbican-retry: true tempest_plugins: - barbican-tempest-plugin tempest_test_regex: '\[.*\bsmoke\b.*\]|^(barbican_tempest_plugin.tests)' tox_envlist: all - job: name: octavia-v2-dsvm-tls-barbican-secure-rbac parent: octavia-v2-dsvm-tls-barbican vars: devstack_localrc: ENFORCE_SCOPE: True - project: queue: barbican templates: - check-requirements - openstack-cover-jobs - openstack-python3-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - barbican-tox-functional - barbican-vault-tox-functional: voting: false - barbican-grenade: voting: false - barbican-tempest-plugin-simple-crypto - barbican-tempest-plugin-simple-crypto-jammy - barbican-tempest-plugin-simple-crypto-secure-rbac - barbican-tempest-plugin-simple-crypto-ipv6-only - barbican-tox-functional-fips: voting: false - octavia-v2-dsvm-tls-barbican - octavia-v2-dsvm-tls-barbican-secure-rbac gate: jobs: - barbican-tox-functional experimental: jobs: - barbican-dogtag-tox-functional - barbican-kmip-tox-functional ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590729.0 barbican-20.0.0/AUTHORS0000664000175000017500000002727700000000000014444 0ustar00zuulzuul00000000000000Abhishek Koneru Adam Harwell Ade Lee Ajay Kalambur Akihiro Motoki Alan Bishop Alex Gaynor Alex Kavanagh Alex Schultz Alex Schultz Alexander Gräb Alexandra Settle Amy Marrich Andre Aranha Andreas Jaeger Andreas Jaeger Andreas Scheuring Andrew Hartnett Anh Tran Anusha Unnam Aradhana Singh Arash Ghoreyshi Arun Kant Arun Kant Arun Kant Arvind Tiwari Atsushi SAKAI Bertrand Lallau Bertrand Lallau Bhagyashri Shewale Bin Zhou Brian Haley Bryan D. Payne Béla Vancsics CHARDON Gerome Cao Xuan Hoang Carlos D. Garza Carlos Goncalves Carlos Marin Cassandra Burnias Chad Lung Chandan Kumar Chaozhe.Chen Charles Neill Chellygel Chelsea Winfree Chelsea Winfree Chris Solis Christian Berendt Christopher Solis Colleen Murphy Constanze Kratel Corey Bryant Craig Tracey Cyril Roelandt Daniel Gonzalez Dao Cong Tien Davanum Srinivas Dave McCowan Dave Walker (Daviey) Deepak Dirk Mueller Dmitriy Rabotyagov Dmitry Ratushnyy Dolph Mathews Donald Stufft Doug Chivers Doug Hellmann Dougal Matthews Douglas Mendizabal Douglas Mendizabal Douglas Mendizábal Douglas Mendizábal Douglas Mendizábal Duan Jiong Elvin Tubillara Emilien Macchi Eric Brown Eric Fried Eric-Xie Everardo Padilla Saca Fernando Diaz Flavio Percoco Freddy Pedraza Gage Hugo Ghanshyam Mann Gorka Eguileor Gregory Haynes Grzegorz Grasza Gábor Antal Harry Rybacki He Qing Hervé Beraud Hieu LE Hironori Shiina Hu Jie Huseyin Gedikli Ian Cordasco Ian Wienand Igor Gueths Jackie Truong James E. Blair James E. Blair James Page Jamie Lennox Jarret Raim Jason Fritcher Jeff Feng Jens Harbott Jeremy Liu Jeremy Stanley Jesse Pretorius Jiong Liu Joe Gordon Johannes Grassler John McKenzie John Vrbanac John Vrbanac John Wood Jorge Munoz Josephine Seifert Juan Antonio Osorio Juan Antonio Osorio Juan Antonio Osorio Robles Juan Antonio Osorio Robles Julien Danjou Kafilat Adeleke Kai Qiang Wu Kaitlin Farr Kaitlin Farr Karen Siles Karthik Prabhu Vinod Kenji Yasui Kevin Bishop Kevin_Zheng Kien Nguyen Kiran_totad Le Hou Lingxian Kong Lisa Clark Longgeek Louis Taylor Lucky samadhiya Luigi Toscano Luong Anh Tuan Luz M V P Nitesh Malini K. Bhandaru Marc Koderer Marc Methot Mark Goddard Martin Kletzander Matthew Treinish Mauricio Harley Max Abidi Max Abidi Meera Belur Michael Johnson Michael Krotscheck Michael McCune Michael Perng Mike Carden Miriam Yumi Mohit Motiani Moises Guimaraes de Medeiros Moisés Guimarães de Medeiros Monty Taylor Nam Nguyen Hoai Nathan Reller Nathan Reller Nathan Reller Nguyen Hai Truong Nguyen Hoai Nam Nguyen Hung Phuong Nguyen Van Duc Nguyen Van Trung Nicholas Jones Ning Sun Nithya Renganathan OTSUKA, Yuanying Ollie Leahy OpenStack Release Bot Ori Pugatzky Pablo Iranzo Gómez Pan Paul Belanger Paul Glass Paul Kehrer Paulo Ewerton Pavlo Shchelokovskyy Peter Hamilton Peter Kazmir Pierre Riteau Pradeep Kumar Singh Priti Desai Q.hongtao <15662728521@163.com> Rafael Folco Rafael Rivero Raildo Mascena de Sousa Filho Ravi Sankar Penta Ricardo Rocha Robert Clark Rohan Arora Russell Bryant Ryan Petrello Samantha Blanco Sascha Peilicke Sean McGinnis Seanna Vien Sebastian Jeuk Sheena Gregson Shuquan Huang Sig Sigler Stanislaw Pitucha Stefan Nica Stephen Finucane Steve Heyman Steve Martinelli Steven Gonzales Swapnil Kulkarni (coolsvap) Takashi Kajinami Takashi Kajinami Takashi Natsume Thierry Carrez Thomas Bechtold Thomas Dinkjian Thomas Goirand Thomas Herve Tim Burke Tim Kelsey Tin Lam Tin Lam Tobias Urdin Tobias Urdin Tony Breeds Tuan Do Anh Victor Stinner Vladyslav Drok Vu Cong Tuan Wanlong Gao Wyllys Ingersoll Xuezhong Kang Yandong Xuan YuehuiLei Yushiro FURUKAWA Yuuichi Fujioka Zhao Lei ZhaoBo Zhenguo Niu ZhiQiang Fan ZhiQiang Fan Zhongyue Luo baiwenteng binean caoyuan chadlung chao liu chen-li chenjiao chenxing chenxing chioleong dane-fichter deepakmourya dommgifer edtubill gecong1973 gengchc2 ghanshyam ghanshyam howardlee huangshan jacky06 jfwood ji-xuepeng jqxin2006 kangyufei liangjingtao likui lingyongxu liujiong liushuobj liyanhang maaoyu melanie witt melissaml pawnesh.kumar pengyuesheng priti_desai rajat29 renliang ricolin shangxiaobj shashi.kant shubhendu sonu.kumar ting.wang tsv venkatamahesh wangzihao werner mendizabal whoami-rajat wu.chunyang wu.chunyang wu.shiming xpress yushangbin zhang.lei zhangdebo zhangyanxian zhengwei6082 zhu.boxiang zhulingjie zhuzeyu “Fernando ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590728.0 barbican-20.0.0/ChangeLog0000664000175000017500000030161300000000000015133 0ustar00zuulzuul00000000000000CHANGES ======= 20.0.0 ------ * Enable pKEK rewrap with SimpleCrypto * Use MultiFernet to enable more than one KEK * Imported Translations from Zanata * Updating tox runtimes for 2025.1 * Enable new default * reno: Update master for unmaintained/2023.1 * Increase unit testing coverage for PKCS#11 * Update gate jobs as per the 2025.1 cycle testing runtime * Fix typo in wrap\_key function * Configure mechanism for wrapping pKEKs * Replace deprecated datetime.utcnow() * Remove default override for config options policy\_file * Add note about requirements lower bounds * Remove Python 3.8 support * Imported Translations from Zanata * Drop SQLALCHEMY\_WARN\_20 * Update master for stable/2024.2 * Imported Translations from Zanata * Remove unused versionbuild script 19.0.0 ------ * Keep new RBAC disable by default * Imported Translations from Zanata * Bump SQLAlchemy requirement * Imported Translations from Zanata * Restore disabled tests * reno: Update master for unmaintained/zed * Fix wrong plugin name * kmip: Fix missing extra requirement * Remove SQLAlchemy tips jobs * Use oslo.db to generate db engine * Use explicit default instead of implicit fallback * reno: Update master for unmaintained/xena * reno: Update master for unmaintained/wallaby * reno: Update master for unmaintained/victoria * vault: Hide values used for authentication * Replace pyOpenSSL by cryptography * func tests: Use cryptography to manage certificates and keys * Remove unused test utils * Update master for stable/2024.1 18.0.0 ------ * Update devstack plugin for Secure RBAC * Drop all remaining logics for certificate resources * Prohibit certificate order resource * tox: Drop envdir * Fix releasenotes build of yoga moved to unmaintained * Bump hacking * Simplify .coveragerc * Update python classifier in setup.cfg * Get rid of unused periodic\_task * Remove unused wsgi/ssl options from oslo.service * pkcs11: Remove deprecated token\_label option * Replace deprecated pyOpenSSL API * Use consistent [database] options * Fix zuul config warning * Add python 3.10 to setup.cfg metadata * Deprecate Symantec certificate plugin * Remove unnecessary comment lines from setup.cfg * Revert "Temporarily make sqlalchemy master job no-voting" * Temporarily make sqlalchemy master job no-voting * Fix python shebang * Update master for stable/2023.2 * Fix expired links * Enable Secure RBAC by default 17.0.0 ------ * Imported Translations from Zanata * Fix missing oslo.versionedobjects library option * Imported Translations from Zanata * Imported Translations from Zanata * db: Replace use of backref * Add job to test with SQLAlchemy master (2.x) * db: Update 'select()' calls * db: Replace use of reverse cascades * tests: Enable SQLAlchemy 2.0 deprecation warnings * Resolve misc deprecation warnings * tests: Enable warnings * tests: Disable policy deprecation warnings * tox: Remove basepython * Enable SRBAC test * Update secret:delete policy to allow admin to delete secret * Remove System scope from policy * Bump Hashicorp Vault version to 1.13.2 * Add tempest to devstack how-to * Make FIPS job non-voting * Migrate back to Launchpad * Imported Translations from Zanata * Logrotate all log files * Fix functional tests * Remove TripleO job * Release notes for secret consumers, microversions and CVE fix * Vault: enable RSA from ordered container functional test * Update master for stable/2023.1 * Imported Translations from Zanata 16.0.0 ------ * Fix typo * Use new get\_rpc\_client API from oslo.messaging * Add content\_types to the response dict * Fix tox4 error * Secret consumers documentation * Add support for Vault Namespaces * Microversions documentation * Fix typo in check\_str for system\_admin policy rule * Remove six * Fix unit test error after cryptography update * Imported Translations from Zanata * Fix Story 2010258 (CVE-2022-3100) * Change the unique properties of secret consumers * Add audit middleware options to barbican.conf * Switch to 2023.1 Python3 unit tests and generic template name * Remove unnecessary hacking checks * Update master for stable/zed * tox: Minor tweaks * Update devstack plugin installation doc * Fix Barbican gate * Imported Translations from Zanata * Imported Translations from Zanata 15.0.0.0rc1 ----------- * Fix wrong assert statements * Fix remaining Secure RBAC policies * Fix Secure RBAC policies for Containers API * Fix Secure RBAC policies for Consumers * Fix Secure RBAC policies for secret\_metadata * Fix Secure RBAC policies for Orders * Fix Secure RBAC policies for Secret ACLs * Fix deprecation cycle for Secret policies * Fix Secure RBAC policies for Secrets * zuul: fix the grenade job to actually test barbican * devstack: make create\_barbican\_accounts idempotent * Mock logging during unit tests * Fix versions for new microversion * Imported Translations from Zanata * zuul: Declare queue at top level * Revert "Temporarily disable voting for FIPS job" * Drop python3.6/3.7 support in testing runtime * Allow users with "creator" role to edit ACLs * Remove undercloud job * Include options for Vault secret store * Remove unnecessary unicode prefixes * Remove the invalid quotes * Temporarily disable voting for FIPS job * Imported Translations from Zanata * Implement microversions, bring back secret consumers API * Replace TripleO CentOS 8 jobs with CentOS 9 jobs * Add missing oslo.service options to barbican.conf * Include healthcheck middleware options * Add Python3 zed unit tests * Update master for stable/yoga 14.0.0 ------ * Fix container consumers rbac policy * Allow secret delete by users with "creator" role * Updating python testing classifier as per Yoga testing runtime * Imported Translations from Zanata * Fix policy for Orders * Move DogTag functional tests to experimental * [doc] Fix typos * Fix consumer name length validator * Fix policy for adding a secret to a container * Fix secret metadata access rules (pt 2) * Fix secret metadata access rules * Fix POST /v1/secret/{secret-id}/metadata response * Fix naming in secret meta tests * Imported Translations from Zanata * Ignore network errors during C\_Finalize * Add Python3 yoga unit tests * Update master for stable/xena 13.0.0 ------ * Fix Castellan Secret Store inconsistent encoding * Run TripleO jobs on CentOS8 instead of CentOS7 * Return 403 instead of 500 when policy check fails * Replace oslo\_utils.fnmatch with fnmatch * Fix alembic migrations * Fix unit tests and migration to unblock gate * Add FIPS gate job * Changed minversion in tox to 3.18.0 * docs: Update Freenode to OFTC * Raise maximum allowed secret size * Imported Translations from Zanata * setup.cfg: Replace dashes with underscores * Fix Vault functional test * Add secure-rbac gate * Fix RBAC and ACL access for managing secret containers * Fix transport key policies * Add Python3 xena unit tests * Update master for stable/wallaby * Add ACL default to allow project read * Monkey patch original current\_thread \_active 12.0.0.0rc1 ----------- * Fix RBAC for transportkeys resource * Implement secure RBAC for quota API * Implement secure RBAC for secretstore API * Optimize conditional statements * Implement secure RBAC for ACLs API * Implement secure RBAC for transport key API * Implement secure RBAC for secretmeta API * Implement secure RBAC for orders API * Implement secure RBAC for consumers API * Implement secure RBAC for containers API * Implement secure RBAC for secrets API * Allow multiple token labels for PKCS#11 driver * Fix PKCS#11 reinitialization after failure * Use system locks in pkcs11 library * [goal] Deprecate the JSON formatted policy file * Switch to collections.abc.MutableMapping * Imported Translations from Zanata * Imported Translations from Zanata * [doc] Fix hmac/mkek generation commands * remove unicode from code * Imported Translations from Zanata * Update doc8 version * Update requirements for secure RBAC work * Imported Translations from Zanata * [doc] Adjust documentation for Thales Luna * Imported Translations from Zanata * Fix hacking min version to 3.0.1 * Use barbican.conf in barbican-manage * Imported Translations from Zanata * Use serial number or label for PKCS#11 tokens * Python 3.9: use base64.{decode,encode}bytes * corrects typo in cp command * Update hacking for Python3 * Bump py37 to py38 in tox.ini * [doc] Add documentation for Vault plugin * Delete deprecated url of readme.rst ask.openstack.org is read-only and cannot raise a new question * Fix admin can not delete other user's secrets * Imported Translations from Zanata * Remove six.add\_metaclass * Add Python3 wallaby unit tests * Update master for stable/victoria 11.0.0 ------ * Fix debug log string * Set db\_auto\_create default to False * Rebase alembic migrations * Remove six.PY3 * Keep barbican functional jobs on Bionic * Log beginning of request processing to INFO * migrate tox based testing to ubuntu focal * Fix dogtag functional job * Fix kmip&dogtag level problem in the document hierarchy * zuul: switch to the new tempest native jobs * devstack: Honor SERVICE\_PROTOCOL for endpoints * Stop setting USE\_PYTHON3 for jobs * Drop configure\_keystone\_authtoken\_middleware function * Switch from unittest2 compat methods to Python 3.x methods * Imported Translations from Zanata * Ussuri contrib docs community goal * Fix PDF build * Fixes index.rst titles and add mascot * Imported Translations from Zanata * Stop to use the \_\_future\_\_ module * docs: Minor tweaks to DevStack guide * Cap jsonschema 3.2.0 as the minimal version * Switch to newer openstackdocstheme and reno versions * delete invalid url in README.rst * Add a /healthcheck URL * Add Python3 victoria unit tests * Remove install\_cmd,lower-constraints * Fix py38 failures * Imported Translations from Zanata * Fix config option type * Cleanup py27 support * Add undercloud-containers to gate * Update README * Use unittest.mock instead of third party mock * Add barbican-spec link to readme.rst * Update master for stable/ussuri * Use Zuulv3 devstack jobs 10.0.0 ------ * Revert Secret Consumers API change * Set correct names for Thales and NetHSM * [ussuri][goal] Drop python 2.7 support and testing * Imported Translations from Zanata * Improve devstack script for vault plugin * Gate on octavia-v2-dsvm-tls-barbican * Gate on py3 * Dejokerizes 404 logs * Support list containers by type * Fix the barbicanclient installation not from source * Switch to Ussuri jobs * Debug-log tracebacks in \_import\_kra\_transport\_cert\_to\_nss\_db * docs: Fix typo: barican * Update master for stable/train * Don't use branch matching * Update the constraints url * [train][goal] Define new barbican-simple-crypto-devstack-tempest-ipv6-only job * Modify the url of upper\_constraints\_file * Support listener pooling in keystone listener * Adding the unit-tests of OVO for Barbican [2] * Add Secret Consumer Controllers and their tests * PDF Documentation Build tox target * Add SecretConsumerValidator and its tests * Add SecretConsumerMetadatum object * Add SecretConsumerRepo repository and its tests * Update dev environment instructions for F30 * Add SecretConsumerMetadatum model and its tests 9.0.0 ----- * Start using the f29 nodeset * Ensure doc/source/\_static to fix docs gate * Fix py3 compatibility issue in PKCS#11 plugin * Imported Translations from Zanata * Update Castellan minimum version * Make broken job barbican-kmip-devstack-functional experimental * Add Python 3 Train unit tests * Rename README.md to README.rst * Update api-ref location * Fix barbican B105 issues * Fix the bug of pep8 and building api-guide * Replace git.openstack.org URLs with opendev.org URLs * Update Back End Documentation * Gate jobs are voting * Block bandit 1.6.0 * Delete api-ref * Update to opendev * OpenDev Migration Patch * Dropping the py35 testing * Imported Translations from Zanata * Code updated with recent PKCS11 API new args added accordingly to generically support all mechanisms and types * Replace openstack.org git:// URLs with https:// * Update master for stable/stein * Added section for Utimaco HSM in documentation 8.0.0 ----- * Made HMAC Key Wrap mechanism configurable * Update json module to jsonutils * Remove unused code * Fixes for rewrap * Set Tempest's service\_availability setting for Barbican * Add venv support to the devstack plugin * Enable KV mountpoint configuration for Vault * Enable AppRole authentication support for Vault * Use the Octavia Barbican integration check gate * add python 3.7 unit test job * Run functional tests serially * Fix multiple backend test * Fix secret-stores functional tests * Remove hardcoded 'localhost' references * Documented ATOS and Thales config for PKCS#11 plugin * Address race condition in KEKDatum * Remove tripleo newton and ocata jobs * functionaltests: Add response headers to logging info * PY3: Ensure normalize\_before\_encryption encodes b64payload * Add barbican-status upgrade check command framework * Fix Safenet HSM regression in PKCS#11 * Workaround for failing gates * Imported Translations from Zanata * Replace tripleo-scenario002-multinode with scenario002-standalone * Change openstack-dev to openstack-discuss * Update Octavia co-gate for python3 first * Imported Translations from Zanata * Fix Chinese quotes * Remove unused validator related to CA IDs * Clean up some config docs formatting * Imported Translations from Zanata * Add 'barbican-manage hsm check\_[mkek|hmac] * Imported Translations from Zanata * Add python 3.6 unit test job * Use authorize instead of enforce for policy * Imported Translations from Zanata * Fix tox -e releasenotes * Fix duplicate paths in secret hrefs * Use context instead of manually setting the credentials for policy * Port RuleDefaults to DocumentedRuleDefaults * tox: Stop building \*all\* docs in 'docs' * Increment versioning with pbr instruction * Imported Translations from Zanata * Update .zuul.yaml: Make barbican-vault-devstack-functional voting * Don't quote {posargs} in tox.ini * Imported Translations from Zanata * Fix tempest\_roles for devstack plugin * Imported Translations from Zanata * Imported Translations from Zanata * Fixed incorrect release note * Import octavia-v2-dsvm-py35-scenario * add python 3.6 unit test job * switch documentation job to new PTI * import zuul job settings from project-config * Fix CKM\_AES\_GCM encryption * Remove non-voting job from gate queue * Imported Translations from Zanata * Revise diretory server install commands * Add missing mechanism for GENERIC\_SECRETS * Remove -u root as mysql is executed with root user * Use absolute path for vault root token file in devstack * Update reno for stable/rocky 7.0.0 ----- * Imported Translations from Zanata * Refactor PKCS#11 to allow configurable mechanisms * Imported Translations from Zanata * Switch to fedora-latest for testing * Adding support for 512-Bit-Secret-Creation when using xts-mode * Add doc8 to pep8 check for project * Skip some tests for vault plugin * Fix getting secret for vault plugin 7.0.0.0b3 --------- * Fix response status for invalid routes * Update http to https and fix link in doc reference * Enable vault devstack functional test by default * Remove unused policy enforcer attributes * Make barbican-kmip-devstack-functional job non-voting * Switch to stestr * Initial the unit-tests of OVO for Barbican * Implement the "check\_exc" parameter for OVO * Implement OVO for Barbican [5] * Ensure orders policy-in-code matches controller * Update pypi url to new url * Implement OVO for Barbican [4] * Implement OVO for Barbican [3] * As of April 28th 2018, Barbican has been migrated to storyboard. The documentation should be adjusted * Update the periodic\_task link for retry\_scheduler * Update two Barbican services to Docs * Activate html\_static\_path config option * add release notes to README.rst * fix tox python3 overrides * Follow the new PTI for document build 7.0.0.0b2 --------- * Implement OVO for Barbican [2] * Remove unused policy rules * Implement OVO for Barbican [1] * Initial OVO for Barbican * Remove CA API policy file * Imported Translations from Zanata * Fix broken gate due to breaking dependency changes * Commit DB changes on API startup * TrivialFix: Update block code to be more beautiful * Update the version of Ubuntu * Add devstack gate for vault * Remove pycrypto dependency * Fix docs build in tox.ini * Castellan based secret store * update some documents about the keystone "API v2.0" * Update http links for doc migration * Imported Translations from Zanata * Stop using legacy-fedora-27 * Remove pycrypto from dogtag plugin * Enforce usage of oslo.context's project\_id 7.0.0.0b1 --------- * Imported Translations from Zanata * fix lower constraints * Configure control\_exchange to match keystone * Initialize db for Barbican Keystone listener * Update auth\_uri option to www\_authenticate\_uri * Add os-testr as test dependency * Apply pep8 check to app.wsgi * Remove unused broker configuration in devstack * Take context from environment instead of parsing headers * Imported Translations from Zanata * Enable mutable config in Barbican * Do not copy policy.json when installing barbican * Remove use of unsupported TEMPEST\_SERVICES variable * Imported Translations from Zanata * Updated from global requirements * Add retries for Dogtag plugin * add lower-constraints job * Update to fedora-27 for testing * Indicating the location tests directory in oslo\_debug\_helper * Change restart the services command * Fix incomplete barbican configuration path in doc * Updated from global requirements * Update reno for stable/queens * Updating time for functional tests * Update to use new stevedore backend option * Set debug mode according to ENABLE\_DEBUG\_LOG\_LEVEL * Make grenade-devstack-barbican job voting * Fix the grenade-devstack-barbican gate * Imported Translations from Zanata 6.0.0 ----- * Handle URL reconstruction in PEP333 compatible fashion * Imported Translations from Zanata * Use default policy in code * Update doc to match our in-repo configuration file * Add sample config and policy to documentation * Correct link address in doc * Fix coverage job * [DOC] Install client package before verification * Make grenade-devstack-barbican job nonvoting * Adding #nosec for pycrypto use * Modify simple\_crypto init message * Remove Simple Crypto plugin production warning * Zuul: Remove project name * Add secret store sync functionality to barbican-manage * Update virtual\_environment for genconfig and genpolicy * Updated from global requirements * Authorites spelling error 6.0.0.0b3 --------- * Update link address * Updated from global requirements * Update the link in database\_migrations.rst * Ensure only api app initializes secret store * Remove Certificate Orders and CAs from API * Updated from global requirements 6.0.0.0b2 --------- * Remove redundancy code * Fix Dogtag mode on key generation * Add defaults for Dogtag backend plugin * zuul: run tripleo-scenario002 job * Remove setting of version/release from releasenotes * Updated from global requirements * Imported Translations from Zanata * Use assertRegex instead of assertRegexpMatches * Updated from global requirements * Use assertRegex instead of assertRegexpMatches * Zuul: add file extension to playbook path * Migrate to Zuul v3 * writing convention: do not use “-y” for package install * Imported Translations from Zanata * Database session need to rollback after duplication * Updated from global requirements * Imported Translations from Zanata * Fix Race Condition in get\_or\_create\_project() 6.0.0.0b1 --------- * Imported Translations from Zanata * Update the documentation link for doc migration * Cleanup test-requirements * Updated from global requirements * Dynamically determine SSL version in unit tests * Delete python bytecode including pyo * Use Castellan's backend option instead of api\_class * Add default configuration files to data\_files * Updated from global requirements * Updated from global requirements * Add flag to allow devstack to run on f26 in gate * Updated from global requirements * Use PortOpt for KMIP port * Add extra time in functional test that fails intermittently * Updated from global requirements * Use PortOpt for port options * Pick up general URI when constructing barbican endpoint * Put base policy rules at first * Fix some reST field lists in docstrings * Revert "Revert "Use devstack functions for deploying barbican-svc"" * [TrivialFix] Change container\_id to right value * Remove unused policy check * Fix to use "." to source script files * Replace http with https for doc links * [Trivialfix]Fix typos * writing convention: do not use “-y” for package install * Updated from global requirements * Ensure module is initialized before being used * Put "rm" command to whitelist\_externals to avoid warning * Imported Translations from Zanata * Update reno for stable/pike 5.0.0 ----- * Revert "Use devstack functions for deploying barbican-svc" * allow redirects in .htaccess files on the static web servers * Removed unnecessary setUp() calls in tests * Use devstack functions for deploying barbican-svc * Remove install-guide env which is no longer effective * Updated from global requirements * Stop using deprecated 'message' attribute in Exception 5.0.0.0b3 --------- * Updated from global requirements * Update internal doc reference according to doc migration result * Add PBR wsgi\_scripts entrypoint for barbican api * Set entry point for dogtag config correctly * Stop excluding functionaltests directory in pep8 check * Use openstackdocstheme 1.11 everywhere * move documentation into the new standard layout * Using openstack command * Update installation guide * Updated from global requirements * Replaces uuid.uuid4 with uuidutils.generate\_uuid() * Fix html\_last\_updated\_fmt for Python3 * Updated from global requirements 5.0.0.0b2 --------- * Fix grenade test related to encrypt volume/image * Remove translation logging prefixes from i18n * Fix pep8 gate failure because of missing dependency * Fix html\_last\_updated\_fmt for Python3 * Use get\_rpc\_transport instead of get\_transport * Updated from global requirements * Fix some syntaxes in the upgrade guide * Remove translation of log messages * Unification content of the index.rst file * Update installation guide * Add guideline to upgrade for Operators * Trivial fix typos * Remove usage of parameter enforce\_type * Creating redundancy files when running tox * Updated from global requirements * Updated from global requirements * DevStack plugin set tempest options in test-config section * Updated from global requirements * Remove setting DEVSTACK\_LOCAL\_CONFIG in test\_hook.sh * Updated from global requirements * Maintain policy in code * Updated from global requirements * Updated from global requirements * Remove python-requests and let it be installed via rpm * Add date filter functional tests * Clean up a stray secret in the functional tests * Remove Certificate Orders and CAs from Documentation * Making iv generation configurable for pkcs11 plugin * Fix barbican devstack plugin for devstack systemd * Sub pycrypto with cryptography in simple\_crypto * Updated from global requirements * Replace pycrypto with cryptography in translations * Add 'rm -f .testrepository/times.dbm' command in testenv * Set access\_policy for messaging's dispatcher * Use oslo.db create\_engine instead of SQLAlchemy * Fixes for Running Functional Tests with Python 3.5 * Add missing dogtag configurations from devstack plugin script * Change parent class for ListingSecretsTestCase * Replace pycrypto with cryptography in test\_rsa * Sub pycrypto with cryptography test\_cert\_resources * Replace pycrypto with cryptography in snakeoil * Update SUSE distro information in install guide * Fix gate devstack * Fix test cases expect Ubuntu specific assertion messages * Updated from global requirements * Update .gitignore to ignore barbican.conf 5.0.0.0b1 --------- * Updated from global requirements * Updated from global requirements * Add sem-ver flag so pbr generates correct version * Use oslo-config-generator to generate barbican.conf.sample * Updates to Devstack Variables * Fix install guide docs for SUSE * Fix bug in barbican-plugin-grenade * Updated from global requirements * Remove log translation related check * Updated from global requirements * Pass cli options to commands * Update Barbican Conf for Retry in Devstack Plugin * Fix the creation of endpoints in keystone\_data.sh * Remove unused exceptions * Trivial link fix in Secret Metadata API User Guide * Compare functional test results as dict, not string * Barbican grenade plugin * Fix TypeError with Python 3.5 * Fix some reST field lists in docstrings * Add missing validation to consumer controller * Updated from global requirements * Fix KMIP gate * Move dogtag plugin config options to extra file * Updated from global requirements * Update link reference * Updated from global requirements * Correct the doc link * Debug with barbican-api * Changed the python3.4 to python3.5 * Handle SQL Integrity Error More Generically * Updated from global requirements * Fix the typo * [Fix gate]Update test requirement * Trivial Fix: Replace http with https * Removed application/pkix media type * Failed to set up a dev env on ubuntu * Remove barbican-tempest-plugin install from devstack plugin * Update local.conf.example * Fix kmip plugin * Initial commit for Barbican api-ref * Updated from global requirements * Fix coverage gate * Read version info from package * Updated readme * Change tempest find\_test\_caller import * Remove unused logging import * Updated from global requirements * Updated from global requirements * Remove support for py34 * Use https instead of http for git.openstack.org * Update reno for stable/ocata 4.0.0 ----- * Add CryptoPluginUnsupportedOperation * Deprecate Dogtag subca tests * Fix error message formatting for Dogtag plugin exceptions * Update previous and next Consumer URLS in filtering * Fix Consumer API Reference Docs * Move dogtag plugin dependencies to one yum call * Add Dogtag plugin dependencies to bindep.txt * Split serial and parallel functional test runs * Make error message clear when no supported secret store found * Revert "Add ID property to all entities" * Add Unit Tests for Consumers API Controller * Add .ropeproject to .gitignore * Add ID property to all entities * Clean imports in code 4.0.0.0b3 --------- * Updated from global requirements * Fix typos * using utcnow instead of now in barbican unit tests * Fix error in api-guide * Correct configuration of db connection * Replace str(uuid.uuid4()) with utils.generate\_uuid() * Remove pycadf useless requirement * Removes unnecessary utf-8 encoding * DOC Remove a couple of repeated words * [devstack] enable logging to stderr * Updated from global requirements * Configure authtoken middleware in barbican.conf * Add build dir to flake8 exclude list * Correct the file path for deploying Barbican API under mod\_wsgi * Update the KMIPSecretStore tests to not test PyKMIP internals 4.0.0.0b2 --------- * Updated from global requirements * Updated from global requirements * Introduce hacking check to Barbican * Enable installation of barbican\_tempest\_plugin * Show team and repo badges on README * Make rabbitmq configuration much simpler * [Devstack] Fix devstack plugin compatibility * Add Apache 2.0 license header to the alembic\_migrations/script.py.mako * Pass secret\_type to repository query * Fix hacking check error * Files with no code must be left completely empty * Fix crypto plugin documentation * Using assertIsNotNone() instead of assertNotEqual(None) * Fix typo in file name * Add bindep environment to tox * Remove unused pylintrc * Updated from global requirements * Updated from global requirements * cors: update default configuration * Deprecate Cetificate Resources * Remove translations for debug level log * Updated from global requirements * Fix some minor error * Add "keystone\_authtoken" section in barbican.conf * Fix warning when running tox -e docs * Fix bindep so that translated jobs work * Updated from global requirements * Replaced assertion with more specific * Enable translations * Remove redundant 'the' in doc * Updated from global requirements * Fix incorrect endpoint in install-guide * Enforce application/json content type on quota set * Imported Translations from Zanata * Use http\_proxy\_to\_wsgi middleware * Updated from global requirements * Fix coverage test failure * Add dogtag-pki and python-nss as extra requirement * Update .coveragerc after the removal of respective directory * Updated from global requirements * Fix error in installation guide of Ubuntu * Rename crypto.py to base.py * Improve devstack configuration * Don't include openstack/common in flake8 exclude list * Fix postgres error during container list * Add summary to metadata in setup.cfg file * Enable release notes translation * Updated from global requirements * Updated from global requirements * Fix order of arguments in assertEqual * Fix typo * Updated from global requirements * Checking barbican resource id in URI is a valid uuid * Use Domains with Keystone v3 in functional tests * Fix routing for adding a secret to a container * Updated from global requirements * TrivialFix: Remove default=None when set value in Config * Fix typos in alembic.ini & kmip\_secret\_store.py * Fix some typos in simple\_crypto.py * Trivial fix in secretstore module * Imported Translations from Zanata * Fixes error when deleting consumers * Update reno for stable/newton * delete python bytecode including pyo before every test run 3.0.0 ----- * Adding reno release notes for multiple backend feature * Adding functional tests for multiple backend changes (Part 5) * Adding rest API for secret-stores resource (Part 4) * Central logic to sync secret store data with conf data (Part 3) * Changes for multiple backend conf and friendly plugin names (Part 2) * Don't inspect oslo.context * typo fix * Active a unit test in comon/test\_validators * standardize release note page ordering * Adding multiple backend db model and repository support (Part 1) * Adding API docs for multiple backend support changes * Imported Translations from Zanata * Remove consumer check for project\_id to match containers * Assigning unwrapped kek handle to new variable to avoid overwrite 3.0.0.0b3 --------- * Add Barbican Verification to Install Guide * Fix typo in barbican/tests/keys.py * Updated from global requirements * TrivialFix: Remove cfg import unused * TrivialFix: Remove logging import unused * Use more specific asserts in tests * Some minor code optimization in post\_test\_hook.sh * Fix some typos in database\_migrations.rst * Remove white space between print and () * Support upper-constratints.txt in tox environments * Add install guide * Fix test suite cleanup * Clean imports in code * Make 'url' references uppercase for consistency * Fix some typos * Change LOG.warn to LOG.warning * Use international logging message * Remove "KEYSTONE\_" URI settings for devsatck * changed typo from similiar to similar * Adds true functional tests for db\_manage script * Updated from global requirements * Imported Translations from Zanata * Add Python 3.5 classifier and venv * Generate IV on HSM device for encrypt operations * Add documentation for date-filters * Add seed random feature to seed HSM RNG * Fix the typo * Fix jenkins failing on coverage report * modify the home-page info with the developer documentation * modify the home-page info with the developer documentation * User with creator role can delete his/her own secret and container * Fixed typo in ACL section of API Guide * Remove white space between print and () * Default to Keystone authentication * Marking database connection config property as secret * Fix the typo in the files * Implement Date Filters for Secrets * dogtag: Only call initialize() if crypto is not None * delete unused LOG * Updated from global requirements * Imported Translations from Zanata 3.0.0.0b2 --------- * Move rabbit configurations to oslo\_messaging\_rabbit section * Insecure default PROTOCOL\_TLSv1 version in KMIP plugin * Correct reraising of exception * Barbican tests fail because of incomplete test dependencies * pkcs11-key-generation: convert mkek length to int * Add support for modifying Generic Containers * Updated from global requirements * Remove unnecessary executable permissions * Updated from global requirements * Port last test (test\_secrets) to Python 3 * Port test\_quotas to Python 3 * Port 3 more unit tests to Python 3 * Setup memory DB in test\_cmd * Change SecretAcceptNotSupportedException from exception.BarbicanException to exception.BarbicanHTTPException * Correct a typo in apiary.apib * Don't supply auth\_token information by default in paste * Fixed typo in crypto plugin docs * Remove unused oslo.concurrency requirement * Updated from global requirements * Do not count expired secrets toward quota * Updated from global requirements * Add retry for recoverable PKCS11 errors * Port API test\_resources to Python 3 * Port test\_validators to Python 3 * Port snakeoil\_ca to Python 3 * Updated from global requirements * Python 3: replace the whitelist with a blacklist * Port translations to Python 3 * Fix doc warnings * Use keystone auth plugin * Fixed test suite cleanup * Updated from global requirements * Updated from global requirements * Return 4xx error for invalid KMIP key spec * Fix keystone\_listener.py * Fix creation of notification server * Added KMIP Secret Store to Devstack * Checking for input secret\_ref to start with input request hostname * Updated from global requirements * Cleanup py34 tox tests * [Trivial] Remove executable privilege of doc/source/conf.py * Replace tempest-lib with tempest.lib * Code cleanup * Issue warning for deployers trying to use simple\_crypto * Python 3: fix barbican.tests.plugin.test\_store\_crypto * Python3: fix barbican.tests.plugin.test\_kmip * Python3: add tests that are already working * Python3: fix barbican.tests.plugin.crypto.test\_crypto * Python 3: use a string rather than bytes for "kek" * Python3: fix barbican.tests.api.controllers.test\_cas * Python3: HTTPServerError no longer has a "message" argument in its constructo * Python3: base64.b64encode expects bytes * Python3: exceptions no longer have a 'message' attribute * Updated from global requirements * Imported Translations from Zanata * use thread safe fnmatch * migrate keystone\_data to openstackclient * Use set Literals for better performance * Updated from global requirements * Fix circular dependency of certificate\_manager module * Adding support for barbican host href to be derived from wsgi request * Barbican server logs Secret Payload contents * Fix skip message for dogtag plugins * Handling json-home header for /v1 call * Add skips for KMIP functional tests * Updated from global requirements * Allow plugins to retrieve secrets * Barbican server discloses password and X-auth * Updated from global requirements * Add code coverage results for functional tests * Fix URL length for alembic migrations * Updated from global requirements * Return 404 when a secret does not have a payload 2.0.0 ----- * Change Table name to correct name * Update project quota paging tests to run with existing project quotas * Uses alembic migration when deploying devstack * Fix typos in Barbican files * Remove outdated line in KMIP docstring * Change Table name to correct name * Removes redundants * Add a configurable setting in barbican-functional.conf for SSL * Update reno for stable/mitaka * Update .gitreview for stable/mitaka 2.0.0.0rc1 ---------- * Add release notes for metadata api * Fix publishing of api-guide * Add a configurable setting in barbican-functional.conf for timeouts * Ensure that smoke tests cleanup their containers * Add cleanup capability for secrets and containers * Add PKCS#11 upgrade release note * Cleanup containers after functional tests run * Add barbican-manage release notes * Updated from global requirements * Return 404 Not Found when UUID is invalid * Publishing API Guide to OpenStack site * Fix correct foreign key constraints * Moved CORS middleware configuration into set\_defaults * Changes max string length for URL to 255 * Correct cert event plugin name in config * Fix 500 server error invalid transport key during secret creation * Throw 405 when specified method not allowed in Secret Metadatum * Improve error code for invalid secret creation with KMIP * Add missing unit test for clean\_command and fix error handling * Remove use of old bandit.yaml * Update Python classifier for 3.4 * Nit: occurrences of barbican in small letter * Fix index for API secrets user-guide * Make clean up of soft deletions configurable 2.0.0.0b3 --------- * Change the type of 'bit\_length' from string to integer * User Metadata API and tests * Introducing barbican-manage utility command * Introduce User-Meta table, model, and repo * Fixing project title * User Metadata Documentation * Simple soft deletion clean up for barbican-db-manage * Use assertEqual/Greater/LessEqual/IsNone * Fix typo in word "initialization" * Simplify the development environment setup * Updated from global requirements * Removing orphan and deprecated doc page: api.rst * Make bandit voting as part of pep8 * Fix roles attribute for barbican request context * Avoid using \`len(x)\` to check if x is empty * Fix typos in repositories.py * Fix gate that broke due to recent devstack renames * Fix http 500 when getting secret payload with no Acccept header * Updated from global requirements * Delete deprecated barbican-all script * Update and reorganize the doc landing page * Updated from global requirements * Fixing pkcs11\_kek\_rewrap script * Document public secret type * Cleanup barbican-api-paste pipeline * Document Symmetric Secret Type * Updated from global requirements * Remove deprecated option 'DEFAULT/verbose' * Update .gitignore for pyenv * Remove erroneous installing of python-nss * Use local images instead of references * Add Name restrictions in ContainerValidator * 's' to uppercase in "Openstack" * Updating the project name to barbican * Typo change Barbican to barbican Closes-Bug: 1542508 * Updated from global requirements * Fixed invalid conf file name * Fix spell typo * Add missing database constraints and defaults * Create Orders Documentation * Use host href for version discovery * Remove padding from legacy stored secrets * Added CORS support to Barbican * Updated from global requirements * Updates python3.4 dependencies in docs * Trivial Fix: Replace exit() with sys.exit() * Casts algorithm to lower case in KMIP plugin * Add missing parameter to the PKCS11 object * Add secret=True so passwords do not get logged * Updated from global requirements * Adding auditing support for API request using audit middleware * Using dict.items() is better than six.iteritems(dict) 2.0.0.0b2 --------- * Adding check on Offset * Fix buffer size for decrypt operations * Correct a typo * Imported Translations from Zanata * Updated from global requirements * Adding support for configuring number of async worker processes * Reworded sentence fragment in the README * Replace None comparisons in SQLAlchemy code * Remove pep8 error in symantec plugin * Fix containers api doc typo * Replaced outdated Barbican devstack link * Add wsgi script to deploy Barbican behind Apache * Fix argument reversal error in pkek cache * Reimplement p11\_crypto and pkcs11 modules * Remove openstack-common.conf * Updated from global requirements * Add lock for crypto plugin manager instantiation * Warning about tox not working in Vagrant setup * Updated from global requirements * Update ContainerValidator to Check for Name Max Length * test: make enforce\_type=True in CONF.set\_override * Use assertTrue/False instead of assertEqual(T/F) * Addressing error by clearing sqlalchemy session leak * Updated from global requirements * Trival: Remove unused logging import * Python 3 deprecated the logger.warn method in favor of warning * Change assertTrue(isinstance()) by optimal assert * Updated from global requirements * Adding cffi to requirements * Updated from global requirements * Removes MANIFEST.in as it is not needed explicitely by PBR * Fix parameter's type error in test case * Updated from global requirements * Updated from global requirements * Added ACL tests * Fix db error when running python34 Unit tests * Remove low-level PyKMIP test asserts * Correct docs for barbican project structure * Remove obsolete shell command files * Remove version from setup.cfg 2.0.0.0b1 --------- * Authorized API Requests * Assigning oslo config CONF once to parsed barbican CONF instance * Added container consumer end to end flow testcase * Add reno for release notes management * Make API Document Code-Blocks Consistent * Updated from global requirements * Allow length to be passed in MKEK Creation * Updated from global requirements * Add information on running individual Tests * Updated from global requirements * Fix troubleshooting.rst broken link * Remove useless requirements * Remove kombu useless requirement * Updated from global requirements * Enable pkcs11-key-generation command * Updated from global requirements * Remove unused scrub variables in barbican.conf * remove default=None for config options * Move Key gen script to cmd folder * Fix Database Migrations Documentation * fix typo * Make variable defined before refer * Removing no longer used class TestCryptoPlugin implementation from test code, to avoid confusing * Change unit tests in test\_utils.py and test\_contaiers.py to use CONF.set\_override * Correct the returned message when bit\_length is absent * Allow null content type for orders * Allow null values for secret names * Added secret flag to oslo config params * Update Devstack deployment and docs * Add more unit tests in test\_validators.py for schema validation * Add some unit tests regarding validation of secrets * Updated from global requirements * Imported Translations from Zanata * Replace assertFalse(a in b) with assertNotIn(a, b) * Remove old gate code * Updated from global requirements * Adds documentation for consumer resource * Fix spelling of HATEOAS * Updated from global requirements * Fix Intermittant Unit Test Failure in Repositories * Replace assertEqual(False, result) with assertFalse(result) * Change assertEqual(None, result) to assertIsNone(result) * Updated from global requirements * Replace assertTrue(a in b) with assertIn(a, b) * Updated from global requirements * Updated from global requirements * Add more information to debug log message for consumer delete * Updated from global requirements * Fix Intermittant Unit Test Failure * Remove duplicate words in documentation * Add RBAC docs for Cloud Administrator Guide * Updated from global requirements 1.0.0 ----- * Fix db\_manage to initialize mysql from base * Enforce project ownership of subCAs * Check a CA's status as project and preferred CA before deleting * Add subca functionality to the dogtag plugin * Update .gitreview to match stable/liberty * Exit with error code when db\_manage.py fails * Updated from global requirements * py3: Enable more tests to Python 3.4 * Enforce project ownership of subCAs * Check a CA's status as project and preferred CA before deleting * Python 3 Refactoring: Replace six.iteritems() with the preferred items() * Make tests.api.controllers.test\_containers py3 compatible * Add subca functionality to the dogtag plugin * Making barbican endpoint selection values to be configurable attributes * Fix order of arguments in assertEqual * Fix order of arguments in assertEqual * Fix order of arguments in assertEqual * Exit with error code when db\_manage.py fails * Fix order of arguments in assertEqual * Fix order of arguments in assertEqual * Fix db\_manage to initialize mysql from base * Use environmental variables for NewRelic * Fix comment and remove unneeded code 1.0.0.0rc1 ---------- * Use subCA when specified to sign CSRs * Catch exceptions raised by bad certificate plugin info * Add missing changes for Alembic time-zero * Add reference guide documenting Certificate Authorities API * Fix Snakeoil to return expiration timestamp in string format * Ensure Alembic version modules bootstrap new db * Cleanup of Translations * Open Mitaka development * Add database commit for database writes when async operations are possible * Change definition of Certificate Authority Model for no soft deletes * Add check to validators that SubCA's project id matches order's project id * CAs should return the external project ID * Remove ERROR\_ON\_CLONE references * Remove quotes from subshell call in bash script * Document dependencies installation for Fedora * Remove oslo-incubator documentation that's no longer valid * Add support in snakeoil plugin for intermediates * Change behavior of GET cas/preferred * Changes to Preferred CA Features * Add default quota limit config to functional tests * Combine exit codes of the two functional test runs * Adding Functional Tests and Supporting Fixes for Global Preferred CAs * Add missing X-xxxx HTTP headers to the unauth context * Change roles to rules in policy.json file * Initialize Database Before Running Quota Enforcer Unit Tests * Fix ca related controllers * Rename Quick Start Guide * Imported Translations from Zanata * Clean up CAs Policy Rules * Updated from global requirements * Remove .pyc files before performing functional tests * Cleanup Secrets created after Order functional tests * Updates quota values to be read from conf file * Finish Initialization of CA Table when Barbican Starts * Add function to catch unknown attributes in URI * Handle case of no logging environment variables * Add barbicanclient clone back (was overzealous in pruning) * Add functional test for project CA * Remove content related to transport keys and quotas * Add filter to secret list for acl secrets * Use testr for running functional tests and documentation * Add DELETE functionality for subCAs * Remove bad clones (new devstack method doesn't need this) * Set host\_href parameter in devstack * Update Bandit Tox Environment * Fix Tempest Installation * Add Project Quota Support for Sub CAs * Remove Vestigial Transport Key Quota Code * Added functional tests for creating CAs * Add subca functionality to snakeoil plugin * Use barbican.conf instead of barbican-api.conf * API documentation for CAs interface * Add validators for new CA creations * Make tests in barbican.tests.model py3 compatible * Make alembic used in barbican similar to other openstack projects * Support for creation of subordinate CAs * Force Identity API v3 for endpoint create in devstack * PyKMIP 0.4.0 test requirement * Fix devstack gate (and new gate\_hook.sh) * Completes localization of the api directory structure 1.0.0.0b3 --------- * Add Project Quota Information to the Barbican Quick Start Guide * Stanardized Functional Tests * Removes pyenv from barbican.sh * Update API Reference Docs with Project Quota Information * Add Functional Tests for Quota Enforcement * Implement Enforce Method for QuotaEnforcer * Change test\_containers unit test to work around webtest issue * Add Request ID to worker-side logs in barbican * Replace dict.iteritems() with dict.items() * Updated from global requirements * Use the new Devstack external plugin method * Add get\_count() method to project resource repositories * Ensure Project and ProjectQuotas Tables are in sync * Allow debug flag in Barbican tox and debugging documentation * Removing unused dependency: discover * Fixed Testing Attributes Bug * Updated from global requirements * Migrate to using gate\_hook.sh pre-devstack-update * Cleanup database after functional test runs * Updated from global requirements * Updated from global requirements * Add Quota Enforcement API * Integrated with PyKMIP Pie API * Use config rather than hardcoded admin id from Quotas test * Make tests in test\_acls.py and test\_cas.py py3 compatible * Implement Models and Repositories for Resource Quotas * Updated from global requirements * Replace itertools.ifilter with six.moves.filter for py3 compatibility * Introduce the key-manager:service-admin role * Updated from global requirements * Make tests in barbican.tests.api.middleware py3 compatible * Use "key-manager" instead of "keymanagement" * Ensure a http 405 is returned on container(s) PUT * Updated from global requirements * Drop downgrade field in alembic script.py.mako and version * Made Functional Test Key 256 Bits * Make tests in barbican.tests.tasks py3 compatible * Make tests in test\_hrefs, test\_quota py3 compatible * Remove obsolete term "incubated" from docs * Remove rpmbuild directory * Introduce the concept of plugin status * Replace python-ldap with ldap3 library * Catch any exception from base64.b64decode during validation * Updated from global requirements * Skip Bandit Checks on Functional Test Code * Removing Cloudkeep from comments * Updated from global requirements * Imported Translations from Transifex * Replace dict.iteritems() with six.iteritems(dict) for py3 support * Replace urllib with six.moves.urllib for py3 support * Add py34 support in tox * Add invalid property info to validation error message * Imported Translations from Transifex * Updated from global requirements * Revert backwards incompatible paste change * Flatten exceptions used in policy tests * Fix colorized logging in Devstack 1.0.0.0b2 --------- * Imported Translations from Transifex * Updated from global requirements * Configure dogtag installation's domain name * Unauthed tests for Secret & Container ACLs * Remove docbook documentation * Updated from global requirements * Add RBAC Functional Test for ACL Opeations * Updated from global requirements * Add Private Key to the resulting container if stored-key order * Added opaque data support to KMIP secret store * Updated from global requirements * Implement Configuration, Controllers, and Validators for Resource Quotas * Fix unit test errors caused by new mock version * Add retry server and functional tests to DevStack * Fix policy.json certificate\_authority inconsistency * Imported Translations from Transifex * Adding script for rewrapping p11 KEKs * Remove invalid skipping of tests * Completed localization tagging for plugin directory * Fix JSON structure example for stored-key orders * Fix config file name to barbican.conf * Drop file extensions for /usr/bin/\* * Add Functional Tests for ACLs Using Multiple Users * Updated from global requirements * Fix gate-barbican-docs after change to infra * Update unwrap key to accept specific variables * Add troubleshooting for \_bsdbb import error * Fix for admin and creator user access for secret/container read calls * Update queries to use proper offset and limit 1.0.0.0b1 --------- * Remove left over reference to admin endpoint * Remove ProjectSecret table-related code * Make db-manage script use same config file as barbican * Replace oslo incubator code with oslo\_service * Updated from global requirements * Fill project\_id in secrets where needed * Added unit test around bug related to who can modify ACL * Updated from global requirements * Refactor Barbican model registration * Added Certificate API Docs and Quick Start Guides * Display all versions info in versions controller * Changed Test Key Size to 2048 * Update version for Liberty 1.0.0a0 ------- * Replace oslo incubator code with i18n * Replace oslo incubator code with oslo\_utils * Added passphrase support to KMIP secret store * Added certificate support to KMIP secret store * Add project\_id to Secret model * Updated from global requirements * Replace oslo incubator jsonutils with oslo\_serialization * Updated from global requirements * Fixed Inconsistent Request Id in Log Messages * Sync with latest oslo-incubator * Changes to fix dogtag nss db handling * service\_enabled instead of environment variable for dogtag * Updated from global requirements * Change naming convention for Barbican config files * Remove duplicate 'have' in doc/source/api/reference/acls.rst * Fix NewRelic error reporting * Updated from global requirements * Add RBAC tests for orders * Fix http 500 when no accept header passed to secret GET /payload * Added pkcs1\_only Configuration to KMIP * Documentation for PKCS11 Key Generation script * Updated from global requirements * Complete RBAC tests for containers * Renaming ACL creator-only to project-access flag * Adding a new script to generate mkek and hmac * Updated from global requirements * Centralize Barbican API Config * Complete RBAC tests for secrets * Removed per ACL operations and added support for PUT method * Adding documentation for ACLs operations * Fix cert tasks not being scheduled for retry * Imported Translations from Transifex * Drop incubating theme from docs * Add more users/roles to secret/container RBAC tests * Fix Dogtag setup script * Remove unused incubated cryptoutils * Split out generate mkek and hmac from get command * Updating setup docs to use Python 2.7.9 * Switch from MySQL-python to PyMySQL * Move policy options to the oslo\_policy group in the config * Splitting out PKCS11 plugin * Fix the sql\_pool\_logging config attribute * Fixing unauthenticated middleware role bug * Updated from global requirements * Adding config option for specifying HSM slot * Base64 encode the cert returned from the Dogtag plugin * Imported Translations from Transifex * Port the Architecture, Dataflow, and Project Strucure docs * Add Multi-user support for Functional Tests * Fix snakeoil\_ca plugin * Updated from global requirements * Removed extraneous config.py * Remove deprecated references to admin endpoint * Updated from global requirements * Add Barbican configs for SQLAlchemy pool settings * Imported Translations from Transifex * Updated from global requirements * Adding a info log for each processed request * Migrate to oslo\_context * Adding more logging around containers & consumers * Updated from global requirements 2015.1.0 -------- * Fixed Bug for KMIP Secret Storage * Fixed Bug for KMIP Secret Storage * Drop use of 'oslo' namespace package 2015.1.0rc2 ----------- * Fix for missing id check in ACL count query * Improved error code handling for pkcs11 errors * Remove Future Parameters (write, list, delete) from ACL Validation Schema * Adding ACL check when new stored key order is submitted * Fix for missing id check in ACL count query * Removing signing\_dir directive from config * Fix failure with get on dict that was None * Fix call to load\_privatekey() when passphrase is None * Fix call to load\_privatekey() when passphrase is None * Updated from global requirements * Removing signing\_dir directive from config * Updated from global requirements * Fix failure with get on dict that was None * Security tests for Secret resources * Updated from global requirements * Update .gitreview to match stable/kilo * Refactor RSA Functional Smoke Tests 2015.1.0rc1 ----------- * Refactor and Fix Translation Code for PER and DER Formats * Add order\_retry\_tasks migration per latest model * Readability-related changes to secret store functions * Adding MySQL fixes to migrations * Refactor dogtag gate scripts * Fix Dogtag plugin to make latest functional tests pass * Fix KMIP Secret Store input/output * Potential resource exhaustion when registering consumers to containers * Fix handling of payload\_content\_encoding for orders * Add utility functions to convert between and PEM and DER * Fix base64 decoding of payloads in one-step POST * Fix errors in functional tests * Fix generating a CSR with an encrypted private key * Enable alternate error message for OpenSSL 1.0.2 * Imported Translations from Transifex * Fix expectations of order certificate test cases * Sign CSRs issued in SnakeOilCA tests * Imported Translations from Transifex * Add new smoke tests for RSA type containers and secrets * Expose root cause plugin exceptions * Create Barbican python scripts for development * Open Liberty development * Delete openstack.common.context * Changes to get remaining cert functional tests working * Switch to oslo\_policy * Add Bandit security static analysis checking via tox * Security tests for Order resources * Return container not found before ACL checks * Remove str() casting for the client\_message variable * Imported Translations from Transifex * Imported Translations from Transifex * Fixing python 3 imcompatiblity in common.utils * Updating hacking version for py3pep8 tox job * Adding simple log.info's to the SecretController * Security tests for Consumer resources * Imported Translations from Transifex * Implement validators and tests for stored key certificate orders * Adding policy layer changes for ACL support (Part 5) * Adding Container ACL controller layer changes (Part 4) * Adding Secret ACL controller layer changes (Part 3) * Adding ACL db repository changes (Part 2) * Adding ACL db model changes (Part 1) * Updating Orders functional tests to new naming convention * Adding docs to index and minor fixes * Imported Translations from Transifex * Restore worker tasks processing catching exceptions * Adding GET and DELETE for containers quickstart guide * Fix error in setting and updating ca and preferred ca tables * Create snakeoil certificate plugin * Imported Translations from Transifex * Introducing container types and examples to quickstart guide * Initial connect up retry task submit and re-enqueue * Write task retry info to database from server.py * Creating initial commit for containers quickstart guide * Removing a forgotten TODO * Adding reference doc page for containers * Allow business logic and plugins to retry tasks * Turning on info logging level by default * Fix string formatting for a secret store exception message * Add ability to run secrets tests in parallel * Security tests for Container resources * Completing secret reference documentation * Add utf-8 decoding for Content-Type * Imported Translations from Transifex * Adding more detail to the secrets quickstart guide * Switching how we handle sessions in p11\_crypto * Changes to get Dogtag related functional tests working * Fix some ca\_id related bugs, add more functional test code * Fix CA related exceptions, and unskip relevant tests * Removing unused TimeKeeper class * Splitting out remaining order tests from test\_resources * Fix pep8 gate errors * Adding more content to the api reference for secrets * Fix flake8 issue * Updated from global requirements * Removing the debian folder * Add functional tests for certificate orders * Fix common misspellings * Imported Translations from Transifex * Updated from global requirements * Add retry periodic task and worker-client logic * Add sub-status logic to worker/task processing * BarbicanHTTPException can take arguments for client\_message * Starting to rework docs around the secret resource * Fix string substitution in exception messages missing the s 2015.1.0b3 ---------- * Fix string substitution in exception messages missing the s * Change certificate unit tests to use strong algorithms * Make the default devstack config use the right password * Add asymmetric key support to KMIP plugin * Fixing errors and warnings on the sphinx docs * Removed get\_secret\_type * Moving containers tests to separate module * Add code to populate CA tables and select plugin based on ca\_id * Remove unused etc/dev\_tempest.conf file * Refactor secrets functional tests for readability * Test functionality of generated asymmetric keys * Update README file * Imported Translations from Transifex * Implement validate\_pkcs10\_data * Deprecate old secret decrypt path both in code and docs * Use unique refs for RSA container example * Added new repository classes and controller classes for CAs * Standardized Secret Encoding * Updating secret store plugin to support PyKMIP 0.3.0 * Making sure we allow all content-types for delete calls * Third round of refactoring secrets tests * Porting more tests to test\_secrets * add another missing status code check in functional tests * Fix functionaltest keystone URL fetch bug for v2 * Add missing alembic migration script for CA tables * Upping process-timeout and fixing posargs in tox.ini * Starting to split out orders from test\_resources.py * check some responses in functional tests before using the resp * Fix create orders functional tests for asynchronous order processing * Making the tox -e functional tests run faster * Cleanup and renaming within secrets smoke tests * Remove extra v1 from override uri * Split out and refactored more secret tests * hide the eggs * Replacing functional test authentication hookup * Fix for order obj expiration issue with SQLAlchemy * Starting refactor of test\_resources * Fixing test dependence on execution order * Enforce X-Project-Id coming from the request headers * Ensure that external secret refs cannot be added to containers * Ported API documentation to the repo * Updated from global requirements * Creating indexes for foreign keys * Fixing race-condition for order processing in workers * Enable secret decrypt through 'payload' resource * Imported Translations from Transifex * Let functional tests run with older tempest * Fixed Binary Encoding to Secret Stores * Updated from global requirements * Use oslo\_log in db-manage script * Get rid of Repositories class * Use urljoin instead of os.path.join * Fix functional tests to use new auth provider interface * Validate character set used for payload when encoding is base64 * Adding NewRelic monitoring to worker tasks * Fix bug in tests assuming order is active * Update devstack to run tests both sequentially and in parallel * Remove version from endpoints in catalog * get\_or\_create\_project now calls repo factory * Clean up test inheritance * Containers and Consumers controllers use repo factories * Refactor Secrets resource to use repository factories * Refactor Orders resource to use repository factories * Modified plugin contract to include barbican-meta-dto * Making RootController load child controller at runtime * Split override-url in functional test config file * Imported Translations from Transifex * Cleaning up application initialization * Using a central secret store manager to remove lock * Attempting to clean up some of the db session code * Fixing logging import for barbican-worker.py * Fixing unable to retrieve req id bug * Use dictionary comprehensions and literals * Add missing localisation markers * Imported Translations from Transifex * Remove pysqlite requirement * Add Barbican order metadata table and repository * Add support for dogtag in devstack testing * Updated from global requirements * Port to graduated oslo.log package * Sync with latest oslo-incubator * Fix security bug with host HTTP header field * Add container consumer repository factory * Removing uuid verification mock * Add clarification regarding \_lookup in secrets and orders * Enforce secret and order IDs are valid UUIDs * Port the Database Migrations doc * Add validation for profile for Orders API * Adding ldap dependencies to documentation * Added mixin class to allow soft deletes * Added secret\_type to Secret model * Imported Translations from Transifex * Add missing repository factory functions * Updating copyright on barbican/api files * Updated copyright dates for functional tests/models * Add subject\_dn validator * Changing basic copyright for a section of functional tests * Refactor \_lookup for orders controller * Imported Translations from Transifex * Configure colored logging in devstack * Refactor \_lookup for secrets * Remove useless requirements wsgiref * Add code to generate a CSR in the stored key case * fix a use after free bug in the pkcs11 plugin * Fix "invalid credentials" error running functional tests * Fix symmetric/asymmetric key order meta validation * Replaced calls to iteritems with calls to six.iteritems * Imported Translations from Transifex * Cleaning up code duplication in hrefs.py * Cleaning up method identation in transportkeys.py * Cleaning up literal dict in validators.py * Refactor exception handling in the app side * Run functional tests against any barbican server 2015.1.0b2 ---------- * Imported Translations from Transifex * Add the ability to use either identity v2 or v3 API * Drop old namespace for some oslo libraries * Updated from global requirements * Change exception when store plugin is misconfigured * Trivial refactors to secret controller * Imported Translations from Transifex * Completely refactor PKCS11 plugin * Delete comments that are no longer valid * Renamed outputted keys from base model * Fix downgrade for revision 254495565185 * Fix error in "tenants to projects" migration script * Remove unnecessary checks from migration commands * Added new model classes for CAs * Add 'current' option to the migration script * Add 'history' option to the migration script * Handle SystemExit properly in migration script * Add support for simple cmc requests to Dogtag plugin * Updated from global requirements * Switch Python's json to the OpenStack's json wrapper * Updated from global requirements * Remove commented test cases * Inherit tests instead of explictly calling them * Updated from global requirements * Resolve intermittent HTTP 404 in devstack gate * Include logging for barbican functional tests * Fix content type validation if missing payload * Drop Python 2.6 support * Refactor order validation * Updated from global requirements * Add data model entities for OrderRetryTask * Fix 500 error when PUTing an order * Adding exceptions on bad key file permissions * Checking the certificate file has reasonable permissions * Return the actual name value for entities * Fix UnicodeDecodeError's in the functional tests * Updated from global requirements * Adding client certificates to connection credentials * Fix usage of keystone v3 * Only de-serialize objects when possible * Enable functional tests to take a regex from tox * Configure keystomemiddleware using identity\_uri * Make default action return 405 in the controllers * Updated from global requirements * Moving exception logging in the base behaviors * Adding error handling to help debug devstack issue * Replace and remove native asserts * Add I18n-related unit tests (Part 3) * Plugin contract changes for the certificate-order-api * Add validation for certificate-order-api * Enable passing test regex to testr from tox * Remove invalid TODOs related to bug 1331815 * Replace instances of keystone\_id from the code * Change keystone\_id for external\_id in model * Fixes crypto enabled plugins configuration 2015.1.0b1 ---------- * Fix content\_type loading to be consistent * Updated from global requirements * Delete secret from plugin only if there's metadata * Setting the max secret bit\_length size to be 32767 * Add I18n-related unit tests (Part 2) * Use keystone v3 credentials for functional tests * Support containers without a name * Removing conditional logic around KMIP tests * Actually run type order creation test * Notify user if no database is specified * Add I18n-related unit tests (Part 1) * Changing ModelBase.save to correct updated time * Replace 'tenants' for 'projects' in documentation * Replace model related instances of tenant * Make flake8 ignore \*.egg * Imported Translations from Transifex * Dont set debug and verbose as our example * Updated from global requirements * Update log messages to oslo.i18n * Workflow documentation is now in infra-manual * Updated from global requirements * Remove py26 from tox.ini * Container deletion will now clean up Consumers * Add functional tests for order * Added smoke tests for consumers * Updated from global requirements * Add PyKMIP to requirements * Updated from global requirements * Moved secret functional tests to data driven tests * Updated from global requirements * Use canonical name for coverage job * Updated from global requirements * Updated from global requirements * Added test to check that an expired secret cannot be retrieved * Fix communication of secret\_type info * Use "key-manager" for service type * Fixing remaining hacking violations * Added smoke tests for containers * Excluding alembic migrations from coverage * Cleaning up a couple nits in the Symantec plugin * The last round of secrets functional tests * Remove redundant else, none returned by default * First set of functional tests for orders * Adding basic functional tests for containers * Added smoke tests for orders * Replace trivial instances of tenant for project * Updated from global requirements * Port the Dependency Adding/Updating docs to Sphinx * Added second round of secrets functional tests * Port the troubleshooting documentation into Sphinx * Added first round of functional tests for secrets * Added support classes for secret functional tests * Adding simple getting involved doc * Adding docs around running tests and devstack * Taking a first stab at putting together setup docs * Attempting to fix the devstack gate job * add new diffcover target for a future non-voting gate job * Smoke tests for secrets in Barbican Functional Tests * sync global requirements now that pecan 0.8 is out * Add certificate plugin page * Add secret store plugin page * Updated version tests to include auth and unauth flavors * Add plugin sub-folder and augment crypto plugin doc * Move functionaltests into smoke or functional subdirectories * Bumping default ssl\_version to TLSv1, in light of POODLE * Removing new\_name argument from test\_wrapper * Add missing \n at the end of file * Remove extraneous vim editor configuration comments * Delete usage-indications from the model docstrings * Adding keystone notification listener support 2014.2 ------ * Update to the latest global requirements versions * Imported Translations from Transifex * Adding tox job for local functional test dev * Adding a sample tempest config * Open Kilo development * Sync SQLAlchemy lifecycle to request cycle * Cleaning up secret functional tests * Code clean-up due to type order enhancement * Use canonical cover name for coverage * Adding parameterized decorators for unit tests * Fix error in two-step secret PUT with base64 * Use OpenStack Incubated branding for docs * Allowing all content-types for secret delete * Adding size limits for create secret json fields * Refactor secret functional tests using models and behaviors * Add asymmtric order validator * Changes to get certificate issuance flow working * Deduplicate HACKING.rst with docs.openstack.org/developer/hacking/ * Add asymmetric key generation in dogtag plugin * Pin Pecan to 0.6.0 to fix binary secret decrypts * Test the secret model using an in memory database * Adds to KMIP secret store test coverage * Switch barbican.sh to use testr * Typos 'asychronous' and 'formated' * removed tenant id from code samples * Bringing translation.py coverage up to 100% * Remove restrictive hard-coded orders validation * removed whitespace from pom.xml * Update Getting Started Guide to include tech review feedback * Adding sub-status and message to orders * Imported Translations from Transifex * Remove config parameter from secret\_store.py interface * Modifying testr conf to suppress output * Fixing the PYTHONHASHSEED bug with our unittests * Updated dev guide to include feedback from previous tech review * removed image files as they referred to internal architecure * Switch to running tests in parallel with testr * Install tempest instead of just adding it to PYTHONPATH * PKCS11 refactor to use a master KEK and per project KEK * Adding support for allowing binary data as-is as part of PUT * Adding missing unit test on queue server tasks * Imported Translations from Transifex * Making a few MORE modules hacking 0.9.2 compliant * Adding initial update logic for orders * Move to oslotest 2014.2.b3 --------- * Add initial files for certificate event handling * Reorganize code to use store crypto plug-in * Remove some inline if/else statements * Replace explicit assertion for function * Make a whole host of modules hacking 0.9.2 compliant * Updating API unit and functional tests to new hacking standards * Imported Translations from Transifex * Making a few modules hacking 0.9.2 compliant * Additional work on certificate processing * Updated Create Secret request and response * Updated Get Secrets request and response * Initial connect orders resource to certificate processing * Updated Get Orders request and response * Add a py3pep8 tox job. This will verify py3 compliant syntax * Imported Translations from Transifex * Fix Container list to properly format secret\_refs * fix for bug #1359197 * fix all the log statments to use %s fomatting * Add order plugin metadata entity and logic * Add certificate\_manager functionality to dogtag plugin * Allow devstack to do git clone of barbican * Remove second setUp() call in tests * change CryptoPluginManager to be instantiated in the module scope * Imported Translations from Transifex * Edited docs to improve context * Minor cleanup and moving around code for clarity * Add more type in order post * Update versionbuild script to handle setup.cfg version * Replacing backslashes * Adds KMIPSecretStore and unit tests * Install of a Barbican RPM fails due to missing keystonemiddleware * Imported Translations from Transifex * Removing symantec as a default plugin * Updating symantec plugin docstrings * First attempt at adding the symantecssl library * Revert remove version from setup.cfg * Install sqlite-devel package on fedora * Refactor secret\_store for consistency * remove project-id from resource URIs * Replace hard-coded setup version setting * Fixed misspelling in error message * Make transport\_key an optional arg in SecretDTO * Add Certificate Interface & Symantec Plugin * Clean old comments (already implemented) * Add support to Barbican for consumer registration * Force uWSGI to set "Connection: close" header * Adds store\_secret\_supports to secret\_store * Remove remaining skipTest * Add code to retrieve secrets metadata and data with transport key * autodoc import error for plugin.rst * Replace skipTest in favor of decorator * Eager load KEKDatum record when EncryptedDatum is retrieved * Code to pass through transport\_key\_id when storing secret * Correct container create response code to be 201 * Adding doc sections to tox.ini * code to retrieve transport key on metadata request * Set python hash seed to 0 in tox.ini * Imported Translations from Transifex * Add tempest tests to run in devstack gate * Use auth\_token from keystonemiddleware * Cleaning up index.rst file * Don't log message for keystone ID query when supressing exceptions 2014.2.b2 --------- * Added duplicate secret\_id validation in a container request * Fix accepting longer PKI keystone auth requests to Barbican * Imported Translations from Transifex * Bug fix 1336995 DateTime type only accepts Python * removing dead code from common.resources * Remove dead code from config * Replace should\_fail for should\_raise in tests * Fix 500 error for secret PUT * Fix Dogtag plugin and tests to match current secret\_store API * Implement content-type HTTP header enforcement * Change function and import names for readability * Correctly ordering requirements.txt * Move crypto package contents to the new plugin structure * Refactor json validator * Correct default bit\_length to match schema constraint * Replaced some UTF-8 characters with ASCII characters * Update from global requirements * Update devstack endpoint creation to include v1 * Increase test coverage to 100% in validators.py * Increased test coverage for app and updated .gitignore * Removing an unnecessary catch * Excluding the docs target folder from flake8 * Pin barbican above the most recent version of pecan * Disable empty or null names for crypto plugins * editing for gramatical and english errors * Ensure that secrets within orders have expiration date isoformatted * Add dogtag plugin to new plugin structure * Increased common/utils.py test coverage to 100% * Refactor secret validation for redability * Adding test cases for more coverage * Refactor allow\_all\_content\_types for readability * Don't package dependencies available in RDO * Use oslo-config and oslo-messaging from RDO * Restructure project to accommodate plugin types * Pass tests in newer versions of pecan * Addes intermediates to certificate container * Add \*\*kwargs to on\_ controllers * fix for - JSONErrorHook is not setting content type to JSON * Fix for Unicode-objects must be encoded before decryption exception * remove default=None for config options * Ensure that datetimes are isoformatted * Add certificate to the container type option * Fixed several typos * replace iterator.next() with next(iterator) * fix to include data migration script * Adding 3 new columns Type, Meta and container\_id to Orders * Add SecretStore interface * Fix data migration script error * Adding SecretStoreMetadatum to the list of MODELS * Crypto dev plugin now uses per-tenant KEKs * Prepare barbican for localization * Replace nonexistent function in test * Usage of \_\_metaclass\_\_ not necesary with six.add\_metaclass * Enable tox to upgrade packages if needed * Add a local.conf for easier DevStack spinning up * Adding docbook-based docs * enable all hacking checks * Update the links to python-barbicanclient * Prefer os.urandom to PyCrypto's PRNG * Beginning transition from pycrypto to cryptography * More Hacking Violation Fixes * Update to oslo-incubator (caed79d) * Additional Hacking violation corrections * enable hacking check H234 * unpin iso8601 dependency * Fixed some Hacking violations * major hacking fixes * Fixing A Few Hacking Violations * Add TransportKey as a resource * Update Plugin Development documentation * Changing from stackforge to openstack project * Change name of SecretMetadatum table to SecretStoreMetadatum * Update .gitreview file for new repo name * Fix flake8 errors in migration version file * Lock Pecan version to 0.5.0 for now, https://review.openstack.org/89849 * Fix dogtag unit tests * Remove extra parameter for generate\_symmetric & generate\_asymmetric in test plugin * Fixed http 500 due to mismatch between ResponseDTO and tuple from plugin encrypt * Fixed a typo in a comment * Pass secret metadata to crypto extension manager for plugin search * Remove redundant code on offset, limit parameters * Implement the REST API with pecan * Increase DevStack uWSGI buffer for Tempest support (Keystone Headers) * change assertEquals to assertEqual * Adds SecretMetadatum table * Extend crypto plugin to support more key types * Metaclass Python 3.x Compatibility * Ensure uWSGI process in DevStack is actually killed * Fix a bug with handling of binary data * Add configuration items to limit secret and request sizes * Add Sphinx developer documentation skeleton * Add Dogtag crypto plugin * correcting spelling error in README file 2014.1 ------ * Support for debug mode start in barbican * Update crypto plugin interface to support Dogtag * Clean up Verifications resource * Revive Alembic-based database migration logic * Remove v1 from the falcon app route * Housekeeping, re-alphabetize requirements * Make max api request size configurable * Barbican should not do rbac on version api call * Include discover in test-requirements * Switch to testtools and make barbican compatible with Python2.6 * Fix string format used to report content validation errors * Correct RBAC response codes * Create secret - case insensitive Content-Type check 2014.1.b3 --------- * adding placeholder for endpoints * Barbican uWSGI stats server listen on localhost * Minor changes to make the bash8 check happy * Adding more msgs, checking admin/non-admin endpoints * Modify python-dateutil RPM build process * Fixing bug 1287993 - logging issue * Correcting bash script pre-test-hook * Remove oslo.uuidutils * Locking the version of iso8601 * Adjust contrib location, minor adjustments * Adjust location of the functionaltests folder to root of project * use of more readable policy language in policy file * Adding DevStack Support * Adds container endpoints to Barbican API * Tell install to create files as 644 instead of 755 * Removes dead and unneeded code * Add script to ensure semantic versions work with continuous build * Specified content type is dropped on single step create secret * add fpm packaging to satisfy oslo.messaging dependency for pyYaml * Add queue configuration changes to support HA Rabbit queues * Initial checkin of doc source * fix for - create a secret - doesn't trim content type * Support building wheels (PEP-427) * exclude abstractmethod from coverage * fix for testr * Switch to testr * Set rpm %\_topdir macro during build * Removed dead code and updated tests to cover 100% for crypto * Remove custom install\_command for argparse * Enable tag-based full version reporting via pbr 2014.1.b2 --------- * Add RPM package dependencies needed for Barbican nodes post oslo.msg/pbr * Alphabetizing requirements as per OpenStack best practices * Switching to use VIRTUAL\_ENV env variable * Fixing issue around error messages not being populated * Update crypto unit test coverage * Add RPM package dependencies needed for Barbican API/Worker nodes * Removed scripts that are not being used * Remove PostgreSQL dependencies from Barbican * removing run\_tests.sh * Updating copyright year * Adding invalid\_property attr to support test code * Sync the kombu and amqp versions * Remove lingering celery dependencies out of RPM dependencies * Enabled branch coverage for barbican. Took out inclusive coverage * Fix file copy bug with the barbican installer * Removing bin/versionrelease * Updating RPM build version * Attempting to fix the tox environments * Fix RBAC unit test failure due to uninitialized mock * Sync with global requirements * Fixed to un-quote the name parameter that gets used when querying for secrets * Removes unneeded details from the error message * Migrate to pbr * Replace Celery with Oslo messaging for asynchronous queuing * Expand secrets list API to allow for filters on name, mode, algorithm, and bit length * host\_href should include scheme * Fixed response object in the Verifications GET list resource method * Add .mailmap file * Add verification REST service to Barbican * Line wrap README, revise IRC channel to #openstack-barbican * Fix automatic signing * Sign RPM builds * Update Launchpad URL * add link to bug tracker * Removed README.rst in favor of README.md * Remove Celery and Kombu version pinning * Gracefully handle request for binary data as plain * Update the keystone configuration script for Barbican per Johns Hopkins team * Add a better installer for local Barbican application instances * Fix spelling error, and add uWSGI stats server support * Pin billiard version to 2.7.3.34 * Fix for bug/1238895: normalize plain text mime types * Change the queue 'broker' config to allow a list of brokers * Add back a explicit barbican-worker.py install line * Remove unneeded Barbican worker script from generated RPM * Add files to create worker RPM and get api/db/queue/worker nodes deployed * Modify version template for the final OpenStack Havana release * Fix KEK generation in the P11 Plugin + fix unit tests * Change from using 'cypher\_type' to 'mode' in API and code base * Remove duplicate TenantSecret on secret PUT call * Remove base64 encoding feature from decrypted secrets GET API * Refine the Secret metadata response by \*not\* returning the 'content-encodings' * Add the error reason to the Order entity if async processing fails * Refactor plugin API methods * Update 404 message * Freeze amqp version for rpm packaging * Update dependency packaging * moar dependencies thanks to oslo * Add missing content type and encoding business logic after MIME-type revamp * Remove uwsgi requirement * Added total number of secrets/orders available to response * Add new dependencies to rpm * adds lazy app loading to the uwsgi inis * Add role based access control (RBAC) to Barbican * Handle missing cyper\_type and algorithm fields * Remove padding code and tests in the p11 plugin * Fix order creation bug involving content types * Add mock testing for the p11\_crypto plugin * Fix rpm build * P11CryptoPlugin is now functional * Update P11 plugin to function & match new plugin contract * First attempt to integrate Paul's HSM plugin into Barbican * p11 plugin work * Added hacking target to tox * Change the location of the New Relic config file * Mime Type Revamp * Add New Relic application monitoring * Fix PyCrypto rpm package name built by fpm * Package PyCrypto 2.6 into an rpm using fpm * Stop barbican-api during rpm removal * Fixing NoneType issue with limit and offset * Restrict use of 'plain\_text' to 'text/plain' MIME * Package dependencies in RPMs using fpm * Automate rpm build * Clean list paging offset and limit values properly * Fix rpm dependencies * Validate MIME type and cypher type during order creation * Added logic to skip migrations for sqlite databases, as sqlite does not support altering schema data * Added barbican-api rpm packaging * Adding files to support database migration via alembic * Adjust debian upstart script to allow for updating running app when updating package version * Added missing -1 in debian changelog file * Revamped version release script to use time stamps; * Clean up debian packaging. Install dependencies from distro repos * Add .gitreviw file; Fixed pyflakes violations; Fixed pep7 violations; Fixed tox.ini config; * Prepare for next release 0.1.65 ------ * Release for v0.1.65 * Fixed script rename in setup.py * Restore comments about policy in barbican run script * Renamed barbican-api script to barbican-all * Use upstart instead of init scripts for uwsgi emperor * Added admin api as a separate falcon app * Removed unused uwsgi.ini file * Prepare for next release 0.1.64 ------ * Release for v0.1.64 * Updated the perf URI; * Prepare for next release 0.1.63 ------ * Release for v0.1.63 * Changing the performance URI; * Prepare for next release 0.1.62 ------ * Release for v0.1.62 * Adding blitz.io API contract; * Modified readme to refer to the wiki home page; * Clean up unit test suite; Remove print statements; * Prepare for next release 0.1.61 ------ * Release for v0.1.61 * Require latest SQLAlchemy (8.1) * Prepare for next release 0.1.60 ------ * Release for v0.1.60 * Prepare for next release 0.1.59 ------ * Release for v0.1.59 * support faux key creation in crypto plugins + tests * PEP8 clean up; * #131 Fix defect involving expiration dates with time zones; Add graceful handling of 500 errors in REST resources; * Prepare for next release 0.1.58 ------ * Release for v0.1.58 * Prepare for next release * Release for v0.1.57 0.1.57 ------ * Updating API Doc and Technology links * Prepare for next release * Release for v0.1.56 0.1.56 ------ * Address Douglass review items; Officially deleting api validators.py; * Put text/plain in front of octet-stream in secret mime-type enum, as is more common type probably; * Added enum validation of mime-types * Finished order validation; Fixed secret from order bug; Added logic to display secret UUID for name if not specified; * Added orders validation; Fixed bug in the max size of plain text secrets; * Prepare for next release * Release for v0.1.55 0.1.55 ------ * Added more testing around nulls/empties in JSON input; * Prepare for next release * Fixed parser failure output back to REST client * Release for v0.1.54 0.1.54 ------ * FIxed PEP8 violations; Added jsonschema to installer dependencies; * Added validation logic for new secrets; * put back the copyright range to that of the original copied file .. instead of the incremented 2013 * do not need wsgi, using Middleware class from barbican.api.middleware.\_\_init\_\_.py * using Middleware from api.middleware.\_\_init\_\_, just what we need. Added debug middleware to the \_\_init\_\_ file. Also extended copyright range from 2011-2012 * Prepare for next release * Release for v0.1.53 0.1.53 ------ * Adding err.log to git ignore list; Remove spurious log; * Fixed mismatch between tenantsecret table's FKs and the ids of the models; * Add tox to test requires; * Add tox to test requires; Better handle mal-formed JSON errors; * creating a request context after token authorization that contains user particulars such as tenant-id, user-id etc * support for keystone context in request * fixes the problem of 'Command tools/with\_venv.sh pip install --upgrade pip failed.' In the process adopted install\_venv\_common.py. Note setup.sh is currently commented out * Prepare for next release * Release for v0.1.52 0.1.52 ------ * Added more debug logging in the paging repo methods, to troubleshoot defect; * Oops, put back the 'see glance setup.py' for later version flow incorporation ala OpenStack lines * Grab kek from config file for simple plugin * need to make executable to not get permissions error when no virtual environment exists * Prepare for next release * Release for v0.1.51 0.1.51 ------ * removed references to glance and substituted with barbican * Prepare for next release * Release for v0.1.50 0.1.50 ------ * Adjusted limits logic; Added logging around limits to troubleshoot bug; * Prepare for next release * Release for v0.1.49 0.1.49 ------ * Added missing is-deleted filter on order query; Changed secret POST response code to 201; * Prepare for next release * Release for v0.1.48 0.1.48 ------ * Restore docstring param accidently removed; * Addressed Douglas issues with imports; Fixed PEP8 violations * Bug fix for order/tenant creation; * Merged in Douglas' changes; * Added better error feedback to clients; Added missing unit tests; * Remove EncryptedDatum from plugin * Added missing tenant-filtering logic/SQL; Added improved error reporting to clients; * Only use byte type in plugin * Added tenant-id filter to db queries of secrets; * Secret can have more than one encrypted datum * Save encryption details to kek\_metadata, not datum.mime\_type * Prepare for next release * Release for v0.1.47 * Added aes-128-cbc encryption in plugin using pycrypto 0.1.47 ------ * Added size limits to the secret data; Fixed the next nav link to hide if no more data to display; * Added methods to the crypto plugin manager to handle key gen and is-supports API flows; Added more code to deal with corner cases in API flow; * Added soft-delete orders/secrets support; Updated unit tests; * Fixed PEP8 violations; Tweaked fake encrypt/decrypt to show original passed in data; * Prepare for next release * Release for v0.1.46 0.1.46 ------ * Don't inject conf into extension manager * Added support for retrieving secret info stored in datum records, more work to follow * Prepare for next release * Release for v0.1.45 0.1.45 ------ * Prepare for next release * Release for v0.1.44 0.1.44 ------ * Prepare for next release * Release for v0.1.43 * Use oslo.config to load crypto plugins 0.1.43 ------ * Prepare for next release * Fixed the one-step POST call flow, verified with text/plain mime type; Modified exception handling to provide better feedback to client on errors; * Release for v0.1.42 0.1.42 ------ * Comment out identity policy setup in bin that is clobbering auto deployment process * Added warning about is-null test * Fix SQL bug in model code; * Prepare for next release * Release for v0.1.41 * Fixed some PEP8 violations; * Merged Douglas' changes; Updated debian installer to install stevedore package; * Added support for restricting GETs of expired secrets; Set default page size to 10 records; * Prepare for next release * Release for v0.1.40 0.1.40 ------ * Prepare for next release * Release for v0.1.39 0.1.39 ------ * pep8 fixes * Fixed tox tests broken by upstream commits * Add support/tests for paged orders list via GETs; * Fixed unit tests to mock policy enforcer; * Fixed order test * Finished secrets GET unit tests; * Removed barbican.crypto.fields module. Use plugin system instead * Added decrypt via plugin * Added paging logic, preliminary unit tests * manual test aid * merged with latest secrets resources, and load-balance related changes * keystone middleware integration * integrating keystone policy infrastructure * Add paged list of secrets via secrets GET; * Added crypto plugin encryption to Secrets post * Prepare for next release * Release for v0.1.38 0.1.38 ------ * Converted ids to HATEOS-style refs per API docs; Added true host name for this href via conf file; * Transferring blueprint from apiary.io * Added missing secret fields; Changed order request to have an embedded secret dict that mirrors the secret request type; * Prepare for next release * Release for v0.1.37 * Prepare for next release * Release for v0.1.37 0.1.37 ------ * Fix secret post error * Prepare for next release * Release for v0.1.36 0.1.36 ------ * Prepare for next release * Add unit testing; Add text/plain mime handling; * Release for v0.1.35 * Add PUT secret logic; add generate key (simple only); Add more unit tests; Still need to add missing secret paramters * Added content-types output for secret GET * changed port to 9311 instead of 8080 * Prepare for next release * Release for v0.1.34 * Added creation of secret from order processing * changed port to 9311, avoiding 9292 (taken by glance) and 8080 (taken by S3) * Prepare for next release * Release for v0.1.33 0.1.33 ------ * Prepare for next release * Release for v0.1.32 * Resolve diff in versions * Prepare for next release * Release for v0.1.30 * Revamped Secret and Order stuff per discussion, just baseline functionality, not complete * Modified files per revamped secrets/orders API * require python-keystoneclient to support keystone authentication * Prepare for next release * Release for v0.1.31 0.1.31 ------ * Remove duplicate debian source folder * Ignore IntelliJ module files * Prepare for next release * Release for v0.1.30 0.1.30 ------ * Prepare for next release * Release for v0.1.29 * Added v1 prefix to URI routes * Prepare for next release * Release for v0.1.28 * Changed working and links for new info * Changed the mailing list in the README to the openstack-dev one; Added info about the Freenode IRC for barbican as well * Prepare for next release * Release for v0.1.27 * Add links from main git page to wiki pages * Prepare for next release * Release for v0.1.26 * Added unit test for the sample middleware component; Tweaks to the Barbican worker node boot script; * Prepare for next release * Release for v0.1.25 * Added unit test for tasks; Replaced TBDs with more IDE-friendly TODOs * Prepare for next release * Release for v0.1.24 * Add final config file changes for queuing/Celery * Prepare for next release * Release for v0.1.23 * Prepare for next release * Release for v0.1.22 * Added files needed to run Celery- and RabbitMQ-based workers * Revamped to look more like the Celery tasking approach * Prepare for next release * Release for v0.1.21 * Prepare for next release * Release for v0.1.20 * Prepare for next release * Release for v0.1.19 * Prepare for next release * Release for v0.1.18 * Modified the test/default/standalone db connection to use a sqlite file in a folder writable by the barbican user * Prepare for next release * Release for v0.1.17 * Added missing pip dependencies for debian package * Prepare for next release * Release for v0.1.16 * Prepare for next release * Release for v0.1.15 * Modify boot script to put copy of conf file into home dir for local-only deployments * Address issue with oslo config rejecting cli options for logging * Fixed mismatch between wiki and api script file venv setting * Moved config files back to etc/barbican * Added missing requires * Added middleware sample; Modified boot script accordingly; * Utilizing openstack jsonutils, per Chad feedback * Fixed mismatch in bin script with wiki docs * Fixed bug in local launch of barbican via bin script * PEP8 cleanup * Added secret entity type; Updated the banner of source files to match Rackspace open source * Fixed/added unit tests...more to come * Added initial worker and queue modules, more work to go.. * Added CSR and SSL Certificate resource and models...still need to plumb components * Added tenent resource test; * Cleaned up repo/model/config files to be more like Glance; * Add initial revamped data related files * Add paste config file * Prepare for next release * Release for v0.1.14 * Add oslo/openstack files; Modified barbican-api script and config to support local spinup of uwsgi API server * Prepare for next release * Release for v0.1.13 * Fixed PEP8 violations * Prepare for next release * Release for v0.1.12 * adding images for the wiki page * Prepare for next release * Release for v0.1.11 * Prepare for next release * Release for v0.1.10 * Add change to test Jenkins for sprint review * Prepare for next release * Release for v0.1.9 * Test config to trigger Jenkins * Prepare for next release * Release for v0.1.8 * Prepare for next release * Release for v0.1.7 * Test change to repo * Prepare for next release * Release for v0.1.6 * Prepare for next release * Release for v0.1.5 * Prepare for next release * Release for v0.1.4 * Prepare for next release * Release for v0.1.3 * Prepare for next release * Release for v0.1.2 * Added pythonpath to where config.py is located * Added missing version utility modules * Bump to the next version * Fixed syntax error in script * Added bin folder, to contain start scripts for the Barbican services * Adjust the inital version to 0.1.1 * Moving POC code * Update README.md * Update README.md * Update LICENSE * Basic Readme * Adding PANIC log type * removing tag code * workaround for bad header parsing * Wrap policies in plural container * Update README.md * Added missing dependency * PEP8 Fixes * \Merge branch 'tenantAPI' of github.com:cloudkeep/barbican * fix a bug for mutiple page agent pairing * a bug fix for updating pairings * Added Agent API and Agent WebUI * Use Markup for escape * Update Web UI for event display * Updated tenant API and API testing * add tenant api * add test script for API * added sample files to give coverage tool something to chew on * added missing args from tox config * Removed conflicting argument for nosetests * added missing nosetests parameters in setup.cfg * added missing package py files * added pip and test requirements * Add test and config files to test Jenkins * adding tox.ini file * API Support For Policies * Added Pairing Call * API Example * WIP: First API Call * Added basic policies * Added Key model * Added Tenant model * Authenciation & Admin * Improved style for Users * Added Twitter Bootstrap for style * SQLAlchemy Part 2 * Adding SQLAlchemy persistence * Simple formatting fixes * API Blueprint * Flask Structure * Update README.md * Initial commit ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/HACKING.rst0000664000175000017500000000534200000000000015157 0ustar00zuulzuul00000000000000Barbican Style Commandments =========================== - Step 1: Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ - Step 2: Read on Barbican Specific Commandments ------------------------------ - [B310] Check for improper use of logging format arguments. - [B311] Use assertIsNone(...) instead of assertEqual(None, ...). - [B312] Use assertTrue(...) rather than assertEqual(True, ...). - [B317] `oslo_` should be used instead of `oslo`. - [B318] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs. - [B319] Ensure to not use xrange(). - [B320] Do not use LOG.warn as it's deprecated. - [B321] Use assertIsNotNone(...) rather than assertNotEqual(None, ...) or assertIsNot(None, ...). Creating Unit Tests ------------------- For every new feature, unit tests should be created that both test and (implicitly) document the usage of said feature. If submitting a patch for a bug that had no unit test, a new passing unit test should be added. If a submitted bug fix does have a unit test, be sure to add a new one that fails without the patch and passes with the patch. Running Tests ------------- The testing system is based on a combination of tox and testr. If you just want to run the whole suite, run `tox` and all will be fine. However, if you'd like to dig in a bit more, you might want to learn some things about testr itself. A basic walkthrough for OpenStack can be found at http://wiki.openstack.org/testr OpenStack Trademark ------------------- OpenStack is a registered trademark of OpenStack, LLC, and uses the following capitalization: OpenStack Commit Messages --------------- Using a common format for commit messages will help keep our git history readable. Follow these guidelines: First, provide a brief summary (it is recommended to keep the commit title under 50 chars). The first line of the commit message should provide an accurate description of the change, not just a reference to a bug or blueprint. It must be followed by a single blank line. Following your brief summary, provide a more detailed description of the patch, manually wrapping the text at 72 characters. This description should provide enough detail that one does not have to refer to external resources to determine its high-level functionality. Once you use 'git review', two lines will be appended to the commit message: a blank line followed by a 'Change-Id'. This is important to correlate this commit with a specific review in Gerrit, and it should not be modified. For further information on constructing high quality commit messages, and how to split up commits into a series of changes, consult the project wiki: http://wiki.openstack.org/GitCommitMessages ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/LICENSE0000664000175000017500000002614700000000000014374 0ustar00zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2013, Rackspace (http://www.rackspace.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1743590729.1970294 barbican-20.0.0/PKG-INFO0000644000175000017500000000563300000000000014457 0ustar00zuulzuul00000000000000Metadata-Version: 2.1 Name: barbican Version: 20.0.0 Summary: OpenStack Secure Key Management Home-page: https://docs.openstack.org/barbican/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Requires-Python: >=3.9 License-File: LICENSE Requires-Dist: alembic>=0.8.10 Requires-Dist: cffi>=1.7.0 Requires-Dist: cryptography>=2.1 Requires-Dist: eventlet!=0.18.3,!=0.20.1,>=0.18.2 Requires-Dist: jsonschema>=3.2.0 Requires-Dist: oslo.config>=6.4.0 Requires-Dist: oslo.context>=2.22.0 Requires-Dist: oslo.db>=4.27.0 Requires-Dist: oslo.i18n>=3.15.3 Requires-Dist: oslo.messaging>=14.1.0 Requires-Dist: oslo.middleware>=3.31.0 Requires-Dist: oslo.log>=4.3.0 Requires-Dist: oslo.policy>=4.5.0 Requires-Dist: oslo.serialization!=2.19.1,>=2.18.0 Requires-Dist: oslo.service!=1.28.1,>=1.24.0 Requires-Dist: oslo.upgradecheck>=1.3.0 Requires-Dist: oslo.utils>=7.0.0 Requires-Dist: oslo.versionedobjects>=1.31.2 Requires-Dist: Paste>=2.0.2 Requires-Dist: PasteDeploy>=1.5.0 Requires-Dist: pbr!=2.1.0,>=2.0.0 Requires-Dist: pecan!=1.0.2,!=1.0.3,!=1.0.4,!=1.2,>=1.0.0 Requires-Dist: ldap3>=1.0.2 Requires-Dist: keystonemiddleware>=9.5.0 Requires-Dist: SQLAlchemy>=1.4.0 Requires-Dist: stevedore>=1.20.0 Requires-Dist: WebOb>=1.7.1 Requires-Dist: castellan>=1.2.1 Requires-Dist: microversion-parse>=0.2.1 Provides-Extra: dogtag Requires-Dist: dogtag-pki>=10.3.5.1; extra == "dogtag" Provides-Extra: kmip Requires-Dist: pykmip>=0.7.0; extra == "kmip" Provides-Extra: test Requires-Dist: hacking<6.2.0,>=6.1.0; extra == "test" Requires-Dist: pyflakes>=2.1.1; extra == "test" Requires-Dist: coverage!=4.4,>=4.0; extra == "test" Requires-Dist: oslotest>=3.2.0; extra == "test" Requires-Dist: pykmip>=0.7.0; extra == "test" Requires-Dist: stestr>=2.0.0; extra == "test" Requires-Dist: testtools>=2.2.0; extra == "test" Requires-Dist: fixtures>=3.0.0; extra == "test" Requires-Dist: requests>=2.18.0; extra == "test" Requires-Dist: WebTest>=2.0.27; extra == "test" Requires-Dist: python-keystoneclient>=3.8.0; extra == "test" Requires-Dist: tempest>=17.1.0; extra == "test" Requires-Dist: bandit!=1.6.0,>=1.1.0; extra == "test" Requires-Dist: doc8>=0.8.1; extra == "test" Requires-Dist: Pygments>=2.2.0; extra == "test" Service for storing sensitive client information for OpenStack ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/README.rst0000664000175000017500000000775200000000000015057 0ustar00zuulzuul00000000000000======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/barbican.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on Barbican ======== Barbican is a REST API designed for the secure storage, provisioning and management of secrets. It is aimed at being useful for all environments, including large ephemeral Clouds. Barbican is an OpenStack project developed by the `Barbican Project Team `_ with support from `Rackspace Hosting _, EMC, Ericsson, Johns Hopkins University, HP, Red Hat, Cisco Systems, and many more. The full documentation can be found on the `Barbican Developer Documentation Site `_. To file a bug, use our bug tracker on `Launchpad `_. Release notes for the project can be found at https://docs.openstack.org/releasenotes/barbican. Future design work is tracked at https://specs.openstack.org/openstack/barbican-specs. For development questions or discussion, use the `OpenStack-discuss mailing list `_ at `openstack-discuss@lists.openstack.org` and let us know what you think, just add `[barbican]` to the subject. You can also join our IRC channel `#openstack-barbican` on `OFTC `_. Client Libraries ---------------- * `python-barbicanclient `_ - A convenient Python-based library to interact with the Barbican API. Getting Started --------------- Please visit our `Users, Developers and Operators documentation `_ for details. Why Should You Use Barbican? ---------------------------- The current state of key management is atrocious. While Windows does have some decent options through the use of the Data Protection API (DPAPI) and Active Directory, Linux lacks a cohesive story around how to manage keys for application use. Barbican was designed to solve this problem. The system was motivated by internal Rackspace needs, requirements from `OpenStack `_ and a realization that the current state of the art could use some help. Barbican will handle many types of secrets, including: * **Symmetric Keys** - Used to perform reversible encryption of data at rest, typically using the AES algorithm set. This type of key is required to enable features like `encrypted Swift containers and Cinder volumes `_, `encrypted Cloud Backups `_, etc. * **Asymmetric Keys** - Asymmetric key pairs (sometimes referred to as `public / private keys `_) are used in many scenarios where communication between untrusted parties is desired. The most common case is with SSL/TLS certificates, but also is used in solutions like SSH keys, S/MIME (mail) encryption and digital signatures. * **Raw Secrets** - Barbican stores secrets as a base64 encoded block of data (encrypted, naturally). Clients can use the API to store any secrets in any format they desire. For the symmetric and asymmetric key types, Barbican supports full life cycle management including provisioning, expiration, reporting, etc. Design Goals ------------ 1. Provide a central secret-store capable of distributing secret / keying material to all types of deployments including ephemeral Cloud instances. 2. Support reasonable compliance regimes through reporting and auditability. 3. Application adoption costs should be minimal or non-existent. 4. Build a community and ecosystem by being open-source and extensible. 5. Improve security through sane defaults and centralized management of `policies for all secrets`. 6. Provide an out of band communication mechanism to notify and protect sensitive assets. ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1743590729.105029 barbican-20.0.0/api-guide/0000775000175000017500000000000000000000000015221 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1743590729.125029 barbican-20.0.0/api-guide/source/0000775000175000017500000000000000000000000016521 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/acls.rst0000664000175000017500000002642000000000000020201 0ustar00zuulzuul00000000000000****************** ACL API User Guide ****************** By default barbican manages access to its resources (secrets, containers) on a per project level, whereby a user is allowed access to project resources based on the roles a user has in that project. Some barbican use cases prefer a more fine-grained access control for secrets and containers, such as at the user level. The Access Control List (ACL) feature supports this more restrictive access. This guide will assume you will be using a local running development environment of barbican. If you need assistance with getting set up, please reference the `development guide `__ .. warning:: This ACL documentation is work in progress and may change in near future. ACL Definition ############## ACL defines a set of attributes which are used in policy-based authorization to determine access to a target resource. ACL definition is operation specific and is defined per secret or per container. Currently only the 'read' operation is defined. This supports allowing users on the ACL for a secret to retrieve its metadata or to decrypt its payload. This also allows users on the ACL for a container to retrieve its list of secret references. ACL allow a secret or a container to be marked private. Private secret/container means that only the user who created the secret/container can extract secret. Users with necessary roles on a secret/container project will not have access. To allow access to other users, their user ids need to be added in related ACL users list. An operation specific ACL definition has following attribute: * `users`: Whitelist of users who are allowed access to target resource. In this case a user means a Keystone user id. * `project-access`: Flag to mark a secret or a container private for an operation. Pass `false` to mark private. To accomplish above mentioned behavior for a secret/container resource, having ACL data populated alone is not sufficient. Following ACL rules are defined and used as `OR` in resource access policy: * ACL based access is allowed when token user is present in secret/container operation specific ACL user list e.g. token user present in `read` users list. * When secret/container resource is marked private, then project-level RBAC users access is not allowed. e.g. When a secret is marked private, only the user who created it or a user with the "admin" role on the project will be able to remove it. .. note:: Currently barbican default policy just makes use of `read` ACL data only. So only **GET** calls for a secret and a container resource will make use of ACL data. Other request methods on secret and container resource still uses project level RBAC checks in policy. As per default policy rules, a user with `admin` role in a secret/container project or a user who has created the secret/container can manage ACL for that secret/container. .. _default_implicit_acl: Default ACL ----------- By default when no ACL is explicitly set on a secret or a container, then clients with necessary roles on secret's project or container's project can access it. This default access pattern translates to `project-access` as true and no `users` in ACL settings. That's why every secret and container by default has following implicit ACL. .. code-block:: json { "read":{ "project-access": true } } Above default ACL is also returned on **GET** on secret/container **acl** resource when no explicit ACL is set on it. .. _set_acl: How to Set/Replace ACL ###################### The ACL for an existing secret or container can be modified via a **PUT** to the **acl** resource. This update completely replaces existing ACL settings for this secret or container. To set/replace an ACL for a secret: .. code-block:: bash Request: curl -X PUT -H 'content-type:application/json' \ -H 'X-Auth-Token:b06183778aa64b17beb6215e60686a60' \ -d ' { "read":{ "users":[ "2d0ee7c681cc4549b6d76769c320d91f", "721e27b8505b499e8ab3b38154705b9e", "c1d20e4b7e7d4917aee6f0832152269b" ], "project-access":false } }' \ http://localhost:9311/v1/secrets/15621a1b-efdf-41d8-92dc-356cec8e9da9/acl Response (includes secret ACL reference): HTTP/1.1 201 Created {"acl_ref": "http://localhost:9311/v1/secrets/15621a1b-efdf-41d8-92dc-356cec8e9da9/acl"} To set/replace an ACL for a container: .. code-block:: bash Request: curl -X PUT -H 'content-type:application/json' \ -H 'X-Auth-Token:b06183778aa64b17beb6215e60686a60' \ -d ' { "read":{ "users":[ "2d0ee7c681cc4549b6d76769c320d91f", "721e27b8505b499e8ab3b38154705b9e", "c1d20e4b7e7d4917aee6f0832152269b" ], "project-access":false } }' \ http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl Response (includes container ACL reference): HTTP/1.1 201 Created {"acl_ref": "http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl"} To get more details on the create API you can reference the `Set Secret ACL `__ or `Set Container ACL `__ documentation. .. _update_acl: How to Update ACL ################# Existing ACL can be updated via **PUT** or **PATCH** methods on a given secret/container. **PUT** interaction replaces existing ACL with provided ACL data whereas **PATCH** interaction applies the provided changes on existing ACL of a secret or a container. To replace an existing ACL for a container: .. code-block:: bash Request: curl -X PUT -H 'content-type:application/json' \ -H 'X-Auth-Token:e1f540bc6def456dbb0f8c11f21a74ae' \ -d ' { "read":{ "users":[ "2d0ee7c681cc4549b6d76769c320d91f", "721e27b8505b499e8ab3b38154705b9e" ], "project-access":true } }' \ http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl Response (includes container ACL reference): HTTP/1.1 200 OK {"acl_ref": "http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl"} To remove all users from an existing ACL for a container (pass empty list in `users`): .. code-block:: bash Request: curl -X PUT -H 'content-type:application/json' \ -H 'X-Auth-Token:e1f540bc6def456dbb0f8c11f21a74ae' \ -d ' { "read":{ "users":[], "project-access":true } }' \ http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl Response (includes container ACL reference): HTTP/1.1 200 OK {"acl_ref": "http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl"} To update only the `project-access` flag for container ACL (use PATCH): .. code-block:: bash Request: curl -X PATCH -H 'content-type:application/json' \ -H 'X-Auth-Token:e1f540bc6def456dbb0f8c11f21a74ae' \ -d ' { "read":{ "project-access":false } }' \ http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl Response: HTTP/1.1 200 OK {"acl_ref": "http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl"} To update only the users list for secret ACL (use PATCH): .. code-block:: bash Request: curl -X PATCH -H 'content-type:application/json' \ -H 'X-Auth-Token:e1f540bc6def456dbb0f8c11f21a74ae' \ -d ' { "read":{ "users":[ "2d0ee7c681cc4549b6d76769c320d91f", "c1d20e4b7e7d4917aee6f0832152269b" ], } }' \ http://localhost:9311/v1/secrets/15621a1b-efdf-41d8-92dc-356cec8e9da9/acl Response: HTTP/1.1 200 OK {"acl_ref": "http://localhost:9311/v1/secrets/15621a1b-efdf-41d8-92dc-356cec8e9da9/acl"} Container and Secret ACL(s) update operation are similar except `containers` resource is used instead of the `secrets` resource in URI. To get more details on ACL update APIs, you can reference the `Update Secret ACL `__ , `Update Container ACL `__ , `Partial Update Secret ACL `__ or `Partial Update Container ACL `__ documentation. .. _retrieve_acl: How to Retrieve ACL ################### The ACL defined for a secret or container can be retrieved by using a **GET** operation on respective **acl** resource. The returned response contains ACL data. To get secret ACL data: .. code-block:: bash Request: curl -X GET -H 'X-Auth-Token:b44636bff48c41bbb80f459df69c11aa' \ http://localhost:9311/v1/secrets/15621a1b-efdf-41d8-92dc-356cec8e9da9/acl Response: HTTP/1.1 200 OK { "read":{ "updated":"2015-05-12T20:08:47.644264", "created":"2015-05-12T19:23:44.019168", "users":[ "c1d20e4b7e7d4917aee6f0832152269b", "2d0ee7c681cc4549b6d76769c320d91f" ], "project-access":false } } To get container ACL data: .. code-block:: bash Request: curl -X GET -H 'X-Auth-Token:b44636bff48c41bbb80f459df69c11aa' \ http://localhost:9311/v1/containers/8c077991-d524-4e15-8eaf-bc0c3bb225f2/acl Response: HTTP/1.1 200 OK { "read":{ "updated":"2015-05-12T20:05:17.214948", "created":"2015-05-12T19:47:20.018657", "users":[ "721e27b8505b499e8ab3b38154705b9e", "c1d20e4b7e7d4917aee6f0832152269b", "2d0ee7c681cc4549b6d76769c320d91f" ], "project-access":false } } To get more details on ACL lookup APIs you can reference the `Get Secret ACL `__ , `Get Container ACL `__ documentation. .. _delete_acl: How to Delete ACL ################# ACL defined for a secret or a container can be deleted by using the **DELETE** operation on their respective `acl` resource. There is no response content returned on successful deletion. Delete operation removes existing ACL on a secret or a container if there. It can be treated as resetting a secret or a container to `Default ACL `__ setting. That's why invoking delete multiple times on this resource will not result in error. .. code-block:: bash Request: curl -X DELETE -H 'X-Auth-Token:b06183778aa64b17beb6215e60686a60' \ http://localhost:9311/v1/secrets/50f5ed8e-004e-433a-939c-fa73c7fc81fd/acl Response: 200 OK To get more details on ACL delete APIs, you can reference the `Delete Secret ACL `__ , `Delete Container ACL `__ documentation. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/conf.py0000664000175000017500000002103300000000000020017 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # Key Manager API documentation build configuration file # # All configuration values have a default; values that are commented out # serve to show the default. # import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['openstackdocstheme'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Key Manager API Guide' openstackdocs_bug_tag = 'api-guide' openstackdocs_repo_name = 'openstack/barbican' openstackdocs_auto_name = False openstackdocs_bug_project = 'barbican' copyright = '2016, OpenStack contributors' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'keymanager-api-guide' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'KeyManagerAPI.tex', 'Key Manager API Documentation', 'OpenStack contributors', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'keymanagerapi', 'Key Manager API Documentation', ['OpenStack contributors'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'KeyManagerAPIGuide', 'Key Manager API Guide', 'OpenStack contributors', 'APIGuide', 'This guide teaches OpenStack Key Manager service users concepts about ' 'managing keys in an OpenStack cloud with the Key Manager API.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] # -- Options for PDF output -------------------------------------------------- pdf_documents = [ ('index', 'KeyManagerAPIGuide', 'Key Manager API Guide', 'OpenStack ' 'contributors') ] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/consumers.rst0000664000175000017500000001143500000000000021275 0ustar00zuulzuul00000000000000************************** Consumers API - User Guide ************************** This guide assumes you will be using a local development environment of barbican. If you need assistance with getting set up, please reference the `development guide `__. What is a Consumer? ################### A consumer is a way to register as an interested party for a container. All of the registered consumers can be viewed by performing a GET on the {container_ref}/consumers. The idea being that before a container is deleted all consumers should be notified of the delete. .. _create_consumer: How to Create a Consumer ######################## .. code-block:: bash curl -X POST -H "X-Auth-Token: $TOKEN" -H "Content-Type: application/json" \ -d '{"name": "consumername", "URL": "consumerURL"}' \ http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9/consumers This will return the following response: .. code-block:: json { "status": "ACTIVE", "updated": "2015-10-15T21:06:33.121113", "name": "container name", "consumers": [ { "URL": "consumerurl", "name": "consumername" } ], "created": "2015-10-15T17:55:44.380002", "container_ref": "http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9", "creator_id": "b17c815d80f946ea8505c34347a2aeba", "secret_refs": [ { "secret_ref": "http://localhost:9311/v1/secrets/b61613fc-be53-4696-ac01-c3a789e87973", "name": "private_key" } ], "type": "generic" } .. _retrieve_consumer: How to Retrieve a Consumer ########################## To retrieve a consumer perform a GET on the {container_ref}/consumers This will return all consumers for this container. You can optionally add a limit and offset query parameter. .. code-block:: bash curl -H "X-Auth-Token: $TOKEN" \ http://192.168.99.100:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9/consumers This will return the following response: .. code-block:: json { "total": 1, "consumers": [ { "status": "ACTIVE", "URL": "consumerurl", "updated": "2015-10-15T21:06:33.123878", "name": "consumername", "created": "2015-10-15T21:06:33.123872" } ] } The returned value is a list of all consumers for the specified container. Each consumer will be listed with its metadata.. If the offset and limit parameters are specified then you will see a previous and next reference which allow you to cycle through all of the consumers for this container. .. code-block:: bash curl -H "X-Auth-Token: $TOKEN" \ http://192.168.99.100:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9/consumers?limit=1\&offset=1 This will return the following response: .. code-block:: json { "total": 3, "next": "http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9/consumers?limit=1&offset=2", "consumers": [ { "status": "ACTIVE", "URL": "consumerURL2", "updated": "2015-10-15T21:17:08.092416", "name": "consumername2", "created": "2015-10-15T21:17:08.092408" } ], "previous": "http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9/consumers?limit=1&offset=0" } .. _delete_consumer: How to Delete a Consumer ######################## To delete a consumer for a container you must provide the consumer name and URL which were used when the consumer was created. .. code-block:: bash curl -X DELETE -H "X-Auth-Token: $TOKEN" -H "Content-Type: application/json" \ -d '{"name": "consumername", "URL": "consumerURL"}' \ http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9/consumers This will return the following response: .. code-block:: json { "status": "ACTIVE", "updated": "2015-10-15T17:56:18.626724", "name": "container name", "consumers": [], "created": "2015-10-15T17:55:44.380002", "container_ref": "http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9", "creator_id": "b17c815d80f946ea8505c34347a2aeba", "secret_refs": [ { "secret_ref": "http://localhost:9311/v1/secrets/b61613fc-be53-4696-ac01-c3a789e87973", "name": "private_key" } ], "type": "generic" } A successful delete will return an HTTP 200 OK. The response content will be the container plus the consumer list, minus the consumer which was just deleted. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/containers.rst0000664000175000017500000002454700000000000021434 0ustar00zuulzuul00000000000000**************************** Containers API - User Guide **************************** The containers resource is the organizational center piece of barbican. It creates a logical object that can be used to hold secret references. This is helpful when having to deal with tracking and having access to hundreds of secrets. Barbican supports 3 types of containers: * :ref:`Generic ` * :ref:`Certificate ` * :ref:`RSA ` Each of these types have explicit restrictions as to what type of secrets should be held within. These will be broken down in their respective sections. This guide will assume you will be using a local running development environment of barbican. If you need assistance with getting set up, please reference the `development guide `__. .. _generic_containers: Generic Containers ################## A generic container is used for any type of container that a user may wish to create. There are no restrictions on the type or amount of secrets that can be held within a container. An example of a use case for a generic container would be having multiple passwords stored in the same container reference: .. code-block:: json { "type": "generic", "status": "ACTIVE", "name": "Test Environment User Passwords", "consumers": [], "container_ref": "https://{barbican_host}/v1/containers/{uuid}", "secret_refs": [ { "name": "test_admin_user", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" }, { "name": "test_audit_user", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" } ], "created": "2015-03-30T21:10:45.417835", "updated": "2015-03-30T21:10:45.417835" } For more information on creating a generic container, reference the :ref:`Creating a Generic Container ` section. .. _certificate_containers: Certificate Containers ###################### A certificate container is used for storing the following secrets that are relevant to certificates: * certificate * private_key (optional) * private_key_passphrase (optional) * intermediates (optional) .. code-block:: json { "type": "certificate", "status": "ACTIVE", "name": "Example.com Certificates", "consumers": [], "container_ref": "https://{barbican_host}/v1/containers/{uuid}", "secret_refs": [ { "name": "certificate", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" }, { "name": "private_key", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" }, { "name": "private_key_passphrase", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" }, { "name": "intermediates", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" } ], "created": "2015-03-30T21:10:45.417835", "updated": "2015-03-30T21:10:45.417835" } The payload for the secret referenced as the "certificate" is expected to be a PEM formatted x509 certificate. The payload for the secret referenced as the "intermediates" is expected to be a PEM formatted PKCS7 certificate chain. For more information on creating a certificate container, reference the :ref:`Creating a Certificate Container ` section. .. _rsa_containers: RSA Containers ############## An RSA container is used for storing RSA public keys, private keys, and private key pass phrases. .. code-block:: json { "type": "rsa", "status": "ACTIVE", "name": "John Smith RSA", "consumers": [], "container_ref": "https://{barbican_host}/v1/containers/{uuid}", "secret_refs": [ { "name": "private_key", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" }, { "name": "private_key_passphrase", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" }, { "name": "public_key", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" } ], "created": "2015-03-30T21:10:45.417835", "updated": "2015-03-30T21:10:45.417835" } For more information on creating a certificate container, reference the :ref:`Creating a RSA Container ` section. .. _create_container: How to Create a Container ######################### In order to create a container, we must first have secrets. If you are unfamiliar with creating secrets, please take some time to refer to the :doc:`Secret User Guide ` before moving forward. .. _create_generic_container: Creating a Generic Container **************************** To create a generic container we must have a secret to store as well. .. code-block:: bash curl -X POST -H "X-Auth-Token: $TOKEN" -H "Content-Type:application/json" -d '{ "type": "generic", "name": "generic name", "secret_refs": [ { "name": "a secret", "secret_ref": "http://localhost:9311/v1/secrets/feac9896-49e9-49e0-9484-1a6153c9498b" } ] }' http://localhost:9311/v1/containers This should provide a response as follows: .. code-block:: bash {"container_ref": "http://localhost:9311/v1/containers/0fecaec4-7cd7-4e70-a760-cc7eaf5c3afb"} This is our container reference. We will need this in order to retrieve the container. Jump ahead to :ref:`How To Retrieve a Container ` to make sure our container stored as expected. .. _create_certificate_container: Creating a Certificate Container ******************************** To create a certificate container we must have a secret to store as well. As we mentioned in :ref:`Certificate Containers section ` you are required to provide a secret named certificate but may also include the optional secrets named private_key, private_key_passphrase, and intermediates. .. code-block:: bash curl -X POST -H "X-Auth-Token: $TOKEN" -H "Content-Type:application/json" -d '{ "type": "certificate", "name": "certificate container", "secret_refs": [ { "name": "certificate", "secret_ref": "http://localhost:9311/v1/secrets/f91b84ac-fb19-416b-87dc-e7e41b7f6039" }, { "name": "private_key", "secret_ref": "http://localhost:9311/v1/secrets/feac9896-49e9-49e0-9484-1a6153c9498b" }, { "name": "private_key_passphrase", "secret_ref": "http://localhost:9311/v1/secrets/f1106c5b-0347-4197-8947-d9e392bf74a3" }, { "name": "intermediates", "secret_ref": "http://localhost:9311/v1/secrets/2e86c661-28e8-46f1-8e91-f1d95062695d" } ] }' http://localhost:9311/v1/containers This should provide a response as follows: .. code-block:: bash {"container_ref": "http://localhost:9311/v1/containers/0fecaec4-7cd7-4e70-a760-cc7eaf5c3afb"} This is our container reference. We will need this in order to retrieve the container. Jump ahead to :ref:`How To Retrieve a Container ` to make sure our container stored as expected. .. _create_rsa_container: Creating an RSA Container ************************* To create a certificate container we must have a secret to store as well. As we mentioned in :ref:`RSA Containers section ` you are required to provide a secret named public_key, private_key, and private_key_passphrase. .. code-block:: bash curl -X POST -H "X-Auth-Token: $TOKEN" -H "Content-Type:application/json" -d '{ "type": "rsa", "name": "rsa container", "secret_refs": [ { "name": "public_key", "secret_ref": "http://localhost:9311/v1/secrets/f91b84ac-fb19-416b-87dc-e7e41b7f6039" }, { "name": "private_key", "secret_ref": "http://localhost:9311/v1/secrets/feac9896-49e9-49e0-9484-1a6153c9498b" }, { "name": "private_key_passphrase", "secret_ref": "http://localhost:9311/v1/secrets/f1106c5b-0347-4197-8947-d9e392bf74a3" } ] }' http://localhost:9311/v1/containers This should provide a response as follows: .. code-block:: bash {"container_ref": "http://localhost:9311/v1/containers/0fecaec4-7cd7-4e70-a760-cc7eaf5c3afb"} This is our container reference. We will need this in order to retrieve the container. Jump ahead to :ref:`How To Retrieve a Container ` to make sure our container stored as expected. .. _retrieve_container: How to Retrieve a Container ########################### To retrieve a container we must have a container reference. .. code-block:: bash curl -X GET -H "X-Auth-Token: $TOKEN" http://localhost:9311/v1/containers/49d3c5e9-80bb-47ec-8787-968bb500d76e This should provide a response as follows: .. code-block:: bash { "status": "ACTIVE", "updated": "2015-03-31T21:21:34.126042", "name": "container name", "consumers": [], "created": "2015-03-31T21:21:34.126042", "container_ref": "http://localhost:9311/v1/containers/49d3c5e9-80bb-47ec-8787-968bb500d76e", "secret_refs": [ { "secret_ref": "http://localhost:9311/v1/secrets/feac9896-49e9-49e0-9484-1a6153c9498b", "name": "a secret" } ], "type": "generic" } This is the metadata as well as the list of secret references that are stored within the container. .. _delete_container: How to Delete a Container ######################### To delete a container we must have a container reference. .. code-block:: bash curl -X DELETE -H "X-Auth-Token: $TOKEN" http://localhost:9311/v1/containers/d1c23e06-476b-4684-be9f-8afbef42768d No response will be provided. This is expected behavior! If you do receive a response, something went wrong and you will have to address that before moving forward. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/dogtag_setup.rst0000664000175000017500000001473000000000000021745 0ustar00zuulzuul00000000000000************************** Dogtag Setup - User Guide ************************** Dogtag is the Open Source upstream community version of the Red Hat Certificate System, an enterprise certificate management system that has been deployed in some of the largest PKI deployments worldwide. RHCS is FIPS 140-2 and Common Criteria certified. The Dogtag Certificate Authority (CA) subsystem issues, renews and revokes many different kinds of certificates. It can be used as a private CA back-end to barbican, and interacts with barbican through the Dogtag CA plugin. The Dogtag KRA subsystem is used to securely store secrets after being encrypted by storage keys that are stored either in a software NSS database or in an HSM. It can serve as a secret store for barbican, and interacts with barbican core through the Dogtag KRA plugin. In this guide, we will provide instructions on how to set up a basic Dogtag instance containing a CA and a KRA, and how to configure barbican to use this instance for a secret store. Much more detail about Dogtag, its deployment options and its administration are available in the `RHCS documentation `_. **Note:** The code below is taken from the devstack Barbican-Dogtag gate job. You can extract this code by looking at the Dogtag functions in contrib/devstack/lib/barbican. Installing the Dogtag Packages ****************************** Dogtag packages are available in Fedora/RHEL/Centos and on Ubuntu/Debian distributions. This guide will include instructions applicable to Fedora/RHEL/Centos. If installing on a Fedora platform, use at least Fedora 21. To install the required packages: .. code-block:: bash yum install pki-ca pki-kra 389-ds-base Creating the Directory Server Instance for the Dogtag Internal DB ***************************************************************** The Dogtag CA and KRA subsystems use a 389 directory server as an internal database. Configure one as follows: .. code-block:: bash mkdir -p /etc/389-ds cat > /etc/389-ds/setup.inf <`_. .. code-block:: bash mkdir -p /etc/dogtag cat > /etc/dogtag/ca.cfg < /etc/dogtag/kra.cfg <`__. .. _create_order: Creating an Order ################# When you want barbican to generate a secret you need to create an order. For an order to be processed correctly the parameters mode, bit_length, and algorithm must be valid. Otherwise the order will fail and the secret will not be generated. The example below shows a valid order for generating a symmetric key. You can find a more detailed explanation about the parameters in the `Orders API `__ documentation. .. code-block:: bash curl -X POST -H "X-Auth-Token: $TOKEN" -H "content-type:application/json" -d '{ "type":"key", "meta": { "name": "secretname", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload_content_type": "application/octet-stream"} }' http://localhost:9311/v1/orders You should receive an order reference after placing your order with barbican. .. code-block:: bash {"order_ref": "http://localhost:9311/v1/orders/3a5c6748-44de-4c1c-9e54-085c3f79e942"} The order reference is used to retrieve the metadata for the order you placed which can then be used to retrieve your secret. .. _retrieve_order: Retrieving an Order ################### In order to retrieve the order we will use the reference returned during the initial creation. (See :ref:`Creating an Order `.) .. code-block:: bash curl -H "X-Auth-Token: $TOKEN" -H 'Accept:application/json' \ http://localhost:9311/v1/orders/3a5c6748-44de-4c1c-9e54-085c3f79e942 The typical response is below: .. code-block:: json { "created": "2015-10-15T18:15:10", "creator_id": "40540f978fbd45c1af18910e3e02b63f", "meta": { "algorithm": "AES", "bit_length": 256, "expiration": null, "mode": "cbc", "name": "secretname", "payload_content_type": "application/octet-stream" }, "order_ref": "http://localhost:9311/v1/orders/3a5c6748-44de-4c1c-9e54-085c3f79e942", "secret_ref": "http://localhost:9311/v1/secrets/bcd1b853-edeb-4509-9f12-019b8c8dfb5f", "status": "ACTIVE", "sub_status": "Unknown", "sub_status_message": "Unknown", "type": "key", "updated": "2015-10-15T18:15:10" } This is the metadata associated with the order. To retrieve the secret generated by the order, refer to the :doc:`Secrets User Guide `. The order metadata is very useful for determining if your order was processed correctly. Since orders are processed asynchronously, you can use the metadata returned for the order to verify a successful secret creation. The parameters of the response are explained in more detail `here `__. .. _retrieve_order_list: Retrieving All Orders ##################### It is also possible to retrieve all orders for a project. .. code-block:: bash curl -H "X-Auth-Token: $TOKEN" -H 'Accept:application/json' http://localhost:9311/v1/orders .. code-block:: json { "orders": [ { "created": "2015-10-15T18:15:10", "creator_id": "40540f978fbd45c1af18910e3e02b63f", "meta": { "algorithm": "AES", "bit_length": 256, "expiration": null, "mode": "cbc", "name": "secretname", "payload_content_type": "application/octet-stream" }, "order_ref": "http://localhost:9311/v1/orders/3a5c6748-44de-4c1c-9e54-085c3f79e942", "secret_ref": "http://localhost:9311/v1/secrets/bcd1b853-edeb-4509-9f12-019b8c8dfb5f", "status": "ACTIVE", "sub_status": "Unknown", "sub_status_message": "Unknown", "type": "key", "updated": "2015-10-15T18:15:10" }, { "created": "2015-10-15T18:51:35", "creator_id": "40540f978fbd45c1af18910e3e02b63f", "meta": { "algorithm": "AES", "bit_length": 256, "mode": "cbc", "expiration": null, "name": null }, "order_ref": "http://localhost:9311/v1/orders/d99ced51-ea7a-4c14-8e11-0dda0f49c5be", "secret_ref": "http://localhost:9311/v1/secrets/abadd306-8235-4f6b-984a-cc48ad039def", "status": "ACTIVE", "sub_status": "Unknown", "sub_status_message": "Unknown", "type": "key", "updated": "2015-10-15T18:51:35" } ], "total": 2 } You can refer to the `orders parameters `__ section of the `Orders API `__ documentation in order to refine your search among orders. .. _delete_order: Deleting an Order ################# It is also possible to delete an order from barbican. .. code-block:: bash curl -X DELETE -H "X-Auth-Token: $TOKEN" -H 'Accept:application/json' http://localhost:9311/v1/orders/fbdd845f-4a5e-43e3-8f68-64e8f106c486 Nothing will be returned when you delete an order. If something was returned there was most likely an error while deleting the order. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/pkcs11keygeneration.rst0000664000175000017500000000513000000000000023141 0ustar00zuulzuul00000000000000*********************************** PKCS11 Key Generation - User Guide *********************************** The Key Generation script was written with the Deployer in mind. It allows the deployer to create an MKEK and HMAC signing key for their HSM setup. This script is intended to be used initially or for key rotation scenarios. Setup ##### Initially, the deployer will need to examine the settings in their `barbican.conf` file under the "Crypto plugin" settings section. Set these values to whichever defaults you need. This will be used for both the script and your usage of barbican. The following items are required to use the PKCS11 plugin: * Library Path * Login Passphrase (Password to HSM) * Slot ID (on HSM) The following will need to be provided to generate the HMAC and MKEK: * MKEK Label * MKEK Length * HMAC Label Usage ##### Viewing the help page can give some awareness to the structure of the script as well as inform you of any changes. .. code-block:: bash $ pkcs11-key-generation --help usage: pkcs11-key-generation [-h] [--library-path LIBRARY_PATH] [--passphrase PASSPHRASE] [--slot-id SLOT_ID] {mkek,hmac} ... Barbican MKEK & HMAC Generator optional arguments: -h, --help show this help message and exit --library-path LIBRARY_PATH Path to vendor PKCS11 library --passphrase PASSPHRASE Password to login to PKCS11 session --slot-id SLOT_ID HSM Slot id (Should correspond to a configured PKCS11 slot) subcommands: Action to perform {mkek,hmac} mkek Generates a new MKEK. hmac Generates a new HMAC. **Note:** The user is able to pass the password in as an option or they can leave the flag out and will be prompted for the password upon submission of the command. Generating an MKEK ****************** To generate an MKEK, the user must provide a length and a label for the MKEK. .. code-block:: bash $ pkcs11-key-generation --library-path {library_path here} --passphrase {HSM password here} --slot-id {HSM slot here} mkek --length 32 --label 'HMACLabelHere' MKEK successfully generated! Generating an HMAC ****************** To generate an HMAC, the user must provide a label for the HMAC. .. code-block:: bash $ pkcs11-key-generation --library-path {library_path here} --passphrase {HSM password here} --slot-id {HSM slot here} hmac --label 'HMACLabelHere' HMAC successfully generated! ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/quotas.rst0000664000175000017500000001644600000000000020602 0ustar00zuulzuul00000000000000************************ Quotas API - User Guide ************************ Running with default settings, the barbican REST API doesn't impose an upper limit on the number of resources that are allowed to be created. barbican's backend depends on limited resources. These limited resources include database, plugin, and Hardware Security Module (HSM) storage space. This can be an issue in a multi-project or multi-user environment when one project can exhaust available resources, impacting other projects. The answer to this, on a per-project basis, is project quotas. This user guide will show you how a user can lookup his current effective quotas and how a service admin can create, update, read, and delete project quota configuration for all projects in his cloud. This guide will assume you will be using a local running development environment of barbican. If you need assistance with getting set up, please reference the `development guide `__. .. _user_project_quotas_overview: Project Quotas Overview ####################### All users authenticated with barbican are able to read the effective quota values that apply to their project. Barbican can derive the project that a user belongs to by reading the project scope from the authentication token. Service administrators can read, set, and delete quota configurations for each project known to barbican. The service administrator is recognized by his authenticated role. The service administrator's role is defined in barbican's policy.json file. The default role for a service admin is "key-manager:service-admin". Quotas can be enforced for the following barbican resources: secrets, containers, orders, consumers, and CAs. The configured quota value can be None (use the default), -1 (unlimited), 0 (disabled), or a positive integer defining the maximum number allowed for a project. .. _default_project_quotas: Default Quotas -------------- When no project quotas have been set for a project, the default project quotas are enforced for that project. Default quotas are specified in the barbican configuration file (barbican.conf). The defaults provided in the standard configuration file are as follows. .. code-block:: ini # default number of secrets allowed per project quota_secrets = -1 # default number of orders allowed per project quota_orders = -1 # default number of containers allowed per project quota_containers = -1 # default number of consumers allowed per project quota_consumers = -1 # default number of CAs allowed per project quota_cas = -1 The default quotas are returned via a **GET** on the **quotas** resource when no explicit project quotas have been set for the current project. .. _user_get_quotas: How to Read Effective Quotas ############################ The current effective quotas for a project can be read via a **GET** to the **quotas** resource. Barbican determines the current project ID from the scope of the authentication token sent with the request. .. code-block:: bash Request: curl -i -X GET -H "X-Auth-Token:$TOKEN" \ -H "Accept:application/json" \ http://localhost:9311/v1/quotas Response: HTTP/1.1 200 OK Content-Type: application/json; charset=UTF-8 {"quotas": {"secrets": -1, "orders": -1, "containers": -1, "consumers": -1, "cas": -1 } } To get more details on the quota lookup API, you can reference the `Get Quotas `__ documentation. .. _user_put_project_quotas: How to Set or Replace Project Quotas #################################### The quotas for a project can be modified via a **PUT** to the **project-quotas** resource. This request completely replaces existing quota settings for a project. The project ID is passed in the URI of the request. To set or replace the quotas for the project with the ID 1234: .. code-block:: bash Request: curl -i -X PUT -H "content-type:application/json" \ -H "X-Auth-Token:$TOKEN" \ -d '{"project_quotas": {"secrets": 500, "orders": 100, "containers": -1, "consumers": 100, "cas": 50}}' \ http://localhost:9311/v1/project-quotas/1234 Response: HTTP/1.1 204 No Content To get more details on the project quota setting API you can reference the `Set Project Quotas `__ documentation. .. _user_get_project_quotas: How to Retrieve Configured Project Quotas ######################################### The project quota information defined for a project can be retrieved by using a **GET** operation on the respective **project-quota** resource. The project ID is passed in the URI of the request. The returned response contains project quota data. To get project quota information for a single project: .. code-block:: bash Request: curl -i -X GET -H "X-Auth-Token:$TOKEN" \ -H "Accept:application/json" \ http://localhost:9311/v1/project-quotas/1234 Response: HTTP/1.1 200 OK Content-Type: application/json; charset=UTF-8 {"project_quotas": {"secrets": 500, "orders": 100, "containers": -1, "consumers": 100, "cas": 50}} The project quota information defined for all projects can be retrieved by using a **GET** operation on the **project-quota** resource. The returned response contains a list with all project quota data. .. code-block:: bash Request: curl -i -X GET -H "X-Auth-Token:$TOKEN" \ -H "Accept:application/json" \ http://localhost:9311/v1/project-quotas Response: HTTP/1.1 200 OK Content-Type: application/json; charset=UTF-8 {"project_quotas": [{"project_id": "1234", "project_quotas": {"secrets": 500, "orders": 100, "containers": -1, "consumers": 100, "cas": 50}}, {"project_id": "5678", "project_quotas": {"secrets": 500, "orders": 100, "containers": -1, "consumers": 100, "cas": 50}}]} To get more details on project quota lookup APIs you can reference the `Get Project Quota `__ and `Get Project Quota List `__ documentation. .. _user_delete_project_quotas: How to Delete Configured Project Quotas ####################################### Quotas defined for a project can be deleted by using the **DELETE** operation on the respective **project-quotas** resource. The quota configuration information is deleted for a project, the default quotas will then apply to that project. There is no response content returned on successful deletion. .. code-block:: bash Request: curl -i -X DELETE -H "X-Auth-Token:$TOKEN" \ http://localhost:9311/v1/project-quotas/1234 Response: HTTP/1.1 204 No Content To get more details on project quota delete APIs, you can reference the `Delete Project Quotas `__ documentation. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/secret_metadata.rst0000664000175000017500000001114600000000000022403 0ustar00zuulzuul00000000000000******************************** Secret Metadata API - User Guide ******************************** The Secret Metadata resource is an additional resource associated with Secrets. It allows a user to be able to associate various key/value pairs with a Secret. .. _create_secret_metadata: How to Create/Update Secret Metadata #################################### To create/update the secret metadata for a specific secret, we will need to know the secret reference of the secret we wish to add user metadata to. Any metadata that was previously set will be deleted and replaced with this metadata. For more information on creating/updating secret metadata, you can view the `PUT /v1/secrets/{uuid}/metadata `__ section. .. code-block:: bash curl -X PUT -H "content-type:application/json" -H "X-Auth-Token: $TOKEN" \ -d '{ "metadata": { "description": "contains the AES key", "geolocation": "12.3456, -98.7654" } }' \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata This should provide a response as follows: .. code-block:: bash {"metadata_ref": "http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata"} .. _retrieve_secret_metadata: How to Retrieve Secret Metadata ############################### To retrieve the secret metadata for a single key/value pair, we will need to know the secret reference of the secret we wish to see the user metadata of. If there is no metadata for a particular secret, then an empty metadata object will be returned. .. code-block:: bash curl -H "X-Auth-Token: $TOKEN" \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata/ This should provide a response as follows: .. code-block:: bash { "metadata": { "description": "contains the AES key", "geolocation": "12.3456, -98.7654" } } .. _create_secret_metadatum: How to Create Individual Secret Metadata ######################################## To create the secret metadata for a single key/value pair, we will need to know the secret reference. This will create a new key/value pair. In order to update an already existing key, please see the update section below. .. code-block:: bash curl -X POST -H "content-type:application/json" -H "X-Auth-Token: $TOKEN" \ -d '{ "key": "access-limit", "value": "11" }' \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata This should provide a response as follows: .. code-block:: bash Secret Metadata Location: http://example.com:9311/v1/secrets/{uuid}/metadata/access-limit { "key": "access-limit", "value": 11 } .. _update_secret_metadatum: How to Update an Individual Secret Metadata ########################################### To update the secret metadata for a single key/value pair, we will need to know the secret reference as well as the name of the key. .. code-block:: bash curl -X PUT -H "content-type:application/json" -H "X-Auth-Token: $TOKEN" \ -d '{ "key": "access-limit", "value": "0" }' \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata/access-limit This should provide a response as follows: .. code-block:: bash { "key": "access-limit", "value": 0 } .. _retrieve_secret_metadatum: How to Retrieve an Individual Secret Metadata ############################################# To retrieve the secret metadata for a specific key/value pair, we will need to know the secret reference as well as the name of the metadata key. .. code-block:: bash curl -H "X-Auth-Token: $TOKEN" \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata/access-limit This should provide a response as follows: .. code-block:: bash { "key": "access-limit", "value": 0 } .. _remove_secret_metadatum: How to Delete an Individual Secret Metadata ########################################### To delete a single secret metadata key/value, we will need to know the secret reference as well as the name of the metadata key to delete. In order to delete all metadata for a secret, please see the create/update section at the top of this page. .. code-block:: bash curl -X DELETE -H "X-Auth-Token: $TOKEN" \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/metadata/access-limit No response will be provided. This is expected behavior! If you do receive a response, something went wrong and you will have to address that before moving forward. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/api-guide/source/secrets.rst0000664000175000017500000001254600000000000020733 0ustar00zuulzuul00000000000000************************* Secrets API - User Guide ************************* The secrets resource is the heart of the barbican service. It provides access to the secret / keying material stored in the system. Barbican supports the storage of data for various content-types securely. This guide will assume you will be using a local running development environment of barbican. If you need assistance with getting set up, please reference the `development guide `__. What is a Secret? ################# A secret is a singular item that is stored within barbican. A secret is anything you want it to be; however, the formal use case is a key that you wish to store away from prying eyes. Some examples of a secret may include: * Private Key * Certificate * Password * SSH Keys For the purpose of this user guide, we will use a simple plaintext secret. If you would like to learn more in detail about `secret parameters `__, `responses `__, and `status codes `__ you can reference the `secret reference `__ documentation. .. _create_secret: How to Create a Secret ###################### Single Step Secret Creation *************************** The first secret we will create is a single step secret. Using a single step, barbican expects the user to provide the payload to be stored within the secret itself. Once the secret has been created with a payload it cannot be updated. In this example we will provide a plain text secret. For more information on creating secrets you can view the `POST /v1/secrets `__ section. .. code-block:: bash curl -X POST -H "content-type:application/json" -H "X-Auth-Token: $TOKEN" \ -d '{"payload": "my-secret-here", "payload_content_type": "text/plain"}' \ http://localhost:9311/v1/secrets This should provide a response as follows: .. code-block:: bash {"secret_ref": "http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79"} This is our secret reference. We will need this in order to retrieve the secret in the following steps. Jump ahead to :ref:`How to Retrieve a Secret ` to make sure our secret is stored as expected. .. _two_step_secret_create: Two Step Secret Creation ************************ The second secret we will create is a two-step secret. A two-step secret will allow the user to create a secret reference initially, but upload the secret data after the fact. In this example we will not provide a payload. .. code-block:: bash curl -X POST -H "content-type:application/json" -H "X-Auth-Token: $TOKEN" \ -d '{}' http://localhost:9311/v1/secrets This should provide a response as follows: .. code-block:: bash {"secret_ref": "http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79"} Now that we have a secret reference available, we can update the secret data. .. _update_secret: How to Update a Secret ###################### To update the secret data we will need to know the secret reference provided via the initial creation. (See :ref:`Two Step Secret Creation ` for more information.) In the example below, the secret ref is used from the previous example. You will have to substitute the uuid after /secrets/ with your own in order to update the secret. .. code-block:: bash curl -X PUT -H "content-type:text/plain" -H "X-Auth-Token: $TOKEN" \ -d 'my-secret-here' \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79 No response will be provided. This is expected behavior! If you do receive a response, something went wrong and you will have to address that before moving forward. (For more information visit `PUT /v1/secrets/{uuid} `__ .) .. _retrieve_secret: How to Retrieve a Secret ######################## To retrieve the secret we have created we will need to know the secret reference provided via the initial creation (See :ref:`How to Create a Secret `.) .. code-block:: bash curl -H "Accept: text/plain" -H "X-Auth-Token: $TOKEN" \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79/payload This should provide a response as follows: .. code-block:: bash my-secret-here This is the plain text data we provided upon initial creation of the secret. How to Delete a Secret ###################### To delete a secret we will need to know the secret reference provided via the initial creation (See :ref:`How to Create a Secret `.) .. code-block:: bash curl -X DELETE -H "X-Auth-Token: $TOKEN" \ http://localhost:9311/v1/secrets/2a549393-0710-444b-8aa5-84cf0f85ea79 No response will be provided. This is expected behavior! If you do receive a response, something went wrong and you will have to address that before moving forward. (For more information visit `DELETE /v1/secrets/{uuid} `__ .) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/apiary.apib0000664000175000017500000000306200000000000015500 0ustar00zuulzuul00000000000000HOST: https://dfw.barbican.api.rackspacecloud.com/v1/ --- Barbican API v1 --- --- Barbican is a ReST based key management service. It is built with [OpenStack](https://www.openstack.org/) in mind, but can be used outside an OpenStack implementation. More information can be found on [OpenDev](https://opendev.org/openstack/barbican). --- -- Secrets Resource The following is a description of the resources dealing with generic secrets. These can be encryption keys or anything else a user wants to store in a secure, auditable manner -- Allows a user to list all secrets in a tenant. Note: the actual secret should not be listed here, a user must make a separate call to get the secret details to view the secret. GET /secrets < 200 < Content-Type: application/json { "name": "AES key" "algorithm": "AES" "cypher_type": "CDC" "bit_length": 256 "content_types": { "default": "text/plain" } "expiration": "2013-05-08T16:21:38.134160" "id": "2eb5a8d8-2202-4f46-b64d-89e26eb25487" "mime_type": "text/plain" } Allows a user to create a new secret. This call expects the user to provide a secret. To have the API generate a secret, see the provisioning API. POST /secrets > Content-Type: application/json { "product":"1AB23ORM", "quantity": 2 } < 201 < Content-Type: application/json { "status": "created", "url": "/shopping-cart/2" } -- Payment Resources -- This resource allows you to submit payment information to process your *shopping cart* items POST /payment { "cc": "12345678900", "cvc": "123", "expiry": "0112" } < 200 { "receipt": "/payment/receipt/1" } ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1743590729.125029 barbican-20.0.0/barbican/0000775000175000017500000000000000000000000015116 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/__init__.py0000664000175000017500000000000000000000000017215 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1743590729.1290293 barbican-20.0.0/barbican/api/0000775000175000017500000000000000000000000015667 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/__init__.py0000664000175000017500000001073700000000000020010 0ustar00zuulzuul00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ API handler for Barbican """ import pkgutil from oslo_policy import policy from oslo_serialization import jsonutils as json import pecan from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) CONF = config.CONF class ApiResource(object): """Base class for API resources.""" pass def load_body(req, resp=None, validator=None): """Helper function for loading an HTTP request body from JSON. This body is placed into into a Python dictionary. :param req: The HTTP request instance to load the body from. :param resp: The HTTP response instance. :param validator: The JSON validator to enforce. :return: A dict of values from the JSON request. """ try: body = req.body_file.read(CONF.max_allowed_request_size_in_bytes) req.body_file.seek(0) except IOError: LOG.exception("Problem reading request JSON stream.") pecan.abort(500, u._('Read Error')) try: # TODO(jwood): Investigate how to get UTF8 format via openstack # jsonutils: # parsed_body = json.loads(raw_json, 'utf-8') parsed_body = json.loads(body) strip_whitespace(parsed_body) except ValueError: LOG.exception("Problem loading request JSON.") pecan.abort(400, u._('Malformed JSON')) if validator: try: parsed_body = validator.validate(parsed_body) except exception.BarbicanHTTPException as e: LOG.exception(str(e)) pecan.abort(e.status_code, e.client_message) return parsed_body def generate_safe_exception_message(operation_name, excep): """Generates an exception message that is 'safe' for clients to consume. A 'safe' message is one that doesn't contain sensitive information that could be used for (say) cryptographic attacks on Barbican. That generally means that em.CryptoXxxx should be captured here and with a simple message created on behalf of them. :param operation_name: Name of attempted operation, with a 'Verb noun' format (e.g. 'Create Secret). :param excep: The Exception instance that halted the operation. :return: (status, message) where 'status' is one of the webob.exc.HTTP_xxx codes, and 'message' is the sanitized message associated with the error. """ message = None reason = None status = 500 try: raise excep except (policy.PolicyNotAuthorized, policy.InvalidScope): message = u._( '{operation} attempt not allowed - ' 'please review your ' 'user/project privileges').format(operation=operation_name) status = 403 except exception.BarbicanHTTPException as http_exception: reason = http_exception.client_message status = http_exception.status_code except Exception: message = u._('{operation} failure seen - please contact site ' 'administrator.').format(operation=operation_name) if reason: message = u._('{operation} issue seen - {reason}.').format( operation=operation_name, reason=reason) return status, message @pkgutil.simplegeneric def get_items(obj): """This is used to get items from either a list or a dictionary. While false generator is need to process scalar object """ while False: yield None @get_items.register(dict) def _json_object(obj): return obj.items() @get_items.register(list) def _json_array(obj): return enumerate(obj) def strip_whitespace(json_data): """Recursively trim values from the object passed in using get_items().""" for key, value in get_items(json_data): if hasattr(value, 'strip'): json_data[key] = value.strip() else: strip_whitespace(value) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/app.py0000664000175000017500000000676200000000000017034 0ustar00zuulzuul00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ API application handler for Barbican """ import os from paste import deploy import pecan try: import newrelic.agent newrelic_loaded = True except ImportError: newrelic_loaded = False from oslo_log import log from barbican.api.controllers import versions from barbican.api import hooks from barbican.common import config from barbican.model import repositories from barbican import queue CONF = config.CONF if newrelic_loaded: newrelic.agent.initialize( os.environ.get('NEW_RELIC_CONFIG_FILE', '/etc/newrelic/newrelic.ini'), os.environ.get('NEW_RELIC_ENVIRONMENT') ) def build_wsgi_app(controller=None, transactional=False): """WSGI application creation helper :param controller: Overrides default application controller :param transactional: Adds transaction hook for all requests """ request_hooks = [hooks.JSONErrorHook()] if transactional: request_hooks.append(hooks.BarbicanTransactionHook()) if newrelic_loaded: request_hooks.insert(0, hooks.NewRelicHook()) # Create WSGI app wsgi_app = pecan.Pecan( controller or versions.AVAILABLE_VERSIONS[versions.DEFAULT_VERSION](), hooks=request_hooks, force_canonical=False ) # clear the session created in controller initialization 60 repositories.clear() return wsgi_app def main_app(func): def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. try: repositories.setup_database_engine_and_factory( initialize_secret_stores=True ) repositories.commit() except Exception: LOG.exception('Failed to sync secret_stores table.') repositories.rollback() raise wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG.info('Barbican app created and initialized') return wsgi_app return _wrapper @main_app def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Setup app with transactional hook enabled return build_wsgi_app(versions.V1Controller(), transactional=True) def create_version_app(global_config, **local_conf): wsgi_app = pecan.make_app(versions.VersionsController()) return wsgi_app def get_api_wsgi_script(): conf = '/etc/barbican/barbican-api-paste.ini' application = deploy.loadapp('config:%s' % conf) return application ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/app.wsgi0000664000175000017500000000164000000000000017343 0ustar00zuulzuul00000000000000# -*- mode: python -*- # # Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Use this file for deploying the API under mod_wsgi. See http://pecan.readthedocs.org/en/latest/deployment.html for details. NOTE(mtreinish): This wsgi script is deprecated since the wsgi app is now exposed as an entrypoint via barbican-wsgi-api """ from barbican.api import app application = app.get_api_wsgi_script() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1743590729.1290293 barbican-20.0.0/barbican/api/controllers/0000775000175000017500000000000000000000000020235 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/__init__.py0000664000175000017500000002175500000000000022360 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections.abc from oslo_policy import policy import pecan from webob import exc from barbican import api from barbican.common import accept from barbican.common import config from barbican.common import utils from barbican import i18n as u CONF = config.CONF LOG = utils.getLogger(__name__) def is_json_request_accept(req): """Test if http request 'accept' header configured for JSON response. :param req: HTTP request :return: True if need to return JSON response. """ return ( type(req.accept) is accept.NoHeaderType or type(req.accept) is accept.ValidHeaderType and ( req.accept.header_value == 'application/json' or req.accept.header_value == '*/*' ) ) def _get_barbican_context(req): if 'barbican.context' in req.environ: return req.environ['barbican.context'] else: return None def _do_enforce_rbac(inst, req, action_name, ctx, **kwargs): """Enforce RBAC based on 'request' information.""" if action_name and ctx: # Enforce special case: secret GET decryption if 'secret:get' == action_name and not is_json_request_accept(req): action_name = 'secret:decrypt' # Override to perform special rules target_name, target_data = inst.get_acl_tuple(req, **kwargs) policy_dict = { "enforce_new_defaults": CONF.oslo_policy.enforce_new_defaults } if target_name and target_data: policy_dict['target'] = {target_name: target_data} # Enforce access controls. if ctx.policy_enforcer: target = flatten(policy_dict) ctx.policy_enforcer.authorize(action_name, target, ctx, do_raise=True) def enforce_rbac(action_name='default'): """Decorator handling RBAC enforcement on behalf of REST verb methods.""" def rbac_decorator(fn): def enforcer(inst, *args, **kwargs): # Enforce RBAC rules. # context placed here by context.py # middleware ctx = _get_barbican_context(pecan.request) external_project_id = None if ctx: external_project_id = ctx.project_id _do_enforce_rbac(inst, pecan.request, action_name, ctx, **kwargs) # insert external_project_id as the first arg to the guarded method args = list(args) args.insert(0, external_project_id) # Execute guarded method now. return fn(inst, *args, **kwargs) return enforcer return rbac_decorator def handle_exceptions(operation_name=u._('System')): """Decorator handling generic exceptions from REST methods.""" def exceptions_decorator(fn): def handler(inst, *args, **kwargs): try: return fn(inst, *args, **kwargs) except exc.HTTPError: LOG.exception('Webob error seen') raise # Already converted to Webob exception, just reraise # In case PolicyNotAuthorized, we do not want to expose payload by # logging exception, so just LOG.error except policy.PolicyNotAuthorized as pna: status, message = api.generate_safe_exception_message( operation_name, pna) LOG.error(message) pecan.abort(status, message) except Exception as e: # In case intervening modules have disabled logging. LOG.logger.disabled = False status, message = api.generate_safe_exception_message( operation_name, e) LOG.exception(message) pecan.abort(status, message) return handler return exceptions_decorator def _do_enforce_content_types(pecan_req, valid_content_types): """Content type enforcement Check to see that content type in the request is one of the valid types passed in by our caller. """ if pecan_req.content_type not in valid_content_types: m = u._( "Unexpected content type. Expected content types " "are: {expected}" ).format( expected=valid_content_types ) pecan.abort(415, m) def enforce_content_types(valid_content_types=[]): """Decorator handling content type enforcement on behalf of REST verbs.""" def content_types_decorator(fn): def content_types_enforcer(inst, *args, **kwargs): _do_enforce_content_types(pecan.request, valid_content_types) return fn(inst, *args, **kwargs) return content_types_enforcer return content_types_decorator def flatten(d, parent_key=''): """Flatten a nested dictionary Converts a dictionary with nested values to a single level flat dictionary, with dotted notation for each key. """ items = [] for k, v in d.items(): new_key = parent_key + '.' + k if parent_key else k if isinstance(v, collections.abc.MutableMapping): items.extend(flatten(v, new_key).items()) else: items.append((new_key, v)) return dict(items) class ACLMixin(object): def __init__(self): self.secret = None self.container = None def get_acl_tuple(self, req, **kwargs): if self.secret is not None: entity = 'secret' elif self.container is not None: entity = 'container' else: return None, None entity_acls = getattr(getattr(self, entity), '{}_acls'.format(entity)) acl = self.get_acl_dict_for_user(req, entity_acls) acl['project_id'] = getattr(self, entity).project.external_id acl['creator_id'] = getattr(self, entity).creator_id return entity, acl def get_acl_dict_for_user(self, req, acl_list): """Get acl operation found for token user in acl list. Token user is looked into users list present for each acl operation. If there is a match, it means that ACL data is applicable for policy logic. Policy logic requires data as dictionary so this method capture acl's operation, project_access data in that format. For operation value, matching ACL record's operation is stored in dict as key and value both. project_access flag is intended to make secret/container private for a given operation. It doesn't require user match. So its captured in dict format where key is prefixed with related operation and flag is used as its value. Then for acl related policy logic, this acl dict data is combined with target entity (secret or container) creator_id and project id. The whole dict serves as target in policy enforcement logic i.e. right hand side of policy rule. Following is sample outcome where secret or container has ACL defined and token user is among the ACL users defined for 'read' and 'list' operation. {'read': 'read', 'list': 'list', 'read_project_access': True, 'list_project_access': True } Its possible that ACLs are defined without any user, they just have project_access flag set. This means only creator can read or list ACL entities. In that case, dictionary output can be as follows. {'read_project_access': False, 'list_project_access': False } """ ctxt = _get_barbican_context(req) if not ctxt: return {} acl_dict = {acl.operation: acl.operation for acl in acl_list if ctxt.user_id in acl.to_dict_fields().get('users', [])} co_dict = {'%s_project_access' % acl.operation: acl.project_access for acl in acl_list if acl.project_access is not None} if not co_dict: """ The co_dict is empty when the entity (secret or container) has no acls in its acl_list. This causes any policy with "%(target.secret.read_project_access)s" or "%(target.container.read_project_access)s" to always evaluate to False. This is probelmatic because we want to allow project access by default (with additional role checks). To work around this we allow read here. When the entity has an acl, co_dict will use the value from the database, and this if statement will be skipped. """ co_dict = {'read_project_access': True} acl_dict.update(co_dict) return acl_dict ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/acls.py0000664000175000017500000003653500000000000021545 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _convert_acl_to_response_format(acl, acls_dict): fields = acl.to_dict_fields() operation = fields['operation'] acl_data = {} # dict for each acl operation data acl_data['project-access'] = fields['project_access'] acl_data['users'] = fields.get('users', []) acl_data['created'] = fields['created'] acl_data['updated'] = fields['updated'] acls_dict[operation] = acl_data DEFAULT_ACL = {'read': {'project-access': True}} class SecretACLsController(controllers.ACLMixin): """Handles SecretACL requests by a given secret id.""" def __init__(self, secret): super().__init__() self.secret = secret self.secret_project_id = self.secret.project.external_id self.acl_repo = repo.get_secret_acl_repository() self.validator = validators.ACLValidator() def get_acl_tuple(self, req, **kwargs): d = {'project_id': self.secret_project_id, 'creator_id': self.secret.creator_id} return 'secret', d @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretACL(s) retrieval')) @controllers.enforce_rbac('secret_acls:get') def on_get(self, external_project_id, **kw): LOG.debug('Start secret ACL on_get ' 'for secret-ID %s:', self.secret.id) return self._return_acl_list_response(self.secret.id) @index.when(method='PATCH', template='json') @controllers.handle_exceptions(u._('SecretACL(s) Update')) @controllers.enforce_rbac('secret_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_patch(self, external_project_id, **kwargs): """Handles update of existing secret acl requests. At least one secret ACL needs to exist for update to proceed. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via SecretACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":true } } """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_patch...%s', data) existing_acls_map = {acl.operation: acl for acl in self.secret.secret_acls} for operation in filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access') user_ids = data[operation].get('users') s_acl = None if operation in existing_acls_map: # update if matching acl exists s_acl = existing_acls_map[operation] if project_access is not None: s_acl.project_access = project_access else: s_acl = models.SecretACL(self.secret.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.secret, secret_acl=s_acl, user_ids=user_ids) acl_ref = '{0}/acl'.format( hrefs.convert_secret_to_href(self.secret.id)) return {'acl_ref': acl_ref} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('SecretACL(s) Update')) @controllers.enforce_rbac('secret_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles update of existing secret acl requests. Replaces existing secret ACL(s) with input ACL(s) data. Existing ACL operation not specified in input are removed as part of update. For missing project-access in ACL, true is used as default. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via SecretACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } Every secret, by default, has an implicit ACL in case client has not defined an explicit ACL. That default ACL definition, DEFAULT_ACL, signifies that a secret by default has project based access i.e. client with necessary roles on secret project can access the secret. That's why when ACL is added to a secret, it always returns 200 (and not 201) indicating existence of implicit ACL on a secret. """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_put...%s', data) existing_acls_map = {acl.operation: acl for acl in self.secret.secret_acls} for operation in filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access', True) user_ids = data[operation].get('users', []) s_acl = None if operation in existing_acls_map: # update if matching acl exists s_acl = existing_acls_map.pop(operation) s_acl.project_access = project_access else: s_acl = models.SecretACL(self.secret.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.secret, secret_acl=s_acl, user_ids=user_ids) # delete remaining existing acls as they are not present in input. for acl in existing_acls_map.values(): self.acl_repo.delete_entity_by_id(entity_id=acl.id, external_project_id=None) acl_ref = '{0}/acl'.format( hrefs.convert_secret_to_href(self.secret.id)) return {'acl_ref': acl_ref} @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('SecretACL(s) deletion')) @controllers.enforce_rbac('secret_acls:delete') def on_delete(self, external_project_id, **kwargs): count = self.acl_repo.get_count(self.secret.id) if count > 0: self.acl_repo.delete_acls_for_secret(self.secret) def _return_acl_list_response(self, secret_id): result = self.acl_repo.get_by_secret_id(secret_id) acls_data = {} if result: for acl in result: _convert_acl_to_response_format(acl, acls_data) if not acls_data: acls_data = DEFAULT_ACL.copy() return acls_data class ContainerACLsController(controllers.ACLMixin): """Handles ContainerACL requests by a given container id.""" def __init__(self, container): super().__init__() self.container = container self.container_id = container.id self.acl_repo = repo.get_container_acl_repository() self.container_repo = repo.get_container_repository() self.validator = validators.ACLValidator() self.container_project_id = container.project.external_id def get_acl_tuple(self, req, **kwargs): d = {'project_id': self.container_project_id, 'creator_id': self.container.creator_id} return 'container', d @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) retrieval')) @controllers.enforce_rbac('container_acls:get') def on_get(self, external_project_id, **kw): LOG.debug('Start container ACL on_get ' 'for container-ID %s:', self.container_id) return self._return_acl_list_response(self.container.id) @index.when(method='PATCH', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) Update')) @controllers.enforce_rbac('container_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_patch(self, external_project_id, **kwargs): """Handles update of existing container acl requests. At least one container ACL needs to exist for update to proceed. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via ContainerACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start ContainerACLsController on_patch...%s', data) existing_acls_map = {acl.operation: acl for acl in self.container.container_acls} for operation in filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access') user_ids = data[operation].get('users') if operation in existing_acls_map: # update if matching acl exists c_acl = existing_acls_map[operation] if project_access is not None: c_acl.project_access = project_access else: c_acl = models.ContainerACL(self.container.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.container, container_acl=c_acl, user_ids=user_ids) acl_ref = '{0}/acl'.format( hrefs.convert_container_to_href(self.container.id)) return {'acl_ref': acl_ref} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) Update')) @controllers.enforce_rbac('container_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles update of existing container acl requests. Replaces existing container ACL(s) with input ACL(s) data. Existing ACL operation not specified in input are removed as part of update. For missing project-access in ACL, true is used as default. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via ContainerACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } Every container, by default, has an implicit ACL in case client has not defined an explicit ACL. That default ACL definition, DEFAULT_ACL, signifies that a container by default has project based access i.e. client with necessary roles on container project can access the container. That's why when ACL is added to a container, it always returns 200 (and not 201) indicating existence of implicit ACL on a container. """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start ContainerACLsController on_put...%s', data) existing_acls_map = {acl.operation: acl for acl in self.container.container_acls} for operation in filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access', True) user_ids = data[operation].get('users', []) if operation in existing_acls_map: # update if matching acl exists c_acl = existing_acls_map.pop(operation) c_acl.project_access = project_access else: c_acl = models.ContainerACL(self.container.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.container, container_acl=c_acl, user_ids=user_ids) # delete remaining existing acls as they are not present in input. for acl in existing_acls_map.values(): self.acl_repo.delete_entity_by_id(entity_id=acl.id, external_project_id=None) acl_ref = '{0}/acl'.format( hrefs.convert_container_to_href(self.container.id)) return {'acl_ref': acl_ref} @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) deletion')) @controllers.enforce_rbac('container_acls:delete') def on_delete(self, external_project_id, **kwargs): count = self.acl_repo.get_count(self.container_id) if count > 0: self.acl_repo.delete_acls_for_container(self.container) def _return_acl_list_response(self, container_id): result = self.acl_repo.get_by_container_id(container_id) acls_data = {} if result: for acl in result: _convert_acl_to_response_format(acl, acls_data) if not acls_data: acls_data = DEFAULT_ACL.copy() return acls_data ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/consumers.py0000664000175000017500000003465700000000000022644 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.plugin import util as putil LOG = utils.getLogger(__name__) def _consumer_not_found(): """Throw exception indicating consumer not found.""" pecan.abort(404, u._('Consumer not found.')) def _consumer_ownership_mismatch(): """Throw exception indicating the user does not own this consumer.""" pecan.abort(403, u._('Not Allowed. Sorry, only the creator of a consumer ' 'can delete it.')) def _invalid_consumer_id(): """Throw exception indicating consumer id is invalid.""" pecan.abort(404, u._('Not Found. Provided consumer id is invalid.')) class ContainerConsumerController(controllers.ACLMixin): """Handles Container Consumer entity retrieval and deletion requests""" def __init__(self, container, consumer_id): super().__init__() self.container = container self.consumer_id = consumer_id self.consumer_repo = repo.get_container_consumer_repository() self.validator = validators.ContainerConsumerValidator() @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerConsumer retrieval')) @controllers.enforce_rbac('consumer:get') def on_get(self, external_project_id): consumer = self.consumer_repo.get( entity_id=self.consumer_id, suppress_exception=True) if not consumer: _consumer_not_found() dict_fields = consumer.to_dict_fields() LOG.info('Retrieved a consumer for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class ContainerConsumersController(controllers.ACLMixin): """Handles Container Consumer creation requests""" def __init__(self, container): super().__init__() self.container = container self.container_id = self.container.id self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.project_repo = repo.get_project_repository() self.validator = validators.ContainerConsumerValidator() self.quota_enforcer = quota.QuotaEnforcer('consumers', self.consumer_repo) @pecan.expose() def _lookup(self, consumer_id, *remainder): if not utils.validate_id_is_uuid(consumer_id): _invalid_consumer_id()() return ContainerConsumerController(self.container, consumer_id), \ remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerConsumers(s) retrieval')) @controllers.enforce_rbac('container_consumers:get') def on_get(self, external_project_id, **kw): LOG.debug('Start consumers on_get ' 'for container-ID %s:', self.container_id) result = self.consumer_repo.get_by_container_id( self.container_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), suppress_exception=True ) consumers, offset, limit, total = result if not consumers: resp_ctrs_overall = {'consumers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in consumers ] consumer_path = "containers/{container_id}/consumers".format( container_id=self.container_id) resp_ctrs_overall = hrefs.add_nav_hrefs( consumer_path, offset, limit, total, {'consumers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved a container consumer list for project: %s', external_project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('ContainerConsumer creation')) @controllers.enforce_rbac('container_consumers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_post...%s', data) self.quota_enforcer.enforce(project) new_consumer = models.ContainerConsumerMetadatum(self.container_id, project.id, data) self.consumer_repo.create_or_update_from(new_consumer, self.container) url = hrefs.convert_consumer_to_href(new_consumer.container_id) pecan.response.headers['Location'] = url LOG.info('Created a container consumer for project: %s', external_project_id) return self._return_container_data() @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('ContainerConsumer deletion')) @controllers.enforce_rbac('container_consumers:delete') @controllers.enforce_content_types(['application/json']) def on_delete(self, external_project_id, **kwargs): data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_delete...%s', data) project = res.get_or_create_project(external_project_id) consumer = self.consumer_repo.get_by_values( self.container_id, data["name"], data["URL"], suppress_exception=True ) if not consumer: _consumer_not_found() LOG.debug("Found container consumer: %s", consumer) owner_of_consumer = consumer.project_id == project.id owner_of_container = self.container.project.external_id \ == external_project_id if not owner_of_consumer and not owner_of_container: _consumer_ownership_mismatch() try: self.consumer_repo.delete_entity_by_id(consumer.id, external_project_id) except exception.NotFound: LOG.exception('Problem deleting container consumer') _consumer_not_found() ret_data = self._return_container_data() LOG.info('Deleted a container consumer for project: %s', external_project_id) return ret_data def _return_container_data(self): dict_fields = self.container.to_dict_fields() for secret_ref in dict_fields['secret_refs']: hrefs.convert_to_hrefs(secret_ref) # TODO(john-wood-w) Why two calls to convert_to_hrefs()? return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class SecretConsumerController(controllers.ACLMixin): """Handles Secret Consumer entity retrieval and deletion requests""" def __init__(self, secret, consumer_id): super().__init__() self.secret = secret self.consumer_id = consumer_id self.consumer_repo = repo.get_secret_consumer_repository() self.validator = validators.SecretConsumerValidator() @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretConsumer retrieval')) @controllers.enforce_rbac('consumer:get') def on_get(self, external_project_id): consumer = self.consumer_repo.get( entity_id=self.consumer_id, suppress_exception=True) if not consumer: _consumer_not_found() dict_fields = consumer.to_dict_fields() LOG.info('Retrieved a secret consumer for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class SecretConsumersController(controllers.ACLMixin): """Handles Secret Consumer creation requests""" def __init__(self, secret): super().__init__() self.secret = secret self.secret_id = secret.id self.consumer_repo = repo.get_secret_consumer_repository() self.secret_repo = repo.get_secret_repository() self.project_repo = repo.get_project_repository() self.validator = validators.SecretConsumerValidator() self.quota_enforcer = quota.QuotaEnforcer('consumers', self.consumer_repo) @pecan.expose() def _lookup(self, consumer_id, *remainder): if not utils.validate_id_is_uuid(consumer_id): _invalid_consumer_id()() return SecretConsumerController(self.secret, consumer_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretConsumers(s) retrieval')) @controllers.enforce_rbac('secret_consumers:get') def on_get(self, external_project_id, **kw): LOG.debug('Start consumers on_get ' 'for secret-ID %s:', self.secret_id) result = self.consumer_repo.get_by_secret_id( self.secret_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), suppress_exception=True ) consumers, offset, limit, total = result if not consumers: resp_ctrs_overall = {'consumers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in consumers ] consumer_path = "secrets/{secret_id}/consumers".format( secret_id=self.secret_id) resp_ctrs_overall = hrefs.add_nav_hrefs( consumer_path, offset, limit, total, {'consumers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved a consumer list for project: %s', external_project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('SecretConsumer creation')) @controllers.enforce_rbac('secret_consumers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_post...%s', data) self.quota_enforcer.enforce(project) new_consumer = models.SecretConsumerMetadatum( self.secret_id, project.id, data["service"], data["resource_type"], data["resource_id"], ) self.consumer_repo.create_or_update_from(new_consumer, self.secret) url = hrefs.convert_consumer_to_href(new_consumer.secret_id) pecan.response.headers['Location'] = url LOG.info('Created a consumer for project: %s', external_project_id) return self._return_secret_data(self.secret_id) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('SecretConsumer deletion')) @controllers.enforce_rbac('secret_consumers:delete') @controllers.enforce_content_types(['application/json']) def on_delete(self, external_project_id, **kwargs): data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_delete...%s', data) project = self.project_repo.find_by_external_project_id( external_project_id, suppress_exception=True) if not project: _consumer_not_found() consumer = self.consumer_repo.get_by_values( self.secret_id, data["service"], data["resource_type"], data["resource_id"], suppress_exception=True ) if not consumer: _consumer_not_found() LOG.debug("Found consumer: %s", consumer) owner_of_consumer = consumer.project_id == project.id owner_of_secret = self.secret.project.external_id \ == external_project_id if not owner_of_consumer and not owner_of_secret: _consumer_ownership_mismatch() try: self.consumer_repo.delete_entity_by_id(consumer.id, external_project_id) except exception.NotFound: LOG.exception('Problem deleting consumer') _consumer_not_found() ret_data = self._return_secret_data(self.secret_id) LOG.info('Deleted a secret consumer for project: %s', external_project_id) return ret_data def _get_secret(self, secret_id): secret = self.secret_repo.get_secret_by_id( secret_id, suppress_exception=True) if not secret: controllers.secrets.secret_not_found() return secret def _return_secret_data(self, secret_id): secret = self._get_secret(secret_id) secret_fields = putil.mime_types.augment_fields_with_content_types( secret) return hrefs.convert_to_hrefs(secret_fields) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/containers.py0000664000175000017500000003030100000000000022751 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.api.controllers import acls from barbican.api.controllers import consumers from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) CONTAINER_GET = 'container:get' def container_not_found(): """Throw exception indicating container not found.""" pecan.abort(404, u._('Secrets container not found.')) def invalid_container_id(): """Throw exception indicating container id is invalid.""" pecan.abort(404, u._('Not Found. Provided container id is invalid.')) class ContainerController(controllers.ACLMixin): """Handles Container entity retrieval and deletion requests.""" def __init__(self, container): super().__init__() self.container = container self.container_id = container.id self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.validator = validators.ContainerValidator() self.consumers = consumers.ContainerConsumersController( self.container) self.acl = acls.ContainerACLsController(self.container) @pecan.expose(generic=True, template='json') def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Container retrieval')) @controllers.enforce_rbac(CONTAINER_GET) def on_get(self, external_project_id): dict_fields = self.container.to_dict_fields() for secret_ref in dict_fields['secret_refs']: hrefs.convert_to_hrefs(secret_ref) LOG.info('Retrieved container for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Container deletion')) @controllers.enforce_rbac('container:delete') def on_delete(self, external_project_id, **kwargs): container_consumers = self.consumer_repo.get_by_container_id( self.container_id, suppress_exception=True ) try: self.container_repo.delete_entity_by_id( entity_id=self.container_id, external_project_id=external_project_id ) except exception.NotFound: LOG.exception('Problem deleting container') container_not_found() LOG.info('Deleted container for project: %s', external_project_id) for consumer in container_consumers[0]: try: self.consumer_repo.delete_entity_by_id( consumer.id, external_project_id) except exception.NotFound: # nosec pass class ContainersController(controllers.ACLMixin): """Handles Container creation requests.""" def __init__(self): super().__init__() self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.secret_repo = repo.get_secret_repository() self.validator = validators.ContainerValidator() self.quota_enforcer = quota.QuotaEnforcer('containers', self.container_repo) @pecan.expose() def _lookup(self, container_id, *remainder): if not utils.validate_id_is_uuid(container_id): invalid_container_id() container = self.container_repo.get_container_by_id( entity_id=container_id, suppress_exception=True) if not container: container_not_found() if len(remainder) > 0 and remainder[0] == 'secrets': return ContainersSecretsController(container), () return ContainerController(container), remainder @pecan.expose(generic=True, template='json') def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Containers(s) retrieval')) @controllers.enforce_rbac('containers:get') def on_get(self, project_id, **kw): LOG.debug('Start containers on_get for project-ID %s:', project_id) result = self.container_repo.get_by_create_date( project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), name_arg=kw.get('name', None), type_arg=kw.get('type', None), suppress_exception=True ) containers, offset, limit, total = result if not containers: resp_ctrs_overall = {'containers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in containers ] for ctr in resp_ctrs: for secret_ref in ctr.get('secret_refs', []): hrefs.convert_to_hrefs(secret_ref) resp_ctrs_overall = hrefs.add_nav_hrefs( 'containers', offset, limit, total, {'containers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved container list for project: %s', project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Container creation')) @controllers.enforce_rbac('containers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) ctxt = controllers._get_barbican_context(pecan.request) if ctxt: # in authenticated pipleline case, always use auth token user data['creator_id'] = ctxt.user_id self.quota_enforcer.enforce(project) LOG.debug('Start on_post...%s', data) new_container = models.Container(data) new_container.project_id = project.id # TODO(hgedikli): performance optimizations for secret_ref in new_container.container_secrets: secret = self.secret_repo.get( entity_id=secret_ref.secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: # This only partially localizes the error message and # doesn't localize secret_ref.name. pecan.abort( 404, u._("Secret provided for '{secret_name}' doesn't " "exist.").format(secret_name=secret_ref.name) ) self.container_repo.create_from(new_container) url = hrefs.convert_container_to_href(new_container.id) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a container for project: %s', external_project_id) return {'container_ref': url} class ContainersSecretsController(controllers.ACLMixin): """Handles ContainerSecret creation and deletion requests.""" def __init__(self, container): LOG.debug('=== Creating ContainerSecretsController ===') super().__init__() self.container = container self.container_secret_repo = repo.get_container_secret_repository() self.secret_repo = repo.get_secret_repository() self.validator = validators.ContainerSecretValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Container Secret creation')) @controllers.enforce_rbac('container_secret:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): """Handles adding an existing secret to an existing container.""" if self.container.type != 'generic': pecan.abort(400, u._("Only 'generic' containers can be modified.")) data = api.load_body(pecan.request, validator=self.validator) name = data.get('name') secret_ref = data.get('secret_ref') secret_id = hrefs.get_secret_id_from_ref(secret_ref) secret = self.secret_repo.get( entity_id=secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: pecan.abort(404, u._("Secret provided doesn't exist.")) found_container_secrets = list( filter(lambda cs: cs.secret_id == secret_id and cs.name == name, self.container.container_secrets) ) if found_container_secrets: pecan.abort(409, u._('Conflict. A secret with that name and ID is ' 'already stored in this container. The same ' 'secret can exist in a container as long as ' 'the name is unique.')) LOG.debug('Start container secret on_post...%s', secret_ref) new_container_secret = models.ContainerSecret() new_container_secret.container_id = self.container.id new_container_secret.name = name new_container_secret.secret_id = secret_id self.container_secret_repo.save(new_container_secret) url = hrefs.convert_container_to_href(self.container.id) LOG.debug('URI to container is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a container secret for project: %s', external_project_id) return {'container_ref': url} @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Container Secret deletion')) @controllers.enforce_rbac('container_secret:delete') def on_delete(self, external_project_id, **kwargs): """Handles removing a secret reference from an existing container.""" data = api.load_body(pecan.request, validator=self.validator) name = data.get('name') secret_ref = data.get('secret_ref') secret_id = hrefs.get_secret_id_from_ref(secret_ref) secret = self.secret_repo.get( entity_id=secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: pecan.abort(404, u._("Secret '{secret_name}' with reference " "'{secret_ref}' doesn't exist.").format( secret_name=name, secret_ref=secret_ref)) found_container_secrets = list( filter(lambda cs: cs.secret_id == secret_id and cs.name == name, self.container.container_secrets) ) if not found_container_secrets: pecan.abort(404, u._('Secret provided is not in the container')) for container_secret in found_container_secrets: self.container_secret_repo.delete_entity_by_id( container_secret.id, external_project_id) pecan.response.status = 204 LOG.info('Deleted container secret for project: %s', external_project_id) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/orders.py0000664000175000017500000001715000000000000022111 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.queue import client as async_client LOG = utils.getLogger(__name__) def _order_not_found(): """Throw exception indicating order not found.""" pecan.abort(404, u._('Order not found.')) def _secret_not_in_order(): """Throw exception that secret info is not available in the order.""" pecan.abort(400, u._("Secret metadata expected but not received.")) def _order_update_not_supported(): """Throw exception that PUT operation is not supported for orders.""" pecan.abort(405, u._("Order update is not supported.")) def _order_cannot_be_updated_if_not_pending(order_status): """Throw exception that order cannot be updated if not PENDING.""" pecan.abort(400, u._("Only PENDING orders can be updated. Order is in the" "{0} state.").format(order_status)) def order_cannot_modify_order_type(): """Throw exception that order type cannot be modified.""" pecan.abort(400, u._("Cannot modify order type.")) class OrderController(controllers.ACLMixin): """Handles Order retrieval and deletion requests.""" def __init__(self, order, queue_resource=None): super().__init__() self.order = order self.order_repo = repo.get_order_repository() self.queue = queue_resource or async_client.TaskClient() self.type_order_validator = validators.TypeOrderValidator() def get_acl_tuple(self, req, **kwargs): acl = dict() acl['project_id'] = self.order.project.external_id acl['creator_id'] = self.order.creator_id return 'order', acl @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Order retrieval')) @controllers.enforce_rbac('order:get') def on_get(self, external_project_id): return hrefs.convert_to_hrefs(self.order.to_dict_fields()) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Order deletion')) @controllers.enforce_rbac('order:delete') def on_delete(self, external_project_id, **kwargs): self.order_repo.delete_entity_by_id( entity_id=self.order.id, external_project_id=external_project_id) class OrdersController(controllers.ACLMixin): """Handles Order requests for Secret creation.""" def __init__(self, queue_resource=None): LOG.debug('Creating OrdersController') super().__init__() self.order_repo = repo.get_order_repository() self.queue = queue_resource or async_client.TaskClient() self.type_order_validator = validators.TypeOrderValidator() self.quota_enforcer = quota.QuotaEnforcer('orders', self.order_repo) @pecan.expose() def _lookup(self, order_id, *remainder): # NOTE(jaosorior): It's worth noting that even though this section # actually does a lookup in the database regardless of the RBAC policy # check, the execution only gets here if authentication of the user was # previously successful. ctx = controllers._get_barbican_context(pecan.request) order = self.order_repo.get(entity_id=order_id, external_project_id=ctx.project_id, suppress_exception=True) if not order: _order_not_found() return OrderController(order, self.order_repo), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Order(s) retrieval')) @controllers.enforce_rbac('orders:get') def on_get(self, external_project_id, **kw): LOG.debug('Start orders on_get ' 'for project-ID %s:', external_project_id) result = self.order_repo.get_by_create_date( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), meta_arg=kw.get('meta', None), suppress_exception=True) orders, offset, limit, total = result if not orders: orders_resp_overall = {'orders': [], 'total': total} else: orders_resp = [ hrefs.convert_to_hrefs(o.to_dict_fields()) for o in orders ] orders_resp_overall = hrefs.add_nav_hrefs('orders', offset, limit, total, {'orders': orders_resp}) orders_resp_overall.update({'total': total}) return orders_resp_overall @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Order update')) @controllers.enforce_rbac('orders:put') def on_put(self, external_project_id, **kwargs): _order_update_not_supported() @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Order creation')) @controllers.enforce_rbac('orders:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) body = api.load_body(pecan.request, validator=self.type_order_validator) order_type = body.get('type') order_meta = body.get('meta') request_type = order_meta.get('request_type') LOG.debug('Processing order type %(order_type)s,' ' request type %(request_type)s' % {'order_type': order_type, 'request_type': request_type}) self.quota_enforcer.enforce(project) new_order = models.Order() new_order.meta = body.get('meta') new_order.type = order_type new_order.project_id = project.id request_id = None ctxt = controllers._get_barbican_context(pecan.request) if ctxt: new_order.creator_id = ctxt.user_id request_id = ctxt.request_id self.order_repo.create_from(new_order) # Grab our id before commit due to obj expiration from sqlalchemy order_id = new_order.id # Force commit to avoid async issues with the workers repo.commit() self.queue.process_type_order(order_id=order_id, project_id=external_project_id, request_id=request_id) url = hrefs.convert_order_to_href(order_id) pecan.response.status = 202 pecan.response.headers['Location'] = url return {'order_ref': url} ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/quotas.py0000664000175000017500000001175500000000000022134 0ustar00zuulzuul00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u LOG = utils.getLogger(__name__) def _project_quotas_not_found(): """Throw exception indicating project quotas not found.""" pecan.abort(404, u._('Project quotas not found.')) class QuotasController(controllers.ACLMixin): """Handles quota retrieval requests.""" def __init__(self): LOG.debug('=== Creating QuotasController ===') super().__init__() self.quota_driver = quota.QuotaDriver() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Quotas')) @controllers.enforce_rbac('quotas:get') def on_get(self, external_project_id, **kwargs): LOG.debug('=== QuotasController GET ===') # make sure project exists res.get_or_create_project(external_project_id) resp = self.quota_driver.get_quotas(external_project_id) return resp class ProjectQuotasController(controllers.ACLMixin): """Handles project quota requests.""" def __init__(self, project_id): LOG.debug('=== Creating ProjectQuotasController ===') super().__init__() self.passed_project_id = project_id self.validator = validators.ProjectQuotaValidator() self.quota_driver = quota.QuotaDriver() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:get') def on_get(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController GET ===') resp = self.quota_driver.get_project_quotas(self.passed_project_id) if resp: return resp else: _project_quotas_not_found() @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController PUT ===') if not pecan.request.body: raise exception.NoDataToProcess() api.load_body(pecan.request, validator=self.validator) self.quota_driver.set_project_quotas(self.passed_project_id, kwargs['project_quotas']) LOG.info('Put Project Quotas') pecan.response.status = 204 @index.when(method='DELETE', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:delete') def on_delete(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController DELETE ===') try: self.quota_driver.delete_project_quotas(self.passed_project_id) except exception.NotFound: LOG.info('Delete Project Quotas - Project not found') _project_quotas_not_found() else: LOG.info('Delete Project Quotas') pecan.response.status = 204 class ProjectsQuotasController(controllers.ACLMixin): """Handles projects quota retrieval requests.""" def __init__(self): LOG.debug('=== Creating ProjectsQuotaController ===') super().__init__() self.quota_driver = quota.QuotaDriver() @pecan.expose() def _lookup(self, project_id, *remainder): return ProjectQuotasController(project_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:get') def on_get(self, external_project_id, **kwargs): resp = self.quota_driver.get_project_quotas_list( offset_arg=kwargs.get('offset', 0), limit_arg=kwargs.get('limit', None) ) return resp ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/secretmeta.py0000664000175000017500000001641100000000000022746 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _secret_metadata_not_found(): """Throw exception indicating secret metadata not found.""" pecan.abort(404, u._('Secret metadata not found.')) class SecretMetadataController(controllers.ACLMixin): """Handles SecretMetadata requests by a given secret id.""" def __init__(self, secret): LOG.debug('=== Creating SecretMetadataController ===') super().__init__() self.secret = secret self.secret_project_id = self.secret.project.external_id self.secret_repo = repo.get_secret_repository() self.user_meta_repo = repo.get_secret_user_meta_repository() self.metadata_validator = validators.NewSecretMetadataValidator() self.metadatum_validator = validators.NewSecretMetadatumValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret metadata retrieval')) @controllers.enforce_rbac('secret_meta:get') def on_get(self, external_project_id, **kwargs): """Handles retrieval of existing secret metadata requests.""" LOG.debug('Start secret metadata on_get ' 'for secret-ID %s:', self.secret.id) resp = self.user_meta_repo.get_metadata_for_secret(self.secret.id) pecan.response.status = 200 return {"metadata": resp} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Secret metadata creation')) @controllers.enforce_rbac('secret_meta:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles creation/update of secret metadata.""" data = api.load_body(pecan.request, validator=self.metadata_validator) LOG.debug('Start secret metadata on_put...%s', data) self.user_meta_repo.create_replace_user_metadata(self.secret.id, data) url = hrefs.convert_user_meta_to_href(self.secret.id) LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url} @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Secret metadatum creation')) @controllers.enforce_rbac('secret_meta:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): """Handles creation of secret metadatum.""" data = api.load_body(pecan.request, validator=self.metadatum_validator) key = data.get('key') value = data.get('value') metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if key in metadata: pecan.abort(409, u._('Conflict. Key in request is already in the ' 'secret metadata')) LOG.debug('Start secret metadatum on_post...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) url = hrefs.convert_user_meta_to_href(self.secret.id) LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url + '/' + key return {'key': key, 'value': value} class SecretMetadatumController(controllers.ACLMixin): def __init__(self, secret): LOG.debug('=== Creating SecretMetadatumController ===') super().__init__() self.user_meta_repo = repo.get_secret_user_meta_repository() self.secret = secret self.metadatum_validator = validators.NewSecretMetadatumValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret metadatum retrieval')) @controllers.enforce_rbac('secret_meta:get') def on_get(self, external_project_id, remainder, **kwargs): """Handles retrieval of existing secret metadatum.""" LOG.debug('Start secret metadatum on_get ' 'for secret-ID %s:', self.secret.id) metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if remainder in metadata: pecan.response.status = 200 pair = {'key': remainder, 'value': metadata[remainder]} return collections.OrderedDict(sorted(pair.items())) else: _secret_metadata_not_found() @index.when(method='PUT', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret metadatum update')) @controllers.enforce_rbac('secret_meta:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, remainder, **kwargs): """Handles update of existing secret metadatum.""" metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) data = api.load_body(pecan.request, validator=self.metadatum_validator) key = data.get('key') value = data.get('value') if remainder not in metadata: _secret_metadata_not_found() elif remainder != key: msg = 'Key in request data does not match key in the ' 'request url.' pecan.abort(409, msg) else: LOG.debug('Start secret metadatum on_put...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) pecan.response.status = 200 pair = {'key': key, 'value': value} return collections.OrderedDict(sorted(pair.items())) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('Secret metadatum removal')) @controllers.enforce_rbac('secret_meta:delete') def on_delete(self, external_project_id, remainder, **kwargs): """Handles removal of existing secret metadatum.""" self.user_meta_repo.delete_metadatum(self.secret.id, remainder) msg = 'Deleted secret metadatum: %s for secret %s' % (remainder, self.secret.id) pecan.response.status = 204 LOG.info(msg) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/secrets.py0000664000175000017500000004530400000000000022265 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils import pecan from urllib import parse from barbican import api from barbican.api import controllers from barbican.api.controllers import acls from barbican.api.controllers import consumers from barbican.api.controllers import secretmeta from barbican.api.controllers import versions from barbican.common import accept from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.plugin import resources as plugin from barbican.plugin import util as putil LOG = utils.getLogger(__name__) def _secret_not_found(): """Throw exception indicating secret not found.""" pecan.abort(404, u._('Secret not found.')) def _invalid_secret_id(): """Throw exception indicating secret id is invalid.""" pecan.abort(404, u._('Not Found. Provided secret id is invalid.')) def _secret_payload_not_found(): """Throw exception indicating secret's payload is not found.""" pecan.abort(404, u._('Not Found. Sorry but your secret has no payload.')) def _secret_already_has_data(): """Throw exception that the secret already has data.""" pecan.abort(409, u._("Secret already has data, cannot modify it.")) def _bad_query_string_parameters(): pecan.abort(400, u._("URI provided invalid query string parameters.")) def _request_has_twsk_but_no_transport_key_id(): """Throw exception for bad wrapping parameters. Throw exception if transport key wrapped session key has been provided, but the transport key id has not. """ pecan.abort(400, u._('Transport key wrapped session key has been ' 'provided to wrap secrets for retrieval, but the ' 'transport key id has not been provided.')) class SecretController(controllers.ACLMixin): """Handles Secret retrieval and deletion requests.""" def __init__(self, secret): LOG.debug('=== Creating SecretController ===') super().__init__() self.secret = secret self.consumers = consumers.SecretConsumersController(secret) self.consumer_repo = repo.get_secret_consumer_repository() self.transport_key_repo = repo.get_transport_key_repository() @pecan.expose() def _lookup(self, sub_resource, *remainder): if sub_resource == 'acl': return acls.SecretACLsController(self.secret), remainder elif sub_resource == 'metadata': if len(remainder) == 0 or remainder == ('',): return secretmeta.SecretMetadataController(self.secret), \ remainder else: request_method = pecan.request.method allowed_methods = ['GET', 'PUT', 'DELETE'] if request_method in allowed_methods: return secretmeta.SecretMetadatumController(self.secret), \ remainder else: # methods cannot be handled at controller level pecan.abort(405) else: # only 'acl' and 'metadata' as sub-resource is supported pecan.abort(404) @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret retrieval')) @controllers.enforce_rbac('secret:get') def on_get(self, external_project_id, **kwargs): if controllers.is_json_request_accept(pecan.request): resp = self._on_get_secret_metadata(self.secret, **kwargs) LOG.info('Retrieved secret metadata for project: %s', external_project_id) if versions.is_supported(pecan.request, max_version='1.0'): # NOTE(xek): consumers are being introduced in 1.1 del resp['consumers'] return resp else: LOG.warning('Decrypted secret %s requested using deprecated ' 'API call.', self.secret.id) return self._on_get_secret_payload(self.secret, external_project_id, **kwargs) def _on_get_secret_metadata(self, secret, **kwargs): """GET Metadata-only for a secret.""" pecan.override_template('json', 'application/json') secret_fields = putil.mime_types.augment_fields_with_content_types( secret) transport_key_id = self._get_transport_key_id_if_needed( kwargs.get('transport_key_needed'), secret) if transport_key_id: secret_fields['transport_key_id'] = transport_key_id return hrefs.convert_to_hrefs(secret_fields) def _get_transport_key_id_if_needed(self, transport_key_needed, secret): if transport_key_needed and transport_key_needed.lower() == 'true': return plugin.get_transport_key_id_for_retrieval(secret) return None def _on_get_secret_payload(self, secret, external_project_id, **kwargs): """GET actual payload containing the secret.""" # With ACL support, the user token project does not have to be same as # project associated with secret. The lookup project_id needs to be # derived from the secret's data considering authorization is already # done. external_project_id = secret.project.external_id project = res.get_or_create_project(external_project_id) # default to application/octet-stream if there is no Accept header if (type(pecan.request.accept) is accept.NoHeaderType or not pecan.request.accept.header_value): accept_header = 'application/octet-stream' else: accept_header = pecan.request.accept.header_value pecan.override_template('', accept_header) # check if payload exists before proceeding if not secret.encrypted_data and not secret.secret_store_metadata: _secret_payload_not_found() twsk = kwargs.get('trans_wrapped_session_key', None) transport_key = None if twsk: transport_key = self._get_transport_key( kwargs.get('transport_key_id', None)) return plugin.get_secret(accept_header, secret, project, twsk, transport_key) def _get_transport_key(self, transport_key_id): if transport_key_id is None: _request_has_twsk_but_no_transport_key_id() transport_key_model = self.transport_key_repo.get( entity_id=transport_key_id, suppress_exception=True) return transport_key_model.transport_key @pecan.expose() @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret payload retrieval')) @controllers.enforce_rbac('secret:decrypt') def payload(self, external_project_id, **kwargs): if pecan.request.method != 'GET': pecan.abort(405) resp = self._on_get_secret_payload(self.secret, external_project_id, **kwargs) LOG.info('Retrieved secret payload for project: %s', external_project_id) return resp @index.when(method='PUT') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret update')) @controllers.enforce_rbac('secret:put') @controllers.enforce_content_types(['application/octet-stream', 'text/plain']) def on_put(self, external_project_id, **kwargs): if (not pecan.request.content_type or pecan.request.content_type == 'application/json'): pecan.abort( 415, u._("Content-Type of '{content_type}' is not supported for " "PUT.").format(content_type=pecan.request.content_type) ) transport_key_id = kwargs.get('transport_key_id') payload = pecan.request.body if not payload: raise exception.NoDataToProcess() if validators.secret_too_big(payload): raise exception.LimitExceeded() if self.secret.encrypted_data or self.secret.secret_store_metadata: _secret_already_has_data() project_model = res.get_or_create_project(external_project_id) content_type = pecan.request.content_type content_encoding = pecan.request.headers.get('Content-Encoding') plugin.store_secret( unencrypted_raw=payload, content_type_raw=content_type, content_encoding=content_encoding, secret_model=self.secret, project_model=project_model, transport_key_id=transport_key_id) LOG.info('Updated secret for project: %s', external_project_id) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret deletion')) @controllers.enforce_rbac('secret:delete') def on_delete(self, external_project_id, **kwargs): secret_consumers = self.consumer_repo.get_by_secret_id( self.secret.id, suppress_exception=True ) # With ACL support, the user token project does not have to be same as # project associated with secret. The lookup project_id needs to be # derived from the secret's data considering authorization is already # done. external_project_id = self.secret.project.external_id plugin.delete_secret(self.secret, external_project_id) LOG.info('Deleted secret for project: %s', external_project_id) for consumer in secret_consumers[0]: try: self.consumer_repo.delete_entity_by_id( consumer.id, external_project_id) except exception.NotFound: # nosec pass class SecretsController(controllers.ACLMixin): """Handles Secret creation requests.""" def __init__(self): LOG.debug('Creating SecretsController') super().__init__() self.validator = validators.NewSecretValidator() self.secret_repo = repo.get_secret_repository() self.quota_enforcer = quota.QuotaEnforcer('secrets', self.secret_repo) def _is_valid_date_filter(self, date_filter): filters = date_filter.split(',') sorted_filters = dict() try: for filter in filters: if filter.startswith('gt:'): if sorted_filters.get('gt') or sorted_filters.get('gte'): return False sorted_filters['gt'] = timeutils.parse_isotime(filter[3:]) elif filter.startswith('gte:'): if sorted_filters.get('gt') or sorted_filters.get( 'gte') or sorted_filters.get('eq'): return False sorted_filters['gte'] = timeutils.parse_isotime(filter[4:]) elif filter.startswith('lt:'): if sorted_filters.get('lt') or sorted_filters.get('lte'): return False sorted_filters['lt'] = timeutils.parse_isotime(filter[3:]) elif filter.startswith('lte:'): if sorted_filters.get('lt') or sorted_filters.get( 'lte') or sorted_filters.get('eq'): return False sorted_filters['lte'] = timeutils.parse_isotime(filter[4:]) elif sorted_filters.get('eq') or sorted_filters.get( 'gte') or sorted_filters.get('lte'): return False else: sorted_filters['eq'] = timeutils.parse_isotime(filter) except ValueError: return False return True def _is_valid_sorting(self, sorting): allowed_keys = ['algorithm', 'bit_length', 'created', 'expiration', 'mode', 'name', 'secret_type', 'status', 'updated'] allowed_directions = ['asc', 'desc'] sorted_keys = dict() for sort in sorting.split(','): if ':' in sort: try: key, direction = sort.split(':') except ValueError: return False else: key, direction = sort, 'asc' if key not in allowed_keys or direction not in allowed_directions: return False if sorted_keys.get(key): return False else: sorted_keys[key] = direction return True @pecan.expose() def _lookup(self, secret_id, *remainder): # NOTE(jaosorior): It's worth noting that even though this section # actually does a lookup in the database regardless of the RBAC policy # check, the execution only gets here if authentication of the user was # previously successful. if not utils.validate_id_is_uuid(secret_id): _invalid_secret_id()() secret = self.secret_repo.get_secret_by_id( entity_id=secret_id, suppress_exception=True) if not secret: _secret_not_found() return SecretController(secret), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret(s) retrieval')) @controllers.enforce_rbac('secrets:get') def on_get(self, external_project_id, **kw): no_consumers = versions.is_supported(pecan.request, max_version='1.0') # NOTE(xek): consumers are being introduced in 1.1 def secret_fields(field): resp = putil.mime_types.augment_fields_with_content_types(field) if no_consumers: del resp['consumers'] return resp LOG.debug('Start secrets on_get ' 'for project-ID %s:', external_project_id) name = kw.get('name', '') if name: name = parse.unquote_plus(name) bits = kw.get('bits', 0) try: bits = int(bits) except ValueError: # as per Github issue 171, if bits is invalid then # the default should be used. bits = 0 for date_filter in 'created', 'updated', 'expiration': if kw.get(date_filter) and not self._is_valid_date_filter( kw.get(date_filter)): _bad_query_string_parameters() if kw.get('sort') and not self._is_valid_sorting(kw.get('sort')): _bad_query_string_parameters() ctxt = controllers._get_barbican_context(pecan.request) user_id = None if ctxt: user_id = ctxt.user_id result = self.secret_repo.get_secret_list( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), name=name, alg=kw.get('alg'), mode=kw.get('mode'), bits=bits, secret_type=kw.get('secret_type'), suppress_exception=True, acl_only=kw.get('acl_only'), user_id=user_id, created=kw.get('created'), updated=kw.get('updated'), expiration=kw.get('expiration'), sort=kw.get('sort') ) secrets, offset, limit, total = result if not secrets: secrets_resp_overall = {'secrets': [], 'total': total} else: secrets_resp = [ hrefs.convert_to_hrefs(secret_fields(s)) for s in secrets ] secrets_resp_overall = hrefs.add_nav_hrefs( 'secrets', offset, limit, total, {'secrets': secrets_resp} ) secrets_resp_overall.update({'total': total}) LOG.info('Retrieved secret list for project: %s', external_project_id) return secrets_resp_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Secret creation')) @controllers.enforce_rbac('secrets:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): LOG.debug('Start on_post for project-ID %s:...', external_project_id) data = api.load_body(pecan.request, validator=self.validator) project = res.get_or_create_project(external_project_id) self.quota_enforcer.enforce(project) transport_key_needed = data.get('transport_key_needed', 'false').lower() == 'true' ctxt = controllers._get_barbican_context(pecan.request) if ctxt: # in authenticated pipleline case, always use auth token user data['creator_id'] = ctxt.user_id secret_model = models.Secret(data) new_secret, transport_key_model = plugin.store_secret( unencrypted_raw=data.get('payload'), content_type_raw=data.get('payload_content_type', 'application/octet-stream'), content_encoding=data.get('payload_content_encoding'), secret_model=secret_model, project_model=project, transport_key_needed=transport_key_needed, transport_key_id=data.get('transport_key_id')) url = hrefs.convert_secret_to_href(new_secret.id) LOG.debug('URI to secret is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a secret for project: %s', external_project_id) if transport_key_model is not None: tkey_url = hrefs.convert_transport_key_to_href( transport_key_model.id) return {'secret_ref': url, 'transport_key_ref': tkey_url} else: return {'secret_ref': url} ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/secretstores.py0000664000175000017500000002010700000000000023334 0ustar00zuulzuul00000000000000# (c) Copyright 2015-2016 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican.api import controllers from barbican.common import hrefs from barbican.common import resources as res from barbican.common import utils from barbican import i18n as u from barbican.model import repositories as repo from barbican.plugin.util import multiple_backends LOG = utils.getLogger(__name__) def _secret_store_not_found(): """Throw exception indicating secret store not found.""" pecan.abort(404, u._('Not Found. Secret store not found.')) def _preferred_secret_store_not_found(): """Throw exception indicating preferred secret store not found.""" pecan.abort(404, u._('Not Found. No preferred secret store defined for ' 'this project.')) def _multiple_backends_not_enabled(): """Throw exception indicating multiple backends support is not enabled.""" pecan.abort(404, u._('Not Found. Multiple backends support is not enabled ' 'in service configuration.')) def convert_secret_store_to_response_format(secret_store): data = secret_store.to_dict_fields() data['secret_store_plugin'] = data.pop('store_plugin') data['secret_store_ref'] = hrefs.convert_secret_stores_to_href( data['secret_store_id']) # no need to pass store id as secret_store_ref is returned data.pop('secret_store_id', None) return data class PreferredSecretStoreController(controllers.ACLMixin): """Handles preferred secret store set/removal requests.""" def __init__(self, secret_store): LOG.debug('=== Creating PreferredSecretStoreController ===') super().__init__() self.secret_store = secret_store self.proj_store_repo = repo.get_project_secret_store_repository() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('Removing preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:delete') def on_delete(self, external_project_id, **kw): LOG.debug('Start: Remove project preferred secret-store for store' ' id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) project_store = self.proj_store_repo.get_secret_store_for_project( project.id, None, suppress_exception=True) if project_store is None: _preferred_secret_store_not_found() self.proj_store_repo.delete_entity_by_id( entity_id=project_store.id, external_project_id=external_project_id) pecan.response.status = 204 @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Setting preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:post') def on_post(self, external_project_id, **kwargs): LOG.debug('Start: Set project preferred secret-store for store ' 'id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) self.proj_store_repo.create_or_update_for_project(project.id, self.secret_store.id) pecan.response.status = 204 class SecretStoreController(controllers.ACLMixin): """Handles secret store retrieval requests.""" def __init__(self, secret_store): LOG.debug('=== Creating SecretStoreController ===') super().__init__() self.secret_store = secret_store @pecan.expose() def _lookup(self, action, *remainder): if (action == 'preferred'): return PreferredSecretStoreController(self.secret_store), remainder else: pecan.abort(405) @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret store retrieval')) @controllers.enforce_rbac('secretstore:get') def on_get(self, external_project_id): LOG.debug("== Getting secret store for %s", self.secret_store.id) return convert_secret_store_to_response_format(self.secret_store) class SecretStoresController(controllers.ACLMixin): """Handles secret-stores list requests.""" def __init__(self): LOG.debug('Creating SecretStoresController') super().__init__() self.secret_stores_repo = repo.get_secret_stores_repository() self.proj_store_repo = repo.get_project_secret_store_repository() def __getattr__(self, name): route_table = { 'global-default': self.get_global_default, 'preferred': self.get_preferred, } if name in route_table: return route_table[name] raise AttributeError @pecan.expose() def _lookup(self, secret_store_id, *remainder): if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() secret_store = self.secret_stores_repo.get(entity_id=secret_store_id, suppress_exception=True) if not secret_store: _secret_store_not_found() return SecretStoreController(secret_store), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('List available secret stores')) @controllers.enforce_rbac('secretstores:get') def on_get(self, external_project_id, **kw): LOG.debug('Start SecretStoresController on_get: listing secret ' 'stores') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() res.get_or_create_project(external_project_id) secret_stores = self.secret_stores_repo.get_all() resp_list = [] for store in secret_stores: item = convert_secret_store_to_response_format(store) resp_list.append(item) resp = {'secret_stores': resp_list} return resp @pecan.expose(generic=True, template='json') @controllers.handle_exceptions(u._('Retrieve global default secret store')) @controllers.enforce_rbac('secretstores:get_global_default') def get_global_default(self, external_project_id, **kw): LOG.debug('Start secret-stores get global default secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() res.get_or_create_project(external_project_id) store = multiple_backends.get_global_default_secret_store() return convert_secret_store_to_response_format(store) @pecan.expose(generic=True, template='json') @controllers.handle_exceptions(u._('Retrieve project preferred store')) @controllers.enforce_rbac('secretstores:get_preferred') def get_preferred(self, external_project_id, **kw): LOG.debug('Start secret-stores get preferred secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() project = res.get_or_create_project(external_project_id) project_store = self.proj_store_repo.get_secret_store_for_project( project.id, None, suppress_exception=True) if project_store is None: _preferred_secret_store_not_found() return convert_secret_store_to_response_format( project_store.secret_store) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/transportkeys.py0000664000175000017500000001400500000000000023537 0ustar00zuulzuul00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from urllib import parse from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _transport_key_not_found(): """Throw exception indicating transport key not found.""" pecan.abort(404, u._('Not Found. Transport Key not found.')) def _invalid_transport_key_id(): """Throw exception indicating transport key id is invalid.""" pecan.abort(404, u._('Not Found. Provided transport key id is invalid.')) class TransportKeyController(controllers.ACLMixin): """Handles transport key retrieval requests.""" def __init__(self, transport_key_id, transport_key_repo=None): LOG.debug('=== Creating TransportKeyController ===') super().__init__() self.transport_key_id = transport_key_id self.repo = transport_key_repo or repo.TransportKeyRepo() @pecan.expose(generic=True) def index(self, external_project_id, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET') @controllers.handle_exceptions(u._('Transport Key retrieval')) @controllers.enforce_rbac('transport_key:get') def on_get(self, external_project_id): LOG.debug("== Getting transport key for %s", external_project_id) transport_key = self.repo.get(entity_id=self.transport_key_id) if not transport_key: _transport_key_not_found() pecan.override_template('json', 'application/json') return transport_key @index.when(method='DELETE') @controllers.handle_exceptions(u._('Transport Key deletion')) @controllers.enforce_rbac('transport_key:delete') def on_delete(self, external_project_id, **kwargs): LOG.debug("== Deleting transport key ===") try: self.repo.delete_entity_by_id( entity_id=self.transport_key_id, external_project_id=external_project_id) # TODO(alee) response should be 204 on success # pecan.response.status = 204 except exception.NotFound: LOG.exception('Problem deleting transport_key') _transport_key_not_found() class TransportKeysController(controllers.ACLMixin): """Handles transport key list requests.""" def __init__(self, transport_key_repo=None): LOG.debug('Creating TransportKeyController') super().__init__() self.repo = transport_key_repo or repo.TransportKeyRepo() self.validator = validators.NewTransportKeyValidator() @pecan.expose() def _lookup(self, transport_key_id, *remainder): if not utils.validate_id_is_uuid(transport_key_id): _invalid_transport_key_id() return TransportKeyController(transport_key_id, self.repo), remainder @pecan.expose(generic=True) def index(self, external_project_id, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Transport Key(s) retrieval')) @controllers.enforce_rbac('transport_keys:get') def on_get(self, external_project_id, **kw): LOG.debug('Start transport_keys on_get') plugin_name = kw.get('plugin_name', None) if plugin_name is not None: plugin_name = parse.unquote_plus(plugin_name) result = self.repo.get_by_create_date( plugin_name=plugin_name, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), suppress_exception=True ) transport_keys, offset, limit, total = result if not transport_keys: transport_keys_resp_overall = {'transport_keys': [], 'total': total} else: transport_keys_resp = [ hrefs.convert_transport_key_to_href(s.id) for s in transport_keys ] transport_keys_resp_overall = hrefs.add_nav_hrefs( 'transport_keys', offset, limit, total, {'transport_keys': transport_keys_resp} ) transport_keys_resp_overall.update({'total': total}) return transport_keys_resp_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Transport Key Creation')) @controllers.enforce_rbac('transport_keys:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): LOG.debug('Start transport_keys on_post') # TODO(alee) POST should determine the plugin name and call the # relevant get_transport_key() call. We will implement this once # we figure out how the plugins will be enumerated. data = api.load_body(pecan.request, validator=self.validator) new_key = models.TransportKey(data.get('plugin_name'), data.get('transport_key')) self.repo.create_from(new_key) url = hrefs.convert_transport_key_to_href(new_key.id) LOG.debug('URI to transport key is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url return {'transport_key_ref': url} ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/controllers/versions.py0000664000175000017500000001752000000000000022464 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from urllib import parse from barbican.api import controllers from barbican.api.controllers import containers from barbican.api.controllers import orders from barbican.api.controllers import quotas from barbican.api.controllers import secrets from barbican.api.controllers import secretstores from barbican.api.controllers import transportkeys from barbican.common import utils from barbican import i18n as u from barbican import version LOG = utils.getLogger(__name__) _MIN_MICROVERSION = 0 _MAX_MICROVERSION = 1 _LAST_UPDATED = '2021-02-10T00:00:00Z' # NOTE(xek): The above defines the minimum and maximum version of the API # across all of the v1 REST API. # When introducing a new microversion, the _MAX_MICROVERSION # needs to be incremented by 1 and the _LAST_UPDATED string updated. # Additionally, the new microversion has to be documented in # doc/source/api/microversion_history.rst # # The following is the complete (ordered) list of supported versions # used by the microversion middleware to parse what is allowed and # supported. VERSIONS = ['1.{}'.format(v) for v in range(_MIN_MICROVERSION, _MAX_MICROVERSION + 1)] MIN_API_VERSION = VERSIONS[0] MAX_API_VERSION = VERSIONS[-1] MIME_TYPE_JSON = 'application/json' MIME_TYPE_JSON_HOME = 'application/json-home' MEDIA_TYPE_JSON = 'application/vnd.openstack.key-manager-%s+json' def is_supported(req, min_version=MIN_API_VERSION, max_version=MAX_API_VERSION): """Check if API request version satisfies version restrictions. :param req: request object :param min_version: minimal version of API needed for correct request processing :param max_version: maximum version of API needed for correct request processing :returns: True if request satisfies minimal and maximum API version requirements. False in other case. """ requested_version = str(req.environ.get('key-manager.microversion', MIN_API_VERSION)) return (VERSIONS.index(max_version) >= VERSIONS.index(requested_version) >= VERSIONS.index(min_version)) def _version_not_found(): """Throw exception indicating version not found.""" pecan.abort(404, u._("The version you requested wasn't found")) def _get_versioned_url(version): if version[-1] != '/': version += '/' # If host_href is not set in barbican conf, then derive it from request url host_part = utils.get_base_url_from_request() if host_part[-1] != '/': host_part += '/' return parse.urljoin(host_part, version) class BaseVersionController(object): """Base class for the version-specific controllers""" @classmethod def get_version_info(cls, microversion_spec=True): version = { 'id': cls.version_id, 'status': 'CURRENT', 'min_version': cls.min_version, 'max_version': cls.version, 'links': [ { 'rel': 'self', 'href': _get_versioned_url(cls.version_string), }, { 'rel': 'describedby', 'type': 'text/html', 'href': 'https://docs.openstack.org/' } ], } if not microversion_spec: version.pop('min_version') version.pop('max_version') version['status'] = 'stable' version['updated']: cls.last_updated version['media-types'] = [ { 'base': MIME_TYPE_JSON, 'type': MEDIA_TYPE_JSON % cls.version_string } ] return version class V1Controller(BaseVersionController): """Root controller for the v1 API""" version_string = 'v1' # NOTE(jaosorior): We might start using decimals in the future, meanwhile # this is the same as the version string. version_id = 'v1' version = MAX_API_VERSION min_version = MIN_API_VERSION last_updated = _LAST_UPDATED def __init__(self): LOG.debug('=== Creating V1Controller ===') self.secrets = secrets.SecretsController() self.orders = orders.OrdersController() self.containers = containers.ContainersController() self.transport_keys = transportkeys.TransportKeysController() self.quotas = quotas.QuotasController() setattr(self, 'project-quotas', quotas.ProjectsQuotasController()) setattr(self, 'secret-stores', secretstores.SecretStoresController()) @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_certain_content_types(MIME_TYPE_JSON, MIME_TYPE_JSON_HOME) @controllers.handle_exceptions(u._('Version retrieval')) def on_get(self): pecan.core.override_template('json') if is_supported(pecan.request, max_version='1.0'): return {'version': self.get_version_info(microversion_spec=False)} else: return {'version': self.get_version_info()} AVAILABLE_VERSIONS = { V1Controller.version_string: V1Controller, } DEFAULT_VERSION = V1Controller.version_string class VersionsController(object): def __init__(self): LOG.debug('=== Creating VersionsController ===') @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_certain_content_types(MIME_TYPE_JSON, MIME_TYPE_JSON_HOME) def on_get(self, **kwargs): """The list of versions is dependent on the context.""" self._redirect_to_default_json_home_if_needed(pecan.request) if 'build' in kwargs: return {'build': version.__version__} if is_supported(pecan.request, max_version='1.0'): resp = { 'versions': { 'values': [ V1Controller.get_version_info(microversion_spec=False) ] } } else: resp = { 'versions': [ version_cls.get_version_info() for version_cls in AVAILABLE_VERSIONS.values()] } # Since we are returning all the versions available, the proper status # code is Multiple Choices (300) pecan.response.status = 300 return resp def _redirect_to_default_json_home_if_needed(self, request): if self._mime_best_match(request.accept) == MIME_TYPE_JSON_HOME: url = _get_versioned_url(DEFAULT_VERSION) LOG.debug("Redirecting Request to " + url) # NOTE(jaosorior): This issues an "external" redirect because of # two reasons: # * This module doesn't require authorization, and accessing # specific version info needs that. # * The resource is a separate app_factory and won't be found # internally pecan.redirect(url, request=request) def _mime_best_match(self, accept): if not accept: return MIME_TYPE_JSON SUPPORTED_TYPES = [MIME_TYPE_JSON, MIME_TYPE_JSON_HOME] return accept.best_match(SUPPORTED_TYPES) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/hooks.py0000664000175000017500000000335200000000000017367 0ustar00zuulzuul00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import pecan import webob from oslo_serialization import jsonutils as json try: import newrelic.agent newrelic_loaded = True except ImportError: newrelic_loaded = False from barbican.model import repositories class JSONErrorHook(pecan.hooks.PecanHook): def on_error(self, state, exc): if isinstance(exc, webob.exc.HTTPError): exc.body = json.dump_as_bytes({ 'code': exc.status_int, 'title': exc.title, 'description': exc.detail }) state.response.content_type = "application/json" return exc.body class BarbicanTransactionHook(pecan.hooks.TransactionHook): """Custom hook for Barbican transactions.""" def __init__(self): super(BarbicanTransactionHook, self).__init__( start=repositories.start, start_ro=repositories.start_read_only, commit=repositories.commit, rollback=repositories.rollback, clear=repositories.clear ) class NewRelicHook(pecan.hooks.PecanHook): def on_error(self, state, exc): if newrelic_loaded: newrelic.agent.record_exception() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1743590729.1290293 barbican-20.0.0/barbican/api/middleware/0000775000175000017500000000000000000000000020004 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/middleware/__init__.py0000664000175000017500000000564200000000000022124 0ustar00zuulzuul00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican middleware modules. """ import sys import webob.dec from barbican.common import utils LOG = utils.getLogger(__name__) class Middleware(object): """Base WSGI middleware wrapper These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ def __init__(self, application): self.application = application @classmethod def factory(cls, global_conf, **local_conf): def filter(app): return cls(app) return filter def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) response.request = req return self.process_response(response) # Brought over from an OpenStack project class Debug(Middleware): """Debug helper class This class can be inserted into any WSGI application chain to get information about the request and response. """ @webob.dec.wsgify def __call__(self, req): LOG.debug(("*" * 40) + " REQUEST ENVIRON") for key, value in req.environ.items(): LOG.debug('%s=%s', key, value) LOG.debug(' ') resp = req.get_response(self.application) LOG.debug(("*" * 40) + " RESPONSE HEADERS") for (key, value) in resp.headers.items(): LOG.debug('%s=%s', key, value) LOG.debug(' ') resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """Iterator that prints the contents of a wrapper string iterator.""" LOG.debug(("*" * 40) + " BODY") for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part LOG.debug(' ') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/middleware/context.py0000664000175000017500000001166000000000000022046 0ustar00zuulzuul00000000000000# Copyright 2011-2012 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob.exc from barbican.api import middleware as mw from barbican.common import config from barbican.common import utils import barbican.context from barbican import i18n as u LOG = utils.getLogger(__name__) CONF = config.CONF class BaseContextMiddleware(mw.Middleware): def process_request(self, req): request_id = req.headers.get('x-openstack-request-id') if not request_id: request_id = 'req-' + utils.generate_uuid() setattr(req, 'request_id', request_id) LOG.info('Begin processing request %(request_id)s', {'request_id': request_id}) def process_response(self, resp): resp.headers['x-openstack-request-id'] = resp.request.request_id LOG.info('Processed request: %(status)s - %(method)s %(url)s', {"status": resp.status, "method": resp.request.method, "url": resp.request.url}) return resp class ContextMiddleware(BaseContextMiddleware): def __init__(self, app): super(ContextMiddleware, self).__init__(app) def process_request(self, req): """Convert authentication information into a request context Generate a barbican.context.RequestContext object from the available authentication headers and store on the 'context' attribute of the req object. :param req: wsgi request object that will be given the context object :raises webob.exc.HTTPUnauthorized: when value of the X-Identity-Status header is not 'Confirmed' and anonymous access is disallowed """ super(ContextMiddleware, self).process_request(req) if req.headers.get('X-Identity-Status') == 'Confirmed': req.context = self._get_authenticated_context(req) elif CONF.allow_anonymous_access: req.context = self._get_anonymous_context() LOG.debug("==== Inserted barbican unauth " "request context: %s ====", req.context.to_dict()) else: raise webob.exc.HTTPUnauthorized() # Ensure that down wind mw.Middleware/app can see this context. req.environ['barbican.context'] = req.context def _get_anonymous_context(self): kwargs = { 'user_id': None, 'tenant': None, 'is_admin': False, 'read_only': True, } return barbican.context.RequestContext(**kwargs) def _get_authenticated_context(self, req): ctx = barbican.context.RequestContext.from_environ(req.environ) if ctx.project_id is None: LOG.debug("X_PROJECT_ID not found in request") return webob.exc.HTTPUnauthorized() ctx.is_admin = CONF.admin_role.strip().lower() in ctx.roles return ctx class UnauthenticatedContextMiddleware(BaseContextMiddleware): def _get_project_id_from_header(self, req): project_id = req.headers.get('X-Project-Id') if not project_id: accept_header = req.headers.get('Accept') if not accept_header: req.headers['Accept'] = 'text/plain' raise webob.exc.HTTPBadRequest(detail=u._('Missing X-Project-Id')) return project_id def process_request(self, req): """Create a context without an authorized user.""" super(UnauthenticatedContextMiddleware, self).process_request(req) project_id = self._get_project_id_from_header(req) config_admin_role = CONF.admin_role.strip().lower() roles_header = req.headers.get('X-Roles', '') roles = [r.strip().lower() for r in roles_header.split(',') if r] # If a role wasn't specified we default to admin if not roles: roles = [config_admin_role] kwargs = { 'user_id': req.headers.get('X-User-Id'), 'domain': req.headers.get('X-Domain-Id'), 'user_domain': req.headers.get('X-User-Domain-Id'), 'project_domain': req.headers.get('X-Project-Domain-Id'), 'project_id': project_id, 'roles': roles, 'is_admin': config_admin_role in roles, 'request_id': req.request_id } context = barbican.context.RequestContext(**kwargs) req.environ['barbican.context'] = context ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/middleware/microversion.py0000664000175000017500000000225500000000000023101 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A filter middleware that just outputs to logs, for instructive/sample purposes only. """ from microversion_parse import middleware as microversion_middleware from barbican.api.controllers import versions from barbican.api import middleware from barbican.common import utils LOG = utils.getLogger(__name__) class MicroversionMiddleware( microversion_middleware.MicroversionMiddleware, middleware.Middleware): def __init__(self, app): super(MicroversionMiddleware, self).__init__( app, 'key-manager', versions.VERSIONS) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/api/middleware/simple.py0000664000175000017500000000211200000000000021643 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A filter middleware that just outputs to logs, for instructive/sample purposes only. """ from barbican.api import middleware from barbican.common import utils LOG = utils.getLogger(__name__) class SimpleFilter(middleware.Middleware): def __init__(self, app): super(SimpleFilter, self).__init__(app) def process_request(self, req): """Just announce we have been called.""" LOG.debug("Calling SimpleFilter") return None ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1743590729.1330292 barbican-20.0.0/barbican/cmd/0000775000175000017500000000000000000000000015661 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/cmd/__init__.py0000664000175000017500000000000000000000000017760 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1743590688.0 barbican-20.0.0/barbican/cmd/barbican_manage.py0000664000175000017500000004626500000000000021321 0ustar00zuulzuul00000000000000#!/usr/bin/env python3 # Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ CLI interface for barbican management """ import argparse import sys from oslo_config import cfg from oslo_log import log as logging from barbican.cmd import pkcs11_kek_rewrap as pkcs11_rewrap from barbican.cmd import simple_crypto_kek_rewrap from barbican.common import config from barbican.model import clean from barbican.model.migration import commands from barbican.model import sync from barbican.plugin.crypto import p11_crypto from barbican.plugin.crypto import pkcs11 import barbican.version CONF = cfg.CONF LOG = logging.getLogger(__name__) # Decorators for actions def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) return func return _decorator class DbCommands(object): """Class for managing barbican database""" description = "Subcommands for managing barbican database" clean_description = "Clean up soft deletions in the database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--min-days', '-m', metavar='', dest='min_days', type=int, default=90, help='minimum number of days to keep soft deletions. ' 'default is %(default)s days.') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') @args('--clean-unassociated-projects', '-p', action='store_true', dest='do_clean_unassociated_projects', default=False, help='Remove projects that have no ' 'associated resources.') @args('--soft-delete-expired-secrets', '-e', action='store_true', dest='do_soft_delete_expired_secrets', default=False, help='Soft delete secrets that are expired.') def clean(self, conf, dburl=None, min_days=None, verbose=None, log_file=None, do_clean_unassociated_projects=None, do_soft_delete_expired_secrets=None): """Clean soft deletions in the database""" if dburl is None: dburl = CONF.database.connection if log_file is None: log_file = CONF.log_file clean.clean_command( sql_url=dburl, min_num_days=min_days, do_clean_unassociated_projects=do_clean_unassociated_projects, do_soft_delete_expired_secrets=do_soft_delete_expired_secrets, verbose=verbose, log_file=log_file) revision_description = "Create a new database version file" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--message', '-m', metavar='', default='DB change', help='the message for the DB change') @args('--autogenerate', action="store_true", dest='autogen', default=False, help='autogenerate from models') def revision(self, conf, dburl=None, message=None, autogen=None): """Process the 'revision' Alembic command.""" if dburl is None: commands.generate(autogenerate=autogen, message=str(message), sql_url=CONF.database.connection) else: commands.generate(autogenerate=autogen, message=str(message), sql_url=str(dburl)) upgrade_description = "Upgrade to a future database version" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--version', '-v', metavar='', default='head', help='the version to upgrade to, or else ' 'the latest/head if not specified.') def upgrade(self, conf, dburl=None, version=None): """Process the 'upgrade' Alembic command.""" if dburl is None: commands.upgrade(to_version=str(version), sql_url=CONF.database.connection) else: commands.upgrade(to_version=str(version), sql_url=str(dburl)) history_description = "Show database changset history" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def history(self, conf, dburl=None, verbose=None): if dburl is None: commands.history(verbose, sql_url=CONF.database.connection) else: commands.history(verbose, sql_url=str(dburl)) current_description = "Show current revision of database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def current(self, conf, dburl=None, verbose=None): if dburl is None: commands.current(verbose, sql_url=CONF.database.connection) else: commands.current(verbose, sql_url=str(dburl)) sync_secret_stores_description = ("Sync secret_stores with " # nosec "barbican.conf") @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') def sync_secret_stores(self, conf, dburl=None, verbose=None, log_file=None): """Sync secret_stores table with barbican.conf""" if dburl is None: dburl = CONF.database.connection if log_file is None: log_file = CONF.log_file sync.sync_secret_stores( sql_url=dburl, verbose=verbose, log_file=log_file) class HSMCommands(object): """Class for managing HSM/pkcs11 plugin""" _CKK_AES = 'CKK_AES' description = "Subcommands for managing HSM/PKCS11" check_mkek_description = "Checks if a MKEK label is available" @args('--library-path', metavar='', dest='libpath', help='Path to vendor PKCS#11 library') @args('--slot-id', metavar='', dest='slotid', help='HSM Slot ID containing Token to be used.') @args('--passphrase', metavar='', help='Password (PIN) to login to PKCS#11 Token') @args('--label', '-L', metavar='