././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/0000775000175000017500000000000000000000000014071 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/.coveragerc0000664000175000017500000000021100000000000016204 0ustar00zuulzuul00000000000000[run] branch = True source = oslo_policy omit = oslo_policy/tests/*,oslo_policy/openstack/* [report] ignore_errors = True precision = 2 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/.mailmap0000664000175000017500000000013100000000000015505 0ustar00zuulzuul00000000000000# Format is: # # ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/.pre-commit-config.yaml0000664000175000017500000000252300000000000020354 0ustar00zuulzuul00000000000000# We from the Oslo project decided to pin repos based on the # commit hash instead of the version tag to prevend arbitrary # code from running in developer's machines. To update to a # newer version, run `pre-commit autoupdate` and then replace # the newer versions with their commit hash. default_language_version: python: python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: 9136088a246768144165fcc3ecc3d31bb686920a # v3.3.0 hooks: - id: trailing-whitespace # Replaces or checks mixed line ending - id: mixed-line-ending args: ['--fix', 'lf'] exclude: '.*\.(svg)$' # Forbid files which have a UTF-8 byte-order marker - id: check-byte-order-marker # Checks that non-binary executables have a proper shebang - id: check-executables-have-shebangs # Check for files that contain merge conflict strings. - id: check-merge-conflict # Check for debugger imports and py37+ breakpoint() # calls in python source - id: debug-statements - id: check-yaml files: .*\.(yaml|yml)$ - repo: local hooks: - id: flake8 name: flake8 additional_dependencies: - hacking>=3.2.0,<3.3.0 language: python entry: flake8 files: '^.*\.py$' exclude: '^(doc|releasenotes|tools)/.*$' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/.stestr.conf0000664000175000017500000000006400000000000016342 0ustar00zuulzuul00000000000000[DEFAULT] test_path=./oslo_policy/tests top_path=./ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/.zuul.yaml0000664000175000017500000000254600000000000016041 0ustar00zuulzuul00000000000000- job: name: cross-nova-tox-functional parent: openstack-tox description: | Run cross-project functional tests on nova. vars: zuul_work_dir: src/opendev.org/openstack/nova tox_envlist: functional required-projects: - openstack/nova - openstack/oslo.policy - job: name: cross-nova-tox-py38 parent: openstack-tox description: | Run cross-project unit tests on nova. vars: zuul_work_dir: src/opendev.org/openstack/nova tox_envlist: py38 required-projects: - openstack/nova - openstack/oslo.policy - job: name: cross-neutron-tox-py38 parent: openstack-tox description: | Run cross-project unit tests on neutron. timeout: 3600 vars: zuul_work_dir: src/opendev.org/openstack/neutron tox_envlist: py38 required-projects: - openstack/neutron - openstack/oslo.policy - project: templates: - check-requirements - lib-forward-testing-python3 - openstack-python3-yoga-jobs - periodic-stable-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - cross-nova-tox-py38 - cross-nova-tox-functional - cross-neutron-tox-py38 gate: jobs: - cross-nova-tox-py38 - cross-nova-tox-functional - cross-neutron-tox-py38 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/AUTHORS0000664000175000017500000001225600000000000015147 0ustar00zuulzuul00000000000000Adam Young Adrian Vladu Akihiro Motoki Akira Yoshiyama Alexander Gordeev Andreas Jaeger Andreas Jaeger Andrew Bogott Andrew Laski Ann Kamyshnikova Anthony Washington Anusha Unnam Arata Notsu Arthur Dayne Arvind Tiwari Ben Nemec Brant Knudson Brian Rosmaita BubaVV Chang Bo Guo ChangBo Guo(gcb) Chuck Short Colleen Murphy Corey Bryant Corey Wright Cyril Roelandt Daisuke Fujita Daniel Bengtsson Davanum Srinivas David Stanek Dina Belova Dmitrii Shcherbakov Doug Hellmann Doug Hellmann Douglas Mendizábal Edan David Eric Brown Eric Windisch Flaper Fesp Flavio Percoco Florent Flament Gary Kotton Ghanshyam Mann Haiwei Xu He Jie Xu He Jie Xu Hervé Beraud Ian Cordasco Ian Cordasco Ian Wienand Ihar Hrachyshka Jamie Lennox Jason Kölker Javeme Jay Pipes Joe Gordon John Dennis Joshua Harlow Juan Antonio Osorio Robles Julien Danjou Kamil Rykowski Kenneth Giusti Kevin L. Mitchell Kirill Bespalov Kseniya Tychkova Lance Bragstad Luong Anh Tuan Mark McClain Mark McLoughlin Maruti Mateusz Kowalski Michael Beaver Michael Johnson Michael McCune Mitya_Eremeev Moisés Guimarães de Medeiros Monty Taylor Nathan Kinder OpenStack Release Bot Pierre Riteau Qi Zhang Raildo Mascena Rodrigo Duarte Sousa Ronald Bradford Sami Makki Sean Dague Sean McGinnis Sean McGinnis Sergey Kraynev Sergey Lukjanov Sergey Nikitin ShaoHe Feng Shuangtai Tian Slawek Kaplonski Stephen Finucane Steve Martinelli Sujitha Takashi NATSUME Thiago Paiva Thomas Duval Tim Goddard Timothy Symanczyk TommyLike Tony Breeds Tony Xu Victor Sergeyev Vishvananda Ishaya Wei Li Xu Ao Yasufumi Ogawa Zane Bitter Zhi Yan Liu ZhijunWei ZhongShengping Zhongyue Luo caoyuan dengzhaosen fujioka yuuichi gecong1973 gengchc2 guohliu haixin howardlee jacky06 jessegler likui loooosy melissaml mitya-eremeev-2 pengyuesheng ricolin sonu.kumar vponomaryov wangqi wangxiyuan xuanyandong yangyawei yatinkarel zhangbailin zhangyanxian zhaoleilc <15247232416@163.com> zhoulinhui ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/CONTRIBUTING.rst0000664000175000017500000000134600000000000016536 0ustar00zuulzuul00000000000000If you would like to contribute to the development of oslo's libraries, first you must take a look at this page: https://specs.openstack.org/openstack/oslo-specs/specs/policy/contributing.html If you would like to contribute to the development of OpenStack, you must follow the steps on this page: https://docs.openstack.org/infra/manual/developers.html Once those steps have been completed, changes to OpenStack should be submitted for review via the Gerrit tool, following the workflow documented at: https://docs.openstack.org/infra/manual/developers.html#development-workflow Pull requests submitted through GitHub will be ignored. Bugs should be filed on Launchpad, not GitHub: https://bugs.launchpad.net/oslo.policy ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/ChangeLog0000664000175000017500000005450400000000000015653 0ustar00zuulzuul00000000000000CHANGES ======= 3.11.0 ------ * Expand set\_defaults() to set other config default value * Fix formatting of release list * Update python testing classifier 3.10.1 ------ * Enforce scope check always when rule has scope\_types set * Increase timeout of the cross-neutron-tox-py38 job 3.10.0 ------ * Don't reset rules without overwriting * Rules in policy directory files can be deleted * Refactor scope enforcement in the Enforcer class * Add scope\_types attribute to the BaseCheck class 3.9.0 ----- * Add Python3 yoga unit tests * Update master for stable/xena * Clarify enforce\_new\_defaults help text * Map system\_scope in creds dictionary 3.8.2 ----- * Fix a typo in the document * Changed minversion in tox to 3.18.0 3.8.1 ----- * setup.cfg: Replace dashes with underscores * Replace getargspec with getfullargspec * Ussuri+ is python3 only and update python to python3 3.8.0 ----- * Dropping lower constraints testing * Add Python3 xena unit tests * Update master for stable/wallaby * Move flake8 as a pre-commit local target * Reinstate double deprecation test logic 3.7.0 ----- * trivial: Tweak docstrings * Make 'Rule' attributes read-only * Don't modify 'Rule.check' * Improving documentation about target resources * Adding tests on cache handler * Correctly handle IO errors at policy file load * Add unit tests on cache handler * Add nova/neutron project unit/functional tests job in gate 3.6.2 ----- * Work on copy of registered rule instead of original object 3.6.1 ----- * Handle deprecated rule only once * Switch to collections.abc.MutableMapping * Add debug log in pick\_default\_policy\_file * Add documentation parameters for DeprecatedRule * tests: Unset requests-related environment variables * pre-commit: Resolve dependency conflicts * remove unicode from code 3.6.0 ----- * Fix oslopolicy-j2y-convertor tool for RuleDefault * Add policy file selection logic when default changing to yaml * Use py3 as the default runtime for tox * Fix grammar issues * Use TOX\_CONSTRAINTS\_FILE * Remove format option examples in policy generator * Fix hacking min version to 3.0.1 * Remove all usage of six library * Adding pre-commit * Add Python3 wallaby unit tests * Update master for stable/victoria 3.5.0 ----- * [goal] Migrate testing to ubuntu focal * sample-generator: Improve YAML output 3.4.0 ----- * Log warning for redundant file rules * Deprecate the JSON support for policy\_file * Add oslopolicy-convert-json-to-yaml tool * Bump bandit version 3.3.2 ----- * Fix unit tests to work with stevedore > 2.0.1 * Clarify what exactly an "access file" is 3.3.1 ----- * Don't deepcopy objects before mask\_dict\_password * Include example of literal comparison policy rule 3.3.0 ----- * docs: Add separate man page for each CLI tool * Add oslopolicy-validator tool 3.2.1 ----- * Reload files in policy\_dirs on primary file change * Fix pygments style 3.2.0 ----- * Switch to newer openstackdocstheme and reno versions * Remove the unused coding style modules * Remove translation sections from setup.cfg * Align contributing doc with oslo's policy * docs: Add description of 'oslopolicy-policy-generator' * Bump default tox env from py37 to py38 * Add py38 package metadata * Add release notes links to doc index * Add Python3 victoria unit tests * Update master for stable/ussuri * Mark sphinx extensions thread safe 3.1.0 ----- * Fix doc comments for new enforce default flag * Allow disabling the default check\_str change warnings * Add new config to enforce the new defaults * Cleanup warnings * Remove the conversion according to the comment of jdennis * Bump oslo.utils to 3.40.0 3.0.3 ----- * Use unittest.mock instead of third party mock * Update hacking for Python3 3.0.2 ----- * Don't parse cli args on the global object in sphinxpolicygen 3.0.1 ----- * Temporarily make namespace arg optional 3.0.0 ----- * remove outdated header * [ussuri][goal] Drop python 2.7 support and testing * Link to the Keystone role documentation * Make HTTP check doc heading more specific * Initialize global config object in cli tools * Move away from python setup.py test who is deprecated in pbr * tox: Trivial cleanup * Follow the new PTI for document build 2.4.1 ----- * Don't use string processing to combine deprecated rules * Bump the openstackdocstheme extension to 1.20 2.4.0 ----- * tox: Keeping going with docs * Switch to Ussuri jobs * Modernize policy checker * Update the constraints url * Update master for stable/train * Suppress deprecation warnings in oslopolicy-list-redundant * Fix reference cycle caused by deprecated sample override 2.3.2 ----- 2.3.1 ----- 2.3.0 ----- * Add attribute to suppress deprecation warnings * Only alias when policy names change * Add unit tests on the sphinxext indent function * Move doc related modules to doc/requirements.txt * Add Python 3 Train unit tests * Updated from global requirements * Replace git.openstack.org URLs with opendev.org URLs * Cap Bandit below 1.6.0 and update Sphinx requirement 2.2.0 ----- * OpenDev Migration Patch * Dropping the py35 testing * Clarify policy\_file configuration option help text * Update master for stable/stein * Corrects tox.ini snippet to point to config file * Provide more specific error when namespace is missing * Add py36 and py37 tox envs 2.1.1 ----- * add python 3.7 unit test job * Update hacking version 2.1.0 ----- * Add ability for policy-checker to read configuration 2.0.0 ----- 1.44.1 ------ * Fix sample config value when set\_defaults is used * Fixes is\_admin type from StrOpt to BoolOpt * Fixes file access using with statements 1.44.0 ------ * Use template for lower-constraints * Use oslo.config instead of argparse * Add policy-upgrade tool 1.43.1 ------ * Prevent sensitive target data from being logged 1.43.0 ------ * Change openstack-dev to openstack-discuss 1.42.0 ------ * Fully log RBAC enforcement data * Add domain scope support for scope types * Make upgrades more robust with policy overrides * oslopolicy-checker: iterate through rules in sorted order * Enhance test to prevent JSON parsing regression * Correct typo in docs 1.41.1 ------ * Fix usage of token fixture in shell tests 1.41.0 ------ * Add ability to pass in target data for the oslopolicy-checker * Pass in policy name as part of the oslopolicy-check check call * Unit test for CLI 1.40.1 ------ * Update sphinx extension logging * Add minor nits in testing documentation * Clean up .gitignore references to personal tools * Add guidelines for naming policies 1.40.0 ------ * Add docs for developers testing APIs 1.39.1 ------ * sphinxext: Start parsing 'DocumentedRuleDefault.description' as rST * Docs: Remove references to JSON format * add lib-forward-testing-python3 test job * Imported Translations from Zanata * add python 3.6 unit test job * Move \_capture\_stdout to a common place * Remove PyPI downloads * import zuul job settings from project-config * Update reno for stable/rocky 1.38.1 ------ * Avoid redundant policy syntax checks 1.38.0 ------ * Teach Enforcer.enforce to deal with context objects * Pass dictionary as creds in policy tests * Fix requirements and convert to stestr * Add blueprints and releasenotes link to README * generator: Reimplement wrapping of 'description' * fix tox python3 overrides 1.37.0 ------ * Add CLI usage documentation * Clarify CLI documentation * Remove erroneous newline in sample generation * Update sphinxext to include scope\_types in docs 1.36.0 ------ * Fix document formatting * Add examples and clarification around scope\_types * Include deprecated\_reason when deprecated\_rule is set * Include both new and deprecated rules in generated sample * trivial: Fix file permissions 1.35.0 ------ * Remove stale pip-missing-reqs tox test * make the sphinxpolicygen extension handle multiple input/output files * Update documentation to include usage for new projects * Trivial: Update pypi url to new url * set default python to python3 * add lower-constraints job * Updated from global requirements * Update links in README 1.34.0 ------ * Imported Translations from Zanata * Imported Translations from Zanata * Update reno for stable/queens * Updated from global requirements * Imported Translations from Zanata * Updated from global requirements * Render deprecated policy names when generating files * Updated from global requirements * Updated from global requirements 1.33.1 ------ * Only log deprecation warnings when they are overridden 1.33.0 ------ * Add a release note for enforce\_scope * Add configuration option for enforcing scope 1.32.2 ------ * Fix string injection for InvalidScope 1.32.1 ------ * Imported Translations from Zanata 1.32.0 ------ * Add scope\_types to RuleDefault objects 1.31.0 ------ * Remove -U from pip install * Avoid tox\_install.sh for constraints support * add bandit to pep8 job * Updated from global requirements * Handle deprecation of inspect.getargspec * Remove setting of version/release from releasenotes * Updated from global requirements 1.30.0 ------ * Imported Translations from Zanata * Add functionality to deprecate policies * Pass creds as a dict in tests 1.29.0 ------ * Documentation and release notes for plugins * expand type documentation for Enforcer arguments * Imported Translations from Zanata * http/https check rules as stevedore extensions * External Policy hook should support SSL 1.28.1 ------ * Modification to add additional information in the HTTPCheck request 1.28.0 ------ * Updated from global requirements * rewrite HttpCheckFixture to not mock out entire HttpCheck class 1.27.0 ------ * Updated from global requirements * Add JSON output option to sample generator 1.26.0 ------ * Imported Translations from Zanata * Updated from global requirements * throw an exception when sphinxext cannot find the config file * Update reno for stable/pike * fix formatting for empty defaults * Updated from global requirements 1.25.0 ------ * Updated from global requirements * Update URLs in documents according to document migration * Fix parsing bug when config file is empty 1.24.1 ------ * import configuration guide content from openstack-manuals repo * sphinxext: Use field lists in output * sphinxext: Format definition lists correctly * switch from oslosphinx to openstackdocstheme * move existing documentation into new standard layout 1.24.0 ------ * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements 1.23.0 ------ * Updated from global requirements * Simplify message of exception PolicyNotAuthorized * Updated from global requirements 1.22.1 ------ * Updated from global requirements * Add Sphinx extension to pretty-print modules * Optimize the link address * Check reStructuredText documents for common style issues 1.22.0 ------ * Update usage documentation * Add release note for DocumentedRuleDefault * Remove log translations * oslopolicy-sample-generator description support * Use Sphinx 1.5 warning-is-error 1.21.0 ------ * Comment out the rule from generated sample-policy file * Modify tests in test\_generator * Add additional param to policy.RuleDefault * Updated from global requirements * Seperate each policy rule with new line 1.20.0 ------ * Allow multiline descriptions for RuleDefaults 1.19.0 ------ * Updated from global requirements * [Fix gate]Update test requirement * Updated from global requirements * Updated from global requirements * Remove support for py34 * pbr.version.VersionInfo needs package name (oslo.xyz and not oslo\_xyz) * Delete the unnecessary word in policy.py * Update reno for stable/ocata * Add optional exception for check\_rules 1.18.0 ------ * Remove references to Python 3.4 * Remove dead code and use default value of argparse * Add Constraints support * Updated from global requirements 1.17.0 ------ * Improved performance of parse\_file\_contents() method * Show team and repo badges on README * Remove wrong parameter type for class NotCheck from docstring * Fix a code logic while doing cyclical reference check to the policy * Updated from global requirements * Add missing parameter description in module \_cache\_handler * Fix typo in oslo.policy * Updated from global requirements * Add stevedore to requirements * Imported Translations from Zanata * Updated from global requirements * Make exception PolicyNotAuthorized more readable 1.16.0 ------ * Change assertTrue(isinstance()) by optimal assert * Perform basic checks on policy definitions * Enable release notes translation * Changed the home-page link * Change assertTrue(isinstance()) by optimal assert 1.15.0 ------ * Updated from global requirements * Update docs on policy sample generator * Updated from global requirements * doc: Add introduction to index page * Add sphinx extension to build sample policy * Updated from global requirements * Updated from global requirements * Doc: declare YAML/JSON support * Remove oslo.utils from requirements * Update reno for stable/newton 1.14.0 ------ * Revert "Adds debug logging for policy file validation" * Updated from global requirements * Delete H803 in flake8 ignore 1.13.0 ------ * Updated from global requirements * Add note about not all APIs support policy enforcement by user\_id * Allow policy file to not exist * Adds debug logging for policy file validation * Fixed unit tests running on Windows * Add Python 3.5 classifier and venv 1.12.0 ------ * Updated from global requirements * Updated from global requirements * Fix mispelled method name in setup.cfg * Updated from global requirements * Updated from global requirements * Imported Translations from Zanata 1.11.0 ------ * Updated from global requirements 1.10.0 ------ * Imported Translations from Zanata * Improve policy sample generation testing * Add helper scripts for generating policy info 1.9.0 ----- * Add sample file generation script and helper methods * Add equality operator to policy.RuleDefault * Imported Translations from Zanata * Updated from global requirements * Fix typo: 'olso' to 'oslo' * Updated from global requirements * Updated from global requirements * Add reno for release notes management * Add policy registration and authorize method * Updated from global requirements * doc: Fix wrong import statement in usage 1.8.0 ----- * Trivial: ignore openstack/common in flake8 exclude list 1.7.0 ----- * Updated from global requirements * Imported Translations from Zanata * Imported Translations from Zanata * Updated from global requirements * Updated from global requirements * Deprecate load\_json() in favor of load() * Support policy file in YAML 1.5.0 ----- * Updated from global requirements 1.4.0 ----- * Update translation setup * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Revert "Pass environment variables of proxy to tox" * Run docs testenv by default with tox * Add oslopolicy-checker command-line tool 1.3.0 ----- * Updated from global requirements * Don't crash on RoleCheck when roles not present * assertIsNone(val) instead of assertEqual(None,val) 1.2.0 ----- * Updated from global requirements * Add string format rendering to RoleCheck.\_\_call\_\_() * Pass environment variables of proxy to tox * Fixes combined "and" and "or" rule handling * Make sure item of policy\_dirs is directory * Updated from global requirements * Use dict comprehension * Don't generate doc from test 1.1.0 ----- * Trival: Remove 'MANIFEST.in' * Updated from global requirements * Updated from global requirements * Updated from global requirements * Use requests-mock instead of httpretty in tests * Clarify usage docs * Correct invalid doc references 1.0.0 ----- * Updated from global requirements * Remove Python 2.6 classifier * Remove python 2.6 and cleanup tox.ini 0.13.0 ------ * Updated from global requirements * Updated from global requirements * Updated from global requirements * Use JSON generator * Add test for enforce with rule doesn't exist * Add test for raising default exception * Add test for invalid JSON * Add cover test requirement * Fix a typo in policy.py 0.12.0 ------ * Updated from global requirements * Custom fixture to avoid external call in HttpCheck * Fix coverage configuration and execution * add auto-generated docs for config options * Add shields.io version/downloads links/badges into README.rst * Updated from global requirements * Use requests in http check instead of urllib * Change ignore-errors to ignore\_errors * Updated from global requirements * remove deprecation text for policy\_dirs option 0.11.0 ------ * Updated from global requirements 0.10.0 ------ * Pass reference then actual to assertEqual * Overwrite option should not cause policy file reloading * Updated from global requirements * Setup translations * Have the enforcer have its own file cache * Updated from global requirements 0.9.0 ----- * Updated from global requirements 0.8.0 ----- * Updated from global requirements * Fix typo of 'available' in token\_fixture.py * Fixes up the API docs and module index 0.7.0 ----- * Remove oslo-incubator specific code * Move fileutils functions to oslo.policy * Add six and oslo.utils to requirements * Add tox target to find missing requirements * Updated from global requirements * Updated from global requirements 0.6.0 ----- * Fix Enforcer docstring 0.5.0 ----- * Expose base check classes as part of public API * Cleanup logging to conform to guidelines * Cleanup logging to conform to guidelines * Remove support for Python 3.3 * Updated from global requirements 0.4.0 ----- * Uncap library requirements for liberty * Fix invalid indentation in \_load\_policy\_file method * Cleanup README.rst and setup.cfg * Avoid reloading policy files in policy.d for every call * Lists for Generic Checks * Updated from global requirements 0.3.1 ----- * Switch to non-namespaced module imports 0.3.0 ----- * deprecate policy\_dirs option * Updated from global requirements * Expose register and Check as part of public API * provide more descriptive exception 0.2.0 ----- * Add missing space to help message * Add Rules.from\_dict classmethod * Use assertTrue or assertFalse where appropriate * Fix the order of args to assertEqual in tests 0.1.0 ----- * Clean up configuration option management * Update comments about tox configuration * Fix i18n imports * Change default set of tox environments * Create the temporary files needed for tests * Fix minor spelling issues in oslo.policy * Use single quotes consistently * Do not log on missing or empty policy\_dirs * Remove symlinked file from tests * document the migration process and update the docs a bit * Use standard logging in oslo.policy * Updated from global requirements * Remove globals that were introduced for compatibility * Upgrade hacking to >=0.10.0 * Remove oslo.concurrency from requirements * Stop shouting test attribute names * Do not use global enforcer for tests * Make use of private modules * Privatize parsing classes * Add entry points for option discovery * Add pep8 import exception for oslo\_policy.\_i18n * Use oslo\_i18n * Perform an oslo-sync * General docstring cleanup * Drop use of oslo namespace for oslo libraries * Update .gitignore * Drop usage of namespaced packages * Remove use of graduated modules * Add docstrings for check classes * Correct docstring references * Improve policy documentation * Explicit configuration object * Fix project metadata * Add API documentation * Move project imports after 3rd party imports * Fix tests * Add openstack.common and requirements fixes * exported from oslo-incubator by graduate.sh * Improving docstrings for policy API * Don't log missing policy.d as a warning * Add rule overwrite flag to Enforcer class * Fixed a problem with neutron http policy check * Expanding the help text for policy\_dirs * policy: add a missing staticmethod declaration * Fixes nits in module policy * add list\_opts to all modules with configuration options * Correct default rule name for policy.Enforcer * Minor fixes in policy module * Delete graduated serialization files * Remove code that moved to oslo.i18n * Allow dictionary lookup in credentials with dot notation * Remove graduated test and fixtures libraries * Fix typo to show correct log message * Use MultiStrOpt for policy\_dirs * Add support for policy configration directories * Fix deletion of cached file for policy enforcer * Make policy debug logging less verbose * Improve help strings * Use oslotest instead of common test module * policy: rename Exception to avoid nose regression * Adds a flag to determine whether to reload the rules in policy * Documenting policy.json syntax * Update oslo log messages with translation domains * Fix policy tests for parallel testing * Allow policy.json resource vs constant check * Replaces use of urlutils with six in policy module * Utilizes assertIsNone and assertIsNotNone * Use hacking import\_exceptions for gettextutils.\_ * Use urlutils functions instead of urllib/urllib2 * Remove vim header * Use six.string\_type instead of basestring * Apply six for metaclass * ConfigFileNotFoundError with proper argument * Keystone user can't perform revoke\_token * Remove useless unit test codes in test\_policy * Replace using tests.utils part2 * Bump hacking to 0.7.0 * Fix wrong argument in openstack common policy * Fix missing argument bug in oslo common policy * Fix policy default\_rule issue * Allow use of hacking 0.6.0 and enable new checks * Fix missing argument bug in oslo common policy * Enable H302 hacking check * Enable hacking H404 test * Enable H306 hacking check * python3: python3 binary/text data compatbility * Reduce duplicated code related to policies * Removes len() on empty sequence evaluation * Convert unicode for python3 portability * Replaces standard logging with common logging * Update to use flake8 * Removes unused imports in the tests module * update OpenStack, LLC to OpenStack Foundation * Replace direct use of testtools BaseTestCase * Use testtools as test base class * Fix pep8 E125 errors * Revert "Add support for finer-grained policy decisions" * Remove an unneeded 'global' * Add support for finer-grained policy decisions * Add a 'not' operator to the policy langage * Add a new policy language * Remove deprecated policy engine APIs * Rewrite the policy engine from scratch * Use pep8 v1.3.3 * Allow non-string items in the creds dict * Use function registration for policy checks * Fix missing gettextutils in several modules * Switch common files to using jsonutils * Update common code to support pep 1.3. bug 1014216 * Common-ize policies * initial commit * Initial skeleton project ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/HACKING.rst0000664000175000017500000000044700000000000015674 0ustar00zuulzuul00000000000000Style Commandments ================== - Step 1: Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ - Step 2: Read on oslo.policy Specific Commandments --------------------------------- - Avoid using "double quotes" where you can reasonably use 'single quotes' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/LICENSE0000664000175000017500000002363700000000000015111 0ustar00zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/PKG-INFO0000664000175000017500000000376600000000000015202 0ustar00zuulzuul00000000000000Metadata-Version: 1.2 Name: oslo.policy Version: 3.11.0 Summary: Oslo Policy library Home-page: https://docs.openstack.org/oslo.policy/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/oslo.policy.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on ============= oslo.policy ============= .. image:: https://img.shields.io/pypi/v/oslo.policy.svg :target: https://pypi.org/project/oslo.policy/ :alt: Latest Version The Oslo Policy library provides support for RBAC policy enforcement across all OpenStack services. * Free software: Apache license * Documentation: https://docs.openstack.org/oslo.policy/latest/ * Source: https://opendev.org/openstack/oslo.policy * Bugs: https://bugs.launchpad.net/oslo.policy * Blueprints: https://blueprints.launchpad.net/oslo.policy * Release Notes: https://docs.openstack.org/releasenotes/oslo.policy Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: Implementation :: CPython Requires-Python: >=3.6 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/README.rst0000664000175000017500000000153300000000000015562 0ustar00zuulzuul00000000000000======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/oslo.policy.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on ============= oslo.policy ============= .. image:: https://img.shields.io/pypi/v/oslo.policy.svg :target: https://pypi.org/project/oslo.policy/ :alt: Latest Version The Oslo Policy library provides support for RBAC policy enforcement across all OpenStack services. * Free software: Apache license * Documentation: https://docs.openstack.org/oslo.policy/latest/ * Source: https://opendev.org/openstack/oslo.policy * Bugs: https://bugs.launchpad.net/oslo.policy * Blueprints: https://blueprints.launchpad.net/oslo.policy * Release Notes: https://docs.openstack.org/releasenotes/oslo.policy ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8435183 oslo.policy-3.11.0/doc/0000775000175000017500000000000000000000000014636 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/requirements.txt0000664000175000017500000000052300000000000020122 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. openstackdocstheme>=2.2.0 # Apache-2.0 sphinx>=2.0.0,!=2.1.0 # BSD sphinxcontrib-apidoc>=0.2.0 # BSD reno>=3.1.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8435183 oslo.policy-3.11.0/doc/source/0000775000175000017500000000000000000000000016136 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8435183 oslo.policy-3.11.0/doc/source/admin/0000775000175000017500000000000000000000000017226 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/admin/index.rst0000664000175000017500000000033400000000000021067 0ustar00zuulzuul00000000000000================================================= Administering Applications that use oslo.policy ================================================= .. toctree:: :maxdepth: 2 policy-yaml-file policy-json-file ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/admin/policy-json-file.rst0000664000175000017500000002113100000000000023141 0ustar00zuulzuul00000000000000==================== The policy.json file ==================== .. warning:: While the old json format policy file is still supported, we recommend using the :doc:`newer YAML format `. Each OpenStack service, Identity, Compute, Networking, and so on, has its own role-based access policies. They determine which user can access which objects in which way, and are defined in the service's ``policy.json`` file. Whenever an API call to an OpenStack service is made, the service's policy engine uses the appropriate policy definitions to determine if the call can be accepted. Any changes to ``policy.json`` are effective immediately, which allows new policies to be implemented while the service is running. A ``policy.json`` file is a text file in JSON (Javascript Object Notation) format. Each policy is defined by a one-line statement in the form ``"" : ""``. The policy target, also named "action", represents an API call like "start an instance" or "attach a volume". Action names are usually qualified. For example, the Compute service features API calls to list instances, volumes, and networks. In ``/etc/nova/policy.json``, these APIs are represented by ``compute:get_all``, ``volume:get_all``, and ``network:get_all``, respectively. The mapping between API calls and actions is not generally documented. The policy rule determines under which circumstances the API call is permitted. Usually this involves the user who makes the call (hereafter named the "API user") and often the object on which the API call operates. A typical rule checks if the API user is the object's owner. .. warning:: **Modifying the policy** While recipes for editing ``policy.json`` files are found on blogs, modifying the policy can have unexpected side effects and is not encouraged. Examples ~~~~~~~~ A simple rule might look like this: .. code-block:: none "compute:get_all" : "" The target is ``"compute:get_all"``, the "list all instances" API of the Compute service. The rule is an empty string meaning "always". This policy allows anybody to list instances. You can also decline permission to use an API: .. code-block:: none "compute:shelve": "!" The exclamation mark stands for "never" or "nobody", which effectively disables the Compute API "shelve an instance". Many APIs can only be called by administrators. This can be expressed by the rule ``"role:admin"``. The following policy ensures that only administrators can create new users in the Identity database: .. code-block:: none "identity:create_user" : "role:admin" .. note:: ``admin`` is a built-in default role in Keystone. For more details and other roles that may be available, see the `Keystone documentation on default roles. `_ You can limit APIs to any role. For example, the Orchestration service defines a role named ``heat_stack_user``. Whoever has this role is not allowed to create stacks: .. code-block:: none "stacks:create": "not role:heat_stack_user" This rule makes use of the boolean operator ``not``. More complex rules can be built using operators ``and``, ``or``, and parentheses. You can define aliases for rules: .. code-block:: none "deny_stack_user": "not role:heat_stack_user" The policy engine understands that ``"deny_stack_user"`` is not an API and consequently interprets it as an alias. The stack creation policy above can then be written as: .. code-block:: none "stacks:create": "rule:deny_stack_user" This is taken verbatim from ``/etc/heat/policy.json``. Rules can compare API attributes to object attributes. For example: .. code-block:: none "os_compute_api:servers:start" : "project_id:%(project_id)s" states that only the owner of an instance can start it up. The ``project_id`` string before the colon is an API attribute, namely the project ID of the API user. It is compared with the project ID of the object (in this case, an instance). More precisely, it is compared with the ``project_id`` field of that object in the database. If the two values are equal, permission is granted. An administrator always has permission to call APIs. This is how ``/etc/keystone/policy.json`` makes this policy explicit: .. code-block:: none "admin_required": "role:admin or is_admin:1", "owner" : "user_id:%(user_id)s", "admin_or_owner": "rule:admin_required or rule:owner", "identity:change_password": "rule:admin_or_owner" The first line defines an alias for "user is an admin user". The ``is_admin`` flag is only used when setting up the Identity service for the first time. It indicates that the user has admin privileges granted by the service token (``--os-token`` parameter of the ``keystone`` command line client). The second line creates an alias for "user owns the object" by comparing the API's user ID with the object's user ID. Line 3 defines a third alias ``admin_or_owner``, combining the two first aliases with the Boolean operator ``or``. Line 4 sets up the policy that a password can only be modified by its owner or an admin user. As a final example, let's examine a more complex rule: .. code-block:: none "identity:ec2_delete_credential": "rule:admin_required or (rule:owner and user_id:%(target.credential.user_id)s)" This rule determines who can use the Identity API "delete EC2 credential". Here, boolean operators and parentheses combine three simpler rules. ``admin_required`` and ``owner`` are the same aliases as in the previous example. ``user_id:%(target.credential.user_id)s`` compares the API user with the user ID of the credential object associated with the target. Syntax ~~~~~~ A ``policy.json`` file consists of policies and aliases of the form ``target:rule`` or ``alias:definition``, separated by commas and enclosed in curly braces: .. code-block:: none { "alias 1" : "definition 1", "alias 2" : "definition 2", ... "target 1" : "rule 1", "target 2" : "rule 2", .... } Targets are APIs and are written ``"service:API"`` or simply ``"API"``. For example, ``"compute:create"`` or ``"add_image"``. Rules determine whether the API call is allowed. Rules can be: - always true. The action is always permitted. This can be written as ``""`` (empty string), ``[]``, or ``"@"``. - always false. The action is never permitted. Written as ``"!"``. - a special check - a comparison of two values - boolean expressions based on simpler rules Special checks are: - ``role:``, a test whether the API credentials contain this role. - ``rule:``, the definition of an alias. - ``http:``, which delegates the check to a remote server. The API is authorized when the server returns True. Developers can define additional special checks. Two values are compared in the following way: .. code-block:: none "value1 : value2" Possible values are: - constants: Strings, numbers, ``true``, ``false`` - API attributes - target object attributes - the flag ``is_admin`` API attributes can be ``project_id``, ``user_id`` or ``domain_id``. Target object attributes are fields from the object description in the database. For example in the case of the ``"compute:start"`` API, the object is the instance to be started. The policy for starting instances could use the ``%(project_id)s`` attribute, that is the project that owns the instance. The trailing ``s`` indicates this is a string. ``is_admin`` indicates that administrative privileges are granted via the admin token mechanism (the ``--os-token`` option of the ``keystone`` command). The admin token allows initialisation of the Identity database before the admin role exists. The alias construct exists for convenience. An alias is short name for a complex or hard to understand rule. It is defined in the same way as a policy: .. code-block:: none alias name : alias definition Once an alias is defined, use the ``rule`` keyword to use it in a policy rule. Older syntax ~~~~~~~~~~~~ You may encounter older ``policy.json`` files that feature a different syntax, where JavaScript arrays are used instead of boolean operators. For example, the EC2 credentials rule above would have been written as follows: .. code-block:: none "identity:ec2_delete_credential": [ [ "rule:admin_required ], [ "rule:owner", "user_id:%(target.credential.user_id)s)" ] ] The rule is an array of arrays. The innermost arrays are or'ed together, whereas elements inside the innermost arrays are and'ed. While the old syntax is still supported, we recommend using the newer, more intuitive syntax. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/admin/policy-yaml-file.rst0000664000175000017500000002214300000000000023136 0ustar00zuulzuul00000000000000==================== The policy.yaml file ==================== Each OpenStack service, Identity, Compute, Networking, and so on, has its own role-based access policies. They determine which user can access which objects in which way, and are defined in the service's ``policy.yaml`` file. Whenever an API call to an OpenStack service is made, the service's policy engine uses the appropriate policy definitions to determine if the call can be accepted. Any changes to ``policy.yaml`` are effective immediately, which allows new policies to be implemented while the service is running. A ``policy.yaml`` file is a text file in YAML (YAML Ain't Markup Language) format. Each policy is defined by a one-line statement in the form ``"" : ""``. The policy target, also named "action", represents an API call like "start an instance" or "attach a volume". Action names are usually qualified. For example, the Compute service features API calls to list instances, volumes, and networks. In ``/etc/nova/policy.yaml``, these APIs are represented by ``compute:get_all``, ``volume:get_all``, and ``network:get_all``, respectively. The mapping between API calls and actions is not generally documented. The policy rule determines under which circumstances the API call is permitted. Usually this involves the user who makes the call (hereafter named the "API user") and often the object on which the API call operates. A typical rule checks if the API user is the object's owner. .. warning:: **Modifying the policy** While recipes for editing ``policy.yaml`` files are found on blogs, modifying the policy can have unexpected side effects and is not encouraged. Examples ~~~~~~~~ A simple rule might look like this: .. code-block:: yaml "compute:get_all" : "" The target is ``"compute:get_all"``, the "list all instances" API of the Compute service. The rule is an empty string meaning "always". This policy allows anybody to list instances. You can also decline permission to use an API: .. code-block:: yaml "compute:shelve": "!" The exclamation mark stands for "never" or "nobody", which effectively disables the Compute API "shelve an instance". A simple comparison can be done using a literal value: .. code-block:: yaml "copy_image": "'shared':%(visibility)s" This check compares the literal ``shared`` with the value of the key ``visibility`` from the object. It will pass if and only if ``object['visibility'] == 'shared'``. It is necessary to include the single quotes around the literal value when writing the rule so oslo.policy knows not to interpret it as an API attribute. To determine the fields available on the object passed to the policy check, it is necessary to enable debug logging for oslo.policy. This can be done by enabling debug logging for the service in question, and also removing ``oslo_policy`` from the default_log_levels option. Many APIs can only be called by administrators. This can be expressed by the rule ``"role:admin"``. The following policy ensures that only administrators can create new users in the Identity database: .. code-block:: yaml "identity:create_user" : "role:admin" .. note:: ``admin`` is a built-in default role in Keystone. For more details and other roles that may be available, see the `Keystone documentation on default roles. `_ You can limit APIs to any role. For example, the Orchestration service defines a role named ``heat_stack_user``. Whoever has this role is not allowed to create stacks: .. code-block:: yaml "stacks:create": "not role:heat_stack_user" This rule makes use of the boolean operator ``not``. More complex rules can be built using operators ``and``, ``or``, and parentheses. You can define aliases for rules: .. code-block:: yaml "deny_stack_user": "not role:heat_stack_user" The policy engine understands that ``"deny_stack_user"`` is not an API and consequently interprets it as an alias. The stack creation policy above can then be written as: .. code-block:: yaml "stacks:create": "rule:deny_stack_user" This is taken verbatim from ``/etc/heat/policy.yaml``. Rules can compare API attributes to object attributes. For example: .. code-block:: yaml "os_compute_api:servers:start" : "project_id:%(project_id)s" states that only the owner of an instance can start it up. The ``project_id`` string before the colon is an API attribute, namely the project ID of the API user. It is compared with the project ID of the object (in this case, an instance). More precisely, it is compared with the ``project_id`` field of that object in the database. If the two values are equal, permission is granted. An administrator always has permission to call APIs. This is how ``/etc/keystone/policy.yaml`` makes this policy explicit: .. code-block:: yaml "admin_required": "role:admin or is_admin:1" "owner" : "user_id:%(user_id)s" "admin_or_owner": "rule:admin_required or rule:owner" "identity:change_password": "rule:admin_or_owner" The first line defines an alias for "user is an admin user". The ``is_admin`` flag is only used when setting up the Identity service for the first time. It indicates that the user has admin privileges granted by the service token (``--os-token`` parameter of the ``keystone`` command line client). The second line creates an alias for "user owns the object" by comparing the API's user ID with the object's user ID. Line 3 defines a third alias ``admin_or_owner``, combining the two first aliases with the Boolean operator ``or``. Line 4 sets up the policy that a password can only be modified by its owner or an admin user. As a final example, let's examine a more complex rule: .. code-block:: yaml "identity:ec2_delete_credential": "rule:admin_required or (rule:owner and user_id:%(target.credential.user_id)s)" This rule determines who can use the Identity API "delete EC2 credential". Here, boolean operators and parentheses combine three simpler rules. ``admin_required`` and ``owner`` are the same aliases as in the previous example. ``user_id:%(target.credential.user_id)s`` compares the API user with the user ID of the credential object associated with the target. Syntax ~~~~~~ A ``policy.yaml`` file consists of policies and aliases of the form ``target:rule`` or ``alias:definition``: .. code-block:: yaml "alias 1" : "definition 1" "alias 2" : "definition 2" .... "target 1" : "rule 1" "target 2" : "rule 2" .... Targets are APIs and are written ``"service:API"`` or simply ``"API"``. For example, ``"compute:create"`` or ``"add_image"``. Rules determine whether the API call is allowed. Rules can be: - Always true. The action is always permitted. This can be written as ``""`` (empty string), ``[]``, or ``"@"``. - Always false. The action is never permitted. Written as ``"!"``. - A special check - A comparison of two values - Boolean expressions based on simpler rules Special checks are: - ``role:``, a test whether the API credentials contain this role. - ``rule:``, the definition of an alias. - ``http:``, which delegates the check to a remote server. The API is authorized when the server returns True. Developers can define additional special checks. Two values are compared in the following way: .. code-block:: yaml "value1 : value2" Possible values are: - Constants: Strings, numbers, ``true``, ``false`` - API attributes - Target object attributes - The flag ``is_admin`` API attributes can be ``project_id``, ``user_id`` or ``domain_id``. Target object attributes are fields from the object description in the database. For example in the case of the ``"compute:start"`` API, the object is the instance to be started. The policy for starting instances could use the ``%(project_id)s`` attribute, that is the project that owns the instance. The trailing ``s`` indicates this is a string. The same case would be valid for API attributes like ``%(user_id)s`` and ``%(domain_id)s``. During a debug logging phase, it's common to have the target object attributes retrieved in the API calls. Comparing the API call on the logs with the policy enforced for the corresponding API, you can check which API attribute has been used as the target object. For example in the policy.yaml for the Nova project you can find ``"compute:start"`` API, the policy will show as ``"rule:admin_or_owner"`` which will point for ``"admin_or_owner": "is_admin:True or project_id:%(project_id)s"`` and in this way you can check that the target object in the debug logging it needs to be a ``project_id`` attribute. ``is_admin`` indicates that administrative privileges are granted via the admin token mechanism (the ``--os-token`` option of the ``keystone`` command). The admin token allows initialisation of the Identity database before the admin role exists. The alias construct exists for convenience. An alias is short name for a complex or hard to understand rule. It is defined in the same way as a policy: .. code-block:: yaml alias name : alias definition Once an alias is defined, use the ``rule`` keyword to use it in a policy rule. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/cli/0000775000175000017500000000000000000000000016705 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/cli/common/0000775000175000017500000000000000000000000020175 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/common/convert-opts.rst0000664000175000017500000000035600000000000023376 0ustar00zuulzuul00000000000000.. option:: --namespace NAMESPACE Option namespace(s) under "oslo.policy.policies" in which to query for options. .. option:: --policy-file POLICY_FILE Path to the policy file which need to be converted to ``yaml`` format. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/common/default-opts.rst0000664000175000017500000000127000000000000023336 0ustar00zuulzuul00000000000000.. option:: -h, --help Show help message and exit. .. option:: --config-dir DIR Path to a config directory to pull ``*.conf`` files from. This file set is sorted, so as to provide a predictable parse order if individual options are overridden. The set is parsed after the file(s) specified via previous ``--config-file``, arguments hence overridden options in the directory take precedence. This option must be set from the command-line. .. option:: --config-file PATH Path to a config file to use. Multiple config files can be specified, with values in later files taking precedence. Defaults to None. This option must be set from the command-line. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/common/enforcer-opts.rst0000664000175000017500000000020500000000000023512 0ustar00zuulzuul00000000000000.. option:: --namespace NAMESPACE Option namespace under "oslo.policy.enforcer" in which to look for a ``policy.Enforcer``. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/common/generator-opts.rst0000664000175000017500000000013500000000000023677 0ustar00zuulzuul00000000000000.. option:: --output-file OUTPUT_FILE Path of the file to write to. Defaults to stdout. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/common/rule-opts.rst0000664000175000017500000000033700000000000022664 0ustar00zuulzuul00000000000000.. option:: --format FORMAT Desired format for the output. Allowed values: ``json``, ``yaml`` .. option:: --namespace NAMESPACE Option namespace(s) under "oslo.policy.policies" in which to query for options. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/index.rst0000664000175000017500000000061000000000000020543 0ustar00zuulzuul00000000000000====================== Command Line Interface ====================== This document describes the various command line tools exposed by ``oslo.policy`` to manage policies and policy files. .. toctree:: :maxdepth: 1 oslopolicy-checker oslopolicy-validator oslopolicy-list-redundant oslopolicy-policy-generator oslopolicy-sample-generator oslopolicy-convert-json-to-yaml ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/oslopolicy-checker.rst0000664000175000017500000000466700000000000023252 0ustar00zuulzuul00000000000000================== oslopolicy-checker ================== .. program:: oslopolicy-checker Synopsis -------- :: oslopolicy-checker [-h] [--access ACCESS] [--config-dir DIR] [--config-file PATH] [--enforcer_config ENFORCER_CONFIG] [--is_admin] [--nois_admin] [--policy POLICY] [--rule RULE] [--target TARGET] Description ----------- The ``oslopolicy-checker`` command can be used to check policy against the OpenStack Identity API access information. The access information is a keystone token response from keystone's `authentication API `_. Options ------- .. include:: common/default-opts.rst .. option:: --access ACCESS Path to a file containing an OpenStack Identity API token response body in JSON format. .. option:: --enforcer_config ENFORCER_CONFIG Configuration file for the oslopolicy-checker enforcer .. option:: --is_admin Set ``is_admin=True`` on the credentials used for the evaluation. .. option:: --nois_admin The inverse of ``--is_admin`` .. option:: --policy POLICY Path to a policy file. .. option:: --rule RULE Rule to test. .. option:: --target TARGET Path to a file containing custom target info in JSON format. This will be used to evaluate the policy with. Examples -------- Test all of Nova's policy with an admin token: .. code-block:: bash oslopolicy-checker \ --policy /opt/stack/nova/etc/nova/policy.json --access sample_data/auth_v3_token_admin.json Test the ``compute_extension:flavorextraspecs:index`` rule in Nova's policy with the admin member token and ``is_admin`` set to ``True``: .. code-block:: bash oslopolicy-checker \ --policy /opt/stack/nova/etc/nova/policy.json \ --access sample_data/auth_v3_token_admin.json \ --is_admin=true --rule compute_extension:flavorextraspecs:index Test the ``compute_extension:flavorextraspecs:index`` rule in Nova's policy with the plain member token: .. code-block:: bash oslopolicy-checker \ --policy /opt/stack/nova/etc/nova/policy.json \ --access sample_data/auth_v3_token_member.json \ --rule compute_extension:flavorextraspecs:index See Also -------- :program:`oslopolicy-sample-generator`, :program:`oslopolicy-policy-generator`, :program:`oslopolicy-list-redundant`, :program:`oslopolicy-validator` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/oslopolicy-convert-json-to-yaml.rst0000664000175000017500000000515300000000000025644 0ustar00zuulzuul00000000000000=============================== oslopolicy-convert-json-to-yaml =============================== .. program:: oslopolicy-convert-json-to-yaml Synopsis -------- :: oslopolicy-convert-json-to-yaml [-h] [--config-dir DIR] [--config-file PATH] [--namespace NAMESPACE] [--policy-file POLICY_FILE] [--output-file OUTPUT_FILE] Description ----------- The ``oslopolicy-convert-json-to-yaml`` tool can be used to convert the JSON format policy file to YAML format. It takes JSON formatted policy file as input and convert it to a YAML formatted policy file similar to ``oslopolicy-sample-generator`` tool except keeping the overridden rule as uncommented. It does the following: * Comment out any rules that match the default from policy-in-code. * Keep rules uncommented if rule is overridden. * Does not auto add the deprecated rules in the file unless it not already present in the file. * Keep any extra rules or already exist deprecated rules uncommented but at the end of the file with a warning text. When to use: ~~~~~~~~~~~~ Oslo policy still support the policy file in JSON format, but that lead to `multiple issues `_ . One of the key issue came up while nova switched to the new policy with new defaults and scope feature from keystone. Refer `this bug `_ for details. In future release, oslo policy will remove the JSON formatted policy file support and to have a smooth migration to YAML formatted policy file you can use this tool to convert your existing JSON formatted file to YAML file. Options ------- .. include:: common/default-opts.rst .. include:: common/generator-opts.rst .. include:: common/convert-opts.rst Examples -------- To convert a JSON policy file for a namespace called ``keystone``: .. code-block:: bash oslopolicy-convert-json-to-yaml --namespace keystone \ --policy-file keystone-policy.json To convert a JSON policy file to yaml format directly to a file: .. code-block:: bash oslopolicy-convert-json-to-yaml --namespace keystone \ --policy-file keystone-policy.json \ --output-file keystone-policy.yaml Use the following to generate help text for additional options and arguments supported by ``oslopolicy-convert-json-to-yaml``: .. code-block:: bash oslopolicy-convert-json-to-yaml --help See Also -------- :program:`oslopolicy-sample-generator`, :program:`oslopolicy-policy-generator`, :program:`oslopolicy-upgrade` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/oslopolicy-list-redundant.rst0000664000175000017500000000224100000000000024565 0ustar00zuulzuul00000000000000========================= oslopolicy-list-redundant ========================= .. program:: oslopolicy-list-redundant Synopsis -------- :: oslopolicy-list-redundant [-h] [--config-dir DIR] [--config-file PATH] [--namespace NAMESPACE] Description ----------- The ``oslopolicy-list-redundant`` tool is useful for detecting policies that are specified in policy files that are the same as the defaults provided by the service. Operators can use this tool to find policies that they can remove from their policy files, making maintenance easier. This tool assumes a policy file containing overrides exists and is specified through configuration. Options ------- .. include:: common/default-opts.rst .. include:: common/enforcer-opts.rst Examples -------- To list redundant default policies: .. code-block:: bash oslopolicy-list-redundant --namespace keystone --config-dir /etc/keystone For more information regarding the options supported by this tool: .. code-block:: bash oslopolicy-list-redundant --help See Also -------- :program:`oslopolicy-sample-generator`, :program:`oslopolicy-policy-generator`, :program:`oslopolicy-checker` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/oslopolicy-policy-generator.rst0000664000175000017500000000253000000000000025114 0ustar00zuulzuul00000000000000=========================== oslopolicy-policy-generator =========================== .. program:: oslopolicy-policy-generator Synopsis -------- :: oslopolicy-policy-generator [-h] [--config-dir DIR] [--config-file PATH] [--namespace NAMESPACE] [--output-file OUTPUT_FILE] Description ----------- The ``oslopolicy-policy-generator`` command can be used to generate a policy file that shows the effective policy in use. This is generated by merging the registered defaults and policies loaded from a configuration file. Options ------- .. include:: common/default-opts.rst .. include:: common/enforcer-opts.rst .. include:: common/generator-opts.rst Examples -------- The generate the effective policy file for a namespace called ``keystone``: .. code-block:: bash oslopolicy-policy-generator --namespace keystone To generate the effective policy file and output directly to a file: .. code-block:: bash oslopolicy-policy-generator \ --namespace keystone \ --output-file keystone-policy.yaml To show the additional options and arguments supported by ``oslopolicy-policy-generator``: .. code-block:: bash oslopolicy-policy-generator --help See Also -------- :program:`oslopolicy-sample-generator`, :program:`oslopolicy-list-redundant`, :program:`oslopolicy-checker` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/oslopolicy-sample-generator.rst0000664000175000017500000000306000000000000025075 0ustar00zuulzuul00000000000000=========================== oslopolicy-sample-generator =========================== .. program:: oslopolicy-sample-generator Synopsis -------- :: oslopolicy-sample-generator [-h] [--config-dir DIR] [--config-file PATH] [--format FORMAT] [--namespace NAMESPACE] [--output-file OUTPUT_FILE] Description ----------- The ``oslopolicy-sample-generator`` command can be used to generate a sample policy file based on the default policies in a given namespace. This tool requires a namespace to query for policies and supports output in JSON or YAML. Options ------- .. include:: common/default-opts.rst .. include:: common/rule-opts.rst .. include:: common/generator-opts.rst Examples -------- To generate sample policies for a namespace called ``keystone``: .. code-block:: bash oslopolicy-sample-generator --namespace keystone To generate sample policies in JSON use: .. code-block:: bash oslopolicy-sample-generator --namespace keystone --format json To generate a sample policy file and output directly to a file: .. code-block:: bash oslopolicy-sample-generator --namespace keystone \ --format yaml \ --output-file keystone-policy.yaml Use the following to generate help text for additional options and arguments supported by ``oslopolicy-sample-generator``: .. code-block:: bash oslopolicy-sample-generator --help See Also -------- :program:`oslopolicy-policy-generator`, :program:`oslopolicy-list-redundant`, :program:`oslopolicy-checker` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/cli/oslopolicy-validator.rst0000664000175000017500000000266700000000000023631 0ustar00zuulzuul00000000000000==================== oslopolicy-validator ==================== .. program:: oslopolicy-policy-validator Synopsis -------- :: oslopolicy-policy-validator Description ----------- The ``oslopolicy-validator`` tool can be used to perform basic sanity checks against a policy file. It will detect the following problems: * A missing policy file * Rules which have invalid syntax * Rules which reference non-existent other rules * Rules which form a cyclical reference with another rule * Rules which do not exist in the specified namespace This tool does very little validation of the content of the rules. Other tools, such as ``oslopolicy-checker``, should be used to check that rules do what is intended. Options ------- .. include:: common/default-opts.rst .. include:: common/enforcer-opts.rst Examples -------- Validate the policy file used for Keystone: .. code-block:: bash oslopolicy-validator --config-file /etc/keystone/keystone.conf --namespace keystone Sample output from a failed validation:: $ oslopolicy-validator --config-file keystone.conf --namespace keystone WARNING:oslo_policy.policy:Policies ['foo', 'bar'] are part of a cyclical reference. Invalid rules found Failed to parse rule: (role:admin and system_scope:all) or (role:foo and oken.domain.id:%(target.user.domain_id)s)) Unknown rule found in policy file: foo Unknown rule found in policy file: bar See Also -------- :program:`oslopolicy-checker` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/conf.py0000664000175000017500000000665600000000000017452 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2020 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.extlinks', 'sphinxcontrib.apidoc', 'openstackdocstheme', 'oslo_config.sphinxext', ] # openstackdocstheme options openstackdocs_repo_name = 'openstack/oslo.policy' openstackdocs_bug_project = 'oslo.policy' openstackdocs_bug_tag = '' # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. copyright = '2014-2020, OpenStack Foundation' source_tree = 'https://opendev.org/openstack/oslo.policy/src/branch/master' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. modindex_common_prefix = ['oslo_policy.'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'openstackdocs' # -- Options for man page output --------------------------------------------- # Grouping the document tree for man pages. # List of tuples 'sourcefile', 'target', 'title', 'Authors name', 'manual' _man_pages = [ ( 'oslopolicy-checker', 'Check policy against the OpenStack Identity API access information.', ), ( 'oslopolicy-list-redundant', 'Detect policies that are specified in policy files that are the same ' 'as the defaults provided by the service', ), ( 'oslopolicy-policy-generator', 'Generate a policy file that shows the effective policy in use', ), ( 'oslopolicy-sample-generator', 'Generate a sample policy file based on the default policies in a ' 'given namespace', ), ] man_pages = [ (f'cli/{name}', name, description, 'OpenStack Community', 1) for name, description in _man_pages ] # -- sphinx.ext.extlinks configuration --------------------------------------- extlinks = { 'example': (source_tree + '/oslo_policy/%s', ''), } # -- sphinxcontrib.apidoc configuration -------------------------------------- apidoc_module_dir = '../../oslo_policy' apidoc_output_dir = 'reference/api' apidoc_excluded_paths = [ 'tests', ] ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/configuration/0000775000175000017500000000000000000000000021005 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/configuration/index.rst0000664000175000017500000000042600000000000022650 0ustar00zuulzuul00000000000000======================= Configuration Options ======================= oslo.policy uses oslo.config to define and manage configuration options that allow the deployer to control where the policy files are located, the default rule to apply, etc. .. show-options:: oslo.policy ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/contributor/0000775000175000017500000000000000000000000020510 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/contributor/index.rst0000664000175000017500000000012400000000000022346 0ustar00zuulzuul00000000000000============== Contributing ============== .. include:: ../../../CONTRIBUTING.rst ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/index.rst0000664000175000017500000000102600000000000017776 0ustar00zuulzuul00000000000000============= oslo.policy ============= An OpenStack library providing support for RBAC policy enforcement across all OpenStack services. Contents ======== .. toctree:: :maxdepth: 2 install/index admin/index configuration/index cli/index user/index reference/index contributor/index Release Notes ============= Read also the `oslo.policy Release Notes `_. Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/install/0000775000175000017500000000000000000000000017604 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/install/index.rst0000664000175000017500000000032400000000000021444 0ustar00zuulzuul00000000000000============== Installation ============== At the command line:: $ pip install oslo.policy Or, if you want to use it in a virtualenvwrapper:: $ mkvirtualenv oslo.policy $ pip install oslo.policy ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/reference/0000775000175000017500000000000000000000000020074 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/reference/index.rst0000664000175000017500000000016100000000000021733 0ustar00zuulzuul00000000000000=========================== oslo.policy API Reference =========================== .. toctree:: api/modules ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/doc/source/user/0000775000175000017500000000000000000000000017114 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/user/history.rst0000664000175000017500000000010400000000000021342 0ustar00zuulzuul00000000000000=========== ChangeLog =========== .. include:: ../../../ChangeLog ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/user/index.rst0000664000175000017500000000017500000000000020760 0ustar00zuulzuul00000000000000=================== Using oslo.policy =================== .. toctree:: usage plugins sphinxpolicygen history ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/user/plugins.rst0000664000175000017500000000263600000000000021336 0ustar00zuulzuul00000000000000======================== Writing HTTP check rules ======================== oslo.policy has supported the following syntax for a while:: http:, which delegates the check to a remote server Starting with 1.29, oslo.policy will also support https url(s) as well:: https:, which delegates the check to a remote server Both ``http`` and ``https`` support are implemented as custom check rules. If you see the setup.cfg for oslo.policy, you can see the following entry points:: oslo.policy.rule_checks = http = oslo_policy._external:HttpCheck https = oslo_policy._external:HttpsCheck When a policy is evaluated, when the engine encounters ``https`` like in a snippet below:: { ... "target 1" : "https://foo.bar/baz", ... } The engine will look for a plugin named ``https`` in the ``rule_checks`` entry point and will try to invoke that stevedore plugin. This mechanism allows anyone to write their own code, in their own library with their own custom stevedore based rule check plugins and can enhance their policies with custom checks. This would be useful for example to integrate with an in-house policy server. Example code - HttpCheck ======================== .. note:: Full source located at :example:`_external.py` .. literalinclude:: ../../../oslo_policy/_external.py :language: python :linenos: :lines: 28-64 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/user/sphinxpolicygen.rst0000664000175000017500000000417300000000000023076 0ustar00zuulzuul00000000000000==================================== Sphinx Oslo Sample Policy Generation ==================================== .. note:: This extension relies on ``oslopolicy-sample-generator``, which requires configuration of policies in code to function. Refer to the :doc:`usage` guide for more information. oslo.policy includes a sphinx extension to generate a sample policy file at the beginning of each sphinx build. This sample policy file can then be included in your documents as a raw file, for example, via the ``literalinclude`` directive. To activate the extension add ``oslo_policy.sphinxpolicygen`` to the list of extensions in your sphinx ``conf.py``. Once enabled, you need to define two options: ``policy_generator_config_file`` and ``sample_policy_basename``. For example:: policy_generator_config_file = '../../etc/nova/nova-policy-generator.conf' sample_policy_basename = '_static/nova' where: ``policy_generator_config_file`` Path to an configuration file used with the ``oslopolicy-sample-generator`` utility. This can be a full path or a value relative to the documentation source directory (``app.srcdir``). If this option is not specified or is invalid then the sample policy file generation will be skipped. To handle cases where multiple files need to be generated, this value can be a list of two-part tuples containing the path to the configuration file and the base name for the output file (in this case, ``sample_policy_basename`` should not be set). ``sample_policy_basename`` Base name of the output file. This name will be appended with a ``.policy.yaml.sample`` extension to generate the final output file, and the path is relative to the documentation source directory (``app.srcdir``). As such, using the above example, the policy file will be output to ``_static/nova.policy.yaml.sample``. If this option is not specified, the file will be output to ``sample.policy.yaml``. Once configured, you can include this configuration file in your source: .. code:: reST ============= Sample Policy ============= Here is a sample policy file. .. literalinclude:: _static/nova.policy.yaml.sample ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/doc/source/user/usage.rst0000664000175000017500000005530400000000000020761 0ustar00zuulzuul00000000000000======= Usage ======= To use oslo.policy in a project, import the relevant module. For example:: from oslo_policy import policy Migrating to oslo.policy ======================== Applications using the incubated version of the policy code from Oslo aside from changing the way the library is imported, may need to make some extra changes. Incorporating oslo.policy tooling --------------------------------- The ``oslo.policy`` library offers a generator that projects can use to render sample policy files, check for redundant rules or policies, among other things. This is a useful tool not only for operators managing policies but also for developers looking to automate documentation describing projects' default policies. This part of the document describes how you can incorporate these features into your project. Let's assume we're working on an OpenStack-like project called ``foo``. Policies for this service are registered in code in a common module of the project. First, you'll need to expose a couple of entry points in the project's ``setup.cfg``:: [entry_points] oslo.policy.policies = foo = foo.common.policies:list_rules oslo.policy.enforcer = foo = foo.common.policy:get_enforcer The ``oslo.policy`` library uses the project namespace to call ``list_rules``, which should return a list of ``oslo.policy`` objects, instances of either ``RuleDefault`` or ``DocumentedRuleDefault``. The second entry point allows ``oslo.policy`` to generate complete policy from overrides supplied by an existing policy file on disk. This is useful for operators looking to supply a policy file to Horizon or for security compliance complete with overrides important to that deployment. The ``get_enforcer`` method should return an instance of ``oslo.policy.policy:Enforcer``. The information passed into the constructor of ``Enforcer`` should resolve any overrides on disk. An example for project ``foo`` might look like the following:: from oslo_config import cfg from oslo_policy import policy from foo.common import policies CONF = cfg.CONF _ENFORCER = None def get_enforcer(): CONF([], project='foo') global _ENFORCER if not _ENFORCER: _ENFORCER = policy.Enforcer(CONF) _ENFORCER.register_defaults(policies.list_rules()) return _ENFORCER Please note that if you're incorporating this into a project that already uses ``oslo.policy`` in some form or fashion, this might need to be changed to fit that project's structure accordingly. Next, you can create a configuration file for generating policies specifically for project ``foo``. This file could be called ``foo-policy-generator.conf`` and it can be kept under version control within the project:: [DEFAULT] output_file = etc/foo/policy.yaml.sample namespace = foo If project ``foo`` uses tox, this makes it easier to create a specific tox environment for generating sample configuration files in ``tox.ini``:: [testenv:genpolicy] commands = oslopolicy-sample-generator --config-file etc/foo/foo-policy-generator.conf Changes to Enforcer Initialization ---------------------------------- The ``oslo.policy`` library no longer assumes a global configuration object is available. Instead the :py:class:`oslo_policy.policy.Enforcer` class has been changed to expect the consuming application to pass in an ``oslo.config`` configuration object. When using policy from oslo-incubator ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: enforcer = policy.Enforcer(policy_file=_POLICY_PATH) When using oslo.policy ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: from oslo_config import cfg CONF = cfg.CONF enforcer = policy.Enforcer(CONF, policy_file=_POLICY_PATH) Registering policy defaults in code =================================== A project can register policy defaults in their code which brings with it some benefits. * A deployer only needs to add a policy file if they wish to override the project defaults. * Projects can use Enforcer.authorize to ensure that a policy check is being done against a registered policy. This can be used to ensure that all policies used are registered. The signature of Enforcer.authorize matches Enforcer.enforce. * Projects can register policies as `DocumentedRuleDefault` objects, which require a method and path of the corresponding policy. This helps policy readers understand which path maps to a particular policy ultimately providing better documentation. * A sample policy file can be generated based on the registered policies rather than needing to be manually maintained. * A policy file can be generated which is a merge of registered defaults and policies loaded from a file. This shows the effective policy in use. * A list can be generated which contains policies defined in a file which match defaults registered in code. These are candidates for removal from the file in order to keep it small and understandable. How to register --------------- :: from oslo_config import cfg CONF = cfg.CONF enforcer = policy.Enforcer(CONF, policy_file=_POLICY_PATH) base_rules = [ policy.RuleDefault('admin_required', 'role:admin or is_admin:1', description='Who is considered an admin'), policy.RuleDefault('service_role', 'role:service', description='service role'), ] enforcer.register_defaults(base_rules) enforcer.register_default(policy.RuleDefault('identity:create_region', 'rule:admin_required', description='helpful text')) To provide more information about the policy, use the `DocumentedRuleDefault` class:: enforcer.register_default( policy.DocumentedRuleDefault( 'identity:create_region', 'rule:admin_required', 'helpful text', [{'path': '/regions/{region_id}', 'method': 'POST'}] ) ) The `DocumentedRuleDefault` class inherits from the `RuleDefault` implementation, but it must be supplied with the `description` attribute in order to be used. In addition, the `DocumentedRuleDefault` class requires a new `operations` attribute that is a list of dictionaries. Each dictionary must have a `path` and a `method` key. The `path` should map to the path used to interact with the resource the policy protects. The `method` should be the HTTP verb corresponding to the `path`. The list of `operations` can be supplied with multiple dictionaries if the policy is used to protect multiple paths. Naming policies --------------- Policy names are an integral piece of information in understanding how OpenStack's policy engine works. Developers protect APIs using policy names. Operators use policy names to override policies in their deployment. Having consistent policy names across OpenStack services is essential to providing a pleasant user experience. The following rules are guidelines to help you, as a developer, build unique and descriptive policy names. Service types ~~~~~~~~~~~~~ Policy names should be specific about the service that uses them. The service type should also follow a known standard, which is the `service-types authority `_. Using an existing standard avoids confusing users by reusing an established reference. For example, instead of using `keystone` as the service in a policy name, you should use `identity`, since it is not specific to one implementation. It's also more specific about the functionality provided by the service instead of having readers maintain a mental mapping between service code name and functionality it provides. Resources and subresources ~~~~~~~~~~~~~~~~~~~~~~~~~~ Users may interact with resources exposed by a service's API. You should include the name of a resource in the policy name, and it should be singular. For example, policies that protect the user API should use `identity:user`, instead of `identity:users`. Some services might have subresources. For example, a fixed IP address could be considered a subresource of an IP address. You should separate open-form compound words with a hyphen and not an underscore. This spacing convention maintains consistency with spacing used in the service types authority. For example, use `ip-address` instead of `ip_address`. Having more than one way to separate compound words within a single convention is confusing and prone to accidentally introducing inconsistencies. Resource names should be minimalist and contain only characters needed to describe the resource. Extra information should be omitted from the resource altogether. Use `agent` instead of `os-agents`, even if the URL path of the resource uses `/os-agents`. Actions and subactions ~~~~~~~~~~~~~~~~~~~~~~ Actions are specific things that users can do to resources. Typical actions are `create`, `get`, `list`, `update`, and `delete`. These action definitions are independent of the HTTP method used to implement their underlying API, which is intentional. This independence is important because two different services may implement the same action using two different HTTP methods. For example, use `compute:server:list` as a policy name for listing servers instead of `compute:server:get_all` or `compute:server:get-all`. Using `all` in the policy name itself implies returning every possible entity when the actual response may be filtered based on the user's authority. In other words, list servers for a domain administrator managing many different projects within that domain could be very different from a member of a project listing servers owned by a single project. Some services have the ability to list resources with greater detail. Depending on the context, those additional details might be sensitive in nature and require more strict RBAC permissions than `list`. In this case, use `compute:server:list-detail` as opposed to `compute:server:detail`. By using a compound word, we're being more descriptive about what the `detail` actually means. Subactions are optionally available for you to add clarity about resource actions. For example, `compute:server:resize:confirm` is an example of how you can compound an action (resize) with a subaction (confirm) to explicitly name a policy. Actions that are open form compound words should use hyphens instead of underscores for spacing. This spacing is consistent with the service types authority and resource names for open form compound words. For example, use `compute:server:resize-state` instead of `compute:server:resize_state`. Resource Attributes ~~~~~~~~~~~~~~~~~~~ Resource attributes may be used in policy names, and are entirely optional. If you need to include the attribute of a resource in the name, you should place it after the resource or subresource portion. For example, use `compute:flavor:private:list` to name a policy for listing all private flavors. Putting it all together ~~~~~~~~~~~~~~~~~~~~~~~ Now that you know what services types, resources, attributes, and actions are within the context of policy names, it is possible to establish the order you should use them. Policy names should increase in detail as you read it. This results in the following syntax:: :[:][:]:[:] You should delimit each segment of the name with a colon (:). The following are examples for existing OpenStack APIs:: identity:user:list block-storage:volume:extend compute:server:resize:confirm compute:flavor:private:list network:ip-address:fixed-ip-address:create Setting scope ------------- The `RuleDefault` and `DocumentedRuleDefault` objects have an attribute dedicated to the intended scope of the operation called `scope_types`. This attribute can only be set at rule definition and never overridden via a policy file. This variable is designed to save the scope at which a policy should operate. During enforcement, the information in `scope_types` is compared to the scope of the token used in the request. It is designed to match the available token scopes available from keystone, which are `system`, `domain`, and `project`. The examples highlighted here will show the usage with system and project APIs. Setting `scope_types` to anything but these three values is unsupported. For example, a policy that is used to protect a resource tracked in a project should require a project-scoped token. This can be expressed with `scope_types` as follows:: policy.DocumentedRuleDefault( name='service:create_foo', check_str='role:admin', scope_types=['project'], description='Creates a foo resource', operations=[ { 'path': '/v1/foos/', 'method': 'POST' } ] ) A policy that is used to protect system-level resources can follow the same pattern:: policy.DocumentedRuleDefault( name='service:update_bar', check_str='role:admin', scope_types=['system'], description='Updates a bar resource', operations=[ { 'path': '/v1/bars/{bar_id}', 'method': 'PATCH' } ] ) The `scope_types` attribute makes sure the token used to make the request is scoped properly and passes the `check_str`. This is powerful because it allows roles to be reused across different authorization levels without compromising APIs. For example, the `admin` role in the above example is used at the project-level and the system-level to protect two different resources. If we only checked that the token contained the `admin` role, it would be possible for a user with a project-scoped token to access a system-level API. Developers incorporating `scope_types` into OpenStack services should be mindful of the relationship between the API they are protecting with a policy and the resource level the API operates at, whether it's system-level or project-level. Sample file generation ---------------------- In setup.cfg of a project using oslo.policy:: [entry_points] oslo.policy.policies = nova = nova.policy:list_policies where list_policies is a method that returns a list of policy.RuleDefault objects. Run the oslopolicy-sample-generator script with some configuration options:: oslopolicy-sample-generator --namespace nova --output-file policy-sample.yaml or:: oslopolicy-sample-generator --config-file policy-generator.conf where policy-generator.conf looks like:: [DEFAULT] output_file = policy-sample.yaml namespace = nova If output_file is omitted the sample file will be sent to stdout. Merged file generation ---------------------- This will output a policy file which includes all registered policy defaults and all policies configured with a policy file. This file shows the effective policy in use by the project. In setup.cfg of a project using oslo.policy:: [entry_points] oslo.policy.enforcer = nova = nova.policy:get_enforcer where get_enforcer is a method that returns a configured oslo_policy.policy.Enforcer object. This object should be setup exactly as it is used for actual policy enforcement, if it differs the generated policy file may not match reality. Run the oslopolicy-policy-generator script with some configuration options:: oslopolicy-policy-generator --namespace nova --output-file policy-merged.yaml or:: oslopolicy-policy-generator --config-file policy-merged-generator.conf where policy-merged-generator.conf looks like:: [DEFAULT] output_file = policy-merged.yaml namespace = nova If output_file is omitted the file will be sent to stdout. List of redundant configuration ------------------------------- This will output a list of matches for policy rules that are defined in a configuration file where the rule does not differ from a registered default rule. These are rules that can be removed from the policy file with no change in effective policy. In setup.cfg of a project using oslo.policy:: [entry_points] oslo.policy.enforcer = nova = nova.policy:get_enforcer where get_enforcer is a method that returns a configured oslo_policy.policy.Enforcer object. This object should be setup exactly as it is used for actual policy enforcement, if it differs the generated policy file may not match reality. Run the oslopolicy-list-redundant script:: oslopolicy-list-redundant --namespace nova or:: oslopolicy-list-redundant --config-file policy-redundant.conf where policy-redundant.conf looks like:: [DEFAULT] namespace = nova Output will go to stdout. Testing default policies ======================== Developers need to reliably unit test policies used to protect APIs. Having robust unit test coverage increases confidence that changes won't negatively affect user experience. This document is intended to help you understand historical context behind testing practices you may find in your service. More importantly, it's going to describe testing patterns you can use to increase confidence in policy testing and coverage. History ------- Before the ability to register policies in code, developers maintained policies in a policy file, which included all policies used by the service. Developers maintained policy files within the project source code, which contained the default policies for the service. Once it became possible to register policies in code, policy files became irrelevant because you could generate them. Generating policy files from code made maintaining documentation for policies easier and allowed for a single source of truth. Registering policies in code also meant testing no longer required a policy file, since the default policies were in the service itself. At this point, it is important to note that policy enforcement requires an authorization context based on the user making the request (e.g., is the user allowed to do the operation they're asking to do). Within OpenStack, this authorization context is relayed to services by the token used to call an API, which comes from an OpenStack identity service. In its purest form, you can think of authorization context as the roles a user has on a project, domain, or system. Services can feed the authorization context into policy enforcement, which determines if a user is allowed to do something. The coupling between the authorization context, ultimately the token, and the policy enforcement mechanism raises the bar for effectively testing policies and APIs. Service developers want to ensure the functionality specific to their service works and not dwell on the implementation details of an authorization system. Additionally, they want to keep unit tests lightweight, as opposed to requiring a separate system to issue tokens for authorization, crossing the boundary of unit testing to integration testing. Because of this, you typically see one of two approaches taken concerning policies and test code across OpenStack services. One approach is to supply a policy file specifically for testing that overrides the sample policy file or default policies in code. This file contains mostly policies without proper check strings, which relaxes the authorization enforced by the service using oslo.policy. Without proper check strings, it's possible to access APIs without building context objects or using tokens from an identity service. The other approach is to mock policy enforcement to succeed unconditionally. Since developers are bypassing the code within the policy engine, supplying a proper authorization context doesn't have an impact on the APIs used in the test case. Both methods let developers focus on validating the domain-specific functionality of their service without needing to understand the intricacies of policy enforcement. Unfortunately, bypassing API authorization testing comes at the expense of introducing gaps where the default policies may break unexpectedly with new changes. If the tests don't assert the default behavior, it's likely that seemingly simple changes negatively impact users or operators, regardless of that being the intent of the developer. Testing policies ---------------- Fortunately, you can test policies without needing to deal with tokens by using context objects directly, specifically a RequestContext object. Chances are your service is already using these to represent information from middleware that sits in front of the API. Using context for authorization strikes a perfect balance between integration testing and exercising just enough authorization to ensure policies sufficiently protect APIs. The oslo.policy library also accepts context objects and automatically translates properties to values used when evaluating policy, which makes using them even more natural. To use RequestContext objects effectively, you need to understand the policy under test. Then, you can model a context object appropriately for the test case. The idea is to build a context object to use in the request that either fails or passes policy enforcement. For example, assume you're testing a default policy like the following: :: from oslo_config import cfg CONF = cfg.CONF enforcer = policy.Enforcer(CONF, policy_file=_POLICY_PATH) enforcer.register_default( policy.RuleDefault('identity:create_region', 'role:admin') ) Enforcement here is straightforward in that a user with a role called ``admin`` may access this API. You can model this in a request context by setting these attributes explicitly: :: from oslo_context import context context = context.RequestContext() context.roles = ['admin'] Depending on how your service deploys the API in unit tests, you can either provide a fake context as you supply the request, or mock the return value of the context to return the one you've built. You can also supply scope information for policies with complex check strings or the use of scope types. For example, consider the following default policy: :: from oslo_config import cfg CONF = cfg.CONF enforcer = policy.Enforcer(CONF, policy_file=_POLICY_PATH) enforcer.register_default( policy.RuleDefault('identity:create_region', 'role:admin', scope_types=['system']) ) We can model it using the following request context object, which includes scope: :: from oslo_context import context context = context.RequestContext() context.roles = ['admin'] context.system_scope = 'all' Note that ``all`` is a unique system scope target that signifies the user is authorized to operate on the deployment system. Conversely, the following is an example of a context modeling a project-scoped token: :: import uuid from oslo_context import context context = context.RequestContext() context.roles = ['admin'] context.project_id = uuid.uuid4().hex The significance here is the difference between administrator authorization on the deployment system and administrator authorization on a project. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8475182 oslo.policy-3.11.0/oslo.policy.egg-info/0000775000175000017500000000000000000000000020035 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/PKG-INFO0000664000175000017500000000376600000000000021146 0ustar00zuulzuul00000000000000Metadata-Version: 1.2 Name: oslo.policy Version: 3.11.0 Summary: Oslo Policy library Home-page: https://docs.openstack.org/oslo.policy/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/oslo.policy.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on ============= oslo.policy ============= .. image:: https://img.shields.io/pypi/v/oslo.policy.svg :target: https://pypi.org/project/oslo.policy/ :alt: Latest Version The Oslo Policy library provides support for RBAC policy enforcement across all OpenStack services. * Free software: Apache license * Documentation: https://docs.openstack.org/oslo.policy/latest/ * Source: https://opendev.org/openstack/oslo.policy * Bugs: https://bugs.launchpad.net/oslo.policy * Blueprints: https://blueprints.launchpad.net/oslo.policy * Release Notes: https://docs.openstack.org/releasenotes/oslo.policy Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: Implementation :: CPython Requires-Python: >=3.6 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/SOURCES.txt0000664000175000017500000001112000000000000021714 0ustar00zuulzuul00000000000000.coveragerc .mailmap .pre-commit-config.yaml .stestr.conf .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog HACKING.rst LICENSE README.rst requirements.txt setup.cfg setup.py test-requirements.txt tox.ini doc/requirements.txt doc/source/conf.py doc/source/index.rst doc/source/admin/index.rst doc/source/admin/policy-json-file.rst doc/source/admin/policy-yaml-file.rst doc/source/cli/index.rst doc/source/cli/oslopolicy-checker.rst doc/source/cli/oslopolicy-convert-json-to-yaml.rst doc/source/cli/oslopolicy-list-redundant.rst doc/source/cli/oslopolicy-policy-generator.rst doc/source/cli/oslopolicy-sample-generator.rst doc/source/cli/oslopolicy-validator.rst doc/source/cli/common/convert-opts.rst doc/source/cli/common/default-opts.rst doc/source/cli/common/enforcer-opts.rst doc/source/cli/common/generator-opts.rst doc/source/cli/common/rule-opts.rst doc/source/configuration/index.rst doc/source/contributor/index.rst doc/source/install/index.rst doc/source/reference/index.rst doc/source/user/history.rst doc/source/user/index.rst doc/source/user/plugins.rst doc/source/user/sphinxpolicygen.rst doc/source/user/usage.rst oslo.policy.egg-info/PKG-INFO oslo.policy.egg-info/SOURCES.txt oslo.policy.egg-info/dependency_links.txt oslo.policy.egg-info/entry_points.txt oslo.policy.egg-info/not-zip-safe oslo.policy.egg-info/pbr.json oslo.policy.egg-info/requires.txt oslo.policy.egg-info/top_level.txt oslo_policy/__init__.py oslo_policy/_cache_handler.py oslo_policy/_checks.py oslo_policy/_external.py oslo_policy/_i18n.py oslo_policy/_parser.py oslo_policy/fixture.py oslo_policy/generator.py oslo_policy/opts.py oslo_policy/policy.py oslo_policy/shell.py oslo_policy/sphinxext.py oslo_policy/sphinxpolicygen.py oslo_policy/version.py oslo_policy/locale/en_GB/LC_MESSAGES/oslo_policy.po oslo_policy/tests/__init__.py oslo_policy/tests/base.py oslo_policy/tests/test_cache_handler.py oslo_policy/tests/test_checks.py oslo_policy/tests/test_external.py oslo_policy/tests/test_fixtures.py oslo_policy/tests/test_generator.py oslo_policy/tests/test_opts.py oslo_policy/tests/test_parser.py oslo_policy/tests/test_policy.py oslo_policy/tests/test_shell.py oslo_policy/tests/test_sphinxext.py oslo_policy/tests/test_sphinxpolicygen.py oslo_policy/tests/token_fixture.py releasenotes/notes/Fix-map-system-scope-for-creds-dict-e4cbec2f7495f22e.yaml releasenotes/notes/Pass-target-dict-to-oslopolicy-checker-87185d40aec413ee.yaml releasenotes/notes/add-deprecated-metadata-to-DeprecatedRule-79d2e8a3f5d11743.yaml releasenotes/notes/add-policy-convert-json-to-yaml-tool-3c93604aee79f58a.yaml releasenotes/notes/add-policy-upgrade-command-a65bc4f760e5d8b1.yaml releasenotes/notes/add-scope-types-to-sphinxext-cacd845c4575e965.yaml releasenotes/notes/add-sphinxpolicygen-39e2f8fa24930b0c.yaml releasenotes/notes/add_custom_rule_check_plugins-3c15c2c7ca5e.yaml releasenotes/notes/add_reno-3b4ae0789e9c45b4.yaml releasenotes/notes/bug-1779172-c1323c0f647bc44c.yaml releasenotes/notes/bug-1880959-8f1370a59759d40d.yaml releasenotes/notes/bug-1913718-f1b46bbff3231d98.yaml releasenotes/notes/bug-1943584-fc74f9205039883c.yaml releasenotes/notes/deprecate-policy-file-json-format-e1921f15b5d00287.yaml releasenotes/notes/drop-python27-support-9aa06224812cc352.yaml releasenotes/notes/enforce-scope-checks-always-when-rule-has-scope_types-8f983cdf70766e4f.yaml releasenotes/notes/enforce_new_defaults-6ae17d8b8d166a2c.yaml releasenotes/notes/enforce_scope_types-1e92f6a34e4173ef.yaml releasenotes/notes/expand-cli-docs-02c2f13adbe251c0.yaml releasenotes/notes/fix-bug-1914095-fa71d81c9639ba94.yaml releasenotes/notes/fix-rendering-for-deprecated-rules-d465292e4155f483.yaml releasenotes/notes/list-redundant-deprecation-warnings-f84a06133efdaedd.yaml releasenotes/notes/oslo-policy-descriptive-support-3ee688c5fa48d751.yaml releasenotes/notes/policy-check-performance-fbad83c7a4afd7d7.yaml releasenotes/notes/policy-file-validator-906d5cff864a2d51.yaml releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/newton.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst releasenotes/source/unreleased.rst releasenotes/source/ussuri.rst releasenotes/source/victoria.rst releasenotes/source/wallaby.rst releasenotes/source/xena.rst releasenotes/source/_static/.placeholder releasenotes/source/_templates/.placeholder releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po releasenotes/source/locale/fr/LC_MESSAGES/releasenotes.po sample_data/auth_v3_token_admin.json sample_data/auth_v3_token_member.json sample_data/auth_v3_token_system_admin.json././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/dependency_links.txt0000664000175000017500000000000100000000000024103 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/entry_points.txt0000664000175000017500000000120200000000000023326 0ustar00zuulzuul00000000000000[console_scripts] oslopolicy-checker = oslo_policy.shell:main oslopolicy-convert-json-to-yaml = oslo_policy.generator:convert_policy_json_to_yaml oslopolicy-list-redundant = oslo_policy.generator:list_redundant oslopolicy-policy-generator = oslo_policy.generator:generate_policy oslopolicy-policy-upgrade = oslo_policy.generator:upgrade_policy oslopolicy-sample-generator = oslo_policy.generator:generate_sample oslopolicy-validator = oslo_policy.generator:validate_policy [oslo.config.opts] oslo.policy = oslo_policy.opts:list_opts [oslo.policy.rule_checks] http = oslo_policy._external:HttpCheck https = oslo_policy._external:HttpsCheck ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/not-zip-safe0000664000175000017500000000000100000000000022263 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/pbr.json0000664000175000017500000000005600000000000021514 0ustar00zuulzuul00000000000000{"git_version": "b48b711", "is_release": true}././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/requires.txt0000664000175000017500000000024000000000000022431 0ustar00zuulzuul00000000000000PyYAML>=5.1 oslo.config>=6.0.0 oslo.context>=2.22.0 oslo.i18n>=3.15.3 oslo.serialization!=2.19.1,>=2.18.0 oslo.utils>=3.40.0 requests>=2.14.2 stevedore>=1.20.0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645118009.0 oslo.policy-3.11.0/oslo.policy.egg-info/top_level.txt0000664000175000017500000000001400000000000022562 0ustar00zuulzuul00000000000000oslo_policy ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8515182 oslo.policy-3.11.0/oslo_policy/0000775000175000017500000000000000000000000016424 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/__init__.py0000664000175000017500000000000000000000000020523 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/_cache_handler.py0000664000175000017500000000511400000000000021676 0ustar00zuulzuul00000000000000# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import errno import logging import os from oslo_config import cfg LOG = logging.getLogger(__name__) def read_cached_file(cache, filename, force_reload=False): """Read from a file if it has been modified. :param cache: dictionary to hold opaque cache. :param filename: the file path to read. :param force_reload: Whether to reload the file. :returns: A tuple with a boolean specifying if the data is fresh or not. """ if force_reload: delete_cached_file(cache, filename) reloaded = False try: mtime = os.path.getmtime(filename) except OSError as err: msg = err.strerror LOG.error('Config file not found %(filename)s: %(msg)s', {'filename': filename, 'msg': msg}) return True, {} cache_info = cache.setdefault(filename, {}) if not cache_info or mtime > cache_info.get('mtime', 0): LOG.debug("Reloading cached file %s", filename) try: with open(filename) as fap: cache_info['data'] = fap.read() except IOError as err: msg = err.strerror err_code = err.errno LOG.error('IO error loading %(filename)s: %(msg)s', {'filename': filename, 'msg': msg}) if err_code == errno.EACCES: raise cfg.ConfigFilesPermissionDeniedError((filename,)) except OSError as err: msg = err.strerror LOG.error('Config file not found %(filename)s: %(msg)s', {'filename': filename, 'msg': msg}) raise cfg.ConfigFilesNotFoundError((filename,)) cache_info['mtime'] = mtime reloaded = True return (reloaded, cache_info['data']) def delete_cached_file(cache, filename): """Delete cached file if present. :param cache: dictionary to hold opaque cache. :param filename: filename to delete """ try: del cache[filename] except KeyError: pass ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/_checks.py0000664000175000017500000002327400000000000020405 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # # Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import ast import inspect import stevedore registered_checks = {} extension_checks = None def get_extensions(): global extension_checks if extension_checks is None: em = stevedore.ExtensionManager('oslo.policy.rule_checks', invoke_on_load=False) extension_checks = { extension.name: extension.plugin for extension in em } return extension_checks def _check(rule, target, creds, enforcer, current_rule): """Evaluate the rule. This private method is meant to be used by the enforcer to call the rule. It can also be used by built-in checks that have nested rules. We use a private function because it makes it easier to change the API without having an impact on subclasses not defined within the oslo.policy library. We don't put this logic in Enforcer.enforce() and invoke that method recursively because that changes the BaseCheck API to require that the enforcer argument to __call__() be a valid Enforcer instance (as evidenced by all of the breaking unit tests). We don't put this in a private method of BaseCheck because that propagates the problem of extending the list of arguments to __call__() if subclasses change the implementation of the function. :param rule: A check object. :type rule: BaseCheck :param target: Attributes of the object of the operation. :type target: dict :param creds: Attributes of the user performing the operation. :type creds: dict :param enforcer: The Enforcer being used. :type enforcer: Enforcer :param current_rule: The name of the policy being checked. :type current_rule: str """ # Evaluate the rule argspec = inspect.getfullargspec(rule.__call__) rule_args = [target, creds, enforcer] # Check if the rule argument must be included or not if len(argspec.args) > 4: rule_args.append(current_rule) return rule(*rule_args) class BaseCheck(metaclass=abc.ABCMeta): """Abstract base class for Check classes.""" scope_types = None @abc.abstractmethod def __str__(self): """String representation of the Check tree rooted at this node.""" pass @abc.abstractmethod def __call__(self, target, cred, enforcer, current_rule=None): """Triggers if instance of the class is called. Performs the check. Returns False to reject the access or a true value (not necessary True) to accept the access. """ pass class FalseCheck(BaseCheck): """A policy check that always returns ``False`` (disallow).""" def __str__(self): """Return a string representation of this check.""" return '!' def __call__(self, target, cred, enforcer, current_rule=None): """Check the policy.""" return False class TrueCheck(BaseCheck): """A policy check that always returns ``True`` (allow).""" def __str__(self): """Return a string representation of this check.""" return '@' def __call__(self, target, cred, enforcer, current_rule=None): """Check the policy.""" return True class Check(BaseCheck): def __init__(self, kind, match): self.kind = kind self.match = match def __str__(self): """Return a string representation of this check.""" return '%s:%s' % (self.kind, self.match) class NotCheck(BaseCheck): def __init__(self, rule): self.rule = rule def __str__(self): """Return a string representation of this check.""" return 'not %s' % self.rule def __call__(self, target, cred, enforcer, current_rule=None): """Check the policy. Returns the logical inverse of the wrapped check. """ return not _check(self.rule, target, cred, enforcer, current_rule) class AndCheck(BaseCheck): def __init__(self, rules): self.rules = rules def __str__(self): """Return a string representation of this check.""" return '(%s)' % ' and '.join(str(r) for r in self.rules) def __call__(self, target, cred, enforcer, current_rule=None): """Check the policy. Requires that all rules accept in order to return True. """ for rule in self.rules: if not _check(rule, target, cred, enforcer, current_rule): return False return True def add_check(self, rule): """Adds rule to be tested. Allows addition of another rule to the list of rules that will be tested. :returns: self :rtype: :class:`.AndCheck` """ self.rules.append(rule) return self class OrCheck(BaseCheck): def __init__(self, rules): self.rules = rules def __str__(self): """Return a string representation of this check.""" return '(%s)' % ' or '.join(str(r) for r in self.rules) def __call__(self, target, cred, enforcer, current_rule=None): """Check the policy. Requires that at least one rule accept in order to return True. """ for rule in self.rules: if _check(rule, target, cred, enforcer, current_rule): return True return False def add_check(self, rule): """Adds rule to be tested. Allows addition of another rule to the list of rules that will be tested. Returns the OrCheck object for convenience. """ self.rules.append(rule) return self def pop_check(self): """Pops the last check from the list and returns them :returns: self, the popped check :rtype: :class:`.OrCheck`, class:`.Check` """ check = self.rules.pop() return self, check def register(name, func=None): # Perform the actual decoration by registering the function or # class. Returns the function or class for compliance with the # decorator interface. def decorator(func): registered_checks[name] = func return func # If the function or class is given, do the registration if func: return decorator(func) return decorator @register('rule') class RuleCheck(Check): def __call__(self, target, creds, enforcer, current_rule=None): try: return _check( rule=enforcer.rules[self.match], target=target, creds=creds, enforcer=enforcer, current_rule=current_rule, ) except KeyError: # We don't have any matching rule; fail closed return False @register('role') class RoleCheck(Check): """Check that there is a matching role in the ``creds`` dict.""" def __call__(self, target, creds, enforcer, current_rule=None): try: match = self.match % target except KeyError: # While doing RoleCheck if key not # present in Target return false return False if 'roles' in creds: return match.lower() in [x.lower() for x in creds['roles']] return False @register(None) class GenericCheck(Check): """Check an individual match. Matches look like: - tenant:%(tenant_id)s - role:compute:admin - True:%(user.enabled)s - 'Member':%(role.name)s """ def _find_in_dict(self, test_value, path_segments, match): '''Searches for a match in the dictionary. test_value is a reference inside the dictionary. Since the process is recursive, each call to _find_in_dict will be one level deeper. path_segments is the segments of the path to search. The recursion ends when there are no more segments of path. When specifying a value inside a list, each element of the list is checked for a match. If the value is found within any of the sub lists the check succeeds; The check only fails if the entry is not in any of the sublists. ''' if len(path_segments) == 0: return match == str(test_value) key, path_segments = path_segments[0], path_segments[1:] try: test_value = test_value[key] except KeyError: return False if isinstance(test_value, list): for val in test_value: if self._find_in_dict(val, path_segments, match): return True return False else: return self._find_in_dict(test_value, path_segments, match) def __call__(self, target, creds, enforcer, current_rule=None): try: match = self.match % target except KeyError: # While doing GenericCheck if key not # present in Target return false return False try: # Try to interpret self.kind as a literal test_value = ast.literal_eval(self.kind) return match == str(test_value) except ValueError: pass path_segments = self.kind.split('.') return self._find_in_dict(creds, path_segments, match) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/_external.py0000664000175000017500000001035200000000000020760 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # # Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import copy import os from oslo_policy import _checks from oslo_policy._i18n import _ from oslo_serialization import jsonutils import requests class HttpCheck(_checks.Check): """Check ``http:`` rules by calling to a remote server. This example implementation simply verifies that the response is exactly ``True``. """ def __call__(self, target, creds, enforcer, current_rule=None): url = ('http:' + self.match) % target data, json = self._construct_payload(creds, current_rule, enforcer, target) with contextlib.closing( requests.post(url, json=json, data=data) ) as r: return r.text.lstrip('"').rstrip('"') == 'True' @staticmethod def _construct_payload(creds, current_rule, enforcer, target): # Convert instances of object() in target temporarily to # empty dict to avoid circular reference detection # errors in jsonutils.dumps(). temp_target = copy.deepcopy(target) for key in target.keys(): element = target.get(key) if type(element) is object: temp_target[key] = {} data = json = None if (enforcer.conf.oslo_policy.remote_content_type == 'application/x-www-form-urlencoded'): data = {'rule': jsonutils.dumps(current_rule), 'target': jsonutils.dumps(temp_target), 'credentials': jsonutils.dumps(creds)} else: json = {'rule': current_rule, 'target': temp_target, 'credentials': creds} return data, json class HttpsCheck(HttpCheck): """Check ``https:`` rules by calling to a remote server. This example implementation simply verifies that the response is exactly ``True``. """ def __call__(self, target, creds, enforcer, current_rule=None): url = ('https:' + self.match) % target cert_file = enforcer.conf.oslo_policy.remote_ssl_client_crt_file key_file = enforcer.conf.oslo_policy.remote_ssl_client_key_file ca_crt_file = enforcer.conf.oslo_policy.remote_ssl_ca_crt_file verify_server = enforcer.conf.oslo_policy.remote_ssl_verify_server_crt if cert_file: if not os.path.exists(cert_file): raise RuntimeError( _("Unable to find ssl cert_file : %s") % cert_file) if not os.access(cert_file, os.R_OK): raise RuntimeError( _("Unable to access ssl cert_file : %s") % cert_file) if key_file: if not os.path.exists(key_file): raise RuntimeError( _("Unable to find ssl key_file : %s") % key_file) if not os.access(key_file, os.R_OK): raise RuntimeError( _("Unable to access ssl key_file : %s") % key_file) cert = (cert_file, key_file) if verify_server: if ca_crt_file: if not os.path.exists(ca_crt_file): raise RuntimeError( _("Unable to find ca cert_file : %s") % ca_crt_file) verify_server = ca_crt_file data, json = self._construct_payload(creds, current_rule, enforcer, target) with contextlib.closing( requests.post(url, json=json, data=data, cert=cert, verify=verify_server) ) as r: return r.text.lstrip('"').rstrip('"') == 'True' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/_i18n.py0000664000175000017500000000147300000000000017721 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html . """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_policy') # The primary translation function using the well-known name "_" _ = _translators.primary ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/_parser.py0000664000175000017500000002421100000000000020431 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # # Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import re from oslo_policy import _checks LOG = logging.getLogger(__name__) def reducer(*tokens): """Decorator for reduction methods. Arguments are a sequence of tokens, in order, which should trigger running this reduction method. """ def decorator(func): # Make sure we have a list of reducer sequences if not hasattr(func, 'reducers'): func.reducers = [] # Add the tokens to the list of reducer sequences func.reducers.append(list(tokens)) return func return decorator class ParseStateMeta(type): """Metaclass for the :class:`.ParseState` class. Facilitates identifying reduction methods. """ def __new__(mcs, name, bases, cls_dict): """Create the class. Injects the 'reducers' list, a list of tuples matching token sequences to the names of the corresponding reduction methods. """ reducers = [] for key, value in cls_dict.items(): if not hasattr(value, 'reducers'): continue for reduction in value.reducers: reducers.append((reduction, key)) cls_dict['reducers'] = reducers return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) class ParseState(metaclass=ParseStateMeta): """Implement the core of parsing the policy language. Uses a greedy reduction algorithm to reduce a sequence of tokens into a single terminal, the value of which will be the root of the :class:`Check` tree. .. note:: Error reporting is rather lacking. The best we can get with this parser formulation is an overall "parse failed" error. Fortunately, the policy language is simple enough that this shouldn't be that big a problem. """ def __init__(self): """Initialize the ParseState.""" self.tokens = [] self.values = [] def reduce(self): """Perform a greedy reduction of the token stream. If a reducer method matches, it will be executed, then the :meth:`reduce` method will be called recursively to search for any more possible reductions. """ for reduction, methname in self.reducers: if (len(self.tokens) >= len(reduction) and self.tokens[-len(reduction):] == reduction): # Get the reduction method meth = getattr(self, methname) # Reduce the token stream results = meth(*self.values[-len(reduction):]) # Update the tokens and values self.tokens[-len(reduction):] = [r[0] for r in results] self.values[-len(reduction):] = [r[1] for r in results] # Check for any more reductions return self.reduce() def shift(self, tok, value): """Adds one more token to the state. Calls :meth:`reduce`. """ self.tokens.append(tok) self.values.append(value) # Do a greedy reduce... self.reduce() @property def result(self): """Obtain the final result of the parse. :raises ValueError: If the parse failed to reduce to a single result. """ if len(self.values) != 1: raise ValueError('Could not parse rule') return self.values[0] @reducer('(', 'check', ')') @reducer('(', 'and_expr', ')') @reducer('(', 'or_expr', ')') def _wrap_check(self, _p1, check, _p2): """Turn parenthesized expressions into a 'check' token.""" return [('check', check)] @reducer('check', 'and', 'check') def _make_and_expr(self, check1, _and, check2): """Create an 'and_expr'. Join two checks by the 'and' operator. """ return [('and_expr', _checks.AndCheck([check1, check2]))] @reducer('or_expr', 'and', 'check') def _mix_or_and_expr(self, or_expr, _and, check): """Modify the case 'A or B and C'""" or_expr, check1 = or_expr.pop_check() if isinstance(check1, _checks.AndCheck): and_expr = check1 and_expr.add_check(check) else: and_expr = _checks.AndCheck([check1, check]) return [('or_expr', or_expr.add_check(and_expr))] @reducer('and_expr', 'and', 'check') def _extend_and_expr(self, and_expr, _and, check): """Extend an 'and_expr' by adding one more check.""" return [('and_expr', and_expr.add_check(check))] @reducer('check', 'or', 'check') @reducer('and_expr', 'or', 'check') def _make_or_expr(self, check1, _or, check2): """Create an 'or_expr'. Join two checks by the 'or' operator. """ return [('or_expr', _checks.OrCheck([check1, check2]))] @reducer('or_expr', 'or', 'check') def _extend_or_expr(self, or_expr, _or, check): """Extend an 'or_expr' by adding one more check.""" return [('or_expr', or_expr.add_check(check))] @reducer('not', 'check') def _make_not_expr(self, _not, check): """Invert the result of another check.""" return [('check', _checks.NotCheck(check))] def _parse_check(rule): """Parse a single base check rule into an appropriate Check object.""" # Handle the special checks if rule == '!': return _checks.FalseCheck() elif rule == '@': return _checks.TrueCheck() try: kind, match = rule.split(':', 1) except Exception: LOG.exception('Failed to understand rule %s', rule) # If the rule is invalid, we'll fail closed return _checks.FalseCheck() # Find what implements the check extension_checks = _checks.get_extensions() if kind in extension_checks: return extension_checks[kind](kind, match) elif kind in _checks.registered_checks: return _checks.registered_checks[kind](kind, match) elif None in _checks.registered_checks: return _checks.registered_checks[None](kind, match) else: LOG.error('No handler for matches of kind %s', kind) return _checks.FalseCheck() def _parse_list_rule(rule): """Translates the old list-of-lists syntax into a tree of Check objects. Provided for backwards compatibility. """ # Empty rule defaults to True if not rule: return _checks.TrueCheck() # Outer list is joined by "or"; inner list by "and" or_list = [] for inner_rule in rule: # Skip empty inner lists if not inner_rule: continue # Handle bare strings if isinstance(inner_rule, str): inner_rule = [inner_rule] # Parse the inner rules into Check objects and_list = [_parse_check(r) for r in inner_rule] # Append the appropriate check to the or_list if len(and_list) == 1: or_list.append(and_list[0]) else: or_list.append(_checks.AndCheck(and_list)) # If we have only one check, omit the "or" if not or_list: return _checks.FalseCheck() elif len(or_list) == 1: return or_list[0] return _checks.OrCheck(or_list) # Used for tokenizing the policy language _tokenize_re = re.compile(r'\s+') def _parse_tokenize(rule): """Tokenizer for the policy language. Most of the single-character tokens are specified in the _tokenize_re; however, parentheses need to be handled specially, because they can appear inside a check string. Thankfully, those parentheses that appear inside a check string can never occur at the very beginning or end ("%(variable)s" is the correct syntax). """ for tok in _tokenize_re.split(rule): # Skip empty tokens if not tok or tok.isspace(): continue # Handle leading parens on the token clean = tok.lstrip('(') for i in range(len(tok) - len(clean)): yield '(', '(' # If it was only parentheses, continue if not clean: continue else: tok = clean # Handle trailing parens on the token clean = tok.rstrip(')') trail = len(tok) - len(clean) # Yield the cleaned token lowered = clean.lower() if lowered in ('and', 'or', 'not'): # Special tokens yield lowered, clean elif clean: # Not a special token, but not composed solely of ')' if len(tok) >= 2 and ((tok[0], tok[-1]) in [('"', '"'), ("'", "'")]): # It's a quoted string yield 'string', tok[1:-1] else: yield 'check', _parse_check(clean) # Yield the trailing parens for i in range(trail): yield ')', ')' def _parse_text_rule(rule): """Parses policy to the tree. Translates a policy written in the policy language into a tree of Check objects. """ # Empty rule means always accept if not rule: return _checks.TrueCheck() # Parse the token stream state = ParseState() for tok, value in _parse_tokenize(rule): state.shift(tok, value) try: return state.result except ValueError: # Couldn't parse the rule LOG.exception('Failed to understand rule %s', rule) # Fail closed return _checks.FalseCheck() def parse_rule(rule): """Parses a policy rule into a tree of :class:`.Check` objects.""" # If the rule is a string, it's in the policy language if isinstance(rule, str): return _parse_text_rule(rule) return _parse_list_rule(rule) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/fixture.py0000664000175000017500000000423000000000000020463 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures __all__ = ['HttpCheckFixture', 'HttpsCheckFixture'] class HttpCheckFixture(fixtures.Fixture): """Helps short circuit the external http call""" def __init__(self, return_value=True): """Initialize the fixture. :param return_value: True implies the policy check passed and False implies that the policy check failed :type return_value: boolean """ super(HttpCheckFixture, self).__init__() self.return_value = return_value def setUp(self): super(HttpCheckFixture, self).setUp() def mocked_call(target, cred, enforcer, rule): return self.return_value self.useFixture( fixtures.MonkeyPatch( 'oslo_policy._external.HttpCheck.__call__', mocked_call, ) ) class HttpsCheckFixture(fixtures.Fixture): """Helps short circuit the external http call""" def __init__(self, return_value=True): """Initialize the fixture. :param return_value: True implies the policy check passed and False implies that the policy check failed :type return_value: boolean """ super(HttpsCheckFixture, self).__init__() self.return_value = return_value def setUp(self): super(HttpsCheckFixture, self).setUp() def mocked_call(target, cred, enforcer, rule): return self.return_value self.useFixture( fixtures.MonkeyPatch( 'oslo_policy._external.HttpsCheck.__call__', mocked_call, ) ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/generator.py0000664000175000017500000005702100000000000020771 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import sys import textwrap import warnings import yaml from oslo_config import cfg from oslo_serialization import jsonutils import stevedore from oslo_policy import policy LOG = logging.getLogger(__name__) GENERATOR_OPTS = [ cfg.StrOpt('output-file', help='Path of the file to write to. Defaults to stdout.'), ] RULE_OPTS = [ cfg.MultiStrOpt('namespace', help='Option namespace(s) under "oslo.policy.policies" in ' 'which to query for options.'), cfg.StrOpt('format', deprecated_for_removal=True, deprecated_since='Victoria', deprecated_reason=""" ``policy_file`` support for JSON formatted file is deprecated. So these tools also deprecate the support of generating or upgrading policy file in JSON format. """, help='Desired format for the output.', default='yaml', choices=['json', 'yaml']), ] ENFORCER_OPTS = [ cfg.StrOpt('namespace', help='Option namespace under "oslo.policy.enforcer" in ' 'which to look for a policy.Enforcer.'), ] UPGRADE_OPTS = [ cfg.StrOpt('policy', required=True, help='Path to the policy file which need to be updated.') ] CONVERT_OPTS = [ cfg.MultiStrOpt('namespace', required=True, help='Option namespace(s) under "oslo.policy.policies" in ' 'which to query for options.'), cfg.StrOpt('policy-file', required=True, help='Path to the policy file which need to be converted to ' 'yaml format.') ] def get_policies_dict(namespaces): """Find the options available via the given namespaces. :param namespaces: a list of namespaces registered under 'oslo.policy.policies' :returns: a dict of {namespace1: [rule_default_1, rule_default_2], namespace2: [rule_default_3]...} """ mgr = stevedore.named.NamedExtensionManager( 'oslo.policy.policies', names=namespaces, on_load_failure_callback=on_load_failure_callback, invoke_on_load=True) opts = {ep.name: ep.obj for ep in mgr} return opts def _get_enforcer(namespace): """Find a policy.Enforcer via an entry point with the given namespace. :param namespace: a namespace under oslo.policy.enforcer where the desired enforcer object can be found. :returns: a policy.Enforcer object """ mgr = stevedore.named.NamedExtensionManager( 'oslo.policy.enforcer', names=[namespace], on_load_failure_callback=on_load_failure_callback, invoke_on_load=True) if namespace not in mgr: raise KeyError('Namespace "%s" not found.' % namespace) enforcer = mgr[namespace].obj return enforcer def _format_help_text(description): """Format a comment for a policy based on the description provided. :param description: A string with helpful text. :returns: A line wrapped comment, or blank comment if description is None """ if not description: return '#' formatted_lines = [] paragraph = [] def _wrap_paragraph(lines): return textwrap.wrap(' '.join(lines), 70, initial_indent='# ', subsequent_indent='# ') for line in description.strip().splitlines(): if not line.strip(): # empty line -> line break, so dump anything we have formatted_lines.extend(_wrap_paragraph(paragraph)) formatted_lines.append('#') paragraph = [] elif len(line) == len(line.lstrip()): # no leading whitespace = paragraph, which should be wrapped paragraph.append(line.rstrip()) else: # leading whitespace - literal block, which should not be wrapping if paragraph: # ...however, literal blocks need a new line before them to # delineate things # TODO(stephenfin): Raise an exception here and stop doing # anything else in oslo.policy 2.0 warnings.warn( 'Invalid policy description: literal blocks must be ' 'preceded by a new line. This will raise an exception in ' 'a future version of oslo.policy:\n%s' % description, FutureWarning) formatted_lines.extend(_wrap_paragraph(paragraph)) formatted_lines.append('#') paragraph = [] formatted_lines.append('# %s' % line.rstrip()) if paragraph: # dump anything we might still have in the buffer formatted_lines.extend(_wrap_paragraph(paragraph)) return '\n'.join(formatted_lines) def _format_rule_default_yaml(default, include_help=True, comment_rule=True, add_deprecated_rules=True): """Create a yaml node from policy.RuleDefault or policy.DocumentedRuleDefault. :param default: A policy.RuleDefault or policy.DocumentedRuleDefault object :param comment_rule: By default rules will be commented out in generated yaml format text. If you want to keep few or all rules uncommented then pass this arg as False. :param add_deprecated_rules: Whether to add the deprecated rules in format text. :returns: A string containing a yaml representation of the RuleDefault """ text = ('"%(name)s": "%(check_str)s"\n' % {'name': default.name, 'check_str': default.check_str}) if include_help: op = "" if hasattr(default, 'operations'): for operation in default.operations: if operation['method'] and operation['path']: op += ('# %(method)s %(path)s\n' % {'method': operation['method'], 'path': operation['path']}) intended_scope = "" if getattr(default, 'scope_types', None) is not None: intended_scope = ( '# Intended scope(s): ' + ', '.join(default.scope_types) + '\n' ) comment = '#' if comment_rule else '' text = ('%(op)s%(scope)s%(comment)s%(text)s\n' % {'op': op, 'scope': intended_scope, 'comment': comment, 'text': text}) if default.description: text = _format_help_text(default.description) + '\n' + text if add_deprecated_rules and default.deprecated_for_removal: text = ( '# DEPRECATED\n# "%(name)s" has been deprecated since ' '%(since)s.\n%(reason)s\n%(text)s' ) % {'name': default.name, 'since': default.deprecated_since, 'reason': _format_help_text(default.deprecated_reason), 'text': text} elif add_deprecated_rules and default.deprecated_rule: deprecated_reason = ( default.deprecated_rule.deprecated_reason or default.deprecated_reason ) deprecated_since = ( default.deprecated_rule.deprecated_since or default.deprecated_since ) # This issues a deprecation warning but aliases the old policy name # with the new policy name for compatibility. deprecated_text = ( '"%(old_name)s":"%(old_check_str)s" has been deprecated ' 'since %(since)s in favor of "%(name)s":"%(check_str)s".' ) % { 'old_name': default.deprecated_rule.name, 'old_check_str': default.deprecated_rule.check_str, 'since': deprecated_since, 'name': default.name, 'check_str': default.check_str, } text = '%(text)s# DEPRECATED\n%(deprecated_text)s\n%(reason)s\n' % { 'text': text, 'reason': _format_help_text(deprecated_reason), 'deprecated_text': _format_help_text(deprecated_text) } if default.name != default.deprecated_rule.name: text += ('"%(old_name)s": "rule:%(name)s"\n' % {'old_name': default.deprecated_rule.name, 'name': default.name}) text += '\n' return text def _format_rule_default_json(default): """Create a json node from policy.RuleDefault or policy.DocumentedRuleDefault. :param default: A policy.RuleDefault or policy.DocumentedRuleDefault object :returns: A string containing a json representation of the RuleDefault """ return ('"%(name)s": "%(check_str)s"' % {'name': default.name, 'check_str': default.check_str}) def _sort_and_format_by_section(policies, output_format='yaml', include_help=True): """Generate a list of policy section texts The text for a section will be created and returned one at a time. The sections are sorted first to provide for consistent output. Text is created in yaml format. This is done manually because PyYaml does not facilitate outputing comments. :param policies: A dict of {section1: [rule_default_1, rule_default_2], section2: [rule_default_3]} :param output_format: The format of the file to output to. """ for section in sorted(policies.keys()): rule_defaults = policies[section] for rule_default in rule_defaults: if output_format == 'yaml': yield _format_rule_default_yaml(rule_default, include_help=include_help) elif output_format == 'json': LOG.warning(policy.WARN_JSON) yield _format_rule_default_json(rule_default) def _generate_sample(namespaces, output_file=None, output_format='yaml', include_help=True): """Generate a sample policy file. List all of the policies available via the namespace specified in the given configuration and write them to the specified output file. :param namespaces: a list of namespaces registered under 'oslo.policy.policies'. Stevedore will look here for policy options. :param output_file: The path of a file to output to. stdout used if None. :param output_format: The format of the file to output to. :param include_help: True, generates a sample-policy file with help text along with rules in which everything is commented out. False, generates a sample-policy file with only rules. """ policies = get_policies_dict(namespaces) output_file = (open(output_file, 'w') if output_file else sys.stdout) sections_text = [] for section in _sort_and_format_by_section(policies, output_format, include_help=include_help): sections_text.append(section) if output_format == 'yaml': output_file.writelines(sections_text) elif output_format == 'json': LOG.warning(policy.WARN_JSON) output_file.writelines(( '{\n ', ',\n '.join(sections_text), '\n}\n')) if output_file != sys.stdout: output_file.close() def _generate_policy(namespace, output_file=None): """Generate a policy file showing what will be used. This takes all registered policies and merges them with what's defined in a policy file and outputs the result. That result is the effective policy that will be honored by policy checks. :param output_file: The path of a file to output to. stdout used if None. """ enforcer = _get_enforcer(namespace) # Ensure that files have been parsed enforcer.load_rules() file_rules = [policy.RuleDefault(name, default.check_str) for name, default in enforcer.file_rules.items()] registered_rules = [policy.RuleDefault(name, default.check_str) for name, default in enforcer.registered_rules.items() if name not in enforcer.file_rules] policies = {'rules': file_rules + registered_rules} output_file = (open(output_file, 'w') if output_file else sys.stdout) for section in _sort_and_format_by_section(policies, include_help=False): output_file.write(section) if output_file != sys.stdout: output_file.close() def _list_redundant(namespace): """Generate a list of configured policies which match defaults. This checks all policies loaded from policy files and checks to see if they match registered policies. If so then it is redundant to have them defined in a policy file and operators should consider removing them. """ enforcer = _get_enforcer(namespace) # NOTE(bnemec): We don't want to see policy deprecation warnings in the # output of this tool. They tend to overwhelm the output that the user # actually cares about, and checking for deprecations isn't the purpose of # this tool. enforcer.suppress_deprecation_warnings = True # Ensure that files have been parsed enforcer.load_rules() for name, file_rule in enforcer.file_rules.items(): reg_rule = enforcer.registered_rules.get(name) if reg_rule: if file_rule == reg_rule: print(reg_rule) def _validate_policy(namespace): """Perform basic sanity checks on a policy file Checks for the following errors in the configured policy file: * A missing policy file * Rules which have invalid syntax * Rules which reference non-existent other rules * Rules which form a cyclical reference with another rule * Rules which do not exist in the specified namespace :param namespace: The name under which the oslo.policy enforcer is registered. :returns: 0 if all policies validated correctly, 1 if not. """ return_code = 0 enforcer = _get_enforcer(namespace) # NOTE(bnemec): We don't want to see policy deprecation warnings in the # output of this tool. They tend to overwhelm the output that the user # actually cares about. If we check for deprecated rules in this tool, # we need to do it another way. enforcer.suppress_deprecation_warnings = True # Disable logging from the parser code. We'll be printing any errors we # find below. logging.disable(logging.ERROR) # Ensure that files have been parsed enforcer.load_rules() if enforcer._informed_no_policy_file: print('Configured policy file "%s" not found' % enforcer.policy_file) # If the policy file is completely missing then the rest of our checks # don't make sense. return 1 # Re-enable logging so we get messages for things like cyclical references logging.disable(logging.NOTSET) result = enforcer.check_rules() if not result: print('Invalid rules found') return_code = 1 # TODO(bnemec): Allow this to handle policy_dir with open(cfg.CONF.oslo_policy.policy_file) as f: unparsed_policies = yaml.safe_load(f.read()) for name, file_rule in enforcer.file_rules.items(): reg_rule = enforcer.registered_rules.get(name) if reg_rule is None: print('Unknown rule found in policy file:', name) return_code = 1 # If a rule has invalid syntax it will be forced to '!'. If the literal # rule from the policy file isn't '!' then this means there was an # error parsing it. if str(enforcer.rules[name]) == '!' and unparsed_policies[name] != '!': print('Failed to parse rule:', unparsed_policies[name]) return_code = 1 return return_code def _convert_policy_json_to_yaml(namespace, policy_file, output_file=None): with open(policy_file, 'r') as rule_data: file_policies = jsonutils.loads(rule_data.read()) yaml_format_rules = [] default_policies = get_policies_dict(namespace) for section in sorted(default_policies): default_rules = default_policies[section] for default_rule in default_rules: if default_rule.name not in file_policies: continue file_rule_check_str = file_policies.pop(default_rule.name) # Some rules might be still RuleDefault object so let's prepare # empty 'operations' list and rule name as description for # those. operations = [{ 'method': '', 'path': '' }] if hasattr(default_rule, 'operations'): operations = default_rule.operations # Converting JSON file rules to DocumentedRuleDefault rules so # that we can convert the JSON file to YAML including # descriptions which is what 'oslopolicy-sample-generator' # tool does. file_rule = policy.DocumentedRuleDefault( default_rule.name, file_rule_check_str, default_rule.description or default_rule.name, operations, default_rule.deprecated_rule, default_rule.deprecated_for_removal, default_rule.deprecated_reason, default_rule.deprecated_since, scope_types=default_rule.scope_types) if file_rule == default_rule: rule_text = _format_rule_default_yaml( file_rule, add_deprecated_rules=False) else: # NOTE(gmann): If json file rule is not same as default # means rule is overridden then do not comment out it in # yaml file. rule_text = _format_rule_default_yaml( file_rule, comment_rule=False, add_deprecated_rules=False) yaml_format_rules.append(rule_text) extra_rules_text = ("# WARNING: Below rules are either deprecated rules\n" "# or extra rules in policy file, it is strongly\n" "# recommended to switch to new rules.\n") # NOTE(gmann): If policy json file still using the deprecated rules which # will not be present in default rules list. Or it can be case of any # extra rule (old rule which is now removed) present in json file. # so let's keep these as it is (not commented out) to avoid breaking # existing deployment. if file_policies: yaml_format_rules.append(extra_rules_text) for file_rule, check_str in file_policies.items(): rule_text = ('"%(name)s": "%(check_str)s"\n' % {'name': file_rule, 'check_str': check_str}) yaml_format_rules.append(rule_text) if output_file: with open(output_file, 'w') as fh: fh.writelines(yaml_format_rules) else: sys.stdout.writelines(yaml_format_rules) def on_load_failure_callback(*args, **kwargs): raise def _check_for_namespace_opt(conf): # NOTE(bnemec): This opt is required, but due to lp#1849518 we need to # make it optional while our consumers migrate to the new method of # parsing cli args. Making the arg itself optional and explicitly checking # for it in the tools will allow us to migrate projects without breaking # anything. Once everyone has migrated, we can make the arg required again # and remove this check. if conf.namespace is None: raise cfg.RequiredOptError('namespace', 'DEFAULT') def generate_sample(args=None, conf=None): logging.basicConfig(level=logging.WARN) # Allow the caller to pass in a local conf object for unit testing if conf is None: conf = cfg.CONF conf.register_cli_opts(GENERATOR_OPTS + RULE_OPTS) conf.register_opts(GENERATOR_OPTS + RULE_OPTS) conf(args) _check_for_namespace_opt(conf) _generate_sample(conf.namespace, conf.output_file, conf.format) def generate_policy(args=None): logging.basicConfig(level=logging.WARN) conf = cfg.CONF conf.register_cli_opts(GENERATOR_OPTS + ENFORCER_OPTS) conf.register_opts(GENERATOR_OPTS + ENFORCER_OPTS) conf(args) _check_for_namespace_opt(conf) _generate_policy(conf.namespace, conf.output_file) def _upgrade_policies(policies, default_policies): old_policies_keys = list(policies.keys()) for section in sorted(default_policies.keys()): rule_defaults = default_policies[section] for rule_default in rule_defaults: if (rule_default.deprecated_rule and rule_default.deprecated_rule.name in old_policies_keys): policies[rule_default.name] = policies.pop( rule_default.deprecated_rule.name) LOG.info('The name of policy %(old_name)s has been upgraded to' '%(new_name)', {'old_name': rule_default.deprecated_rule.name, 'new_name': rule_default.name}) def upgrade_policy(args=None, conf=None): logging.basicConfig(level=logging.WARN) # Allow the caller to pass in a local conf object for unit testing if conf is None: conf = cfg.CONF conf.register_cli_opts(GENERATOR_OPTS + RULE_OPTS + UPGRADE_OPTS) conf.register_opts(GENERATOR_OPTS + RULE_OPTS + UPGRADE_OPTS) conf(args) _check_for_namespace_opt(conf) with open(conf.policy, 'r') as input_data: policies = policy.parse_file_contents(input_data.read()) default_policies = get_policies_dict(conf.namespace) _upgrade_policies(policies, default_policies) if conf.output_file: with open(conf.output_file, 'w') as fh: if conf.format == 'yaml': yaml.safe_dump(policies, fh, default_flow_style=False) elif conf.format == 'json': LOG.warning(policy.WARN_JSON) jsonutils.dump(policies, fh, indent=4) else: if conf.format == 'yaml': sys.stdout.write(yaml.safe_dump(policies, default_flow_style=False)) elif conf.format == 'json': LOG.warning(policy.WARN_JSON) sys.stdout.write(jsonutils.dumps(policies, indent=4)) def list_redundant(args=None): logging.basicConfig(level=logging.WARN) conf = cfg.CONF conf.register_cli_opts(ENFORCER_OPTS) conf.register_opts(ENFORCER_OPTS) conf(args) _check_for_namespace_opt(conf) _list_redundant(conf.namespace) def validate_policy(args=None): logging.basicConfig(level=logging.WARN) conf = cfg.CONF conf.register_cli_opts(ENFORCER_OPTS) conf.register_opts(ENFORCER_OPTS) conf(args) sys.exit(_validate_policy(conf.namespace)) def convert_policy_json_to_yaml(args=None, conf=None): logging.basicConfig(level=logging.WARN) # Allow the caller to pass in a local conf object for unit testing if conf is None: conf = cfg.CONF conf.register_cli_opts(GENERATOR_OPTS + CONVERT_OPTS) conf.register_opts(GENERATOR_OPTS + CONVERT_OPTS) conf(args) _check_for_namespace_opt(conf) _convert_policy_json_to_yaml(conf.namespace, conf.policy_file, conf.output_file) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8395183 oslo.policy-3.11.0/oslo_policy/locale/0000775000175000017500000000000000000000000017663 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8395183 oslo.policy-3.11.0/oslo_policy/locale/en_GB/0000775000175000017500000000000000000000000020635 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8515182 oslo.policy-3.11.0/oslo_policy/locale/en_GB/LC_MESSAGES/0000775000175000017500000000000000000000000022422 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/locale/en_GB/LC_MESSAGES/oslo_policy.po0000664000175000017500000001056700000000000025326 0ustar00zuulzuul00000000000000# OpenStack Infra , 2015. #zanata # Andi Chandler , 2016. #zanata # Andi Chandler , 2017. #zanata # Andi Chandler , 2018. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.policy VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2018-08-07 10:53+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2018-08-08 09:50+0000\n" "Last-Translator: Andi Chandler \n" "Language-Team: English (United Kingdom)\n" "Language: en_GB\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #, python-format msgid "%(rule)s is disallowed by policy" msgstr "%(rule)s is disallowed by policy" msgid "Absolute path client key file REST based policy check" msgstr "Absolute path client key file REST based policy check" msgid "Absolute path to ca cert file for REST based policy check" msgstr "Absolute path to ca cert file for REST based policy check" msgid "Absolute path to client cert for REST based policy check" msgstr "Absolute path to client cert for REST based policy check" msgid "Content Type to send and receive data for REST based policy check" msgstr "Content Type to send and receive data for REST based policy check" msgid "Default rule. Enforced when a requested rule is not found." msgstr "Default rule. Enforced when a requested rule is not found." msgid "" "Directories where policy configuration files are stored. They can be " "relative to any directory in the search path defined by the config_dir " "option, or absolute paths. The file defined by policy_file must exist for " "these directories to be searched. Missing or empty directories are ignored." msgstr "" "Directories where policy configuration files are stored. They can be " "relative to any directory in the search path defined by the config_dir " "option, or absolute paths. The file defined by policy_file must exist for " "these directories to be searched. Missing or empty directories are ignored." #, python-format msgid "Invalid context object: %(error)s." msgstr "Invalid context object: %(error)s." #, python-format msgid "Invalid policy rule default: %(error)s." msgstr "Invalid policy rule default: %(error)s." #, python-format msgid "Policies %(names)s are not well defined. Check logs for more details." msgstr "Policies %(names)s are not well defined. Check logs for more details." #, python-format msgid "Policy %(name)s has not been registered" msgstr "Policy %(name)s has not been registered" #, python-format msgid "Policy %(name)s is already registered" msgstr "Policy %(name)s is already registered" #, python-format msgid "Rules must be an instance of dict or Rules, got %s instead" msgstr "Rules must be an instance of dict or Rules, got %s instead" msgid "The file that defines policies." msgstr "The file that defines policies." msgid "" "This option controls whether or not to enforce scope when evaluating " "policies. If ``True``, the scope of the token used in the request is " "compared to the ``scope_types`` of the policy being enforced. If the scopes " "do not match, an ``InvalidScope`` exception will be raised. If ``False``, a " "message will be logged informing operators that policies are being invoked " "with mismatching scope." msgstr "" "This option controls whether or not to enforce scope when evaluating " "policies. If ``True``, the scope of the token used in the request is " "compared to the ``scope_types`` of the policy being enforced. If the scopes " "do not match, an ``InvalidScope`` exception will be raised. If ``False``, a " "message will be logged informing operators that policies are being invoked " "with mismatching scope." #, python-format msgid "Unable to access ssl cert_file : %s" msgstr "Unable to access SSL cert_file : %s" #, python-format msgid "Unable to access ssl key_file : %s" msgstr "Unable to access SSL key_file : %s" #, python-format msgid "Unable to find ca cert_file : %s" msgstr "Unable to find ca cert_file : %s" #, python-format msgid "Unable to find ssl cert_file : %s" msgstr "Unable to find SSL cert_file : %s" #, python-format msgid "Unable to find ssl key_file : %s" msgstr "Unable to find SSL key_file : %s" msgid "server identity verification for REST based policy check" msgstr "server identity verification for REST based policy check" ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/opts.py0000664000175000017500000001436500000000000017774 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from oslo_config import cfg from oslo_policy._i18n import _ __all__ = [ 'list_opts', 'set_defaults', ] _option_group = 'oslo_policy' _options = [ cfg.BoolOpt('enforce_scope', default=False, help=_('This option controls whether or not to enforce scope ' 'when evaluating policies. If ``True``, the scope of ' 'the token used in the request is compared to the ' '``scope_types`` of the policy being enforced. If the ' 'scopes do not match, an ``InvalidScope`` exception ' 'will be raised. If ``False``, a message will be ' 'logged informing operators that policies are being ' 'invoked with mismatching scope.')), cfg.BoolOpt('enforce_new_defaults', default=False, help=_('This option controls whether or not to use old ' 'deprecated defaults when evaluating policies. If ' '``True``, the old deprecated defaults are not going ' 'to be evaluated. This means if any existing token is ' 'allowed for old defaults but is disallowed for new ' 'defaults, it will be disallowed. It is encouraged to ' 'enable this flag along with the ``enforce_scope`` ' 'flag so that you can get the benefits of new defaults ' 'and ``scope_type`` together. If ``False``, the ' 'deprecated policy check string is logically OR\'d ' 'with the new policy check string, allowing for a ' 'graceful upgrade experience between releases with ' 'new policies, which is the default behavior.')), cfg.StrOpt('policy_file', default='policy.json', help=_('The relative or absolute path of a file that maps ' 'roles to permissions for a given service. Relative ' 'paths must be specified in relation to the ' 'configuration file setting this option.'), deprecated_group='DEFAULT'), cfg.StrOpt('policy_default_rule', default='default', help=_('Default rule. Enforced when a requested rule is not ' 'found.'), deprecated_group='DEFAULT'), cfg.MultiStrOpt('policy_dirs', default=['policy.d'], help=_('Directories where policy configuration files are ' 'stored. They can be relative to any directory ' 'in the search path defined by the config_dir ' 'option, or absolute paths. The file defined by ' 'policy_file must exist for these directories to ' 'be searched. Missing or empty directories are ' 'ignored.'), deprecated_group='DEFAULT'), cfg.StrOpt('remote_content_type', choices=('application/x-www-form-urlencoded', 'application/json'), default='application/x-www-form-urlencoded', help=_("Content Type to send and receive data for " "REST based policy check")), cfg.BoolOpt('remote_ssl_verify_server_crt', help=_("server identity verification for REST based " "policy check"), default=False), cfg.StrOpt('remote_ssl_ca_crt_file', help=_("Absolute path to ca cert file for REST based " "policy check")), cfg.StrOpt('remote_ssl_client_crt_file', help=_("Absolute path to client cert for REST based " "policy check")), cfg.StrOpt('remote_ssl_client_key_file', help=_("Absolute path client key file REST based " "policy check")), ] def list_opts(): """Return a list of oslo.config options available in the library. The returned list includes all oslo.config options which may be registered at runtime by the library. Each element of the list is a tuple. The first element is the name of the group under which the list of elements in the second element will be registered. A group name of None corresponds to the [DEFAULT] group in config files. This function is also discoverable via the 'oslo_messaging' entry point under the 'oslo.config.opts' namespace. The purpose of this is to allow tools like the Oslo sample config file generator to discover the options exposed to users by this library. :returns: a list of (group_name, opts) tuples """ return [(_option_group, copy.deepcopy(_options))] def _register(conf): """Register the policy options. We do this in a few places, so use a function to ensure it is done consistently. """ conf.register_opts(_options, group=_option_group) def set_defaults(conf, policy_file=None, **kwargs): """Set defaults for configuration variables. Overrides default options values. :param conf: Configuration object, managed by the caller. :type conf: oslo.config.cfg.ConfigOpts :param policy_file: The base filename for the file that defines policies. :type policy_file: unicode :param kwargs: Any other configuration variable and their new default value. """ _register(conf) if policy_file is not None: cfg.set_defaults(_options, policy_file=policy_file) if kwargs: cfg.set_defaults(_options, **kwargs) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/policy.py0000664000175000017500000017141700000000000020310 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # # Copyright (c) 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Common Policy Engine Implementation Policies are expressed as a target and an associated rule:: "": The `target` is specific to the service that is conducting policy enforcement. Typically, the target refers to an API call. For the `` part, see `Policy Rule Expressions`. Policy Rule Expressions ~~~~~~~~~~~~~~~~~~~~~~~ Policy rules can be expressed in one of two forms: a string written in the new policy language or a list of lists. The string format is preferred since it's easier for most people to understand. In the policy language, each check is specified as a simple "a:b" pair that is matched to the correct class to perform that check: +--------------------------------+------------------------------------------+ | TYPE | SYNTAX | +================================+==========================================+ |User's Role | role:admin | +--------------------------------+------------------------------------------+ |Rules already defined on policy | rule:admin_required | +--------------------------------+------------------------------------------+ |Against URLs¹ | http://my-url.org/check | +--------------------------------+------------------------------------------+ |User attributes² | project_id:%(target.project.id)s | +--------------------------------+------------------------------------------+ |Strings | - :'xpto2035abc' | | | - 'myproject': | +--------------------------------+------------------------------------------+ | | - project_id:xpto2035abc | |Literals | - domain_id:20 | | | - True:%(user.enabled)s | +--------------------------------+------------------------------------------+ ¹URL checking must return ``True`` to be valid ²User attributes (obtained through the token): user_id, domain_id or project_id Conjunction operators ``and`` and ``or`` are available, allowing for more expressiveness in crafting policies. For example:: "role:admin or (project_id:%(project_id)s and role:projectadmin)" The policy language also has the ``not`` operator, allowing a richer policy rule:: "project_id:%(project_id)s and not role:dunce" Operator precedence is below: +------------+-------------+-------------+ | PRECEDENCE | TYPE | EXPRESSION | +============+=============+=============+ | 4 | Grouping | (...) | +------------+-------------+-------------+ | 3 | Logical NOT | not ... | +------------+-------------+-------------+ | 2 | Logical AND | ... and ... | +------------+-------------+-------------+ | 1 | Logical OR | ... or ... | +------------+-------------+-------------+ Operator with larger precedence number precedes others with smaller numbers. In the list-of-lists representation, each check inside the innermost list is combined as with an "and" conjunction -- for that check to pass, all the specified checks must pass. These innermost lists are then combined as with an "or" conjunction. As an example, take the following rule, expressed in the list-of-lists representation:: [["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]] Finally, two special policy checks should be mentioned; the policy check "@" will always accept an access, and the policy check "!" will always reject an access. (Note that if a rule is either the empty list (``[]``) or the empty string (``""``), this is equivalent to the "@" policy check.) Of these, the "!" policy check is probably the most useful, as it allows particular rules to be explicitly disabled. Generic Checks ~~~~~~~~~~~~~~ A `generic` check is used to perform matching against attributes that are sent along with the API calls. These attributes can be used by the policy engine (on the right side of the expression), by using the following syntax:: :%(user.id)s The value on the right-hand side is either a string or resolves to a string using regular Python string substitution. The available attributes and values are dependent on the program that is using the common policy engine. All of these attributes (related to users, API calls, and context) can be checked against each other or against constants. It is important to note that these attributes are specific to the service that is conducting policy enforcement. Generic checks can be used to perform policy checks on the following user attributes obtained through a token: - user_id - domain_id or project_id (depending on the token scope) - list of roles held for the given token scope .. note:: Some resources which are exposed by the API do not support policy enforcement by user_id, and only support policy enforcement by project_id. Some global resources do not support policy enforcement by combination of user_id and project_id. For example, a check on the user_id would be defined as:: user_id: Together with the previously shown example, a complete generic check would be:: user_id:%(user.id)s It is also possible to perform checks against other attributes that represent the credentials. This is done by adding additional values to the ``creds`` dict that is passed to the :meth:`~oslo_policy.policy.Enforcer.enforce` method. Special Checks ~~~~~~~~~~~~~~ Special checks allow for more flexibility than is possible using generic checks. The built-in special check types are ``role``, ``rule``, and ``http`` checks. Role Check ^^^^^^^^^^ A ``role`` check is used to check if a specific role is present in the supplied credentials. A role check is expressed as:: "role:" Rule Check ^^^^^^^^^^ A :class:`rule check ` is used to reference another defined rule by its name. This allows for common checks to be defined once as a reusable rule, which is then referenced within other rules. It also allows one to define a set of checks as a more descriptive name to aid in readability of policy. A rule check is expressed as:: "rule:" The following example shows a role check that is defined as a rule, which is then used via a rule check:: "admin_required": "role:admin" "": "rule:admin_required" HTTP Check ^^^^^^^^^^ An ``http`` check is used to make an HTTP request to a remote server to determine the results of the check. The target and credentials are passed to the remote server for evaluation. The action is authorized if the remote server returns a response of ``True``. An http check is expressed as:: "http:" It is expected that the target URI contains a string formatting keyword, where the keyword is a key from the target dictionary. An example of an http check where the `name` key from the target is used to construct the URL is would be defined as:: "http://server.test/%(name)s" Registering New Special Checks ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is also possible for additional special check types to be registered using the :func:`~oslo_policy.policy.register` function. The following classes can be used as parents for custom special check types: * :class:`~oslo_policy.policy.AndCheck` * :class:`~oslo_policy.policy.NotCheck` * :class:`~oslo_policy.policy.OrCheck` * :class:`~oslo_policy.policy.RuleCheck` Default Rule ~~~~~~~~~~~~ A default rule can be defined, which will be enforced when a rule does not exist for the target that is being checked. By default, the rule associated with the rule name of ``default`` will be used as the default rule. It is possible to use a different rule name as the default rule by setting the ``policy_default_rule`` configuration setting to the desired rule name. """ import collections.abc import copy import logging import os import typing as ty import warnings from oslo_config import cfg from oslo_context import context from oslo_serialization import jsonutils from oslo_utils import strutils import yaml from oslo_policy import _cache_handler from oslo_policy import _checks from oslo_policy._i18n import _ from oslo_policy import _parser from oslo_policy import opts LOG = logging.getLogger(__name__) register = _checks.register """Register a function or :class:`.Check` class as a policy check. :param name: Gives the name of the check type, e.g., "rule", "role", etc. If name is ``None``, a default check type will be registered. :param func: If given, provides the function or class to register. If not given, returns a function taking one argument to specify the function or class to register, allowing use as a decorator. """ Check = _checks.Check """A base class to allow for user-defined policy checks. :param kind: The kind of the check, i.e., the field before the ``:``. :param match: The match of the check, i.e., the field after the ``:``. """ AndCheck = _checks.AndCheck """Implements the "and" logical operator. A policy check that requires that a list of other checks all return True. :param list rules: rules that will be tested. """ NotCheck = _checks.NotCheck """Implements the "not" logical operator. A policy check that inverts the result of another policy check. :param rule: The rule to negate. """ OrCheck = _checks.OrCheck """Implements the "or" operator. A policy check that requires that at least one of a list of other checks returns ``True``. :param rules: A list of rules that will be tested. """ RuleCheck = _checks.RuleCheck """Recursively checks credentials based on the defined rules.""" WARN_JSON = ("JSON formatted policy_file support is deprecated since " "Victoria release. You need to use YAML format which " "will be default in future. You can use " "``oslopolicy-convert-json-to-yaml`` tool to convert existing " "JSON-formatted policy file to YAML-formatted in backward " "compatible way: https://docs.openstack.org/oslo.policy/" "latest/cli/oslopolicy-convert-json-to-yaml.html.") class PolicyNotAuthorized(Exception): """Default exception raised for policy enforcement failure.""" def __init__(self, rule, target, creds): msg = _("%(rule)s is disallowed by policy") % {'rule': rule} super(PolicyNotAuthorized, self).__init__(msg) class InvalidScope(Exception): """Raised when the scope of the request mismatches the policy scope.""" def __init__(self, rule, operation_scopes, token_scope): msg = ( "%(rule)s requires a scope of %(operation_scopes)s, request " "was made with %(token_scope)s scope." % { 'rule': rule, 'operation_scopes': operation_scopes, 'token_scope': token_scope } ) super(InvalidScope, self).__init__(msg) class DuplicatePolicyError(Exception): def __init__(self, name): msg = _('Policy %(name)s is already registered') % {'name': name} super(DuplicatePolicyError, self).__init__(msg) class PolicyNotRegistered(Exception): def __init__(self, name): msg = _('Policy %(name)s has not been registered') % {'name': name} super(PolicyNotRegistered, self).__init__(msg) class InvalidDefinitionError(Exception): def __init__(self, names): msg = _('Policies %(names)s are not well defined. Check logs for ' 'more details.') % {'names': names} super(InvalidDefinitionError, self).__init__(msg) class InvalidRuleDefault(Exception): def __init__(self, error): msg = (_('Invalid policy rule default: ' '%(error)s.') % {'error': error}) super(InvalidRuleDefault, self).__init__(msg) class InvalidContextObject(Exception): def __init__(self, error): msg = (_('Invalid context object: ' '%(error)s.') % {'error': error}) super(InvalidContextObject, self).__init__(msg) def pick_default_policy_file(conf, fallback_to_json_file=True): # TODO(gmann): If service changed the default value of # CONF.oslo_policy.policy_file option to 'policy.yaml' then to avoid # breaking any deployment relying on default value, we need to add # this is fallback logic to pick the old default policy file # (policy.json) if exist. We can to remove this fallback logic once # oslo_policy stop supporting the JSON formatted policy file. new_default_policy_file = 'policy.yaml' old_default_policy_file = 'policy.json' policy_file = None if ((conf.oslo_policy.policy_file == new_default_policy_file) and fallback_to_json_file): location = conf.get_location('policy_file', 'oslo_policy').location if conf.find_file(conf.oslo_policy.policy_file): policy_file = conf.oslo_policy.policy_file elif location in [cfg.Locations.opt_default, cfg.Locations.set_default]: LOG.debug('Searching old policy.json file.') if conf.find_file(old_default_policy_file): policy_file = old_default_policy_file if policy_file: LOG.debug( 'Picking default policy file: %s. Config location: %s', policy_file, location) return policy_file LOG.debug( 'No default policy file present, picking the configured ' 'one: %s.', conf.oslo_policy.policy_file) # Return overridden policy file return conf.oslo_policy.policy_file def parse_file_contents(data): """Parse the raw contents of a policy file. Parses the contents of a policy file which currently can be in either yaml or json format. Both can be parsed as yaml. :param data: A string containing the contents of a policy file. :returns: A dict of the form ``{'policy_name1': 'policy1', 'policy_name2': 'policy2,...}`` """ try: # NOTE(snikitin): jsonutils.loads() is much faster than # yaml.safe_load(). However jsonutils.loads() parses only JSON while # yaml.safe_load() parses JSON and YAML. So here we try to parse data # by jsonutils.loads() first. In case of failure yaml.safe_load() # will be used instead. parsed = jsonutils.loads(data) # NOTE(gmann): If policy file is loaded in JSON format means # policy_file is JSON formatted so log warning. LOG.warning(WARN_JSON) except ValueError: try: parsed = yaml.safe_load(data) except yaml.YAMLError as e: # For backwards-compatibility, convert yaml error to ValueError, # which is what JSON loader raised. raise ValueError(str(e)) return parsed or {} class Rules(dict): """A store for rules. Handles the default_rule setting directly.""" @classmethod def load(cls, data, default_rule=None): """Allow loading of YAML/JSON rule data. .. versionadded:: 1.5.0 """ parsed_file = parse_file_contents(data) # Parse the rules rules = {k: _parser.parse_rule(v) for k, v in parsed_file.items()} return cls(rules, default_rule) @classmethod def load_json(cls, data, default_rule=None): """Allow loading of YAML/JSON rule data. .. warning:: This method is deprecated as of the 1.5.0 release in favor of :meth:`load` and may be removed in the 2.0 release. """ warnings.warn( 'The load_json() method is deprecated as of the 1.5.0 release in ' 'favor of load() and may be removed in the 2.0 release.', DeprecationWarning) return cls.load(data, default_rule) @classmethod def from_dict(cls, rules_dict, default_rule=None): """Allow loading of rule data from a dictionary.""" # Parse the rules stored in the dictionary rules = {k: _parser.parse_rule(v) for k, v in rules_dict.items()} return cls(rules, default_rule) def __init__(self, rules=None, default_rule=None): """Initialize the Rules store.""" super(Rules, self).__init__(rules or {}) self.default_rule = default_rule def __missing__(self, key): """Implements the default rule handling.""" if isinstance(self.default_rule, dict): raise KeyError(key) # If the default rule isn't actually defined, do something # reasonably intelligent if not self.default_rule: raise KeyError(key) if isinstance(self.default_rule, _checks.BaseCheck): return self.default_rule # We need to check this or we can get infinite recursion if self.default_rule not in self: raise KeyError(key) elif isinstance(self.default_rule, str): return self[self.default_rule] def __str__(self): """Dumps a string representation of the rules.""" # Start by building the canonical strings for the rules out_rules = {} for key, value in self.items(): # Use empty string for singleton TrueCheck instances if isinstance(value, _checks.TrueCheck): out_rules[key] = '' else: out_rules[key] = str(value) # Dump a pretty-printed JSON representation return jsonutils.dumps(out_rules, indent=4) class Enforcer(object): """Responsible for loading and enforcing rules. :param conf: A configuration object. :param policy_file: Custom policy file to use, if none is specified, ``conf.oslo_policy.policy_file`` will be used. :param rules: Default dictionary / Rules to use. It will be considered just in the first instantiation. If :meth:`load_rules` with ``force_reload=True``, :meth:`clear` or :meth:`set_rules` with ``overwrite=True`` is called this will be overwritten. :param default_rule: Default rule to use, conf.default_rule will be used if none is specified. :param use_conf: Whether to load rules from cache or config file. :param overwrite: Whether to overwrite existing rules when reload rules from config file. """ def __init__( self, conf, policy_file=None, rules=None, default_rule=None, use_conf=True, overwrite=True, fallback_to_json_file=True, ): self.conf = conf opts._register(conf) self.default_rule = (default_rule or self.conf.oslo_policy.policy_default_rule) self.rules = Rules(rules, self.default_rule) self.registered_rules = {} self.file_rules = {} self.policy_path = None self.policy_file = policy_file or pick_default_policy_file( self.conf, fallback_to_json_file=fallback_to_json_file) self.use_conf = use_conf self._need_check_rule = True self.overwrite = overwrite self._policy_dir_mtimes = {} self._file_cache = {} self._informed_no_policy_file = False # NOTE(gmann): This flag will suppress the warning for # policies changing their default check_str that have # not been overridden by operators. This does not affect the # warning for policy changed their name or deprecated # for removal. self.suppress_default_change_warnings = False # FOR TESTING ONLY self.suppress_deprecation_warnings = False def set_rules(self, rules, overwrite=True, use_conf=False): """Create a new :class:`Rules` based on the provided dict of rules. :param rules: New rules to use. :param overwrite: Whether to overwrite current rules or update them with the new rules. :param use_conf: Whether to reload rules from cache or config file. """ if not isinstance(rules, dict): raise TypeError(_('Rules must be an instance of dict or Rules, ' 'got %s instead') % type(rules)) self.use_conf = use_conf self._need_check_rule = True if overwrite: self.rules = Rules(rules, self.default_rule) else: self.rules.update(rules) def clear(self): """Clears :class:`Enforcer` contents. This will clear this instances rules, policy's cache, file cache and policy's path. """ self.set_rules({}) self.default_rule = None self.policy_path = None self._policy_dir_mtimes = {} self._file_cache.clear() self.registered_rules = {} self.file_rules = {} self._informed_no_policy_file = False self.suppress_default_change_warnings = False self.suppress_deprecation_warnings = False def load_rules(self, force_reload=False): """Loads policy_path's rules. Policy file is cached and will be reloaded if modified. :param force_reload: Whether to reload rules from config file. """ if force_reload: self.use_conf = force_reload policy_file_rules_changed = False if self.use_conf: if not self.policy_path: try: self.policy_path = self._get_policy_path(self.policy_file) except cfg.ConfigFilesNotFoundError: if not self._informed_no_policy_file: LOG.debug('The policy file %s could not be found.', self.policy_file) self._informed_no_policy_file = True if self.policy_path: # If the policy file rules have changed any policy.d rules # also need to be reapplied on top of that change. policy_file_rules_changed = self._load_policy_file( self.policy_path, force_reload, overwrite=self.overwrite ) force_reload_policy_dirs = force_reload if policy_file_rules_changed: force_reload_policy_dirs = True existing_policy_dirs = [] for path in self.conf.oslo_policy.policy_dirs: try: absolute_path = self._get_policy_path(path) existing_policy_dirs.append(absolute_path) except cfg.ConfigFilesNotFoundError: continue # If change was made in any policy directory or main policy # file then all policy directories and main file are # re-calculated from scratch. We don't have separate rule sets # for every policy folder, we only have the only rule set in # RAM for all rule configs (self.rules). So it's the only way # to be consistent. if self._is_directory_updated(self._policy_dir_mtimes, absolute_path): force_reload_policy_dirs = True if force_reload_policy_dirs and existing_policy_dirs: # Here we realize that some policy folders or main policy file # were changed and we need to recalculate all rules from # scratch. # If policy_file_rules_changed is True then we know: # 1. all rules were already reset. # 2. rules from main policy file were already applied. # Otherwise main policy file was not changed and rules were not # reset and. So we reset rules and force to re-calculate # rules in main policy file. And after that we apply rules # from every policy directory. if self.policy_path: if not policy_file_rules_changed and self.overwrite: self._load_policy_file(path=self.policy_path, force_reload=True, overwrite=self.overwrite ) elif self.overwrite: self.rules = Rules(default_rule=self.default_rule) for path in existing_policy_dirs: self._walk_through_policy_directory( path, self._load_policy_file, True, False) for default in self.registered_rules.values(): if default.deprecated_for_removal: self._emit_deprecated_for_removal_warning(default) if default.name in self.rules: continue check = default.check if default.deprecated_rule: check = self._handle_deprecated_rule(default) self.rules[default.name] = check # Detect and log obvious incorrect rule definitions if self._need_check_rule: self.check_rules() self._need_check_rule = False def check_rules(self, raise_on_violation=False): """Look for rule definitions that are obviously incorrect.""" undefined_checks = [] cyclic_checks = [] violation = False for name, check in self.rules.items(): if self._undefined_check(check): undefined_checks.append(name) violation = True if self._cycle_check(check): cyclic_checks.append(name) violation = True if undefined_checks: LOG.warning('Policies %(names)s reference a rule that is not ' 'defined.', {'names': undefined_checks}) if cyclic_checks: LOG.warning('Policies %(names)s are part of a cyclical ' 'reference.', {'names': cyclic_checks}) if raise_on_violation and violation: raise InvalidDefinitionError(undefined_checks + cyclic_checks) return not violation def _emit_deprecated_for_removal_warning(self, default): # If the policy is being removed completely, we need to let operators # know that the policy is going to be silently ignored in the future # and they can remove it from their overrides since it isn't being # replaced by another policy. if not self.suppress_deprecation_warnings and \ default.name in self.file_rules: warnings.warn( 'Policy "%(policy)s":"%(check_str)s" was deprecated for ' 'removal in %(release)s. Reason: %(reason)s. Its value may be ' 'silently ignored in the future.' % { 'policy': default.name, 'check_str': default.check_str, 'release': default.deprecated_since, 'reason': default.deprecated_reason } ) def _handle_deprecated_rule(self, default): """Handle cases where a policy rule has been deprecated. :param default: an instance of RuleDefault that contains an instance of DeprecatedRule """ deprecated_rule = default.deprecated_rule deprecated_reason = ( deprecated_rule.deprecated_reason or default.deprecated_reason) deprecated_since = ( deprecated_rule.deprecated_since or default.deprecated_since) deprecated_msg = ( 'Policy "%(old_name)s":"%(old_check_str)s" was deprecated in ' '%(release)s in favor of "%(name)s":"%(check_str)s". Reason: ' '%(reason)s. Either ensure your deployment is ready for the new ' 'default or copy/paste the deprecated policy into your policy ' 'file and maintain it manually.' % { 'old_name': deprecated_rule.name, 'old_check_str': deprecated_rule.check_str, 'release': deprecated_since, 'name': default.name, 'check_str': default.check_str, 'reason': deprecated_reason, } ) # Print a warning because the actual policy name is changing. If # operators are relying on an override for foo:bar and it's getting # renamed to foo:create_bar then they need to be able to see that # before they roll out the next release. If the policy name is in # self.file_rules, we know that it's being overridden. if ( deprecated_rule.name != default.name and deprecated_rule.name in self.file_rules ): if not self.suppress_deprecation_warnings: warnings.warn(deprecated_msg) # If the deprecated policy is being overridden and doesn't match # the default deprecated policy, override the new policy's default # with the old check string. This should prevents unwanted exposure # to APIs on upgrade. # There's one exception to this: When we generate a sample policy, # we set the deprecated rule name to reference the new rule. If we # see that the deprecated override rule is just the new rule, then # we shouldn't mess with it. file_rule = self.file_rules[deprecated_rule.name] if ( file_rule.check != deprecated_rule.check and str(file_rule.check) != 'rule:%s' % default.name and default.name not in self.file_rules.keys() ): return self.file_rules[deprecated_rule.name].check # In this case, the default check string is changing. We need to let # operators know that this is going to change. If they don't want to # override it, they are going to have to make sure the right # infrastructure exists before they upgrade. This overrides the new # check with an OrCheck that combines the new and old check_str # attributes from the new and deprecated policies. This will make it so # that deployments don't break on upgrade, but they receive log # messages telling them stuff is going to change if they don't maintain # the policy manually or add infrastructure to their deployment to # support the new policy. # If the enforce_new_defaults flag is True, do not add OrCheck to the # old check_str and enforce only the new defaults. if ( not self.conf.oslo_policy.enforce_new_defaults and deprecated_rule.check_str != default.check_str and default.name not in self.file_rules ): if not ( self.suppress_deprecation_warnings or self.suppress_default_change_warnings ): warnings.warn(deprecated_msg) return OrCheck([default.check, deprecated_rule.check]) return default.check def _undefined_check(self, check): """Check if a RuleCheck references an undefined rule.""" if isinstance(check, RuleCheck): if check.match not in self.rules: # Undefined rule return True # An AndCheck or OrCheck is composed of multiple rules so check # each of those. rules = getattr(check, 'rules', None) if rules: for rule in rules: if self._undefined_check(rule): return True return False def _cycle_check(self, check, seen=None): """Check if RuleChecks cycle. Looking for something like:: "foo": "rule:bar" "bar": "rule:foo" :param check: The check to search for. :param seen: A set of previously seen rules, else None. """ if seen is None: seen = set() if isinstance(check, RuleCheck): if check.match in seen: # Cycle found return True seen.add(check.match) if check.match in self.rules: # There can only be a cycle if the referenced rule is defined. if self._cycle_check(self.rules[check.match], seen): return True # An AndCheck or OrCheck is composed of multiple rules so check # each of those. rules = getattr(check, 'rules', None) if rules: for rule in rules: # As there being an OrCheck or AndCheck, a copy of the father's # seen should be called here. In order that the checks in # different branchs are seperated. if self._cycle_check(rule, seen.copy()): return True return False @staticmethod def _is_directory_updated(cache, path): # Get the current modified time and compare it to what is in # the cache and check if the new mtime is greater than what # is in the cache mtime = 0 if os.path.exists(path): if not os.path.isdir(path): raise ValueError('{} is not a directory'.format(path)) # Make a list of all the files files = [path] + [os.path.join(path, file) for file in os.listdir(path)] # Pick the newest one, let's use its time. mtime = os.path.getmtime(max(files, key=os.path.getmtime)) cache_info = cache.setdefault(path, {}) if mtime > cache_info.get('mtime', 0): cache_info['mtime'] = mtime return True return False @staticmethod def _walk_through_policy_directory(path, func, *args): if not os.path.isdir(path): raise ValueError('%s is not a directory' % path) # We do not iterate over sub-directories. policy_files = next(os.walk(path))[2] policy_files.sort() for policy_file in [p for p in policy_files if not p.startswith('.')]: func(os.path.join(path, policy_file), *args) def _record_file_rules(self, data, overwrite=False): """Store a copy of rules loaded from a file. It is useful to be able to distinguish between rules loaded from a file and those registered by a consuming service. In order to do so we keep a record of rules loaded from a file. :param data: The raw contents of a policy file. :param overwrite: If True clear out previously loaded rules. """ if overwrite: self.file_rules = {} parsed_file = parse_file_contents(data) redundant_file_rules = [] for name, check_str in parsed_file.items(): file_rule = RuleDefault(name, check_str) self.file_rules[name] = file_rule reg_rule = self.registered_rules.get(name) if (reg_rule and (file_rule == reg_rule)): redundant_file_rules.append(name) if redundant_file_rules: # NOTE(gmann): Log warning for redundant file rules which # can be detected via 'oslopolicy-list-redundant' tool too. LOG.warning("Policy Rules %(names)s specified in policy files " "are the same as the defaults provided by the " "service. You can remove these rules from policy " "files which will make maintenance easier. You can " "detect these redundant rules by " "``oslopolicy-list-redundant`` tool also.", {'names': redundant_file_rules}) def _load_policy_file(self, path, force_reload, overwrite=True): """Load policy rules from the specified policy file. :param path: A path of the policy file to load rules from. :param force_reload: Forcefully reload the policy file content. :param overwrite: Replace policy rules instead of updating them. :return: A bool indicating whether rules have been changed or not. """ rules_changed = False reloaded, data = _cache_handler.read_cached_file( self._file_cache, path, force_reload=force_reload) if reloaded or not self.rules: rules = Rules.load(data, self.default_rule) self.set_rules(rules, overwrite=overwrite, use_conf=True) rules_changed = True self._record_file_rules(data, overwrite) LOG.debug('Reloaded policy file: %(path)s', {'path': path}) return rules_changed def _get_policy_path(self, path): """Locate the policy YAML/JSON data file/path. :param path: It's value can be a full path or related path. When full path specified, this function just returns the full path. When related path specified, this function will search configuration directories to find one that exists. :returns: The policy path :raises: ConfigFilesNotFoundError if the file/path couldn't be located. """ policy_path = self.conf.find_file(path) if policy_path: return policy_path raise cfg.ConfigFilesNotFoundError((path,)) def enforce( self, rule, target, creds, do_raise=False, exc=None, *args, **kwargs, ): """Checks authorization of a rule against the target and credentials. :param rule: The rule to evaluate as a string or :class:`BaseCheck`. :param target: As much information about the object being operated on as possible. The target argument should be a dict instance or an instance of a class that fully supports the Mapping abstract base class. :param creds: As much information about the user performing the action as possible. This parameter can also be an instance of ``oslo_context.context.RequestContext``. :param do_raise: Whether to raise an exception or not if check fails. :param exc: Class of the exception to raise if the check fails. Any remaining arguments passed to :meth:`enforce` (both positional and keyword arguments) will be passed to the exception class. If not specified, :class:`PolicyNotAuthorized` will be used. :return: ``False`` if the policy does not allow the action and ``exc`` is not provided; otherwise, returns a value that evaluates to ``True``. Note: for rules using the "case" expression, this ``True`` value will be the specified string from the expression. """ self.load_rules() if isinstance(creds, context.RequestContext): creds = self._map_context_attributes_into_creds(creds) # NOTE(lbragstad): The oslo.context library exposes the ability to call # a method on RequestContext objects that converts attributes of the # context object to policy values. However, ``to_policy_values()`` # doesn't actually return a dictionary, it's a subclass of # collections.abc.MutableMapping, which behaves like a dictionary but # doesn't pass the type check. elif not isinstance(creds, collections.abc.MutableMapping): msg = ( 'Expected type oslo_context.context.RequestContext, dict, or ' 'the output of ' 'oslo_context.context.RequestContext.to_policy_values but ' 'got %(creds_type)s instead' % {'creds_type': type(creds)} ) raise InvalidContextObject(msg) # NOTE(lbragstad): We unfortunately have to special case this # attribute. Originally when the system scope when into oslo.policy, we # checked for a key called 'system' in creds. The oslo.context library # uses `system_scope` instead, and the compatibility between # oslo.policy and oslo.context was an afterthought. We'll have to # support services who've been setting creds['system'], but we can do # that by making sure we populate it with what's in the context object # if it has a system_scope attribute. if creds.get('system_scope'): creds['system'] = creds.get('system_scope') if LOG.isEnabledFor(logging.DEBUG): try: creds_dict = strutils.mask_dict_password(creds) creds_msg = jsonutils.dumps(creds_dict, skipkeys=True, sort_keys=True) except Exception as e: creds_msg = ('cannot format data, exception: %(exp)s' % {'exp': e}) try: target_dict = strutils.mask_dict_password(target) target_msg = jsonutils.dumps(target_dict, skipkeys=True, sort_keys=True) except Exception as e: target_msg = ('cannot format data, exception: %(exp)s' % {'exp': e}) LOG.debug('enforce: rule=%s creds=%s target=%s', rule.__class__ if isinstance(rule, _checks.BaseCheck) else '"%s"' % rule, creds_msg, target_msg) # Allow the rule to be a Check tree if isinstance(rule, _checks.BaseCheck): # If the thing we're given is a Check, we don't know the # name of the rule, so pass None for current_rule. if rule.scope_types: self._enforce_scope(creds, rule) result = _checks._check( rule=rule, target=target, creds=creds, enforcer=self, current_rule=None, ) elif not self.rules: # No rules to reference means we're going to fail closed result = False else: try: to_check = self.rules[rule] except KeyError: LOG.debug('Rule [%s] does not exist', rule) # If the rule doesn't exist, fail closed result = False else: # NOTE(moguimar): suppressing [B105:hardcoded_password_string] # as token_scope is not actually a hardcoded # token. registered_rule = self.registered_rules.get(rule) if registered_rule and registered_rule.scope_types: self._enforce_scope(creds, registered_rule) result = _checks._check( rule=to_check, target=target, creds=creds, enforcer=self, current_rule=rule, ) # If it is False, raise the exception if requested if do_raise and not result: if exc: raise exc(*args, **kwargs) raise PolicyNotAuthorized(rule, target, creds) return result def _enforce_scope(self, creds, rule): # Check the scope of the operation against the possible scope # attributes provided in `creds`. if creds.get('system'): token_scope = 'system' # nosec elif creds.get('domain_id'): token_scope = 'domain' # nosec else: # If the token isn't system-scoped or domain-scoped then # we're dealing with a project-scoped token. token_scope = 'project' # nosec if token_scope not in rule.scope_types: if self.conf.oslo_policy.enforce_scope: raise InvalidScope( rule, rule.scope_types, token_scope ) # If we don't raise an exception we should at least # inform operators about policies that are being used # with improper scopes. msg = ( 'Policy %(rule)s failed scope check. The token ' 'used to make the request was %(token_scope)s ' 'scoped but the policy requires %(policy_scope)s ' 'scope. This behavior may change in the future ' 'where using the intended scope is required' % { 'rule': rule, 'token_scope': token_scope, 'policy_scope': rule.scope_types } ) warnings.warn(msg) def _map_context_attributes_into_creds(self, context): creds = {} # port public context attributes into the creds dictionary so long as # the attribute isn't callable context_values = context.to_policy_values() for k, v in context_values.items(): creds[k] = v return creds def register_default(self, default): """Registers a RuleDefault. Adds a RuleDefault to the list of registered rules. Rules must be registered before using the Enforcer.authorize method. :param default: A RuleDefault object to register. """ if default.name in self.registered_rules: raise DuplicatePolicyError(default.name) # NOTE Always make copy of registered rule because policy engine # update these rules in many places (one example is # self._handle_deprecated_rule() ). This will avoid any conflict # in rule object values when running tests in parallel. self.registered_rules[default.name] = copy.deepcopy(default) def register_defaults(self, defaults): """Registers a list of RuleDefaults. Adds each RuleDefault to the list of registered rules. Rules must be registered before using the Enforcer.authorize method. :param default: A list of RuleDefault objects to register. """ for default in defaults: self.register_default(default) def authorize(self, rule, target, creds, do_raise=False, exc=None, *args, **kwargs): """A wrapper around 'enforce' that checks for policy registration. To ensure that a policy being checked has been registered this method should be used rather than enforce. By doing so a project can be sure that all of it's used policies are registered and therefore available for sample file generation. The parameters match the enforce method and a description of them can be found there. """ if rule not in self.registered_rules: raise PolicyNotRegistered(rule) return self.enforce( rule, target, creds, do_raise, exc, *args, **kwargs) class _BaseRule: def __init__(self, name, check_str): self._name = name self._check_str = check_str self._check = _parser.parse_rule(self.check_str) @property def name(self): return self._name @property def check_str(self): return self._check_str @property def check(self): return self._check def __str__(self): return f'"{self.name}": "{self.check_str}"' class RuleDefault(_BaseRule): """A class for holding policy definitions. It is required to supply a name and value at creation time. It is encouraged to also supply a description to assist operators. :param name: The name of the policy. This is used when referencing it from another rule or during policy enforcement. :param check_str: The policy. This is a string defining a policy that conforms to the policy language outlined at the top of the file. :param description: A plain text description of the policy. This will be used to comment sample policy files for use by deployers. :param deprecated_rule: :class:`.DeprecatedRule` :param deprecated_for_removal: indicates whether the policy is planned for removal in a future release. :param deprecated_reason: indicates why this policy is planned for removal in a future release. Silently ignored if deprecated_for_removal is False. :param deprecated_since: indicates which release this policy was deprecated in. Accepts any string, though valid version strings are encouraged. Silently ignored if deprecated_for_removal is False. :param scope_types: A list containing the intended scopes of the operation being done. .. versionchanged:: 1.29 Added *deprecated_rule* parameter. .. versionchanged:: 1.29 Added *deprecated_for_removal* parameter. .. versionchanged:: 1.29 Added *deprecated_reason* parameter. .. versionchanged:: 1.29 Added *deprecated_since* parameter. .. versionchanged:: 1.31 Added *scope_types* parameter. """ def __init__( self, name, check_str, description=None, deprecated_rule=None, deprecated_for_removal=False, deprecated_reason=None, deprecated_since=None, scope_types=None, ): super().__init__(name, check_str) self._description = description self._deprecated_rule = copy.deepcopy(deprecated_rule) or [] self._deprecated_for_removal = deprecated_for_removal self._deprecated_reason = deprecated_reason self._deprecated_since = deprecated_since if self.deprecated_rule: if not isinstance(self.deprecated_rule, DeprecatedRule): raise ValueError( 'deprecated_rule must be a DeprecatedRule object.' ) # if this rule is being deprecated, we need to provide a deprecation # reason here, but if this rule is replacing another rule, then the # deprecation reason belongs on that other rule if deprecated_for_removal: if deprecated_reason is None or deprecated_since is None: raise ValueError( '%(name)s deprecated without deprecated_reason or ' 'deprecated_since. Both must be supplied if deprecating a ' 'policy' % {'name': self.name} ) elif deprecated_rule and (deprecated_reason or deprecated_since): warnings.warn( f'{name} should not configure deprecated_reason or ' f'deprecated_since as these should be configured on the ' f'DeprecatedRule indicated by deprecated_rule. ' f'This will be an error in a future release', DeprecationWarning) if scope_types: msg = 'scope_types must be a list of strings.' if not isinstance(scope_types, list): raise ValueError(msg) for scope_type in scope_types: if not isinstance(scope_type, str): raise ValueError(msg) if scope_types.count(scope_type) > 1: raise ValueError( 'scope_types must be a list of unique strings.' ) self.scope_types = scope_types @property def description(self): return self._description @property def deprecated_rule(self): return self._deprecated_rule @property def deprecated_for_removal(self): return self._deprecated_for_removal @property def deprecated_reason(self): return self._deprecated_reason @property def deprecated_since(self): return self._deprecated_since def __eq__(self, other): """Equality operator. All check objects have a stable string representation. It is used for comparison rather than check_str because multiple check_str's may parse to the same check object. For instance '' and '@' are equivalent and the parsed rule string representation for both is '@'. The description does not play a role in the meaning of the check so it is not considered for equality. """ # Name should match, check should match, and class should be equivalent # or one should be a subclass of the other. if (self.name == other.name and str(self.check) == str(other.check) and (isinstance(self, other.__class__) or isinstance(other, self.__class__))): return True return False class DocumentedRuleDefault(RuleDefault): """A class for holding policy-in-code policy objects definitions This class provides the same functionality as the RuleDefault class, but it also requires additional data about the policy rule being registered. This is necessary so that proper documentation can be rendered based on the attributes of this class. Eventually, all usage of RuleDefault should be converted to use DocumentedRuleDefault. :param operations: List of dicts containing each API URL and corresponding http request method. Example:: operations=[{'path': '/foo', 'method': 'GET'}, {'path': '/some', 'method': 'POST'}] """ def __init__( self, name, check_str, description, operations, deprecated_rule=None, deprecated_for_removal=False, deprecated_reason=None, deprecated_since=None, scope_types=None, ): super().__init__( name, check_str, description, deprecated_rule=deprecated_rule, deprecated_for_removal=deprecated_for_removal, deprecated_reason=deprecated_reason, deprecated_since=deprecated_since, scope_types=scope_types ) self._operations = operations if not self._description: raise InvalidRuleDefault('Description is required') if not isinstance(self._operations, list): raise InvalidRuleDefault('Operations must be a list') if not self._operations: raise InvalidRuleDefault('Operations list must not be empty') for op in self._operations: if 'path' not in op: raise InvalidRuleDefault('Operation must contain a path') if 'method' not in op: raise InvalidRuleDefault('Operation must contain a method') if len(op.keys()) > 2: raise InvalidRuleDefault('Operation contains > 2 keys') @property def description(self): return self._description @property def operations(self): return self._operations class DeprecatedRule(_BaseRule): """Represents a Deprecated policy or rule. Here's how you can use it to change a policy's default role or rule. Assume the following policy exists in code:: from oslo_policy import policy policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:fizz', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}] ) The next snippet will maintain the deprecated option, but allow ``foo:create_bar`` to default to ``role:bang`` instead of ``role:fizz``:: deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz' deprecated_reason='role:bang is a better default', deprecated_since='N', ) policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, ) DeprecatedRule can be used to change the policy name itself. Assume the following policy exists in code:: from oslo_policy import policy policy.DocumentedRuleDefault( name='foo:post_bar', check_str='role:fizz', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}] ) For the sake of consistency, let's say we want to replace ``foo:post_bar`` with ``foo:create_bar``, but keep the same ``check_str`` as the default. We can accomplish this by doing:: deprecated_rule = policy.DeprecatedRule( name='foo:post_bar', check_str='role:fizz' deprecated_reason='foo:create_bar is more consistent', deprecated_since='N', ) policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:fizz', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, ) Finally, let's use DeprecatedRule to break a policy into more granular policies. Let's assume the following policy exists in code:: policy.DocumentedRuleDefault( name='foo:bar', check_str='role:bazz', description='Create, read, update, or delete a bar.', operations=[ { 'path': '/v1/bars', 'method': 'POST' }, { 'path': '/v1/bars', 'method': 'GET' }, { 'path': '/v1/bars/{bar_id}', 'method': 'GET' }, { 'path': '/v1/bars/{bar_id}', 'method': 'PATCH' }, { 'path': '/v1/bars/{bar_id}', 'method': 'DELETE' } ] ) Here we can see the same policy is used to protect multiple operations on bars. This prevents operators from being able to assign different roles to different actions that can be taken on bar. For example, what if an operator wanted to require a less restrictive role or rule to list bars but a more restrictive rule to delete them? The following will introduce a policy that helps achieve that and deprecate the original, overly-broad policy:: deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='role:bazz' deprecated_reason=( 'foo:bar has been replaced by more granular policies' ), deprecated_since='N', ) policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, ) policy.DocumentedRuleDefault( name='foo:list_bars', check_str='role:bazz', description='List bars.', operations=[{'path': '/v1/bars', 'method': 'GET'}], deprecated_rule=deprecated_rule, ) policy.DocumentedRuleDefault( name='foo:get_bar', check_str='role:bazz', description='Get a bar.', operations=[{'path': '/v1/bars/{bar_id}', 'method': 'GET'}], deprecated_rule=deprecated_rule, ) policy.DocumentedRuleDefault( name='foo:update_bar', check_str='role:bang', description='Update a bar.', operations=[{'path': '/v1/bars/{bar_id}', 'method': 'PATCH'}], deprecated_rule=deprecated_rule, ) policy.DocumentedRuleDefault( name='foo:delete_bar', check_str='role:bang', description='Delete a bar.', operations=[{'path': '/v1/bars/{bar_id}', 'method': 'DELETE'}], deprecated_rule=deprecated_rule, ) :param name: The name of the policy. This is used when referencing it from another rule or during policy enforcement. :param check_str: The policy. This is a string defining a policy that conforms to the policy language outlined at the top of the file. :param deprecated_reason: indicates why this policy is planned for removal in a future release. :param deprecated_since: indicates which release this policy was deprecated in. Accepts any string, though valid version strings are encouraged. .. versionchanged:: 1.29 Added *DeprecatedRule* object. .. versionchanged:: 3.4 Added *deprecated_reason* parameter. .. versionchanged:: 3.4 Added *deprecated_since* parameter. """ def __init__( self, name: str, check_str: str, *, deprecated_reason: ty.Optional[str] = None, deprecated_since: ty.Optional[str] = None, ): super().__init__(name, check_str) self._deprecated_reason = deprecated_reason self._deprecated_since = deprecated_since if not deprecated_reason or not deprecated_since: warnings.warn( f'{name} deprecated without deprecated_reason or ' f'deprecated_since. This will be an error in a future release', DeprecationWarning) @property def deprecated_reason(self): return self._deprecated_reason @property def deprecated_since(self): return self._deprecated_since ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/shell.py0000664000175000017500000001044100000000000020105 0ustar00zuulzuul00000000000000#!/usr/bin/env python3 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import collections.abc import sys from oslo_serialization import jsonutils from oslo_config import cfg from oslo_policy import opts from oslo_policy import policy class FakeEnforcer(object): def __init__(self, rules, config): self.rules = rules self.conf = None if config: self.conf = cfg.ConfigOpts() for group, options in opts.list_opts(): self.conf.register_opts(options, group) self.conf(["--config-file={}".format(config)]) def _try_rule(key, rule, target, access_data, o): try: result = rule(target, access_data, o, current_rule=key) if result: print("passed: %s" % key) else: print("failed: %s" % key) except Exception as e: print(e) print("exception: %s" % rule) def flatten(d, parent_key=''): """Flatten a nested dictionary Converts a dictionary with nested values to a single level flat dictionary, with dotted notation for each key. """ items = [] for k, v in d.items(): new_key = parent_key + '.' + k if parent_key else k if isinstance(v, collections.abc.MutableMapping): items.extend(flatten(v, new_key).items()) else: items.append((new_key, v)) return dict(items) def tool(policy_file, access_file, apply_rule, is_admin=False, target_file=None, enforcer_config=None): with open(access_file, "rb", 0) as a: access = a.read() access_data = jsonutils.loads(access)['token'] access_data['roles'] = [role['name'] for role in access_data['roles']] access_data['user_id'] = access_data['user']['id'] if access_data.get('project'): access_data['project_id'] = access_data['project']['id'] if access_data.get('system'): access_data['system_scope'] = 'all' access_data['is_admin'] = is_admin with open(policy_file, "rb", 0) as p: policy_data = p.read() rules = policy.Rules.load(policy_data, "default") enforcer = FakeEnforcer(rules, enforcer_config) if target_file: with open(target_file, "rb", 0) as t: target = t.read() target_data = flatten(jsonutils.loads(target)) else: target_data = {'user_id': access_data['user']['id']} if access_data.get('project_id'): target_data['project_id'] = access_data['project_id'] if apply_rule: key = apply_rule rule = rules[apply_rule] _try_rule(key, rule, target_data, access_data, enforcer) return for key, rule in sorted(rules.items()): if ":" in key: _try_rule(key, rule, target_data, access_data, enforcer) def main(): conf = cfg.ConfigOpts() conf.register_cli_opt(cfg.StrOpt( 'policy', required=True, help='path to a policy file.')) conf.register_cli_opt(cfg.StrOpt( 'access', required=True, help='path to a file containing OpenStack Identity API ' 'access info in JSON format.')) conf.register_cli_opt(cfg.StrOpt( 'target', help='path to a file containing custom target info in ' 'JSON format. This will be used to evaluate the policy with.')) conf.register_cli_opt(cfg.StrOpt( 'rule', help='rule to test.')) conf.register_cli_opt(cfg.BoolOpt( 'is_admin', help='set is_admin=True on the credentials used for the evaluation.', default=False)) conf.register_cli_opt(cfg.StrOpt( 'enforcer_config', help='configuration file for the oslopolicy-checker enforcer')) conf() tool(conf.policy, conf.access, conf.rule, conf.is_admin, conf.target, conf.enforcer_config) if __name__ == "__main__": sys.exit(main()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/sphinxext.py0000664000175000017500000001215700000000000021036 0ustar00zuulzuul00000000000000# Copyright 2017 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Sphinx extension for pretty-formatting policy docs.""" import os from docutils import nodes from docutils.parsers import rst from docutils.parsers.rst import directives from docutils import statemachine from oslo_config import cfg from sphinx.util import logging from sphinx.util.nodes import nested_parse_with_titles from oslo_policy import generator def _indent(text): """Indent by four spaces.""" prefix = ' ' * 4 def prefixed_lines(): for line in text.splitlines(True): yield (prefix + line if line.strip() else line) return ''.join(prefixed_lines()) def _format_policy_rule(rule): """Output a definition list-style rule. For example:: ``os_compute_api:servers:create`` :Default: ``rule:admin_or_owner`` :Operations: - **POST** ``/servers`` Create a server """ yield '``{}``'.format(rule.name) if rule.check_str: yield _indent(':Default: ``{}``'.format(rule.check_str)) else: yield _indent(':Default: ') if hasattr(rule, 'operations'): yield _indent(':Operations:') for operation in rule.operations: yield _indent(_indent('- **{}** ``{}``'.format( operation['method'], operation['path']))) if hasattr(rule, 'scope_types') and rule.scope_types is not None: yield _indent(':Scope Types:') for scope_type in rule.scope_types: yield _indent(_indent('- **{}**'.format(scope_type))) yield '' if rule.description: for line in rule.description.strip().splitlines(): yield _indent(line.rstrip()) else: yield _indent('(no description provided)') yield '' def _format_policy_section(section, rules): # The nested_parse_with_titles will ensure the correct header leve is used. yield section yield '=' * len(section) yield '' for rule in rules: for line in _format_policy_rule(rule): yield line def _format_policy(namespaces): policies = generator.get_policies_dict(namespaces) for section in sorted(policies.keys()): for line in _format_policy_section(section, policies[section]): yield line class ShowPolicyDirective(rst.Directive): has_content = False option_spec = { 'config-file': directives.unchanged, } def run(self): env = self.state.document.settings.env app = env.app config_file = self.options.get('config-file') # if the config_file option was not defined, attempt to reuse the # 'oslo_policy.sphinxpolicygen' extension's setting if not config_file and hasattr(env.config, 'policy_generator_config_file'): config_file = env.config.policy_generator_config_file # If we are given a file that isn't an absolute path, look for it # in the source directory if it doesn't exist. candidates = [ config_file, os.path.join(app.srcdir, config_file,), ] for c in candidates: if os.path.isfile(c): config_path = c break else: raise ValueError( 'could not find config file in: %s' % str(candidates) ) self.info('loading config file %s' % config_path) conf = cfg.ConfigOpts() opts = generator.GENERATOR_OPTS + generator.RULE_OPTS conf.register_cli_opts(opts) conf.register_opts(opts) conf( args=['--config-file', config_path], ) namespaces = conf.namespace[:] result = statemachine.ViewList() source_name = '<' + __name__ + '>' for line in _format_policy(namespaces): result.append(line, source_name) node = nodes.section() node.document = self.state.document # With the resolution for bug #1788183, we now parse the # 'DocumentedRuleDefault.description' attribute as rST. Unfortunately, # there are a lot of broken option descriptions out there and we don't # want to break peoples' builds suddenly. As a result, we disable # 'warning-is-error' temporarily. Users will still see the warnings but # the build will continue. with logging.skip_warningiserror(): nested_parse_with_titles(self.state, result, node) return node.children def setup(app): app.add_directive('show-policy', ShowPolicyDirective) return { 'parallel_read_safe': True, 'parallel_write_safe': True, } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/sphinxpolicygen.py0000664000175000017500000000661700000000000022233 0ustar00zuulzuul00000000000000# Copyright 2015 Hewlett-Packard Development Company, L.P. # Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Generate a sample policy file.""" import os from oslo_config import cfg from sphinx.util import logging from oslo_policy import generator LOG = logging.getLogger(__name__) def generate_sample(app): """Generate a sample policy file.""" if not app.config.policy_generator_config_file: LOG.warning("No policy_generator_config_file is specified, " "skipping sample policy generation") return if isinstance(app.config.policy_generator_config_file, list): for config_file, base_name in app.config.policy_generator_config_file: if base_name is None: base_name = _get_default_basename(config_file) _generate_sample(app, config_file, base_name) else: _generate_sample(app, app.config.policy_generator_config_file, app.config.sample_policy_basename) def _get_default_basename(config_file): return os.path.splitext(os.path.basename(config_file))[0] def _generate_sample(app, policy_file, base_name): def info(msg): LOG.info('[%s] %s' % (__name__, msg)) # If we are given a file that isn't an absolute path, look for it # in the source directory if it doesn't exist. candidates = [ policy_file, os.path.join(app.srcdir, policy_file,), ] for c in candidates: if os.path.isfile(c): info('reading config generator instructions from %s' % c) config_path = c break else: raise ValueError( "Could not find policy_generator_config_file %r" % app.config.policy_generator_config_file) if base_name: out_file = os.path.join(app.srcdir, base_name) + '.policy.yaml.sample' if not os.path.isdir(os.path.dirname(os.path.abspath(out_file))): os.mkdir(os.path.dirname(os.path.abspath(out_file))) else: file_name = 'sample.policy.yaml' out_file = os.path.join(app.srcdir, file_name) info('writing sample policy to %s' % out_file) # NOTE(bnemec): We don't want to do cli parsing on the global object here # because that can break consumers who do cli arg registration on import # in their documented modules. It's not allowed to register a cli arg after # the args have been parsed once. conf = cfg.ConfigOpts() generator.generate_sample(args=['--config-file', config_path, '--output-file', out_file], conf=conf) def setup(app): app.add_config_value('policy_generator_config_file', None, 'env') app.add_config_value('sample_policy_basename', None, 'env') app.connect('builder-inited', generate_sample) return { 'parallel_read_safe': True, 'parallel_write_safe': True, } ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8515182 oslo.policy-3.11.0/oslo_policy/tests/0000775000175000017500000000000000000000000017566 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/__init__.py0000664000175000017500000000000000000000000021665 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/base.py0000664000175000017500000000441500000000000021056 0ustar00zuulzuul00000000000000# Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import codecs import io import os import os.path import sys import fixtures from oslo_config import fixture as config from oslotest import base as test_base from oslo_policy import _checks from oslo_policy import policy class PolicyBaseTestCase(test_base.BaseTestCase): def setUp(self): super(PolicyBaseTestCase, self).setUp() self.conf = self.useFixture(config.Config()).conf self.config_dir = self.useFixture(fixtures.TempDir()).path self.conf(args=['--config-dir', self.config_dir]) self.enforcer = policy.Enforcer(self.conf) self.addCleanup(self.enforcer.clear) def get_config_file_fullname(self, filename): return os.path.join(self.config_dir, filename.lstrip(os.sep)) def create_config_file(self, filename, contents): """Create a configuration file under the config dir. Also creates any intermediate paths needed so the file can be in a subdirectory. """ path = self.get_config_file_fullname(filename) pardir = os.path.dirname(path) if not os.path.exists(pardir): os.makedirs(pardir) with codecs.open(path, 'w', encoding='utf-8') as f: f.write(contents) def _capture_stdout(self): self.useFixture(fixtures.MonkeyPatch('sys.stdout', io.StringIO())) return sys.stdout class FakeCheck(_checks.BaseCheck): def __init__(self, result=None): self.result = result def __str__(self): return str(self.result) def __call__(self, target, creds, enforcer): if self.result is not None: return self.result return (target, creds, enforcer) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_cache_handler.py0000664000175000017500000000564500000000000023751 0ustar00zuulzuul00000000000000# Copyright (c) 2020 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test the cache handler module""" import os from unittest import mock import fixtures import oslo_config from oslotest import base as test_base from oslo_policy import _cache_handler as _ch class CacheHandlerTest(test_base.BaseTestCase): def setUp(self): super().setUp() self.tmpdir = self.useFixture(fixtures.TempDir()) def test_read_cached_file(self): file_cache = {} path = os.path.join(self.tmpdir.path, 'tmpfile') with open(path, 'w+') as fp: fp.write('test') reloaded, data = _ch.read_cached_file(file_cache, path) self.assertEqual('test', data) self.assertTrue(reloaded) reloaded, data = _ch.read_cached_file(file_cache, path) self.assertEqual('test', data) self.assertFalse(reloaded) reloaded, data = _ch.read_cached_file( file_cache, path, force_reload=True) self.assertEqual('test', data) self.assertTrue(reloaded) def test_read_cached_file_with_updates(self): file_cache = {} path = os.path.join(self.tmpdir.path, 'tmpfile') with open(path, 'w+') as fp: fp.write('test') reloaded, data = _ch.read_cached_file(file_cache, path) # update the timestamps times = (os.stat(path).st_atime + 1, os.stat(path).st_mtime + 1) os.utime(path, times) reloaded, data = _ch.read_cached_file(file_cache, path) self.assertTrue(reloaded) @mock.patch.object(_ch, 'LOG') def test_reloading_cache_with_permission_denied(self, mock_log): file_cache = {} path = os.path.join(self.tmpdir.path, 'tmpfile') with open(path, 'w+') as fp: fp.write('test') os.chmod(path, 000) self.assertRaises( oslo_config.cfg.ConfigFilesPermissionDeniedError, _ch.read_cached_file, file_cache, path) mock_log.error.assert_called_once() @mock.patch.object(_ch, 'LOG') def test_reloading_on_removed_file(self, mock_log): file_cache = {} # don't actually create the file path = os.path.join(self.tmpdir.path, 'tmpfile') reloaded, data = _ch.read_cached_file(file_cache, path) self.assertEqual({}, data) self.assertTrue(reloaded) mock_log.error.assert_called_once() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_checks.py0000664000175000017500000003720400000000000022445 0ustar00zuulzuul00000000000000# Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from oslotest import base as test_base from oslo_policy import _checks from oslo_policy.tests import base from oslo_policy.tests import token_fixture class CheckRegisterTestCase(test_base.BaseTestCase): @mock.patch.object(_checks, 'registered_checks', {}) def test_register_check(self): class TestCheck(_checks.Check): pass _checks.register('spam', TestCheck) self.assertEqual(dict(spam=TestCheck), _checks.registered_checks) @mock.patch.object(_checks, 'registered_checks', {}) def test_register_check_decorator(self): @_checks.register('spam') class TestCheck(_checks.Check): pass self.assertEqual(dict(spam=TestCheck), _checks.registered_checks) class RuleCheckTestCase(base.PolicyBaseTestCase): def test_rule_missing(self): self.enforcer.rules = {} check = _checks.RuleCheck('rule', 'spam') self.assertFalse(check('target', 'creds', self.enforcer)) def test_rule_false(self): self.enforcer.rules = dict(spam=_BoolCheck(False)) check = _checks.RuleCheck('rule', 'spam') self.assertFalse(check('target', 'creds', self.enforcer)) def test_rule_true(self): self.enforcer.rules = dict(spam=_BoolCheck(True)) check = _checks.RuleCheck('rule', 'spam') self.assertTrue(check('target', 'creds', self.enforcer)) class RoleCheckTestCase(base.PolicyBaseTestCase): def test_accept(self): check = _checks.RoleCheck('role', 'sPaM') self.assertTrue(check({}, dict(roles=['SpAm']), self.enforcer)) def test_reject(self): check = _checks.RoleCheck('role', 'spam') self.assertFalse(check({}, dict(roles=[]), self.enforcer)) def test_format_value(self): check = _checks.RoleCheck('role', '%(target.role.name)s') target_dict = {'target.role.name': 'a'} cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertTrue(check(target_dict, cred_dict, self.enforcer)) target_dict = {'target.role.name': 'd'} self.assertFalse(check(target_dict, cred_dict, self.enforcer)) target_dict = dict(target=dict(role=dict())) self.assertFalse(check(target_dict, cred_dict, self.enforcer)) def test_no_roles_case(self): check = _checks.RoleCheck('role', 'spam') self.assertFalse(check({}, {}, self.enforcer)) class GenericCheckTestCase(base.PolicyBaseTestCase): def test_no_cred(self): check = _checks.GenericCheck('name', '%(name)s') self.assertFalse(check(dict(name='spam'), {}, self.enforcer)) def test_cred_mismatch(self): check = _checks.GenericCheck('name', '%(name)s') self.assertFalse(check(dict(name='spam'), dict(name='ham'), self.enforcer)) def test_accept(self): check = _checks.GenericCheck('name', '%(name)s') self.assertTrue(check(dict(name='spam'), dict(name='spam'), self.enforcer)) def test_no_key_match_in_target(self): check = _checks.GenericCheck('name', '%(name)s') self.assertFalse(check(dict(name1='spam'), dict(name='spam'), self.enforcer)) def test_constant_string_mismatch(self): check = _checks.GenericCheck("'spam'", '%(name)s') self.assertFalse(check(dict(name='ham'), {}, self.enforcer)) def test_constant_string_accept(self): check = _checks.GenericCheck("'spam'", '%(name)s') self.assertTrue(check(dict(name='spam'), {}, self.enforcer)) def test_constant_literal_mismatch(self): check = _checks.GenericCheck('True', '%(enabled)s') self.assertFalse(check(dict(enabled=False), {}, self.enforcer)) def test_constant_literal_accept(self): check = _checks.GenericCheck('True', '%(enabled)s') self.assertTrue(check(dict(enabled=True), {}, self.enforcer)) def test_deep_credentials_dictionary_lookup(self): check = _checks.GenericCheck('a.b.c.d', 'APPLES') credentials = {'a': {'b': {'c': {'d': 'APPLES'}}}} self.assertTrue(check({}, credentials, self.enforcer)) def test_missing_credentials_dictionary_lookup(self): credentials = {'a': 'APPLES', 'o': {'t': 'ORANGES'}} # First a valid check - rest of case is expecting failures # Should prove the basic credentials structure before we test # for failure cases. check = _checks.GenericCheck('o.t', 'ORANGES') self.assertTrue(check({}, credentials, self.enforcer)) # Case where final key is missing check = _checks.GenericCheck('o.v', 'ORANGES') self.assertFalse(check({}, credentials, self.enforcer)) # Attempt to access key under a missing dictionary check = _checks.GenericCheck('q.v', 'APPLES') self.assertFalse(check({}, credentials, self.enforcer)) def test_single_entry_in_list_accepted(self): check = _checks.GenericCheck('a.b.c.d', 'APPLES') credentials = {'a': {'b': {'c': {'d': ['APPLES']}}}} self.assertTrue(check({}, credentials, self.enforcer)) def test_multiple_entry_in_list_accepted(self): check = _checks.GenericCheck('a.b.c.d', 'APPLES') credentials = {'a': {'b': {'c': {'d': ['Bananas', 'APPLES', 'Grapes']}}}} self.assertTrue(check({}, credentials, self.enforcer)) def test_multiple_entry_in_nested_list_accepted(self): check = _checks.GenericCheck('a.b.c.d', 'APPLES') credentials = {'a': {'b': [{'c': {'d': ['BANANAS', 'APPLES', 'GRAPES']}}, {}]}} self.assertTrue(check({}, credentials, self.enforcer)) def test_multiple_entries_one_matches(self): check = _checks.GenericCheck( 'token.catalog.endpoints.id', token_fixture.REGION_ONE_PUBLIC_KEYSTONE_ENDPOINT_ID) credentials = token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE self.assertTrue(check({}, credentials, self.enforcer)) def test_generic_role_check_matches(self): check = _checks.GenericCheck( 'token.roles.name', 'role1') credentials = token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE self.assertTrue(check({}, credentials, self.enforcer)) def test_generic_missing_role_does_not_matches(self): check = _checks.GenericCheck( 'token.roles.name', 'missing') credentials = token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE self.assertFalse(check({}, credentials, self.enforcer)) def test_multiple_nested_lists_accepted(self): check = _checks.GenericCheck('a.b.c.d', 'APPLES') credentials = {'a': {'b': [{'a': ''}, {'c': {'d': ['BANANAS', 'APPLES', 'GRAPES']}}, {}]}} self.assertTrue(check({}, credentials, self.enforcer)) def test_entry_not_in_list_rejected(self): check = _checks.GenericCheck('a.b.c.d', 'APPLES') credentials = {'a': {'b': {'c': {'d': ['PEACHES', 'PEARS']}}}} self.assertFalse(check({}, credentials, self.enforcer)) class FalseCheckTestCase(test_base.BaseTestCase): def test_str(self): check = _checks.FalseCheck() self.assertEqual('!', str(check)) def test_call(self): check = _checks.FalseCheck() self.assertFalse(check('target', 'creds', None)) class TrueCheckTestCase(test_base.BaseTestCase): def test_str(self): check = _checks.TrueCheck() self.assertEqual('@', str(check)) def test_call(self): check = _checks.TrueCheck() self.assertTrue(check('target', 'creds', None)) class CheckForTest(_checks.Check): def __call__(self, target, creds, enforcer): pass class CheckTestCase(test_base.BaseTestCase): def test_init(self): check = CheckForTest('kind', 'match') self.assertEqual('kind', check.kind) self.assertEqual('match', check.match) def test_str(self): check = CheckForTest('kind', 'match') self.assertEqual('kind:match', str(check)) class NotCheckTestCase(test_base.BaseTestCase): def test_init(self): check = _checks.NotCheck('rule') self.assertEqual('rule', check.rule) def test_str(self): check = _checks.NotCheck('rule') self.assertEqual('not rule', str(check)) def test_call_true(self): rule = _checks.TrueCheck() check = _checks.NotCheck(rule) self.assertFalse(check('target', 'cred', None)) def test_call_false(self): rule = _checks.FalseCheck() check = _checks.NotCheck(rule) self.assertTrue(check('target', 'cred', None)) def test_rule_takes_current_rule(self): results = [] class TestCheck(object): def __call__(self, target, cred, enforcer, current_rule=None): results.append((target, cred, enforcer, current_rule)) return True check = _checks.NotCheck(TestCheck()) self.assertFalse(check('target', 'cred', None, current_rule="a_rule")) self.assertEqual( [('target', 'cred', None, 'a_rule')], results, ) def test_rule_does_not_take_current_rule(self): results = [] class TestCheck(object): def __call__(self, target, cred, enforcer): results.append((target, cred, enforcer)) return True check = _checks.NotCheck(TestCheck()) self.assertFalse(check('target', 'cred', None, current_rule="a_rule")) self.assertEqual( [('target', 'cred', None)], results, ) class _BoolCheck(_checks.BaseCheck): def __init__(self, result): self.called = False self.result = result def __str__(self): return str(self.result) def __call__(self, target, creds, enforcer, current_rule=None): self.called = True return self.result class AndCheckTestCase(test_base.BaseTestCase): def test_init(self): check = _checks.AndCheck(['rule1', 'rule2']) self.assertEqual(['rule1', 'rule2'], check.rules) def test_add_check(self): check = _checks.AndCheck(['rule1', 'rule2']) check.add_check('rule3') self.assertEqual(['rule1', 'rule2', 'rule3'], check.rules) def test_str(self): check = _checks.AndCheck(['rule1', 'rule2']) self.assertEqual('(rule1 and rule2)', str(check)) def test_call_all_false(self): rules = [ _BoolCheck(False), _BoolCheck(False), ] check = _checks.AndCheck(rules) self.assertFalse(check('target', 'cred', None)) self.assertTrue(rules[0].called) self.assertFalse(rules[1].called) def test_call_first_true(self): rules = [ _BoolCheck(True), _BoolCheck(False), ] check = _checks.AndCheck(rules) self.assertFalse(check('target', 'cred', None)) self.assertTrue(rules[0].called) self.assertTrue(rules[1].called) def test_call_second_true(self): rules = [ _BoolCheck(False), _BoolCheck(True), ] check = _checks.AndCheck(rules) self.assertFalse(check('target', 'cred', None)) self.assertTrue(rules[0].called) self.assertFalse(rules[1].called) def test_rule_takes_current_rule(self): results = [] class TestCheck(object): def __call__(self, target, cred, enforcer, current_rule=None): results.append((target, cred, enforcer, current_rule)) return False check = _checks.AndCheck([TestCheck()]) self.assertFalse(check('target', 'cred', None, current_rule="a_rule")) self.assertEqual( [('target', 'cred', None, 'a_rule')], results, ) def test_rule_does_not_take_current_rule(self): results = [] class TestCheck(object): def __call__(self, target, cred, enforcer): results.append((target, cred, enforcer)) return False check = _checks.AndCheck([TestCheck()]) self.assertFalse(check('target', 'cred', None, current_rule="a_rule")) self.assertEqual( [('target', 'cred', None)], results, ) class OrCheckTestCase(test_base.BaseTestCase): def test_init(self): check = _checks.OrCheck(['rule1', 'rule2']) self.assertEqual(['rule1', 'rule2'], check.rules) def test_add_check(self): check = _checks.OrCheck(['rule1', 'rule2']) check.add_check('rule3') self.assertEqual(['rule1', 'rule2', 'rule3'], check.rules) def test_pop_check(self): check = _checks.OrCheck(['rule1', 'rule2', 'rule3']) rules, check1 = check.pop_check() self.assertEqual(['rule1', 'rule2'], check.rules) self.assertEqual('rule3', check1) def test_str(self): check = _checks.OrCheck(['rule1', 'rule2']) self.assertEqual('(rule1 or rule2)', str(check)) def test_call_all_false(self): rules = [ _BoolCheck(False), _BoolCheck(False), ] check = _checks.OrCheck(rules) self.assertFalse(check('target', 'cred', None)) self.assertTrue(rules[0].called) self.assertTrue(rules[1].called) def test_call_first_true(self): rules = [ _BoolCheck(True), _BoolCheck(False), ] check = _checks.OrCheck(rules) self.assertTrue(check('target', 'cred', None)) self.assertTrue(rules[0].called) self.assertFalse(rules[1].called) def test_call_second_true(self): rules = [ _BoolCheck(False), _BoolCheck(True), ] check = _checks.OrCheck(rules) self.assertTrue(check('target', 'cred', None)) self.assertTrue(rules[0].called) self.assertTrue(rules[1].called) def test_rule_takes_current_rule(self): results = [] class TestCheck(object): def __call__(self, target, cred, enforcer, current_rule=None): results.append((target, cred, enforcer, current_rule)) return False check = _checks.OrCheck([TestCheck()]) self.assertFalse(check('target', 'cred', None, current_rule="a_rule")) self.assertEqual( [('target', 'cred', None, 'a_rule')], results, ) def test_rule_does_not_take_current_rule(self): results = [] class TestCheck(object): def __call__(self, target, cred, enforcer): results.append((target, cred, enforcer)) return False check = _checks.OrCheck([TestCheck()]) self.assertFalse(check('target', 'cred', None, current_rule="a_rule")) self.assertEqual( [('target', 'cred', None)], results, ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_external.py0000664000175000017500000003332500000000000023027 0ustar00zuulzuul00000000000000# Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from unittest import mock import fixtures from oslo_serialization import jsonutils from requests_mock.contrib import fixture as rm_fixture from urllib import parse as urlparse from oslo_policy import _external from oslo_policy import opts from oslo_policy.tests import base class HttpCheckTestCase(base.PolicyBaseTestCase): def setUp(self): super(HttpCheckTestCase, self).setUp() opts._register(self.conf) self.requests_mock = self.useFixture(rm_fixture.Fixture()) def decode_post_data(self, post_data): result = {} for item in post_data.split('&'): key, _sep, value = item.partition('=') result[key] = jsonutils.loads(urlparse.unquote_plus(value)) return result def test_accept(self): self.requests_mock.post('http://example.com/target', text='True') check = _external.HttpCheck('http', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('application/x-www-form-urlencoded', last_request.headers['Content-Type']) self.assertEqual('POST', last_request.method) self.assertEqual(dict(target=target_dict, credentials=cred_dict, rule=None), self.decode_post_data(last_request.body)) def test_accept_json(self): self.conf.set_override('remote_content_type', 'application/json', group='oslo_policy') self.requests_mock.post('http://example.com/target', text='True') check = _external.HttpCheck('http', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('application/json', last_request.headers['Content-Type']) self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, credentials=cred_dict, target=target_dict), json.loads(last_request.body.decode('utf-8'))) def test_reject(self): self.requests_mock.post("http://example.com/target", text='other') check = _external.HttpCheck('http', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertFalse(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('POST', last_request.method) self.assertEqual(dict(target=target_dict, credentials=cred_dict, rule=None), self.decode_post_data(last_request.body)) def test_http_with_objects_in_target(self): self.requests_mock.post("http://example.com/target", text='True') check = _external.HttpCheck('http', '//example.com/%(name)s') target = {'a': object(), 'name': 'target', 'b': 'test data'} self.assertTrue(check(target, dict(user='user', roles=['a', 'b', 'c']), self.enforcer)) def test_http_with_strings_in_target(self): self.requests_mock.post("http://example.com/target", text='True') check = _external.HttpCheck('http', '//example.com/%(name)s') target = {'a': 'some_string', 'name': 'target', 'b': 'test data'} self.assertTrue(check(target, dict(user='user', roles=['a', 'b', 'c']), self.enforcer)) def test_accept_with_rule_in_argument(self): self.requests_mock.post('http://example.com/target', text='True') check = _external.HttpCheck('http', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) current_rule = "a_rule" self.assertTrue(check(target_dict, cred_dict, self.enforcer, current_rule)) last_request = self.requests_mock.last_request self.assertEqual('POST', last_request.method) self.assertEqual(dict(target=target_dict, credentials=cred_dict, rule=current_rule), self.decode_post_data(last_request.body)) def test_reject_with_rule_in_argument(self): self.requests_mock.post("http://example.com/target", text='other') check = _external.HttpCheck('http', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) current_rule = "a_rule" self.assertFalse(check(target_dict, cred_dict, self.enforcer, current_rule)) last_request = self.requests_mock.last_request self.assertEqual('POST', last_request.method) self.assertEqual(dict(target=target_dict, credentials=cred_dict, rule=current_rule), self.decode_post_data(last_request.body)) class HttpsCheckTestCase(base.PolicyBaseTestCase): def setUp(self): super(HttpsCheckTestCase, self).setUp() opts._register(self.conf) self.requests_mock = self.useFixture(rm_fixture.Fixture()) # ensure environment variables don't mess with our test results # https://requests.readthedocs.io/en/master/user/advanced/#ssl-cert-verification self.useFixture(fixtures.EnvironmentVariable('REQUESTS_CA_BUNDLE')) self.useFixture(fixtures.EnvironmentVariable('CURL_CA_BUNDLE')) def decode_post_data(self, post_data): result = {} for item in post_data.split('&'): key, _sep, value = item.partition('=') result[key] = jsonutils.loads(urlparse.unquote_plus(value)) return result def test_https_accept(self): self.requests_mock.post('https://example.com/target', text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('application/x-www-form-urlencoded', last_request.headers['Content-Type']) self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, target=target_dict, credentials=cred_dict), self.decode_post_data(last_request.body)) def test_https_accept_json(self): self.conf.set_override('remote_content_type', 'application/json', group='oslo_policy') self.requests_mock.post('https://example.com/target', text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('application/json', last_request.headers['Content-Type']) self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, target=target_dict, credentials=cred_dict), json.loads(last_request.body.decode('utf-8'))) def test_https_accept_with_verify(self): self.conf.set_override('remote_ssl_verify_server_crt', True, group='oslo_policy') self.conf.set_override('remote_ssl_ca_crt_file', None, group='oslo_policy') self.requests_mock.post('https://example.com/target', text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual(True, last_request.verify) self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, target=target_dict, credentials=cred_dict), self.decode_post_data(last_request.body)) def test_https_accept_with_verify_cert(self): self.conf.set_override('remote_ssl_verify_server_crt', True, group='oslo_policy') self.conf.set_override('remote_ssl_ca_crt_file', "ca.crt", group='oslo_policy') self.requests_mock.post('https://example.com/target', text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) with mock.patch('os.path.exists') as path_exists: path_exists.return_value = True self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('ca.crt', last_request.verify) self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, target=target_dict, credentials=cred_dict), self.decode_post_data(last_request.body)) def test_https_accept_with_verify_and_client_certs(self): self.conf.set_override('remote_ssl_verify_server_crt', True, group='oslo_policy') self.conf.set_override('remote_ssl_ca_crt_file', "ca.crt", group='oslo_policy') self.conf.set_override('remote_ssl_client_key_file', "client.key", group='oslo_policy') self.conf.set_override('remote_ssl_client_crt_file', "client.crt", group='oslo_policy') self.requests_mock.post('https://example.com/target', text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) with mock.patch('os.path.exists') as path_exists: with mock.patch('os.access') as os_access: path_exists.return_value = True os_access.return_value = True self.assertTrue(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('ca.crt', last_request.verify) self.assertEqual(('client.crt', 'client.key'), last_request.cert) self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, target=target_dict, credentials=cred_dict), self.decode_post_data(last_request.body)) def test_https_reject(self): self.requests_mock.post("https://example.com/target", text='other') check = _external.HttpsCheck('https', '//example.com/%(name)s') target_dict = dict(name='target', spam='spammer') cred_dict = dict(user='user', roles=['a', 'b', 'c']) self.assertFalse(check(target_dict, cred_dict, self.enforcer)) last_request = self.requests_mock.last_request self.assertEqual('POST', last_request.method) self.assertEqual(dict(rule=None, target=target_dict, credentials=cred_dict), self.decode_post_data(last_request.body)) def test_https_with_objects_in_target(self): self.requests_mock.post("https://example.com/target", text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target = {'a': object(), 'name': 'target', 'b': 'test data'} self.assertTrue(check(target, dict(user='user', roles=['a', 'b', 'c']), self.enforcer)) def test_https_with_strings_in_target(self): self.requests_mock.post("https://example.com/target", text='True') check = _external.HttpsCheck('https', '//example.com/%(name)s') target = {'a': 'some_string', 'name': 'target', 'b': 'test data'} self.assertTrue(check(target, dict(user='user', roles=['a', 'b', 'c']), self.enforcer)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_fixtures.py0000664000175000017500000000414700000000000023056 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from oslo_policy import fixture from oslo_policy import policy as oslo_policy from oslo_policy.tests import base as test_base class FixtureTestCase(test_base.PolicyBaseTestCase): def test_enforce_http_true(self): self.assertTrue(self._test_enforce_http(True)) def test_enforce_http_false(self): self.assertFalse(self._test_enforce_http(False)) def _test_enforce_http(self, return_value): self.useFixture(fixture.HttpCheckFixture(return_value=return_value)) action = self.getUniqueString() rules_json = { action: "http:" + self.getUniqueString() } rules = oslo_policy.Rules.load(json.dumps(rules_json)) self.enforcer.set_rules(rules) return self.enforcer.enforce(rule=action, target={}, creds={}) def test_enforce_https_true(self): self.assertTrue(self._test_enforce_http(True)) def test_enforce_https_false(self): self.assertFalse(self._test_enforce_http(False)) def _test_enforce_https(self, return_value): self.useFixture(fixture.HttpsCheckFixture(return_value=return_value)) action = self.getUniqueString() rules_json = { action: "https:" + self.getUniqueString() } rules = oslo_policy.Rules.load(json.dumps(rules_json)) self.enforcer.set_rules(rules) return self.enforcer.enforce(rule=action, target={}, creds={}) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_generator.py0000664000175000017500000012010200000000000023161 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import operator from unittest import mock import warnings from oslo_config import cfg import stevedore import testtools import yaml from oslo_policy import generator from oslo_policy import policy from oslo_policy.tests import base from oslo_serialization import jsonutils OPTS = {'base_rules': [policy.RuleDefault('admin', 'is_admin:True', description='Basic admin check'), policy.DocumentedRuleDefault('owner', ('project_id:%' '(project_id)s'), 'This is a long ' 'description to check ' 'that line wrapping ' 'functions properly', [{'path': '/foo/', 'method': 'GET'}, {'path': '/test/', 'method': 'POST'}])], 'custom_field': [policy.RuleDefault('shared', 'field:networks:shared=True')], 'rules': [policy.RuleDefault('admin_or_owner', 'rule:admin or rule:owner')], } class GenerateSampleYAMLTestCase(base.PolicyBaseTestCase): def setUp(self): super(GenerateSampleYAMLTestCase, self).setUp() self.enforcer = policy.Enforcer(self.conf, policy_file='policy.yaml') def test_generate_loadable_yaml(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) output_file = self.get_config_file_fullname('policy.yaml') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: # generate sample-policy file with only rules generator._generate_sample(['base_rules', 'rules'], output_file, include_help=False) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['base_rules', 'rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) self.enforcer.load_rules() self.assertIn('owner', self.enforcer.rules) self.assertIn('admin', self.enforcer.rules) self.assertIn('admin_or_owner', self.enforcer.rules) self.assertEqual('project_id:%(project_id)s', str(self.enforcer.rules['owner'])) self.assertEqual('is_admin:True', str(self.enforcer.rules['admin'])) self.assertEqual('(rule:admin or rule:owner)', str(self.enforcer.rules['admin_or_owner'])) def test_expected_content(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) expected = '''# Basic admin check #"admin": "is_admin:True" # This is a long description to check that line wrapping functions # properly # GET /foo/ # POST /test/ #"owner": "project_id:%(project_id)s" #"shared": "field:networks:shared=True" #"admin_or_owner": "rule:admin or rule:owner" ''' output_file = self.get_config_file_fullname('policy.yaml') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['base_rules', 'rules'], output_file) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['base_rules', 'rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) with open(output_file, 'r') as written_file: written_policy = written_file.read() self.assertEqual(expected, written_policy) def test_expected_content_stdout(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) expected = '''# Basic admin check #"admin": "is_admin:True" # This is a long description to check that line wrapping functions # properly # GET /foo/ # POST /test/ #"owner": "project_id:%(project_id)s" #"shared": "field:networks:shared=True" #"admin_or_owner": "rule:admin or rule:owner" ''' stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['base_rules', 'rules'], output_file=None) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['base_rules', 'rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) self.assertEqual(expected, stdout.getvalue()) def test_policies_deprecated_for_removal(self): rule = policy.RuleDefault( name='foo:post_bar', check_str='role:fizz', description='Create a bar.', deprecated_for_removal=True, deprecated_reason='This policy is not used anymore', deprecated_since='N' ) opts = {'rules': [rule]} extensions = [] for name, opts, in opts.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['rules'] ) expected = '''# DEPRECATED # "foo:post_bar" has been deprecated since N. # This policy is not used anymore # Create a bar. #"foo:post_bar": "role:fizz" ''' stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['rules'], output_file=None) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True ) self.assertEqual(expected, stdout.getvalue()) def test_deprecated_policies_are_aliased_to_new_names(self): deprecated_rule = policy.DeprecatedRule( name='foo:post_bar', check_str='role:fizz', deprecated_reason=( 'foo:post_bar is being removed in favor of foo:create_bar' ), deprecated_since='N', ) new_rule = policy.RuleDefault( name='foo:create_bar', check_str='role:fizz', description='Create a bar.', deprecated_rule=deprecated_rule, ) opts = {'rules': [new_rule]} extensions = [] for name, opts in opts.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['rules']) expected = '''# Create a bar. #"foo:create_bar": "role:fizz" # DEPRECATED # "foo:post_bar":"role:fizz" has been deprecated since N in favor of # "foo:create_bar":"role:fizz". # foo:post_bar is being removed in favor of foo:create_bar "foo:post_bar": "rule:foo:create_bar" ''' stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['rules'], output_file=None) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True ) self.assertEqual(expected, stdout.getvalue()) def test_deprecated_policies_with_same_name(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:old', deprecated_reason=( 'role:fizz is a more sane default for foo:create_bar' ), deprecated_since='N', ) new_rule = policy.RuleDefault( name='foo:create_bar', check_str='role:fizz', description='Create a bar.', deprecated_rule=deprecated_rule, ) opts = {'rules': [new_rule]} extensions = [] for name, opts in opts.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['rules']) expected = '''# Create a bar. #"foo:create_bar": "role:fizz" # DEPRECATED # "foo:create_bar":"role:old" has been deprecated since N in favor of # "foo:create_bar":"role:fizz". # role:fizz is a more sane default for foo:create_bar ''' stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['rules'], output_file=None) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True ) self.assertEqual(expected, stdout.getvalue()) def _test_formatting(self, description, expected): rule = [policy.RuleDefault('admin', 'is_admin:True', description=description)] ext = stevedore.extension.Extension(name='check_rule', entry_point=None, plugin=None, obj=rule) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=[ext], namespace=['check_rule']) output_file = self.get_config_file_fullname('policy.yaml') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['check_rule'], output_file) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['check_rule'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) with open(output_file, 'r') as written_file: written_policy = written_file.read() self.assertEqual(expected, written_policy) def test_empty_line_formatting(self): description = ('Check Summary \n' '\n' 'This is a description to ' 'check that empty line has ' 'no white spaces.') expected = """# Check Summary # # This is a description to check that empty line has no white spaces. #"admin": "is_admin:True" """ self._test_formatting(description, expected) def test_paragraph_formatting(self): description = """ Here's a neat description with a paragraph. We want to make sure that it wraps properly. """ expected = """# Here's a neat description with a paragraph. We want \ to make sure # that it wraps properly. #"admin": "is_admin:True" """ self._test_formatting(description, expected) def test_literal_block_formatting(self): description = """Here's another description. This one has a literal block. These lines should be kept apart. They should not be wrapped, even though they may be longer than 70 chars """ expected = """# Here's another description. # # This one has a literal block. # These lines should be kept apart. # They should not be wrapped, even though they may be longer than 70 chars #"admin": "is_admin:True" """ self._test_formatting(description, expected) def test_invalid_formatting(self): description = """Here's a broken description. We have some text... Followed by a literal block without any spaces. We don't support definition lists, so this is just wrong! """ expected = """# Here's a broken description. # # We have some text... # # Followed by a literal block without any spaces. # We don't support definition lists, so this is just wrong! #"admin": "is_admin:True" """ with warnings.catch_warnings(record=True) as warns: self._test_formatting(description, expected) self.assertEqual(1, len(warns)) self.assertTrue(issubclass(warns[-1].category, FutureWarning)) self.assertIn('Invalid policy description', str(warns[-1].message)) class GenerateSampleJSONTestCase(base.PolicyBaseTestCase): def setUp(self): super(GenerateSampleJSONTestCase, self).setUp() self.enforcer = policy.Enforcer(self.conf, policy_file='policy.json') def test_generate_loadable_json(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) output_file = self.get_config_file_fullname('policy.json') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: # generate sample-policy file with only rules generator._generate_sample(['base_rules', 'rules'], output_file, output_format='json', include_help=False) mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['base_rules', 'rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) self.enforcer.load_rules() self.assertIn('owner', self.enforcer.rules) self.assertIn('admin', self.enforcer.rules) self.assertIn('admin_or_owner', self.enforcer.rules) self.assertEqual('project_id:%(project_id)s', str(self.enforcer.rules['owner'])) self.assertEqual('is_admin:True', str(self.enforcer.rules['admin'])) self.assertEqual('(rule:admin or rule:owner)', str(self.enforcer.rules['admin_or_owner'])) def test_expected_content(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) expected = '''{ "admin": "is_admin:True", "owner": "project_id:%(project_id)s", "shared": "field:networks:shared=True", "admin_or_owner": "rule:admin or rule:owner" } ''' output_file = self.get_config_file_fullname('policy.json') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['base_rules', 'rules'], output_file=output_file, output_format='json') mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['base_rules', 'rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) with open(output_file, 'r') as written_file: written_policy = written_file.read() self.assertEqual(expected, written_policy) def test_expected_content_stdout(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) expected = '''{ "admin": "is_admin:True", "owner": "project_id:%(project_id)s", "shared": "field:networks:shared=True", "admin_or_owner": "rule:admin or rule:owner" } ''' stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_sample(['base_rules', 'rules'], output_file=None, output_format='json') mock_ext_mgr.assert_called_once_with( 'oslo.policy.policies', names=['base_rules', 'rules'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) self.assertEqual(expected, stdout.getvalue()) @mock.patch.object(generator, 'LOG') def test_generate_json_file_log_warning(self, mock_log): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) output_file = self.get_config_file_fullname('policy.json') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): generator._generate_sample(['base_rules', 'rules'], output_file, output_format='json') mock_log.warning.assert_any_call(policy.WARN_JSON) class GeneratorRaiseErrorTestCase(testtools.TestCase): def test_generator_raises_error(self): """Verifies that errors from extension manager are not suppressed.""" class FakeException(Exception): pass class FakeEP(object): def __init__(self): self.name = 'callback_is_expected' self.require = self.resolve self.load = self.resolve def resolve(self, *args, **kwargs): raise FakeException() fake_ep = FakeEP() with mock.patch('stevedore.named.NamedExtensionManager', side_effect=FakeException()): self.assertRaises(FakeException, generator._generate_sample, fake_ep.name) def test_generator_call_with_no_arguments_raises_error(self): testargs = ['oslopolicy-sample-generator'] with mock.patch('sys.argv', testargs): local_conf = cfg.ConfigOpts() self.assertRaises(cfg.RequiredOptError, generator.generate_sample, [], local_conf) class GeneratePolicyTestCase(base.PolicyBaseTestCase): def setUp(self): super(GeneratePolicyTestCase, self).setUp() def test_merged_rules(self): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) # Write the policy file for an enforcer to load sample_file = self.get_config_file_fullname('policy-sample.yaml') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): # generate sample-policy file with only rules generator._generate_sample(['base_rules', 'rules'], sample_file, include_help=False) enforcer = policy.Enforcer(self.conf, policy_file='policy-sample.yaml') # register an opt defined in the file enforcer.register_default(policy.RuleDefault('admin', 'is_admin:False')) # register a new opt enforcer.register_default(policy.RuleDefault('foo', 'role:foo')) # Mock out stevedore to return the configured enforcer ext = stevedore.extension.Extension(name='testing', entry_point=None, plugin=None, obj=enforcer) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=[ext], namespace='testing') # Generate a merged file merged_file = self.get_config_file_fullname('policy-merged.yaml') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._generate_policy(namespace='testing', output_file=merged_file) mock_ext_mgr.assert_called_once_with( 'oslo.policy.enforcer', names=['testing'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) # load the merged file with a new enforcer merged_enforcer = policy.Enforcer(self.conf, policy_file='policy-merged.yaml') merged_enforcer.load_rules() for rule in ['admin', 'owner', 'admin_or_owner', 'foo']: self.assertIn(rule, merged_enforcer.rules) self.assertEqual('is_admin:True', str(merged_enforcer.rules['admin'])) self.assertEqual('role:foo', str(merged_enforcer.rules['foo'])) class ListRedundantTestCase(base.PolicyBaseTestCase): def setUp(self): super(ListRedundantTestCase, self).setUp() @mock.patch('warnings.warn') def test_matched_rules(self, mock_warn): extensions = [] for name, opts in OPTS.items(): ext = stevedore.extension.Extension(name=name, entry_point=None, plugin=None, obj=opts) extensions.append(ext) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=extensions, namespace=['base_rules', 'rules']) # Write the policy file for an enforcer to load sample_file = self.get_config_file_fullname('policy-sample.yaml') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): # generate sample-policy file with only rules generator._generate_sample(['base_rules', 'rules'], sample_file, include_help=False) enforcer = policy.Enforcer(self.conf, policy_file='policy-sample.yaml') # register opts that match those defined in policy-sample.yaml enforcer.register_default(policy.RuleDefault('admin', 'is_admin:True')) enforcer.register_default( policy.RuleDefault('owner', 'project_id:%(project_id)s')) # register a new opt deprecated_rule = policy.DeprecatedRule( name='old_foo', check_str='role:bar', deprecated_reason='reason', deprecated_since='T' ) enforcer.register_default( policy.RuleDefault( name='foo', check_str='role:foo', deprecated_rule=deprecated_rule, ), ) # Mock out stevedore to return the configured enforcer ext = stevedore.extension.Extension(name='testing', entry_point=None, plugin=None, obj=enforcer) test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=[ext], namespace='testing') stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr) as mock_ext_mgr: generator._list_redundant(namespace='testing') mock_ext_mgr.assert_called_once_with( 'oslo.policy.enforcer', names=['testing'], on_load_failure_callback=generator.on_load_failure_callback, invoke_on_load=True) matches = [line.split(': ', 1) for line in stdout.getvalue().splitlines()] matches.sort(key=operator.itemgetter(0)) # Should be 'admin' opt0 = matches[0] self.assertEqual('"admin"', opt0[0]) self.assertEqual('"is_admin:True"', opt0[1]) # Should be 'owner' opt1 = matches[1] self.assertEqual('"owner"', opt1[0]) self.assertEqual('"project_id:%(project_id)s"', opt1[1]) self.assertFalse(mock_warn.called, 'Deprecation warnings not suppressed.') class UpgradePolicyTestCase(base.PolicyBaseTestCase): def setUp(self): super(UpgradePolicyTestCase, self).setUp() policy_json_contents = jsonutils.dumps({ "deprecated_name": "rule:admin" }) self.create_config_file('policy.json', policy_json_contents) deprecated_policy = policy.DeprecatedRule( name='deprecated_name', check_str='rule:admin', deprecated_reason='test', deprecated_since='Stein', ) self.new_policy = policy.DocumentedRuleDefault( name='new_policy_name', check_str='rule:admin', description='test_policy', operations=[{'path': '/test', 'method': 'GET'}], deprecated_rule=deprecated_policy, ) self.extensions = [] ext = stevedore.extension.Extension(name='test_upgrade', entry_point=None, plugin=None, obj=[self.new_policy]) self.extensions.append(ext) # Just used for cli opt parsing self.local_conf = cfg.ConfigOpts() def test_upgrade_policy_json_file(self): test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=self.extensions, namespace='test_upgrade') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): testargs = ['olsopolicy-policy-upgrade', '--policy', self.get_config_file_fullname('policy.json'), '--namespace', 'test_upgrade', '--output-file', self.get_config_file_fullname('new_policy.json'), '--format', 'json'] with mock.patch('sys.argv', testargs): generator.upgrade_policy(conf=self.local_conf) new_file = self.get_config_file_fullname('new_policy.json') with open(new_file, 'r') as fh: new_policy = jsonutils.loads(fh.read()) self.assertIsNotNone(new_policy.get('new_policy_name')) self.assertIsNone(new_policy.get('deprecated_name')) @mock.patch.object(generator, 'LOG') def test_upgrade_policy_json_file_log_warning(self, mock_log): test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=self.extensions, namespace='test_upgrade') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): testargs = ['olsopolicy-policy-upgrade', '--policy', self.get_config_file_fullname('policy.json'), '--namespace', 'test_upgrade', '--output-file', self.get_config_file_fullname('new_policy.json'), '--format', 'json'] with mock.patch('sys.argv', testargs): generator.upgrade_policy(conf=self.local_conf) mock_log.warning.assert_any_call(policy.WARN_JSON) def test_upgrade_policy_yaml_file(self): test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=self.extensions, namespace='test_upgrade') with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): testargs = ['olsopolicy-policy-upgrade', '--policy', self.get_config_file_fullname('policy.json'), '--namespace', 'test_upgrade', '--output-file', self.get_config_file_fullname('new_policy.yaml'), '--format', 'yaml'] with mock.patch('sys.argv', testargs): generator.upgrade_policy(conf=self.local_conf) new_file = self.get_config_file_fullname('new_policy.yaml') with open(new_file, 'r') as fh: new_policy = yaml.safe_load(fh) self.assertIsNotNone(new_policy.get('new_policy_name')) self.assertIsNone(new_policy.get('deprecated_name')) def test_upgrade_policy_json_stdout(self): test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=self.extensions, namespace='test_upgrade') stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): testargs = ['olsopolicy-policy-upgrade', '--policy', self.get_config_file_fullname('policy.json'), '--namespace', 'test_upgrade', '--format', 'json'] with mock.patch('sys.argv', testargs): generator.upgrade_policy(conf=self.local_conf) expected = '''{ "new_policy_name": "rule:admin" }''' self.assertEqual(expected, stdout.getvalue()) def test_upgrade_policy_yaml_stdout(self): test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=self.extensions, namespace='test_upgrade') stdout = self._capture_stdout() with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): testargs = ['olsopolicy-policy-upgrade', '--policy', self.get_config_file_fullname('policy.json'), '--namespace', 'test_upgrade', '--format', 'yaml'] with mock.patch('sys.argv', testargs): generator.upgrade_policy(conf=self.local_conf) expected = '''new_policy_name: rule:admin ''' self.assertEqual(expected, stdout.getvalue()) @mock.patch('stevedore.named.NamedExtensionManager') class GetEnforcerTestCase(base.PolicyBaseTestCase): def test_get_enforcer(self, mock_manager): mock_instance = mock.MagicMock() mock_instance.__contains__.return_value = True mock_manager.return_value = mock_instance mock_item = mock.Mock() mock_item.obj = 'test' mock_instance.__getitem__.return_value = mock_item self.assertEqual('test', generator._get_enforcer('foo')) def test_get_enforcer_missing(self, mock_manager): mock_instance = mock.MagicMock() mock_instance.__contains__.return_value = False mock_manager.return_value = mock_instance self.assertRaises(KeyError, generator._get_enforcer, 'nonexistent') class ValidatorTestCase(base.PolicyBaseTestCase): def _get_test_enforcer(self): test_rules = [policy.RuleDefault('foo', 'foo:bar=baz'), policy.RuleDefault('bar', 'bar:foo=baz')] enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(test_rules) return enforcer def _test_policy(self, rule, success=False, missing_file=False): policy_file = self.get_config_file_fullname('test.yaml') if missing_file: policy_file = 'bogus.yaml' self.create_config_file('test.yaml', rule) self.create_config_file('test.conf', '[oslo_policy]\npolicy_file=%s' % policy_file) # Reparse now that we've created our configs self.conf(args=['--config-dir', self.config_dir]) with mock.patch('oslo_policy.generator._get_enforcer') as ge: ge.return_value = self._get_test_enforcer() result = generator._validate_policy('test') if success: self.assertEqual(0, result) else: self.assertEqual(1, result) def test_success(self): self._test_policy('foo: rule:bar', success=True) def test_cyclical_reference(self): self._test_policy('foo: rule:bar\nbar: rule:foo') def test_invalid_syntax(self): self._test_policy('foo: (bar))') def test_false_okay(self): self._test_policy('foo: !', success=True) def test_reference_nonexistent(self): self._test_policy('foo: rule:baz') def test_nonexistent(self): self._test_policy('baz: rule:foo') def test_missing_policy_file(self): self._test_policy('', missing_file=True) class ConvertJsonToYamlTestCase(base.PolicyBaseTestCase): def setUp(self): super(ConvertJsonToYamlTestCase, self).setUp() policy_json_contents = jsonutils.dumps({ "rule1_name": "rule:admin", "rule2_name": "rule:overridden", "deprecated_rule1_name": "rule:admin" }) self.create_config_file('policy.json', policy_json_contents) self.output_file_path = self.get_config_file_fullname( 'converted_policy.yaml') deprecated_policy = policy.DeprecatedRule( name='deprecated_rule1_name', check_str='rule:admin', deprecated_reason='testing', deprecated_since='ussuri', ) self.registered_policy = [ policy.DocumentedRuleDefault( name='rule1_name', check_str='rule:admin', description='test_rule1', operations=[{'path': '/test', 'method': 'GET'}], deprecated_rule=deprecated_policy, scope_types=['system'], ), policy.RuleDefault( name='rule2_name', check_str='rule:admin', ) ] self.extensions = [] ext = stevedore.extension.Extension(name='test', entry_point=None, plugin=None, obj=self.registered_policy) self.extensions.append(ext) # Just used for cli opt parsing self.local_conf = cfg.ConfigOpts() self.expected = '''# test_rule1 # GET /test # Intended scope(s): system #"rule1_name": "rule:admin" # rule2_name "rule2_name": "rule:overridden" # WARNING: Below rules are either deprecated rules # or extra rules in policy file, it is strongly # recommended to switch to new rules. "deprecated_rule1_name": "rule:admin" ''' def _is_yaml(self, data): is_yaml = False try: jsonutils.loads(data) except ValueError: try: yaml.safe_load(data) is_yaml = True except yaml.scanner.ScannerError: pass return is_yaml def _test_convert_json_to_yaml_file(self, output_to_file=True): test_mgr = stevedore.named.NamedExtensionManager.make_test_instance( extensions=self.extensions, namespace='test') converted_policy_data = None with mock.patch('stevedore.named.NamedExtensionManager', return_value=test_mgr): testargs = ['oslopolicy-convert-json-to-yaml', '--namespace', 'test', '--policy-file', self.get_config_file_fullname('policy.json')] if output_to_file: testargs.extend(['--output-file', self.output_file_path]) with mock.patch('sys.argv', testargs): generator.convert_policy_json_to_yaml(conf=self.local_conf) if output_to_file: with open(self.output_file_path, 'r') as fh: converted_policy_data = fh.read() return converted_policy_data def test_convert_json_to_yaml_file(self): converted_policy_data = self._test_convert_json_to_yaml_file() self.assertTrue(self._is_yaml(converted_policy_data)) self.assertEqual(self.expected, converted_policy_data) def test_convert_policy_to_stdout(self): stdout = self._capture_stdout() self._test_convert_json_to_yaml_file(output_to_file=False) self.assertEqual(self.expected, stdout.getvalue()) def test_converted_yaml_is_loadable(self): self._test_convert_json_to_yaml_file() enforcer = policy.Enforcer(self.conf, policy_file=self.output_file_path) enforcer.load_rules() for rule in ['rule2_name', 'deprecated_rule1_name']: self.assertIn(rule, enforcer.rules) def test_default_rules_comment_out_in_yaml_file(self): converted_policy_data = self._test_convert_json_to_yaml_file() commented_default_rule = '''# test_rule1 # GET /test # Intended scope(s): system #"rule1_name": "rule:admin" ''' self.assertIn(commented_default_rule, converted_policy_data) def test_overridden_rules_uncommented_in_yaml_file(self): converted_policy_data = self._test_convert_json_to_yaml_file() uncommented_overridden_rule = '''# rule2_name "rule2_name": "rule:overridden" ''' self.assertIn(uncommented_overridden_rule, converted_policy_data) def test_existing_deprecated_rules_kept_uncommented_in_yaml_file(self): converted_policy_data = self._test_convert_json_to_yaml_file() existing_deprecated_rule_with_warning = '''# WARNING: Below rules are either deprecated rules # or extra rules in policy file, it is strongly # recommended to switch to new rules. "deprecated_rule1_name": "rule:admin" ''' self.assertIn(existing_deprecated_rule_with_warning, converted_policy_data) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_opts.py0000664000175000017500000000435600000000000022174 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from oslo_config import cfg from oslotest import base as test_base from oslo_policy import opts class OptsTestCase(test_base.BaseTestCase): def setUp(self): super(OptsTestCase, self).setUp() self.conf = cfg.ConfigOpts() self.original_opts = opts._options opts._options = copy.deepcopy(opts._options) def reset(): opts._options = self.original_opts self.addCleanup(reset) def test_set_defaults_policy_file(self): opts._register(self.conf) self.assertNotEqual('new-value.json', self.conf.oslo_policy.policy_file) opts.set_defaults(self.conf, policy_file='new-value.json') self.assertEqual('new-value.json', self.conf.oslo_policy.policy_file) def test_set_defaults_enforce_scope(self): opts._register(self.conf) self.assertEqual(False, self.conf.oslo_policy.enforce_scope) opts.set_defaults(self.conf, enforce_scope=True) self.assertEqual(True, self.conf.oslo_policy.enforce_scope) def test_set_defaults_two_opts(self): opts._register(self.conf) self.assertEqual(False, self.conf.oslo_policy.enforce_scope) self.assertEqual(False, self.conf.oslo_policy.enforce_new_defaults) opts.set_defaults(self.conf, enforce_scope=True, enforce_new_defaults=True) self.assertEqual(True, self.conf.oslo_policy.enforce_scope) self.assertEqual(True, self.conf.oslo_policy.enforce_new_defaults) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_parser.py0000664000175000017500000005056500000000000022506 0ustar00zuulzuul00000000000000# Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from oslotest import base as test_base from oslo_policy import _checks from oslo_policy import _parser from oslo_policy.tests import base class ParseCheckTestCase(test_base.BaseTestCase): def test_false(self): result = _parser._parse_check('!') self.assertIsInstance(result, _checks.FalseCheck) def test_true(self): result = _parser._parse_check('@') self.assertIsInstance(result, _checks.TrueCheck) @mock.patch.object(_parser, 'LOG') def test_bad_rule(self, mock_log): result = _parser._parse_check('foobar') self.assertIsInstance(result, _checks.FalseCheck) mock_log.exception.assert_called_once() @mock.patch.object(_checks, 'registered_checks', {}) @mock.patch.object(_parser, 'LOG') def test_no_handler(self, mock_log): result = _parser._parse_check('no:handler') self.assertIsInstance(result, _checks.FalseCheck) mock_log.error.assert_called() @mock.patch.object(_checks, 'registered_checks', { 'spam': mock.Mock(return_value='spam_check'), None: mock.Mock(return_value='none_check'), }) def test_check(self): result = _parser._parse_check('spam:handler') self.assertEqual('spam_check', result) _checks.registered_checks['spam'].assert_called_once_with('spam', 'handler') self.assertFalse(_checks.registered_checks[None].called) @mock.patch.object(_checks, 'registered_checks', { None: mock.Mock(return_value='none_check'), }) def test_check_default(self): result = _parser._parse_check('spam:handler') self.assertEqual('none_check', result) _checks.registered_checks[None].assert_called_once_with('spam', 'handler') class ParseListRuleTestCase(test_base.BaseTestCase): def test_empty(self): result = _parser._parse_list_rule([]) self.assertIsInstance(result, _checks.TrueCheck) self.assertEqual('@', str(result)) @mock.patch.object(_parser, '_parse_check', base.FakeCheck) def test_oneele_zeroele(self): result = _parser._parse_list_rule([[]]) self.assertIsInstance(result, _checks.FalseCheck) self.assertEqual('!', str(result)) @mock.patch.object(_parser, '_parse_check', base.FakeCheck) def test_oneele_bare(self): result = _parser._parse_list_rule(['rule']) self.assertIsInstance(result, base.FakeCheck) self.assertEqual('rule', result.result) self.assertEqual('rule', str(result)) @mock.patch.object(_parser, '_parse_check', base.FakeCheck) def test_oneele_oneele(self): result = _parser._parse_list_rule([['rule']]) self.assertIsInstance(result, base.FakeCheck) self.assertEqual('rule', result.result) self.assertEqual('rule', str(result)) @mock.patch.object(_parser, '_parse_check', base.FakeCheck) def test_oneele_multi(self): result = _parser._parse_list_rule([['rule1', 'rule2']]) self.assertIsInstance(result, _checks.AndCheck) self.assertEqual(2, len(result.rules)) for i, value in enumerate(['rule1', 'rule2']): self.assertIsInstance(result.rules[i], base.FakeCheck) self.assertEqual(value, result.rules[i].result) self.assertEqual('(rule1 and rule2)', str(result)) @mock.patch.object(_parser, '_parse_check', base.FakeCheck) def test_multi_oneele(self): result = _parser._parse_list_rule([['rule1'], ['rule2']]) self.assertIsInstance(result, _checks.OrCheck) self.assertEqual(2, len(result.rules)) for i, value in enumerate(['rule1', 'rule2']): self.assertIsInstance(result.rules[i], base.FakeCheck) self.assertEqual(value, result.rules[i].result) self.assertEqual('(rule1 or rule2)', str(result)) @mock.patch.object(_parser, '_parse_check', base.FakeCheck) def test_multi_multi(self): result = _parser._parse_list_rule([['rule1', 'rule2'], ['rule3', 'rule4']]) self.assertIsInstance(result, _checks.OrCheck) self.assertEqual(2, len(result.rules)) for i, values in enumerate([['rule1', 'rule2'], ['rule3', 'rule4']]): self.assertIsInstance(result.rules[i], _checks.AndCheck) self.assertEqual(2, len(result.rules[i].rules)) for j, value in enumerate(values): self.assertIsInstance(result.rules[i].rules[j], base.FakeCheck) self.assertEqual(value, result.rules[i].rules[j].result) self.assertEqual('((rule1 and rule2) or (rule3 and rule4))', str(result)) class ParseTokenizeTestCase(test_base.BaseTestCase): @mock.patch.object(_parser, '_parse_check', lambda x: x) def test_tokenize(self): exemplar = ("(( ( ((() And)) or ) (check:%(miss)s) not)) " "'a-string' \"another-string\"") expected = [ ('(', '('), ('(', '('), ('(', '('), ('(', '('), ('(', '('), ('(', '('), (')', ')'), ('and', 'And'), (')', ')'), (')', ')'), ('or', 'or'), (')', ')'), ('(', '('), ('check', 'check:%(miss)s'), (')', ')'), ('not', 'not'), (')', ')'), (')', ')'), ('string', 'a-string'), ('string', 'another-string'), ] result = list(_parser._parse_tokenize(exemplar)) self.assertEqual(expected, result) class ParseStateMetaTestCase(test_base.BaseTestCase): def test_reducer(self): @_parser.reducer('a', 'b', 'c') @_parser.reducer('d', 'e', 'f') def spam(): pass self.assertTrue(hasattr(spam, 'reducers')) self.assertEqual([['d', 'e', 'f'], ['a', 'b', 'c']], spam.reducers) def test_parse_state_meta(self): class FakeState(metaclass=_parser.ParseStateMeta): @_parser.reducer('a', 'b', 'c') @_parser.reducer('d', 'e', 'f') def reduce1(self): pass @_parser.reducer('g', 'h', 'i') def reduce2(self): pass self.assertTrue(hasattr(FakeState, 'reducers')) for reduction, reducer in FakeState.reducers: if (reduction == ['a', 'b', 'c'] or reduction == ['d', 'e', 'f']): self.assertEqual('reduce1', reducer) elif reduction == ['g', 'h', 'i']: self.assertEqual('reduce2', reducer) else: self.fail('Unrecognized reducer discovered') class ParseStateTestCase(test_base.BaseTestCase): def test_init(self): state = _parser.ParseState() self.assertEqual([], state.tokens) self.assertEqual([], state.values) @mock.patch.object(_parser.ParseState, 'reducers', [(['tok1'], 'meth')]) @mock.patch.object(_parser.ParseState, 'meth', create=True) def test_reduce_none(self, mock_meth): state = _parser.ParseState() state.tokens = ['tok2'] state.values = ['val2'] state.reduce() self.assertEqual(['tok2'], state.tokens) self.assertEqual(['val2'], state.values) self.assertFalse(mock_meth.called) @mock.patch.object(_parser.ParseState, 'reducers', [(['tok1', 'tok2'], 'meth')]) @mock.patch.object(_parser.ParseState, 'meth', create=True) def test_reduce_short(self, mock_meth): state = _parser.ParseState() state.tokens = ['tok1'] state.values = ['val1'] state.reduce() self.assertEqual(['tok1'], state.tokens) self.assertEqual(['val1'], state.values) self.assertFalse(mock_meth.called) @mock.patch.object(_parser.ParseState, 'reducers', [(['tok1', 'tok2'], 'meth')]) @mock.patch.object(_parser.ParseState, 'meth', create=True, return_value=[('tok3', 'val3')]) def test_reduce_one(self, mock_meth): state = _parser.ParseState() state.tokens = ['tok1', 'tok2'] state.values = ['val1', 'val2'] state.reduce() self.assertEqual(['tok3'], state.tokens) self.assertEqual(['val3'], state.values) mock_meth.assert_called_once_with('val1', 'val2') @mock.patch.object(_parser.ParseState, 'reducers', [ (['tok1', 'tok4'], 'meth2'), (['tok2', 'tok3'], 'meth1'), ]) @mock.patch.object(_parser.ParseState, 'meth1', create=True, return_value=[('tok4', 'val4')]) @mock.patch.object(_parser.ParseState, 'meth2', create=True, return_value=[('tok5', 'val5')]) def test_reduce_two(self, mock_meth2, mock_meth1): state = _parser.ParseState() state.tokens = ['tok1', 'tok2', 'tok3'] state.values = ['val1', 'val2', 'val3'] state.reduce() self.assertEqual(['tok5'], state.tokens) self.assertEqual(['val5'], state.values) mock_meth1.assert_called_once_with('val2', 'val3') mock_meth2.assert_called_once_with('val1', 'val4') @mock.patch.object(_parser.ParseState, 'reducers', [(['tok1', 'tok2'], 'meth')]) @mock.patch.object(_parser.ParseState, 'meth', create=True, return_value=[('tok3', 'val3'), ('tok4', 'val4')]) def test_reduce_multi(self, mock_meth): state = _parser.ParseState() state.tokens = ['tok1', 'tok2'] state.values = ['val1', 'val2'] state.reduce() self.assertEqual(['tok3', 'tok4'], state.tokens) self.assertEqual(['val3', 'val4'], state.values) mock_meth.assert_called_once_with('val1', 'val2') def test_shift(self): state = _parser.ParseState() with mock.patch.object(_parser.ParseState, 'reduce') as mock_reduce: state.shift('token', 'value') self.assertEqual(['token'], state.tokens) self.assertEqual(['value'], state.values) mock_reduce.assert_called_once_with() def test_result_empty(self): state = _parser.ParseState() self.assertRaises(ValueError, lambda: state.result) def test_result_unreduced(self): state = _parser.ParseState() state.tokens = ['tok1', 'tok2'] state.values = ['val1', 'val2'] self.assertRaises(ValueError, lambda: state.result) def test_result(self): state = _parser.ParseState() state.tokens = ['token'] state.values = ['value'] self.assertEqual('value', state.result) def test_wrap_check(self): state = _parser.ParseState() result = state._wrap_check('(', 'the_check', ')') self.assertEqual([('check', 'the_check')], result) @mock.patch.object(_checks, 'AndCheck', lambda x: x) def test_make_and_expr(self): state = _parser.ParseState() result = state._make_and_expr('check1', 'and', 'check2') self.assertEqual([('and_expr', ['check1', 'check2'])], result) def test_extend_and_expr(self): state = _parser.ParseState() mock_expr = mock.Mock() mock_expr.add_check.return_value = 'newcheck' result = state._extend_and_expr(mock_expr, 'and', 'check') self.assertEqual([('and_expr', 'newcheck')], result) mock_expr.add_check.assert_called_once_with('check') @mock.patch.object(_checks, 'OrCheck', lambda x: x) def test_make_or_expr(self): state = _parser.ParseState() result = state._make_or_expr('check1', 'or', 'check2') self.assertEqual([('or_expr', ['check1', 'check2'])], result) def test_extend_or_expr(self): state = _parser.ParseState() mock_expr = mock.Mock() mock_expr.add_check.return_value = 'newcheck' result = state._extend_or_expr(mock_expr, 'or', 'check') self.assertEqual([('or_expr', 'newcheck')], result) mock_expr.add_check.assert_called_once_with('check') @mock.patch.object(_checks, 'NotCheck', lambda x: 'not %s' % x) def test_make_not_expr(self): state = _parser.ParseState() result = state._make_not_expr('not', 'check') self.assertEqual([('check', 'not check')], result) class ParseTextRuleTestCase(test_base.BaseTestCase): def test_empty(self): result = _parser._parse_text_rule('') self.assertIsInstance(result, _checks.TrueCheck) @mock.patch.object(_parser, '_parse_tokenize', return_value=[('tok1', 'val1'), ('tok2', 'val2')]) @mock.patch.object(_parser.ParseState, 'shift') @mock.patch.object(_parser.ParseState, 'result', 'result') def test_shifts(self, mock_shift, mock_parse_tokenize): result = _parser._parse_text_rule('test rule') self.assertEqual('result', result) mock_parse_tokenize.assert_called_once_with('test rule') mock_shift.assert_has_calls( [mock.call('tok1', 'val1'), mock.call('tok2', 'val2')]) @mock.patch.object(_parser, 'LOG', new=mock.Mock()) @mock.patch.object(_parser, '_parse_tokenize', return_value=[]) def test_fail(self, mock_parse_tokenize): result = _parser._parse_text_rule('test rule') self.assertIsInstance(result, _checks.FalseCheck) mock_parse_tokenize.assert_called_once_with('test rule') def test_A_or_B_or_C(self): result = _parser._parse_text_rule('@ or ! or @') self.assertEqual('(@ or ! or @)', str(result)) def test_A_or_B_and_C(self): result = _parser._parse_text_rule('@ or ! and @') self.assertEqual('(@ or (! and @))', str(result)) def test_A_and_B_or_C(self): result = _parser._parse_text_rule('@ and ! or @') self.assertEqual('((@ and !) or @)', str(result)) def test_A_and_B_and_C(self): result = _parser._parse_text_rule('@ and ! and @') self.assertEqual('(@ and ! and @)', str(result)) def test_A_or_B_or_C_or_D(self): result = _parser._parse_text_rule('@ or ! or @ or !') self.assertEqual('(@ or ! or @ or !)', str(result)) def test_A_or_B_or_C_and_D(self): result = _parser._parse_text_rule('@ or ! or @ and !') self.assertEqual('(@ or ! or (@ and !))', str(result)) def test_A_or_B_and_C_or_D(self): result = _parser._parse_text_rule('@ or ! and @ or !') self.assertEqual('(@ or (! and @) or !)', str(result)) def test_A_or_B_and_C_and_D(self): result = _parser._parse_text_rule('@ or ! and @ and !') self.assertEqual('(@ or (! and @ and !))', str(result)) def test_A_and_B_or_C_or_D(self): result = _parser._parse_text_rule('@ and ! or @ or !') self.assertEqual('((@ and !) or @ or !)', str(result)) def test_A_and_B_or_C_and_D(self): result = _parser._parse_text_rule('@ and ! or @ and !') self.assertEqual('((@ and !) or (@ and !))', str(result)) def test_A_and_B_and_C_or_D(self): result = _parser._parse_text_rule('@ and ! and @ or !') self.assertEqual('((@ and ! and @) or !)', str(result)) def test_A_and_B_and_C_and_D(self): result = _parser._parse_text_rule('@ and ! and @ and !') self.assertEqual('(@ and ! and @ and !)', str(result)) def test_A_and_B_or_C_with_not_1(self): result = _parser._parse_text_rule('not @ and ! or @') self.assertEqual('((not @ and !) or @)', str(result)) def test_A_and_B_or_C_with_not_2(self): result = _parser._parse_text_rule('@ and not ! or @') self.assertEqual('((@ and not !) or @)', str(result)) def test_A_and_B_or_C_with_not_3(self): result = _parser._parse_text_rule('@ and ! or not @') self.assertEqual('((@ and !) or not @)', str(result)) def test_A_and_B_or_C_with_group_1(self): for expression in ['( @ ) and ! or @', '@ and ( ! ) or @', '@ and ! or ( @ )', '( @ ) and ! or ( @ )', '@ and ( ! ) or ( @ )', '( @ ) and ( ! ) or ( @ )', '( @ and ! ) or @', '( ( @ ) and ! ) or @', '( @ and ( ! ) ) or @', '( ( @ and ! ) ) or @', '( @ and ! or @ )']: result = _parser._parse_text_rule(expression) self.assertEqual('((@ and !) or @)', str(result)) def test_A_and_B_or_C_with_group_2(self): result = _parser._parse_text_rule('@ and ( ! or @ )') self.assertEqual('(@ and (! or @))', str(result)) def test_A_and_B_or_C_with_group_and_not_1(self): for expression in ['not ( @ ) and ! or @', 'not @ and ( ! ) or @', 'not @ and ! or ( @ )', '( not @ ) and ! or @', '( not @ and ! ) or @', '( not @ and ! or @ )']: result = _parser._parse_text_rule(expression) self.assertEqual('((not @ and !) or @)', str(result)) def test_A_and_B_or_C_with_group_and_not_2(self): result = _parser._parse_text_rule('not @ and ( ! or @ )') self.assertEqual('(not @ and (! or @))', str(result)) def test_A_and_B_or_C_with_group_and_not_3(self): result = _parser._parse_text_rule('not ( @ and ! or @ )') self.assertEqual('not ((@ and !) or @)', str(result)) def test_A_and_B_or_C_with_group_and_not_4(self): for expression in ['( @ ) and not ! or @', '@ and ( not ! ) or @', '@ and not ( ! ) or @', '@ and not ! or ( @ )', '( @ and not ! ) or @', '( @ and not ! or @ )']: result = _parser._parse_text_rule(expression) self.assertEqual('((@ and not !) or @)', str(result)) def test_A_and_B_or_C_with_group_and_not_5(self): result = _parser._parse_text_rule('@ and ( not ! or @ )') self.assertEqual('(@ and (not ! or @))', str(result)) def test_A_and_B_or_C_with_group_and_not_6(self): result = _parser._parse_text_rule('@ and not ( ! or @ )') self.assertEqual('(@ and not (! or @))', str(result)) def test_A_and_B_or_C_with_group_and_not_7(self): for expression in ['( @ ) and ! or not @', '@ and ( ! ) or not @', '@ and ! or not ( @ )', '@ and ! or ( not @ )', '( @ and ! ) or not @', '( @ and ! or not @ )']: result = _parser._parse_text_rule(expression) self.assertEqual('((@ and !) or not @)', str(result)) def test_A_and_B_or_C_with_group_and_not_8(self): result = _parser._parse_text_rule('@ and ( ! or not @ )') self.assertEqual('(@ and (! or not @))', str(result)) class ParseRuleTestCase(test_base.BaseTestCase): @mock.patch.object(_parser, '_parse_text_rule', return_value='text rule') @mock.patch.object(_parser, '_parse_list_rule', return_value='list rule') def test_parse_rule_string(self, mock_parse_list_rule, mock_parse_text_rule): result = _parser.parse_rule('a string') self.assertEqual('text rule', result) self.assertFalse(mock_parse_list_rule.called) mock_parse_text_rule.assert_called_once_with('a string') @mock.patch.object(_parser, '_parse_text_rule', return_value='text rule') @mock.patch.object(_parser, '_parse_list_rule', return_value='list rule') def test_parse_rule_list(self, mock_parse_list_rule, mock_parse_text_rule): result = _parser.parse_rule([['a'], ['list']]) self.assertEqual('list rule', result) self.assertFalse(mock_parse_text_rule.called) mock_parse_list_rule.assert_called_once_with([['a'], ['list']]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_policy.py0000664000175000017500000024501400000000000022504 0ustar00zuulzuul00000000000000# Copyright (c) 2012 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test of Policy Engine""" import os from unittest import mock import yaml import fixtures from oslo_config import cfg from oslo_context import context from oslo_serialization import jsonutils from oslotest import base as test_base from oslo_policy import _cache_handler from oslo_policy import _checks from oslo_policy import _parser from oslo_policy import policy from oslo_policy.tests import base POLICY_A_CONTENTS = jsonutils.dumps({"default": "role:fakeA"}) POLICY_B_CONTENTS = jsonutils.dumps({"default": "role:fakeB"}) POLICY_FAKE_CONTENTS = jsonutils.dumps({"default": "role:fakeC"}) POLICY_JSON_CONTENTS = jsonutils.dumps({ "default": "rule:admin", "admin": "is_admin:True" }) @_checks.register('field') class FieldCheck(_checks.Check): """A non reversible check. All oslo.policy defined checks have a __str__ method with the property that rule == str(_parser.parse_rule(rule)). Consumers of oslo.policy may have defined checks for which that does not hold true. This FieldCheck is not reversible so we can use it for testing to ensure that this type of check does not break anything. """ def __init__(self, kind, match): # Process the match resource, field_value = match.split(':', 1) field, value = field_value.split('=', 1) super(FieldCheck, self).__init__(kind, '%s:%s:%s' % (resource, field, value)) self.field = field self.value = value def __call__(self, target_dict, cred_dict, enforcer): return True class MyException(Exception): def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs class RulesTestCase(test_base.BaseTestCase): def test_init_basic(self): rules = policy.Rules() self.assertEqual({}, rules) self.assertIsNone(rules.default_rule) def test_init(self): rules = policy.Rules(dict(a=1, b=2, c=3), 'a') self.assertEqual(dict(a=1, b=2, c=3), rules) self.assertEqual('a', rules.default_rule) def test_no_default(self): rules = policy.Rules(dict(a=1, b=2, c=3)) self.assertRaises(KeyError, lambda: rules['d']) def test_missing_default(self): rules = policy.Rules(dict(a=1, c=3), 'b') self.assertRaises(KeyError, lambda: rules['d']) def test_with_default(self): rules = policy.Rules(dict(a=1, b=2, c=3), 'b') self.assertEqual(2, rules['d']) def test_retrieval(self): rules = policy.Rules(dict(a=1, b=2, c=3), 'b') self.assertEqual(1, rules['a']) self.assertEqual(2, rules['b']) self.assertEqual(3, rules['c']) @mock.patch.object(_parser, 'parse_rule', lambda x: x) def test_load_json(self): exemplar = jsonutils.dumps({ "admin_or_owner": [["role:admin"], ["project_id:%(project_id)s"]], "default": [] }) rules = policy.Rules.load(exemplar, 'default') self.assertEqual('default', rules.default_rule) self.assertEqual(dict( admin_or_owner=[['role:admin'], ['project_id:%(project_id)s']], default=[], ), rules) @mock.patch.object(_parser, 'parse_rule', lambda x: x) def test_load_json_invalid_exc(self): # When the JSON isn't valid, ValueError is raised on load. exemplar = """{ "admin_or_owner": [["role:admin"], ["project_id:%(project_id)s"]], "default": [ }""" self.assertRaises(ValueError, policy.Rules.load, exemplar, 'default') # However, since change I43782d245d7652ba69613b26fe598ac79ec19929, # policy.Rules.load() first tries loading with the really fast # jsonutils.loads(), and if that fails, it tries loading with # yaml.safe_load(). Since YAML is a superset of JSON, some strictly # invalid JSON can be parsed correctly by policy.Rules.load() without # raising an exception. But that means that since 1.17.0, we've been # accepting (strictly speaking) illegal JSON policy files, and for # backward compatibility, we should continue to do so. Thus the # following are here to prevent regressions: # JSON requires double quotes, but the YAML parser doesn't care bad_but_acceptable = """{ 'admin_or_owner': [["role:admin"], ["project_id:%(project_id)s"]], 'default': [] }""" self.assertTrue(policy.Rules.load(bad_but_acceptable, 'default')) # JSON does not allow bare keys, but the YAML parser doesn't care bad_but_acceptable = """{ admin_or_owner: [["role:admin"], ["project_id:%(project_id)s"]], default: [] }""" self.assertTrue(policy.Rules.load(bad_but_acceptable, 'default')) # JSON is picky about commas, but the YAML parser is more forgiving # (Note the trailing , in the exemplar is invalid JSON.) bad_but_acceptable = """{ admin_or_owner: [["role:admin"], ["project_id:%(project_id)s"]], default: [], }""" self.assertTrue(policy.Rules.load(bad_but_acceptable, 'default')) @mock.patch.object(_parser, 'parse_rule', lambda x: x) def test_load_empty_data(self): result = policy.Rules.load('', 'default') self.assertEqual(result, {}) @mock.patch.object(_parser, 'parse_rule', lambda x: x) def test_load_yaml(self): # Test that simplified YAML can be used with load(). # Show that YAML allows useful comments. exemplar = """ # Define a custom rule. admin_or_owner: role:admin or project_id:%(project_id)s # The default rule is used when there's no action defined. default: [] """ rules = policy.Rules.load(exemplar, 'default') self.assertEqual('default', rules.default_rule) self.assertEqual(dict( admin_or_owner='role:admin or project_id:%(project_id)s', default=[], ), rules) @mock.patch.object(_parser, 'parse_rule', lambda x: x) def test_load_yaml_invalid_exc(self): # When the JSON is seriously invalid, ValueError is raised on load(). # (See test_load_json_invalid_exc for what 'seriously invalid' means.) exemplar = """{ # Define a custom rule. admin_or_owner: role:admin or project_id:%(project_id)s # The default rule is used when there's no action defined. default: [ }""" self.assertRaises(ValueError, policy.Rules.load, exemplar, 'default') @mock.patch.object(_parser, 'parse_rule', lambda x: x) def test_from_dict(self): expected = {'admin_or_owner': 'role:admin', 'default': '@'} rules = policy.Rules.from_dict(expected, 'default') self.assertEqual('default', rules.default_rule) self.assertEqual(expected, rules) def test_str(self): exemplar = jsonutils.dumps({ "admin_or_owner": "role:admin or project_id:%(project_id)s" }, indent=4) rules = policy.Rules(dict( admin_or_owner='role:admin or project_id:%(project_id)s', )) self.assertEqual(exemplar, str(rules)) def test_str_true(self): exemplar = jsonutils.dumps({ "admin_or_owner": "" }, indent=4) rules = policy.Rules(dict( admin_or_owner=_checks.TrueCheck(), )) self.assertEqual(exemplar, str(rules)) def test_load_json_deprecated(self): with self.assertWarnsRegex(DeprecationWarning, r'load_json\(\).*load\(\)'): policy.Rules.load_json(jsonutils.dumps({'default': ''}, 'default')) class EnforcerTest(base.PolicyBaseTestCase): def setUp(self): super(EnforcerTest, self).setUp() self.create_config_file('policy.json', POLICY_JSON_CONTENTS) def _test_scenario_with_opts_registered(self, scenario, *args, **kwargs): # This test registers some rules, calls the scenario and then checks # the registered rules. The scenario should be a method which loads # policy files containing POLICY_*_CONTENTS defined above. They should # be loaded on the self.enforcer object. # This should be overridden by the policy file self.enforcer.register_default(policy.RuleDefault(name='admin', check_str='is_admin:False')) # This is not in the policy file, only registered self.enforcer.register_default(policy.RuleDefault(name='owner', check_str='role:owner')) scenario(*args, **kwargs) self.assertIn('owner', self.enforcer.rules) self.assertEqual('role:owner', str(self.enforcer.rules['owner'])) self.assertEqual('is_admin:True', str(self.enforcer.rules['admin'])) self.assertIn('owner', self.enforcer.registered_rules) self.assertIn('admin', self.enforcer.registered_rules) self.assertNotIn('default', self.enforcer.registered_rules) self.assertNotIn('owner', self.enforcer.file_rules) self.assertIn('admin', self.enforcer.file_rules) self.assertIn('default', self.enforcer.file_rules) def test_load_file(self): self.conf.set_override('policy_dirs', [], group='oslo_policy') self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) self.assertIn('default', self.enforcer.rules) self.assertIn('admin', self.enforcer.rules) self.assertEqual('is_admin:True', str(self.enforcer.rules['admin'])) def test_load_file_opts_registered(self): self._test_scenario_with_opts_registered(self.test_load_file) def test_load_directory(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('role:fakeB', loaded_rules['default']) self.assertEqual('is_admin:True', loaded_rules['admin']) def test_load_directory_after_file_update(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('role:fakeA', loaded_rules['default']) self.assertEqual('is_admin:True', loaded_rules['admin']) new_policy_json_contents = jsonutils.dumps({ "default": "rule:admin", "admin": "is_admin:True", "foo": "rule:bar", }) # Modify the policy.json file and then validate that the rules # from the policy directory are re-applied on top of the # new rules from the file. self.create_config_file('policy.json', new_policy_json_contents) policy_file_path = self.get_config_file_fullname('policy.json') # Force the mtime change since the unit test may write to this file # too fast for mtime to actually change. stinfo = os.stat(policy_file_path) os.utime(policy_file_path, (stinfo.st_atime + 42, stinfo.st_mtime + 42)) self.enforcer.load_rules() self.assertIsNotNone(self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('role:fakeA', loaded_rules['default']) self.assertEqual('is_admin:True', loaded_rules['admin']) self.assertEqual('rule:bar', loaded_rules['foo']) def test_load_directory_after_file_is_emptied(self): def dict_rules(enforcer_rules): """Converts enforcer rules to dictionary. :param enforcer_rules: enforcer rules represented as a class Rules :return: enforcer rules represented as a dictionary """ return jsonutils.loads(str(enforcer_rules)) self.assertEqual(self.enforcer.rules, {}) self.enforcer.load_rules() main_policy_file_rules = jsonutils.loads(POLICY_JSON_CONTENTS) self.assertEqual(main_policy_file_rules, dict_rules(self.enforcer.rules)) folder_policy_file = os.path.join('policy.d', 'a.conf') self.create_config_file(folder_policy_file, POLICY_A_CONTENTS) self.enforcer.load_rules() expected_rules = main_policy_file_rules.copy() expected_rules.update(jsonutils.loads(POLICY_A_CONTENTS)) self.assertEqual(expected_rules, dict_rules(self.enforcer.rules)) self.create_config_file(folder_policy_file, '{}') # Force the mtime change since the unit test may write to this file # too fast for mtime to actually change. absolute_folder_policy_file_path = self.get_config_file_fullname( folder_policy_file) stinfo = os.stat(absolute_folder_policy_file_path) os.utime(absolute_folder_policy_file_path, (stinfo.st_atime + 42, stinfo.st_mtime + 42)) self.enforcer.load_rules() self.assertEqual(main_policy_file_rules, dict_rules(self.enforcer.rules)) def test_load_directory_opts_registered(self): self._test_scenario_with_opts_registered(self.test_load_directory) def test_load_directory_caching_with_files_updated(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.enforcer.load_rules(False) self.assertIsNotNone(self.enforcer.rules) old = next(iter(self.enforcer._policy_dir_mtimes)) self.assertEqual(1, len(self.enforcer._policy_dir_mtimes)) # Touch the file conf_path = os.path.join(self.config_dir, os.path.join( 'policy.d', 'a.conf')) stinfo = os.stat(conf_path) os.utime(conf_path, (stinfo.st_atime + 10, stinfo.st_mtime + 10)) self.enforcer.load_rules(False) self.assertEqual(1, len(self.enforcer._policy_dir_mtimes)) self.assertEqual(old, next(iter(self.enforcer._policy_dir_mtimes))) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('is_admin:True', loaded_rules['admin']) def test_load_directory_caching_with_files_updated_opts_registered(self): self._test_scenario_with_opts_registered( self.test_load_directory_caching_with_files_updated) def test_load_directory_caching_with_files_same(self, overwrite=True): self.enforcer.overwrite = overwrite self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.enforcer.load_rules(False) self.assertIsNotNone(self.enforcer.rules) old = next(iter(self.enforcer._policy_dir_mtimes)) self.assertEqual(1, len(self.enforcer._policy_dir_mtimes)) self.enforcer.load_rules(False) self.assertEqual(1, len(self.enforcer._policy_dir_mtimes)) self.assertEqual(old, next(iter(self.enforcer._policy_dir_mtimes))) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('is_admin:True', loaded_rules['admin']) def test_load_directory_caching_with_files_same_but_overwrite_false(self): self.test_load_directory_caching_with_files_same(overwrite=False) def test_load_directory_caching_with_files_same_opts_registered(self): self._test_scenario_with_opts_registered( self.test_load_directory_caching_with_files_same) def test_load_dir_caching_with_files_same_overwrite_false_opts_reg(self): # Very long test name makes this difficult test = getattr(self, 'test_load_directory_caching_with_files_same_but_overwrite_false') # NOQA self._test_scenario_with_opts_registered(test) @mock.patch.object(policy, 'LOG') def test_load_json_file_log_warning(self, mock_log): rules = jsonutils.dumps({'foo': 'rule:bar'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) mock_log.warning.assert_any_call(policy.WARN_JSON) @mock.patch.object(policy, 'LOG') def test_warning_on_redundant_file_rules(self, mock_log): rules = yaml.dump({'admin': 'is_admin:True'}) self.create_config_file('policy.yaml', rules) path = self.get_config_file_fullname('policy.yaml') enforcer = policy.Enforcer(self.conf, policy_file=path) # register same rule in default as present in file. enforcer.register_default(policy.RuleDefault(name='admin', check_str='is_admin:True')) enforcer.load_rules(True) warn_msg = ("Policy Rules %(names)s specified in policy files " "are the same as the defaults provided by the service. " "You can remove these rules from policy files which " "will make maintenance easier. You can detect these " "redundant rules by ``oslopolicy-list-redundant`` tool " "also.") mock_log.warning.assert_any_call(warn_msg, {'names': ['admin']}) def test_load_multiple_directories(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) self.create_config_file( os.path.join('policy.2.d', 'fake.conf'), POLICY_FAKE_CONTENTS) self.conf.set_override('policy_dirs', ['policy.d', 'policy.2.d'], group='oslo_policy') self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('role:fakeC', loaded_rules['default']) self.assertEqual('is_admin:True', loaded_rules['admin']) def test_load_multiple_directories_opts_registered(self): self._test_scenario_with_opts_registered( self.test_load_multiple_directories) def test_load_non_existed_directory(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.conf.set_override('policy_dirs', ['policy.d', 'policy.x.d'], group='oslo_policy') self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) self.assertIn('default', self.enforcer.rules) self.assertIn('admin', self.enforcer.rules) def test_load_non_existed_directory_opts_registered(self): self._test_scenario_with_opts_registered( self.test_load_non_existed_directory) def test_load_policy_dirs_with_non_directory(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.conf.set_override('policy_dirs', [os.path.join('policy.d', 'a.conf')], group='oslo_policy') self.assertRaises(ValueError, self.enforcer.load_rules, True) self.assertRaises(ValueError, self.enforcer.load_rules, False) @mock.patch('oslo_policy.policy.Enforcer.check_rules') def test_load_rules_twice(self, mock_check_rules): self.enforcer.load_rules() self.enforcer.load_rules() self.assertEqual(1, mock_check_rules.call_count) @mock.patch('oslo_policy.policy.Enforcer.check_rules') def test_load_rules_twice_force(self, mock_check_rules): self.enforcer.load_rules(True) self.enforcer.load_rules(True) self.assertEqual(2, mock_check_rules.call_count) @mock.patch('oslo_policy.policy.Enforcer.check_rules') def test_load_rules_twice_clear(self, mock_check_rules): self.enforcer.load_rules() self.enforcer.clear() # NOTE(bnemec): It's weird that we have to pass True here, but clear # sets enforcer.use_conf to False, which causes load_rules to be a # noop when called with no parameters. This is probably a bug. self.enforcer.load_rules(True) self.assertEqual(2, mock_check_rules.call_count) @mock.patch('oslo_policy.policy.Enforcer.check_rules') def test_load_directory_twice(self, mock_check_rules): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) self.enforcer.load_rules() self.enforcer.load_rules() self.assertEqual(1, mock_check_rules.call_count) self.assertIsNotNone(self.enforcer.rules) @mock.patch('oslo_policy.policy.Enforcer.check_rules') def test_load_directory_twice_force(self, mock_check_rules): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) self.enforcer.load_rules(True) self.enforcer.load_rules(True) self.assertEqual(2, mock_check_rules.call_count) self.assertIsNotNone(self.enforcer.rules) @mock.patch('oslo_policy.policy.Enforcer.check_rules') def test_load_directory_twice_changed(self, mock_check_rules): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.enforcer.load_rules() # Touch the file conf_path = os.path.join(self.config_dir, os.path.join( 'policy.d', 'a.conf')) stinfo = os.stat(conf_path) os.utime(conf_path, (stinfo.st_atime + 10, stinfo.st_mtime + 10)) self.enforcer.load_rules() self.assertEqual(2, mock_check_rules.call_count) self.assertIsNotNone(self.enforcer.rules) def test_set_rules_type(self): self.assertRaises(TypeError, self.enforcer.set_rules, 'dummy') @mock.patch.object(_cache_handler, 'delete_cached_file', mock.Mock()) def test_clear(self): # Make sure the rules are reset self.enforcer.rules = 'spam' self.enforcer.clear() self.assertEqual({}, self.enforcer.rules) self.assertIsNone(self.enforcer.default_rule) self.assertIsNone(self.enforcer.policy_path) def test_clear_opts_registered(self): # This should be overridden by the policy file self.enforcer.register_default(policy.RuleDefault(name='admin', check_str='is_admin:False')) # This is not in the policy file, only registered self.enforcer.register_default(policy.RuleDefault(name='owner', check_str='role:owner')) self.test_clear() self.assertEqual({}, self.enforcer.registered_rules) def test_rule_with_check(self): rules_json = jsonutils.dumps({ "deny_stack_user": "not role:stack_user", "cloudwatch:PutMetricData": "" }) rules = policy.Rules.load(rules_json) self.enforcer.set_rules(rules) action = 'cloudwatch:PutMetricData' creds = {'roles': ''} self.assertTrue(self.enforcer.enforce(action, {}, creds)) def test_enforcer_with_default_rule(self): rules_json = jsonutils.dumps({ "deny_stack_user": "not role:stack_user", "cloudwatch:PutMetricData": "" }) rules = policy.Rules.load(rules_json) default_rule = _checks.TrueCheck() enforcer = policy.Enforcer(self.conf, default_rule=default_rule) enforcer.set_rules(rules) action = 'cloudwatch:PutMetricData' creds = {'roles': ''} self.assertTrue(enforcer.enforce(action, {}, creds)) def test_enforcer_force_reload_with_overwrite(self, opts_registered=0): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) # Prepare in memory fake policies. self.enforcer.set_rules({'test': _parser.parse_rule('role:test')}, use_conf=True) self.enforcer.set_rules({'default': _parser.parse_rule('role:fakeZ')}, overwrite=False, # Keeps 'test' role. use_conf=True) self.enforcer.overwrite = True # Call enforce(), it will load rules from # policy configuration files, to overwrite # existing fake ones. self.assertFalse(self.enforcer.enforce('test', {}, {'roles': ['test']})) self.assertTrue(self.enforcer.enforce('default', {}, {'roles': ['fakeB']})) # Check against rule dict again from # enforcer object directly. self.assertNotIn('test', self.enforcer.rules) self.assertIn('default', self.enforcer.rules) self.assertIn('admin', self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual(2 + opts_registered, len(loaded_rules)) self.assertIn('role:fakeB', loaded_rules['default']) self.assertIn('is_admin:True', loaded_rules['admin']) def test_enforcer_force_reload_with_overwrite_opts_registered(self): self._test_scenario_with_opts_registered( self.test_enforcer_force_reload_with_overwrite, opts_registered=1) def test_enforcer_force_reload_without_overwrite(self, opts_registered=0): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) # Prepare in memory fake policies. self.enforcer.set_rules({'test': _parser.parse_rule('role:test')}, use_conf=True) self.enforcer.set_rules({'default': _parser.parse_rule('role:fakeZ')}, overwrite=False, # Keeps 'test' role. use_conf=True) self.enforcer.overwrite = False self.enforcer._is_directory_updated = lambda x, y: True # Call enforce(), it will load rules from # policy configuration files, to merge with # existing fake ones. self.assertTrue(self.enforcer.enforce('test', {}, {'roles': ['test']})) # The existing rules have a same key with # new loaded ones will be overwrote. self.assertFalse(self.enforcer.enforce('default', {}, {'roles': ['fakeZ']})) # Check against rule dict again from # enforcer object directly. self.assertIn('test', self.enforcer.rules) self.assertIn('default', self.enforcer.rules) self.assertIn('admin', self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual(3 + opts_registered, len(loaded_rules)) self.assertIn('role:test', loaded_rules['test']) self.assertIn('role:fakeB', loaded_rules['default']) self.assertIn('is_admin:True', loaded_rules['admin']) def test_enforcer_force_reload_without_overwrite_opts_registered(self): self._test_scenario_with_opts_registered( self.test_enforcer_force_reload_without_overwrite, opts_registered=1) def test_enforcer_keep_use_conf_flag_after_reload(self): self.create_config_file( os.path.join('policy.d', 'a.conf'), POLICY_A_CONTENTS) self.create_config_file( os.path.join('policy.d', 'b.conf'), POLICY_B_CONTENTS) self.assertTrue(self.enforcer.use_conf) self.assertTrue(self.enforcer.enforce('default', {}, {'roles': ['fakeB']})) self.assertFalse(self.enforcer.enforce('test', {}, {'roles': ['test']})) # After enforcement the flag should # be remained there. self.assertTrue(self.enforcer.use_conf) self.assertFalse(self.enforcer.enforce('_dynamic_test_rule', {}, {'roles': ['test']})) # Then if configure file got changed, # reloading will be triggered when calling # enforcer(), this case could happen only # when use_conf flag equals True. rules = jsonutils.loads(str(self.enforcer.rules)) rules['_dynamic_test_rule'] = 'role:test' with open(self.enforcer.policy_path, 'w') as f: f.write(jsonutils.dumps(rules)) self.enforcer.load_rules(force_reload=True) self.assertTrue(self.enforcer.enforce('_dynamic_test_rule', {}, {'roles': ['test']})) def test_enforcer_keep_use_conf_flag_after_reload_opts_registered(self): # This test does not use _test_scenario_with_opts_registered because # it loads all rules and then dumps them to a policy file and reloads. # That breaks the ability to differentiate between registered and file # loaded policies. # This should be overridden by the policy file self.enforcer.register_default(policy.RuleDefault(name='admin', check_str='is_admin:False')) # This is not in the policy file, only registered self.enforcer.register_default(policy.RuleDefault(name='owner', check_str='role:owner')) self.test_enforcer_keep_use_conf_flag_after_reload() self.assertIn('owner', self.enforcer.rules) self.assertEqual('role:owner', str(self.enforcer.rules['owner'])) self.assertEqual('is_admin:True', str(self.enforcer.rules['admin'])) def test_enforcer_force_reload_false(self): self.enforcer.set_rules({'test': 'test'}) self.enforcer.load_rules(force_reload=False) self.assertIn('test', self.enforcer.rules) self.assertNotIn('default', self.enforcer.rules) self.assertNotIn('admin', self.enforcer.rules) def test_enforcer_overwrite_rules(self): self.enforcer.set_rules({'test': 'test'}) self.enforcer.set_rules({'test': 'test1'}, overwrite=True) self.assertEqual({'test': 'test1'}, self.enforcer.rules) def test_enforcer_update_rules(self): self.enforcer.set_rules({'test': 'test'}) self.enforcer.set_rules({'test1': 'test1'}, overwrite=False) self.assertEqual({'test': 'test', 'test1': 'test1'}, self.enforcer.rules) def test_enforcer_with_default_policy_file(self): enforcer = policy.Enforcer(self.conf) self.assertEqual(self.conf.oslo_policy.policy_file, enforcer.policy_file) def test_enforcer_with_policy_file(self): enforcer = policy.Enforcer(self.conf, policy_file='non-default.json') self.assertEqual('non-default.json', enforcer.policy_file) def test_get_policy_path_raises_exc(self): enforcer = policy.Enforcer(self.conf, policy_file='raise_error.json') e = self.assertRaises(cfg.ConfigFilesNotFoundError, enforcer._get_policy_path, enforcer.policy_file) self.assertEqual(('raise_error.json', ), e.config_files) def test_enforcer_set_rules(self): self.enforcer.load_rules() self.enforcer.set_rules({'test': 'test1'}) self.enforcer.load_rules() self.assertEqual({'test': 'test1'}, self.enforcer.rules) def test_enforcer_default_rule_name(self): enforcer = policy.Enforcer(self.conf, default_rule='foo_rule') self.assertEqual('foo_rule', enforcer.rules.default_rule) self.conf.set_override('policy_default_rule', 'bar_rule', group='oslo_policy') enforcer = policy.Enforcer(self.conf, default_rule='foo_rule') self.assertEqual('foo_rule', enforcer.rules.default_rule) enforcer = policy.Enforcer(self.conf, ) self.assertEqual('bar_rule', enforcer.rules.default_rule) def test_enforcer_register_twice_raises(self): self.enforcer.register_default(policy.RuleDefault(name='owner', check_str='role:owner')) self.assertRaises(policy.DuplicatePolicyError, self.enforcer.register_default, policy.RuleDefault(name='owner', check_str='role:owner')) def test_enforcer_does_not_modify_original_registered_rule(self): rule_original = policy.RuleDefault( name='test', check_str='role:owner',) self.enforcer.register_default(rule_original) self.enforcer.registered_rules['test']._check_str = 'role:admin' self.enforcer.registered_rules['test']._check = 'role:admin' self.assertEqual( self.enforcer.registered_rules['test'].check_str, 'role:admin') self.assertEqual( self.enforcer.registered_rules['test'].check, 'role:admin') self.assertEqual(rule_original.check_str, 'role:owner') self.assertEqual(rule_original.check.__str__(), 'role:owner') def test_non_reversible_check(self): self.create_config_file('policy.json', jsonutils.dumps( {'shared': 'field:networks:shared=True'})) # load_rules succeeding without error is the focus of this test self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertNotEqual('field:networks:shared=True', loaded_rules['shared']) def test_authorize_opt_registered(self): self.enforcer.register_default(policy.RuleDefault(name='test', check_str='role:test')) self.assertTrue(self.enforcer.authorize('test', {}, {'roles': ['test']})) def test_authorize_opt_not_registered(self): self.assertRaises(policy.PolicyNotRegistered, self.enforcer.authorize, 'test', {}, {'roles': ['test']}) def test_enforcer_accepts_context_objects(self): rule = policy.RuleDefault(name='fake_rule', check_str='role:test') self.enforcer.register_default(rule) request_context = context.RequestContext() target_dict = {} self.enforcer.enforce('fake_rule', target_dict, request_context) def test_enforcer_accepts_subclassed_context_objects(self): rule = policy.RuleDefault(name='fake_rule', check_str='role:test') self.enforcer.register_default(rule) class SpecializedContext(context.RequestContext): pass request_context = SpecializedContext() target_dict = {} self.enforcer.enforce('fake_rule', target_dict, request_context) def test_enforcer_rejects_non_context_objects(self): rule = policy.RuleDefault(name='fake_rule', check_str='role:test') self.enforcer.register_default(rule) class InvalidContext(object): pass request_context = InvalidContext() target_dict = {} self.assertRaises( policy.InvalidContextObject, self.enforcer.enforce, 'fake_rule', target_dict, request_context ) @mock.patch.object(policy.Enforcer, '_map_context_attributes_into_creds') def test_enforcer_call_map_context_attributes(self, map_mock): map_mock.return_value = {} rule = policy.RuleDefault(name='fake_rule', check_str='role:test') self.enforcer.register_default(rule) request_context = context.RequestContext() target_dict = {} self.enforcer.enforce('fake_rule', target_dict, request_context) map_mock.assert_called_once_with(request_context) def test_enforcer_consolidates_context_attributes_with_creds(self): request_context = context.RequestContext() expected_creds = request_context.to_policy_values() creds = self.enforcer._map_context_attributes_into_creds( request_context ) # We don't use self.assertDictEqual here because to_policy_values # actaully returns a non-dict object that just behaves like a # dictionary, but does some special handling when people access # deprecated policy values. for k, v in expected_creds.items(): self.assertEqual(expected_creds[k], creds[k]) def test_enforcer_accepts_policy_values_from_context(self): rule = policy.RuleDefault(name='fake_rule', check_str='role:test') self.enforcer.register_default(rule) request_context = context.RequestContext() policy_values = request_context.to_policy_values() target_dict = {} self.enforcer.enforce('fake_rule', target_dict, policy_values) def test_enforcer_understands_system_scope(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['system'] ) self.enforcer.register_default(rule) ctx = context.RequestContext(system_scope='all') target_dict = {} self.enforcer.enforce('fake_rule', target_dict, ctx) def test_enforcer_understands_system_scope_creds_dict(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['system'] ) self.enforcer.register_default(rule) ctx = context.RequestContext() creds = ctx.to_dict() creds['system_scope'] = 'all' target_dict = {} self.enforcer.enforce('fake_rule', target_dict, creds) def test_enforcer_raises_invalid_scope_with_system_scope_type(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['system'] ) self.enforcer.register_default(rule) # model a domain-scoped token, which should fail enforcement ctx = context.RequestContext(domain_id='fake') target_dict = {} self.assertRaises( policy.InvalidScope, self.enforcer.enforce, 'fake_rule', target_dict, ctx ) # model a project-scoped token, which should fail enforcement ctx = context.RequestContext(project_id='fake') self.assertRaises( policy.InvalidScope, self.enforcer.enforce, 'fake_rule', target_dict, ctx ) def test_enforcer_understands_domain_scope(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['domain'] ) self.enforcer.register_default(rule) ctx = context.RequestContext(domain_id='fake') target_dict = {} self.enforcer.enforce('fake_rule', target_dict, ctx) def test_enforcer_raises_invalid_scope_with_domain_scope_type(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['domain'] ) self.enforcer.register_default(rule) # model a system-scoped token, which should fail enforcement ctx = context.RequestContext(system_scope='all') target_dict = {} self.assertRaises( policy.InvalidScope, self.enforcer.enforce, 'fake_rule', target_dict, ctx ) # model a project-scoped token, which should fail enforcement ctx = context.RequestContext(project_id='fake') self.assertRaises( policy.InvalidScope, self.enforcer.enforce, 'fake_rule', target_dict, ctx ) def test_enforcer_understands_project_scope(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['project'] ) self.enforcer.register_default(rule) ctx = context.RequestContext(project_id='fake') target_dict = {} self.enforcer.enforce('fake_rule', target_dict, ctx) def test_enforcer_raises_invalid_scope_with_project_scope_type(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = policy.RuleDefault( name='fake_rule', check_str='role:test', scope_types=['project'] ) self.enforcer.register_default(rule) # model a system-scoped token, which should fail enforcement ctx = context.RequestContext(system_scope='all') target_dict = {} self.assertRaises( policy.InvalidScope, self.enforcer.enforce, 'fake_rule', target_dict, ctx ) # model a domain-scoped token, which should fail enforcement ctx = context.RequestContext(domain_id='fake') self.assertRaises( policy.InvalidScope, self.enforcer.enforce, 'fake_rule', target_dict, ctx ) def test_enforce_scope_with_subclassed_checks_when_scope_not_set(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = _checks.TrueCheck() rule.scope_types = None ctx = context.RequestContext(system_scope='all', roles=['admin']) self.enforcer.enforce(rule, {}, ctx) def test_enforcer_raises_invalid_scope_with_subclassed_checks(self): self.conf.set_override('enforce_scope', True, group='oslo_policy') rule = _checks.TrueCheck() rule.scope_types = ['domain'] ctx = context.RequestContext(system_scope='all', roles=['admin']) self.assertRaises( policy.InvalidScope, self.enforcer.enforce, rule, {}, ctx) class EnforcerNoPolicyFileTest(base.PolicyBaseTestCase): def setUp(self): super(EnforcerNoPolicyFileTest, self).setUp() def test_load_rules(self): # Check that loading rules with no policy file does not error self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) self.assertEqual(0, len(self.enforcer.rules)) def test_opts_registered(self): self.enforcer.register_default(policy.RuleDefault(name='admin', check_str='is_admin:False')) self.enforcer.register_default(policy.RuleDefault(name='owner', check_str='role:owner')) self.enforcer.load_rules(True) self.assertEqual({}, self.enforcer.file_rules) self.assertEqual('role:owner', str(self.enforcer.rules['owner'])) self.assertEqual('is_admin:False', str(self.enforcer.rules['admin'])) def test_load_directory(self): self.create_config_file('policy.d/a.conf', POLICY_JSON_CONTENTS) self.create_config_file('policy.d/b.conf', POLICY_B_CONTENTS) self.enforcer.load_rules(True) self.assertIsNotNone(self.enforcer.rules) loaded_rules = jsonutils.loads(str(self.enforcer.rules)) self.assertEqual('role:fakeB', loaded_rules['default']) self.assertEqual('is_admin:True', loaded_rules['admin']) class CheckFunctionTestCase(base.PolicyBaseTestCase): def setUp(self): super(CheckFunctionTestCase, self).setUp() self.create_config_file('policy.json', POLICY_JSON_CONTENTS) def test_check_explicit(self): rule = base.FakeCheck() creds = {} result = self.enforcer.enforce(rule, 'target', creds) self.assertEqual(('target', creds, self.enforcer), result) def test_check_no_rules(self): # Clear the policy.json file created in setUp() self.create_config_file('policy.json', "{}") self.enforcer.default_rule = None self.enforcer.load_rules() creds = {} result = self.enforcer.enforce('rule', 'target', creds) self.assertFalse(result) def test_check_with_rule(self): self.enforcer.set_rules(dict(default=base.FakeCheck())) creds = {} result = self.enforcer.enforce('default', 'target', creds) self.assertEqual(('target', creds, self.enforcer), result) def test_check_rule_not_exist_not_empty_policy_file(self): # If the rule doesn't exist, then enforce() fails rather than KeyError. # This test needs a non-empty file otherwise the code short-circuits. self.create_config_file('policy.json', jsonutils.dumps({"a_rule": []})) self.enforcer.default_rule = None self.enforcer.load_rules() creds = {} result = self.enforcer.enforce('rule', 'target', creds) self.assertFalse(result) def test_check_raise_default(self): # When do_raise=True and exc is not used then PolicyNotAuthorized is # raised. self.enforcer.set_rules(dict(default=_checks.FalseCheck())) creds = {} self.assertRaisesRegex(policy.PolicyNotAuthorized, " is disallowed by policy", self.enforcer.enforce, 'rule', 'target', creds, True) def test_check_raise_custom_exception(self): self.enforcer.set_rules(dict(default=_checks.FalseCheck())) creds = {} exc = self.assertRaises( MyException, self.enforcer.enforce, 'rule', 'target', creds, True, MyException, 'arg1', 'arg2', kw1='kwarg1', kw2='kwarg2') self.assertEqual(('arg1', 'arg2'), exc.args) self.assertEqual(dict(kw1='kwarg1', kw2='kwarg2'), exc.kwargs) class RegisterCheckTestCase(base.PolicyBaseTestCase): @mock.patch.object(_checks, 'registered_checks', {}) def test_register_check(self): class TestCheck(policy.Check): pass policy.register('spam', TestCheck) self.assertEqual(dict(spam=TestCheck), _checks.registered_checks) class BaseCheckTypesTestCase(base.PolicyBaseTestCase): @mock.patch.object(_checks, 'registered_checks', {}) def test_base_check_types_are_public(self): '''Check that those check types are part of public API. They are blessed to be used by library consumers. ''' for check_type in (policy.AndCheck, policy.NotCheck, policy.OrCheck, policy.RuleCheck): class TestCheck(check_type): pass check_str = str(check_type) policy.register(check_str, TestCheck) self.assertEqual( TestCheck, _checks.registered_checks[check_str], message='%s check type is not public.' % check_str) class RuleDefaultTestCase(base.PolicyBaseTestCase): def test_rule_is_parsed(self): opt = policy.RuleDefault(name='foo', check_str='rule:foo') self.assertIsInstance(opt.check, _checks.BaseCheck) self.assertEqual('rule:foo', str(opt.check)) def test_str(self): opt = policy.RuleDefault(name='foo', check_str='rule:foo') self.assertEqual('"foo": "rule:foo"', str(opt)) def test_equality_obvious(self): opt1 = policy.RuleDefault(name='foo', check_str='rule:foo', description='foo') opt2 = policy.RuleDefault(name='foo', check_str='rule:foo', description='bar') self.assertEqual(opt1, opt2) def test_equality_less_obvious(self): opt1 = policy.RuleDefault(name='foo', check_str='', description='foo') opt2 = policy.RuleDefault(name='foo', check_str='@', description='bar') self.assertEqual(opt1, opt2) def test_not_equal_check(self): opt1 = policy.RuleDefault(name='foo', check_str='rule:foo', description='foo') opt2 = policy.RuleDefault(name='foo', check_str='rule:bar', description='bar') self.assertNotEqual(opt1, opt2) def test_not_equal_name(self): opt1 = policy.RuleDefault(name='foo', check_str='rule:foo', description='foo') opt2 = policy.RuleDefault(name='bar', check_str='rule:foo', description='bar') self.assertNotEqual(opt1, opt2) def test_not_equal_class(self): class NotRuleDefault(object): def __init__(self, name, check_str): self.name = name self.check = _parser.parse_rule(check_str) opt1 = policy.RuleDefault(name='foo', check_str='rule:foo') opt2 = NotRuleDefault(name='foo', check_str='rule:foo') self.assertNotEqual(opt1, opt2) def test_equal_subclass(self): class RuleDefaultSub(policy.RuleDefault): pass opt1 = policy.RuleDefault(name='foo', check_str='rule:foo') opt2 = RuleDefaultSub(name='foo', check_str='rule:foo') self.assertEqual(opt1, opt2) def test_not_equal_subclass(self): class RuleDefaultSub(policy.RuleDefault): pass opt1 = policy.RuleDefault(name='foo', check_str='rule:foo') opt2 = RuleDefaultSub(name='bar', check_str='rule:foo') self.assertNotEqual(opt1, opt2) def test_create_opt_with_scope_types(self): scope_types = ['project'] opt = policy.RuleDefault( name='foo', check_str='role:bar', scope_types=scope_types ) self.assertEqual(opt.scope_types, scope_types) def test_create_opt_with_scope_type_strings_fails(self): self.assertRaises( ValueError, policy.RuleDefault, name='foo', check_str='role:bar', scope_types='project' ) def test_create_opt_with_multiple_scope_types(self): opt = policy.RuleDefault( name='foo', check_str='role:bar', scope_types=['project', 'domain', 'system'] ) self.assertEqual(opt.scope_types, ['project', 'domain', 'system']) def test_ensure_scope_types_are_unique(self): self.assertRaises( ValueError, policy.RuleDefault, name='foo', check_str='role:bar', scope_types=['project', 'project'] ) class DocumentedRuleDefaultDeprecationTestCase(base.PolicyBaseTestCase): @mock.patch('warnings.warn', new=mock.Mock()) def test_deprecate_a_policy_check_string(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N' ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) expected_msg = ( 'Policy "foo:create_bar":"role:fizz" was deprecated in N in favor ' 'of "foo:create_bar":"role:bang". Reason: "role:bang" is a better ' 'default. Either ensure your deployment is ready for the new ' 'default or copy/paste the deprecated policy into your policy ' 'file and maintain it manually.' ) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_called_once_with(expected_msg) self.assertTrue( enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}) ) self.assertTrue( enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}) ) self.assertFalse( enforcer.enforce('foo:create_bar', {}, {'roles': ['baz']}) ) @mock.patch('warnings.warn', new=mock.Mock()) def test_deprecate_an_empty_policy_check_string(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='', deprecated_reason='because of reasons', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_called_once() enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}, do_raise=True) enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}, do_raise=True) @mock.patch('warnings.warn', new=mock.Mock()) def test_deprecate_replace_with_empty_policy_check_string(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz', deprecated_reason='because of reasons', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_called_once() enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}, do_raise=True) enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}, do_raise=True) def test_deprecate_a_policy_name(self): deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='role:baz', deprecated_reason=( '"foo:bar" is not granular enough. If your deployment has ' 'overridden "foo:bar", ensure you override the new policies ' 'with same role or rule. Not doing this will require the ' 'service to assume the new defaults for "foo:bar:create", ' '"foo:bar:update", "foo:bar:list", and "foo:bar:delete", ' 'which might be backwards incompatible for your deployment' ), deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:baz', description='Create a bar.', operations=[{'path': '/v1/bars/', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] expected_msg = ( 'Policy "foo:bar":"role:baz" was deprecated in N in favor of ' '"foo:create_bar":"role:baz". Reason: "foo:bar" is not granular ' 'enough. If your deployment has overridden "foo:bar", ensure you ' 'override the new policies with same role or rule. Not doing this ' 'will require the service to assume the new defaults for ' '"foo:bar:create", "foo:bar:update", "foo:bar:list", and ' '"foo:bar:delete", which might be backwards incompatible for your ' 'deployment. Either ensure your deployment is ready for the new ' 'default or copy/paste the deprecated policy into your policy ' 'file and maintain it manually.' ) rules = jsonutils.dumps({'foo:bar': 'role:bang'}) self.create_config_file('policy.json', rules) enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules(True) mock_warn.assert_called_once_with(expected_msg) def test_deprecate_a_policy_for_removal_logs_warning_when_overridden(self): rule_list = [policy.DocumentedRuleDefault( name='foo:bar', check_str='role:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_for_removal=True, deprecated_reason=( '"foo:bar" is no longer a policy used by the service' ), deprecated_since='N' )] expected_msg = ( 'Policy "foo:bar":"role:baz" was deprecated for removal in N. ' 'Reason: "foo:bar" is no longer a policy used by the service. Its ' 'value may be silently ignored in the future.' ) rules = jsonutils.dumps({'foo:bar': 'role:bang'}) self.create_config_file('policy.json', rules) enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_called_once_with(expected_msg) def test_deprecate_a_policy_for_removal_does_not_log_warning(self): # We should only log a warning for operators if they are supplying an # override for a policy that is deprecated for removal. rule_list = [policy.DocumentedRuleDefault( name='foo:bar', check_str='role:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_for_removal=True, deprecated_reason=( '"foo:bar" is no longer a policy used by the service' ), deprecated_since='N' )] enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_not_called() def test_deprecate_check_str_suppress_does_not_log_warning(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N' ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] enforcer = policy.Enforcer(self.conf) enforcer.suppress_deprecation_warnings = True enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_not_called() def test_deprecate_name_suppress_does_not_log_warning(self): deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='role:baz', deprecated_reason='"foo:bar" is not granular enough.', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:baz', description='Create a bar.', operations=[{'path': '/v1/bars/', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] rules = jsonutils.dumps({'foo:bar': 'role:bang'}) self.create_config_file('policy.json', rules) enforcer = policy.Enforcer(self.conf) enforcer.suppress_deprecation_warnings = True enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_not_called() def test_deprecate_for_removal_suppress_does_not_log_warning(self): rule_list = [policy.DocumentedRuleDefault( name='foo:bar', check_str='role:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_for_removal=True, deprecated_reason=( '"foo:bar" is no longer a policy used by the service' ), deprecated_since='N' )] rules = jsonutils.dumps({'foo:bar': 'role:bang'}) self.create_config_file('policy.json', rules) enforcer = policy.Enforcer(self.conf) enforcer.suppress_deprecation_warnings = True enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_not_called() def test_suppress_default_change_warnings_flag_not_log_warning(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] enforcer = policy.Enforcer(self.conf) enforcer.suppress_default_change_warnings = True enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_not_called() def test_deprecated_policy_for_removal_must_include_deprecated_meta(self): self.assertRaises( ValueError, policy.DocumentedRuleDefault, name='foo:bar', check_str='rule:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_for_removal=True, deprecated_reason='Some reason.' # no deprecated_since ) def test_deprecated_policy_should_not_include_deprecated_meta(self): deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='rule:baz' ) with mock.patch('warnings.warn') as mock_warn: policy.DocumentedRuleDefault( name='foo:bar', check_str='rule:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_rule=deprecated_rule, deprecated_reason='Some reason.' ) mock_warn.assert_called_once() def test_deprecated_rule_requires_deprecated_rule_object(self): self.assertRaises( ValueError, policy.DocumentedRuleDefault, name='foo:bar', check_str='rule:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_rule='foo:bar', deprecated_reason='Some reason.' ) def test_deprecated_policy_must_include_deprecated_reason(self): self.assertRaises( ValueError, policy.DocumentedRuleDefault, name='foo:bar', check_str='rule:baz', description='Create a foo.', operations=[{'path': '/v1/foos/', 'method': 'POST'}], deprecated_for_removal=True, deprecated_since='N' ) @mock.patch('warnings.warn', new=mock.Mock()) def test_override_deprecated_policy_with_old_name(self): # Simulate an operator overriding a policy rules = jsonutils.dumps({'foo:bar': 'role:bazz'}) self.create_config_file('policy.json', rules) # Deprecate the policy name and check string in favor of something # better. deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] self.enforcer.register_defaults(rule_list) # Make sure the override supplied by the operator using the old policy # name is used in favor of the old or new default. self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}) ) self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}) ) self.assertTrue( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['bazz']}) ) def test_override_deprecated_policy_with_new_name(self): # Simulate an operator overriding a policy using the new policy name rules = jsonutils.dumps({'foo:create_bar': 'role:bazz'}) self.create_config_file('policy.json', rules) # Deprecate the policy name and check string in favor of something # better. deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] self.enforcer.register_defaults(rule_list) # Make sure the override supplied by the operator is being used in # place of either default value. self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}) ) self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}) ) self.assertTrue( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['bazz']}) ) @mock.patch('warnings.warn', new=mock.Mock()) def test_override_both_new_and_old_policy(self): # Simulate an operator overriding a policy using both the the new and # old policy names. The following doesn't make a whole lot of sense # because the overrides are conflicting, but we want to make sure that # oslo.policy uses foo:create_bar instead of foo:bar. rules_dict = { 'foo:create_bar': 'role:bazz', 'foo:bar': 'role:wee' } rules = jsonutils.dumps(rules_dict) self.create_config_file('policy.json', rules) # Deprecate the policy name and check string in favor of something # better. deprecated_rule = policy.DeprecatedRule( name='foo:bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] self.enforcer.register_defaults(rule_list) # The default check string for the old policy name foo:bar should fail self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}) ) # The default check string for the new policy name foo:create_bar # should fail self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}) ) # The override for the old policy name foo:bar should fail self.assertFalse( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['wee']}) ) # The override for foo:create_bar should pass self.assertTrue( self.enforcer.enforce('foo:create_bar', {}, {'roles': ['bazz']}) ) @mock.patch('warnings.warn', new=mock.Mock()) def test_override_deprecated_policy_with_new_rule(self): # Simulate an operator overriding a deprecated policy with a reference # to the new policy, as done by the sample policy generator. rules = jsonutils.dumps({'old_rule': 'rule:new_rule'}) self.create_config_file('policy.json', rules) # Deprecate the policy name in favor of something better. deprecated_rule = policy.DeprecatedRule( name='old_rule', check_str='role:bang', deprecated_reason='"old_rule" is a bad name', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='new_rule', check_str='role:bang', description='Replacement for old_rule.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] self.enforcer.register_defaults(rule_list) # Make sure the override supplied by the operator using the old policy # name is used in favor of the old or new default. self.assertFalse( self.enforcer.enforce('new_rule', {}, {'roles': ['fizz']}) ) self.assertTrue( self.enforcer.enforce('new_rule', {}, {'roles': ['bang']}) ) # Verify that we didn't overwrite the new rule. self.assertEqual('bang', self.enforcer.rules['new_rule'].match) def test_enforce_new_defaults_no_old_check_string(self): self.conf.set_override('enforce_new_defaults', True, group='oslo_policy') deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz', deprecated_reason='"role:bang" is a better default', deprecated_since='N', ) rule_list = [policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, )] enforcer = policy.Enforcer(self.conf) enforcer.register_defaults(rule_list) with mock.patch('warnings.warn') as mock_warn: enforcer.load_rules() mock_warn.assert_not_called() self.assertTrue( enforcer.enforce('foo:create_bar', {}, {'roles': ['bang']}) ) self.assertFalse( enforcer.enforce('foo:create_bar', {}, {'roles': ['fizz']}) ) self.assertFalse( enforcer.enforce('foo:create_bar', {}, {'roles': ['baz']}) ) def test_deprecation_logic_is_only_performed_once_per_rule(self): deprecated_rule = policy.DeprecatedRule( name='foo:create_bar', check_str='role:fizz' ) rule = policy.DocumentedRuleDefault( name='foo:create_bar', check_str='role:bang', description='Create a bar.', operations=[{'path': '/v1/bars', 'method': 'POST'}], deprecated_rule=deprecated_rule, deprecated_reason='"role:bang" is a better default', deprecated_since='N' ) check = rule.check enforcer = policy.Enforcer(self.conf) enforcer.register_defaults([rule]) # Check that rule processing hasn't been done, yet self.assertEqual({}, enforcer.rules) # Load the rules enforcer.load_rules() # Loading the rules will store a version of the rule check string # logically ORed with the check string of the deprecated value. Make # sure this is happening but that the original rule check is unchanged expected_check = policy.OrCheck([ _parser.parse_rule(cs) for cs in [rule.check_str, deprecated_rule.check_str] ]) self.assertIn('foo:create_bar', enforcer.rules) self.assertEqual( str(enforcer.rules['foo:create_bar']), str(expected_check)) self.assertEqual(check, rule.check) # Hacky way to check whether _handle_deprecated_rule was called again. # If a second call to load_rules doesn't overwrite our dummy rule then # we know it didn't call the deprecated rule function again. enforcer.rules['foo:create_bar'] = 'foo:bar' enforcer.load_rules() self.assertEqual('foo:bar', enforcer.rules['foo:create_bar']) class DocumentedRuleDefaultTestCase(base.PolicyBaseTestCase): def test_contain_operations(self): opt = policy.DocumentedRuleDefault( name='foo', check_str='rule:foo', description='foo_api', operations=[{'path': '/foo/', 'method': 'GET'}]) self.assertEqual(1, len(opt.operations)) def test_multiple_operations(self): opt = policy.DocumentedRuleDefault( name='foo', check_str='rule:foo', description='foo_api', operations=[{'path': '/foo/', 'method': 'GET'}, {'path': '/foo/', 'method': 'POST'}]) self.assertEqual(2, len(opt.operations)) def test_description_not_empty(self): invalid_desc = '' self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description=invalid_desc, operations=[{'path': '/foo/', 'method': 'GET'}]) def test_operation_not_empty_list(self): invalid_op = [] self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description='foo_api', operations=invalid_op) def test_operation_must_be_list(self): invalid_op = 'invalid_op' self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description='foo_api', operations=invalid_op) def test_operation_must_be_list_of_dicts(self): invalid_op = ['invalid_op'] self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description='foo_api', operations=invalid_op) def test_operation_must_have_path(self): invalid_op = [{'method': 'POST'}] self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description='foo_api', operations=invalid_op) def test_operation_must_have_method(self): invalid_op = [{'path': '/foo/path/'}] self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description='foo_api', operations=invalid_op) def test_operation_must_contain_method_and_path_only(self): invalid_op = [{'path': '/some/path/', 'method': 'GET', 'break': 'me'}] self.assertRaises(policy.InvalidRuleDefault, policy.DocumentedRuleDefault, name='foo', check_str='rule:foo', description='foo_api', operations=invalid_op) class DeprecatedRuleTestCase(base.PolicyBaseTestCase): def test_should_include_deprecated_meta(self): with mock.patch('warnings.warn') as mock_warn: policy.DeprecatedRule( name='foo:bar', check_str='rule:baz' ) mock_warn.assert_called_once() class EnforcerCheckRulesTest(base.PolicyBaseTestCase): def setUp(self): super(EnforcerCheckRulesTest, self).setUp() def test_no_violations(self): self.create_config_file('policy.json', POLICY_JSON_CONTENTS) self.enforcer.load_rules(True) self.assertTrue(self.enforcer.check_rules(raise_on_violation=True)) @mock.patch.object(policy, 'LOG') def test_undefined_rule(self, mock_log): rules = jsonutils.dumps({'foo': 'rule:bar'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) self.assertFalse(self.enforcer.check_rules()) mock_log.warning.assert_called() @mock.patch.object(policy, 'LOG') def test_undefined_rule_raises(self, mock_log): rules = jsonutils.dumps({'foo': 'rule:bar'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) self.assertRaises(policy.InvalidDefinitionError, self.enforcer.check_rules, raise_on_violation=True) mock_log.warning.assert_called() @mock.patch.object(policy, 'LOG') def test_cyclical_rules(self, mock_log): rules = jsonutils.dumps({'foo': 'rule:bar', 'bar': 'rule:foo'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) self.assertFalse(self.enforcer.check_rules()) mock_log.warning.assert_called() @mock.patch.object(policy, 'LOG') def test_cyclical_rules_raises(self, mock_log): rules = jsonutils.dumps({'foo': 'rule:bar', 'bar': 'rule:foo'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) self.assertRaises(policy.InvalidDefinitionError, self.enforcer.check_rules, raise_on_violation=True) mock_log.warning.assert_called() @mock.patch.object(policy, 'LOG') def test_complex_cyclical_rules_false(self, mock_log): rules = jsonutils.dumps({'foo': 'rule:bar', 'bar': 'rule:baz and role:admin', 'baz': 'rule:foo or role:user'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) self.assertFalse(self.enforcer.check_rules()) mock_log.warning.assert_called() def test_complex_cyclical_rules_true(self): rules = jsonutils.dumps({'foo': 'rule:bar or rule:baz', 'bar': 'role:admin', 'baz': 'rule:bar or role:user'}) self.create_config_file('policy.json', rules) self.enforcer.load_rules(True) self.assertTrue(self.enforcer.check_rules()) class PickPolicyFileTestCase(base.PolicyBaseTestCase): def setUp(self): super(PickPolicyFileTestCase, self).setUp() self.data = { 'rule_admin': 'True', 'rule_admin2': 'is_admin:True' } self.tmpdir = self.useFixture(fixtures.TempDir()) original_search_dirs = cfg._search_dirs def fake_search_dirs(dirs, name): dirs.append(self.tmpdir.path) return original_search_dirs(dirs, name) mock_search_dir = self.useFixture( fixtures.MockPatch('oslo_config.cfg._search_dirs')).mock mock_search_dir.side_effect = fake_search_dirs mock_cfg_location = self.useFixture( fixtures.MockPatchObject(self.conf, 'get_location')).mock mock_cfg_location.return_value = cfg.LocationInfo( cfg.Locations.set_default, 'None') def test_no_fallback_to_json_file(self): tmpfilename = 'policy.yaml' self.conf.set_override('policy_file', tmpfilename, group='oslo_policy') jsonfile = os.path.join(self.tmpdir.path, 'policy.json') with open(jsonfile, 'w') as fh: jsonutils.dump(self.data, fh) selected_policy_file = policy.pick_default_policy_file( self.conf, fallback_to_json_file=False) self.assertEqual(self.conf.oslo_policy.policy_file, tmpfilename) self.assertEqual(selected_policy_file, tmpfilename) def test_overridden_policy_file(self): tmpfilename = 'nova-policy.yaml' self.conf.set_override('policy_file', tmpfilename, group='oslo_policy') selected_policy_file = policy.pick_default_policy_file(self.conf) self.assertEqual(self.conf.oslo_policy.policy_file, tmpfilename) self.assertEqual(selected_policy_file, tmpfilename) def test_only_new_default_policy_file_exist(self): self.conf.set_override('policy_file', 'policy.yaml', group='oslo_policy') tmpfilename = os.path.join(self.tmpdir.path, 'policy.yaml') with open(tmpfilename, 'w') as fh: yaml.dump(self.data, fh) selected_policy_file = policy.pick_default_policy_file(self.conf) self.assertEqual(self.conf.oslo_policy.policy_file, 'policy.yaml') self.assertEqual(selected_policy_file, 'policy.yaml') def test_only_old_default_policy_file_exist(self): self.conf.set_override('policy_file', 'policy.yaml', group='oslo_policy') tmpfilename = os.path.join(self.tmpdir.path, 'policy.json') with open(tmpfilename, 'w') as fh: jsonutils.dump(self.data, fh) selected_policy_file = policy.pick_default_policy_file(self.conf) self.assertEqual(self.conf.oslo_policy.policy_file, 'policy.yaml') self.assertEqual(selected_policy_file, 'policy.json') def test_both_default_policy_file_exist(self): self.conf.set_override('policy_file', 'policy.yaml', group='oslo_policy') tmpfilename1 = os.path.join(self.tmpdir.path, 'policy.json') with open(tmpfilename1, 'w') as fh: jsonutils.dump(self.data, fh) tmpfilename2 = os.path.join(self.tmpdir.path, 'policy.yaml') with open(tmpfilename2, 'w') as fh: yaml.dump(self.data, fh) selected_policy_file = policy.pick_default_policy_file(self.conf) self.assertEqual(self.conf.oslo_policy.policy_file, 'policy.yaml') self.assertEqual(selected_policy_file, 'policy.yaml') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_shell.py0000664000175000017500000002045000000000000022307 0ustar00zuulzuul00000000000000# Copyright (c) 2018 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from oslo_serialization import jsonutils from oslo_policy import shell from oslo_policy.tests import base from oslo_policy.tests import token_fixture class CheckerTestCase(base.PolicyBaseTestCase): SAMPLE_POLICY = '''--- "sample_rule": "role:service" "sampleservice:sample_rule": "" ''' SAMPLE_POLICY_UNSORTED = '''--- "sample_rule": "role:service" "sampleservice:sample_rule2": "" "sampleservice:sample_rule0": "" "sampleservice:sample_rule1": "" ''' SAMPLE_POLICY_SCOPED = '''--- "sampleservice:sample_rule": "role:role1" "sampleservice:scoped_rule": "role:role1 and system_scope:all" ''' SAMPLE_POLICY_OWNER = '''--- "sampleservice:owner_rule": "user_id:%(user_id)s" ''' def setUp(self): super(CheckerTestCase, self).setUp() self.create_config_file("policy.yaml", self.SAMPLE_POLICY) self.create_config_file( "access.json", jsonutils.dumps(token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE)) @mock.patch("oslo_policy._checks.TrueCheck.__call__") def test_pass_rule_parameters(self, call_mock): policy_file = self.get_config_file_fullname('policy.yaml') access_file = self.get_config_file_fullname('access.json') apply_rule = None is_admin = False stdout = self._capture_stdout() access_data = copy.deepcopy( token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE["token"]) target = { 'user_id': access_data['user']['id'], 'project_id': access_data['project']['id'] } access_data['roles'] = [ role['name'] for role in access_data['roles']] access_data['user_id'] = access_data['user']['id'] access_data['project_id'] = access_data['project']['id'] access_data['is_admin'] = is_admin shell.tool(policy_file, access_file, apply_rule, is_admin) call_mock.assert_called_once_with( target, access_data, mock.ANY, current_rule="sampleservice:sample_rule") expected = '''passed: sampleservice:sample_rule ''' self.assertEqual(expected, stdout.getvalue()) def test_pass_rule_parameters_with_scope(self): self.create_config_file("policy.yaml", self.SAMPLE_POLICY_SCOPED) self.create_config_file( "access.json", jsonutils.dumps(token_fixture.SYSTEM_SCOPED_TOKEN_FIXTURE)) policy_file = self.get_config_file_fullname('policy.yaml') access_file = self.get_config_file_fullname('access.json') apply_rule = None is_admin = False stdout = self._capture_stdout() access_data = copy.deepcopy( token_fixture.SYSTEM_SCOPED_TOKEN_FIXTURE["token"]) access_data['roles'] = [ role['name'] for role in access_data['roles']] access_data['user_id'] = access_data['user']['id'] access_data['is_admin'] = is_admin shell.tool(policy_file, access_file, apply_rule, is_admin) expected = '''passed: sampleservice:sample_rule passed: sampleservice:scoped_rule ''' self.assertEqual(expected, stdout.getvalue()) def test_pass_rule_parameters_with_owner(self): self.create_config_file("policy.yaml", self.SAMPLE_POLICY_OWNER) self.create_config_file( "access.json", jsonutils.dumps(token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE)) policy_file = self.get_config_file_fullname('policy.yaml') access_file = self.get_config_file_fullname('access.json') apply_rule = None is_admin = False stdout = self._capture_stdout() access_data = copy.deepcopy( token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE["token"]) access_data['roles'] = [ role['name'] for role in access_data['roles']] access_data['user_id'] = access_data['user']['id'] access_data['project_id'] = access_data['project']['id'] access_data['is_admin'] = is_admin shell.tool(policy_file, access_file, apply_rule, is_admin) expected = '''passed: sampleservice:owner_rule ''' self.assertEqual(expected, stdout.getvalue()) def test_pass_rule_parameters_sorted(self): self.create_config_file("policy.yaml", self.SAMPLE_POLICY_UNSORTED) policy_file = self.get_config_file_fullname('policy.yaml') access_file = self.get_config_file_fullname('access.json') apply_rule = None is_admin = False stdout = self._capture_stdout() access_data = copy.deepcopy( token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE["token"]) access_data['roles'] = [ role['name'] for role in access_data['roles']] access_data['user_id'] = access_data['user']['id'] access_data['project_id'] = access_data['project']['id'] access_data['is_admin'] = is_admin shell.tool(policy_file, access_file, apply_rule, is_admin) expected = '''passed: sampleservice:sample_rule0 passed: sampleservice:sample_rule1 passed: sampleservice:sample_rule2 ''' self.assertEqual(expected, stdout.getvalue()) @mock.patch("oslo_policy._checks.TrueCheck.__call__") def test_pass_rule_parameters_with_custom_target(self, call_mock): apply_rule = None is_admin = False access_data = copy.deepcopy( token_fixture.PROJECT_SCOPED_TOKEN_FIXTURE["token"]) access_data['roles'] = [ role['name'] for role in access_data['roles']] access_data['user_id'] = access_data['user']['id'] access_data['project_id'] = access_data['project']['id'] access_data['is_admin'] = is_admin sample_target = { "project_id": access_data["project"]["id"], "domain_id": access_data["project"]["domain"]["id"] } self.create_config_file( "target.json", jsonutils.dumps(sample_target)) policy_file = self.get_config_file_fullname('policy.yaml') access_file = self.get_config_file_fullname('access.json') target_file = self.get_config_file_fullname('target.json') stdout = self._capture_stdout() shell.tool(policy_file, access_file, apply_rule, is_admin, target_file) call_mock.assert_called_once_with( sample_target, access_data, mock.ANY, current_rule="sampleservice:sample_rule") expected = '''passed: sampleservice:sample_rule ''' self.assertEqual(expected, stdout.getvalue()) def test_all_nonadmin(self): policy_file = self.get_config_file_fullname('policy.yaml') access_file = self.get_config_file_fullname('access.json') apply_rule = None is_admin = False stdout = self._capture_stdout() shell.tool(policy_file, access_file, apply_rule, is_admin) expected = '''passed: sampleservice:sample_rule ''' self.assertEqual(expected, stdout.getvalue()) def test_flatten_from_dict(self): target = { "target": { "secret": { "project_id": "1234" } } } result = shell.flatten(target) self.assertEqual(result, {"target.secret.project_id": "1234"}) def test_flatten_from_file(self): target = { "target": { "secret": { "project_id": "1234" } } } self.create_config_file( "target.json", jsonutils.dumps(target)) with open(self.get_config_file_fullname('target.json'), 'r') as fh: target_from_file = fh.read() result = shell.flatten(jsonutils.loads(target_from_file)) self.assertEqual(result, {"target.secret.project_id": "1234"}) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_sphinxext.py0000664000175000017500000000733100000000000023235 0ustar00zuulzuul00000000000000# Copyright 2017 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import textwrap from oslotest import base from oslo_policy import policy from oslo_policy import sphinxext class IndentTest(base.BaseTestCase): def test_indent(self): result = sphinxext._indent("foo\nbar") self.assertEqual(" foo\n bar", result) result = sphinxext._indent("") self.assertEqual("", result) result = sphinxext._indent("\n") self.assertEqual("\n", result) result = sphinxext._indent("test\ntesting\n\nafter blank") self.assertEqual(" test\n testing\n\n after blank", result) result = sphinxext._indent("\tfoo\nbar") self.assertEqual(" \tfoo\n bar", result) result = sphinxext._indent(" foo\nbar") self.assertEqual(" foo\n bar", result) result = sphinxext._indent("foo\n bar") self.assertEqual(" foo\n bar", result) result = sphinxext._indent("foo\n\n bar") self.assertEqual(" foo\n\n bar", result) self.assertRaises(AttributeError, sphinxext._indent, None) class FormatPolicyTest(base.BaseTestCase): def test_minimal(self): results = '\n'.join(list(sphinxext._format_policy_section( 'foo', [policy.RuleDefault('rule_a', '@')]))) self.assertEqual(textwrap.dedent(""" foo === ``rule_a`` :Default: ``@`` (no description provided) """).lstrip(), results) def test_with_description(self): results = '\n'.join(list(sphinxext._format_policy_section( 'foo', [policy.RuleDefault('rule_a', '@', 'My sample rule')] ))) self.assertEqual(textwrap.dedent(""" foo === ``rule_a`` :Default: ``@`` My sample rule """).lstrip(), results) def test_with_operations(self): results = '\n'.join(list(sphinxext._format_policy_section( 'foo', [policy.DocumentedRuleDefault( 'rule_a', '@', 'My sample rule', [ {'method': 'GET', 'path': '/foo'}, {'method': 'POST', 'path': '/some'}])] ))) self.assertEqual(textwrap.dedent(""" foo === ``rule_a`` :Default: ``@`` :Operations: - **GET** ``/foo`` - **POST** ``/some`` My sample rule """).lstrip(), results) def test_with_scope_types(self): operations = [ {'method': 'GET', 'path': '/foo'}, {'method': 'POST', 'path': '/some'} ] scope_types = ['bar'] rule = policy.DocumentedRuleDefault( 'rule_a', '@', 'My sample rule', operations, scope_types=scope_types ) results = '\n'.join(list(sphinxext._format_policy_section( 'foo', [rule] ))) self.assertEqual(textwrap.dedent(""" foo === ``rule_a`` :Default: ``@`` :Operations: - **GET** ``/foo`` - **POST** ``/some`` :Scope Types: - **bar** My sample rule """).lstrip(), results) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/test_sphinxpolicygen.py0000664000175000017500000000641400000000000024427 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from oslotest import base from oslo_policy import sphinxpolicygen class SingleSampleGenerationTest(base.BaseTestCase): @mock.patch('os.path.isdir') @mock.patch('os.path.isfile') @mock.patch('oslo_policy.generator.generate_sample') def test_sample_gen_with_single_config_file(self, sample, isfile, isdir): isfile.side_effect = [False, True] isdir.return_value = True config = mock.Mock(policy_generator_config_file='nova.conf', sample_policy_basename='nova') app = mock.Mock(srcdir='/opt/nova', config=config) sphinxpolicygen.generate_sample(app) sample.assert_called_once_with(args=[ '--config-file', '/opt/nova/nova.conf', '--output-file', '/opt/nova/nova.policy.yaml.sample'], conf=mock.ANY) @mock.patch('os.path.isdir') @mock.patch('os.path.isfile') @mock.patch('oslo_policy.generator.generate_sample') def test_sample_gen_with_single_config_file_no_base(self, sample, isfile, isdir): isfile.side_effect = [False, True] isdir.return_value = True config = mock.Mock(policy_generator_config_file='nova.conf', sample_policy_basename=None) app = mock.Mock(srcdir='/opt/nova', config=config) sphinxpolicygen.generate_sample(app) sample.assert_called_once_with(args=[ '--config-file', '/opt/nova/nova.conf', '--output-file', '/opt/nova/sample.policy.yaml'], conf=mock.ANY) @mock.patch('os.path.isdir') @mock.patch('os.path.isfile') @mock.patch('oslo_policy.generator.generate_sample') def test_sample_gen_with_multiple_config_files(self, sample, isfile, isdir): # Tests the scenario that policy_generator_config_file is a list # of two-item tuples of the config file name and policy basename. isfile.side_effect = [False, True] * 2 isdir.return_value = True config = mock.Mock(policy_generator_config_file=[ ('nova.conf', 'nova'), ('placement.conf', 'placement')]) app = mock.Mock(srcdir='/opt/nova', config=config) sphinxpolicygen.generate_sample(app) sample.assert_has_calls([ mock.call(args=[ '--config-file', '/opt/nova/nova.conf', '--output-file', '/opt/nova/nova.policy.yaml.sample'], conf=mock.ANY), mock.call(args=[ '--config-file', '/opt/nova/placement.conf', '--output-file', '/opt/nova/placement.policy.yaml.sample'], conf=mock.ANY)]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/tests/token_fixture.py0000664000175000017500000002575400000000000023043 0ustar00zuulzuul00000000000000# Copyright (c) 2015 OpenStack Foundation. # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. REGION_ONE_PUBLIC_KEYSTONE_ENDPOINT_ID = '8cd4b957090f4ca5842a22e9a74099cd' PROJECT_SCOPED_TOKEN_FIXTURE = { "token": { "methods": [ "password" ], "roles": [ { "id": "f03fda8f8a3249b2a70fb1f176a7b631", "name": "role1" }, { "id": "f03fda8f8a3249b2a70fb1f176a7b631", "name": "role2" } ], "issued_at": "2002-01-18T21:14:07Z", "expires_at": "2038-01-18T21:14:07Z", "project": { "id": "tenant_id1", "domain": { "id": "domain_id1", "name": "domain_name1" }, "enabled": True, "description": "no description available", "name": "tenant_name1" }, "catalog": [ { "endpoints": [ { "id": "3b5e554bcf114f2483e8a1be7a0506d1", "interface": "admin", "url": "http://127.0.0.1:8776/v1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "54abd2dc463c4ba4a72915498f8ecad1", "interface": "internal", "url": "http://127.0.0.1:8776/v1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "70a7efa4b1b941968357cc43ae1419ee", "interface": "public", "url": "http://127.0.0.1:8776/v1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "id": "5707c3fc0a294703a3c638e9cf6a6c3a", "type": "volume", "name": "volume" }, { "endpoints": [ { "id": "92217a3b95394492859bc49fd474382f", "interface": "admin", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "f20563bdf66f4efa8a1f11d99b672be1", "interface": "internal", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "375f9ba459a447738fb60fe5fc26e9aa", "interface": "public", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" } ], "id": "15c21aae6b274a8da52e0a068e908aac", "type": "image", "name": "glance" }, { "endpoints": [ { "id": "edbd9f50f66746ae9ed11dc3b1ae35da", "interface": "admin", "url": "http://127.0.0.1:8774/v1.1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "9e03c46c80a34a159cb39f5cb0498b92", "interface": "internal", "url": "http://127.0.0.1:8774/v1.1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "1df0b44d92634d59bd0e0d60cf7ce432", "interface": "public", "url": "http://127.0.0.1:8774/v1.1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "id": "2f404fdb89154c589efbc10726b029ec", "type": "compute", "name": "nova" }, { "endpoints": [ { "id": "a4501e141a4b4e14bf282e7bffd81dc5", "interface": "admin", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "3d17e3227bfc4483b58de5eaa584e360", "interface": "internal", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": REGION_ONE_PUBLIC_KEYSTONE_ENDPOINT_ID, "interface": "public", "url": "http://127.0.0.1:5000/v3", "region": "RegionOne" } ], "id": "c5d926d566424e4fba4f80c37916cde5", "type": "identity", "name": "keystone" } ], "user": { "domain": { "id": "domain_id1", "name": "domain_name1" }, "name": "user_name1", "id": "user_id1" } } } SYSTEM_SCOPED_TOKEN_FIXTURE = { "token": { "methods": [ "password" ], "expires_at": "2038-01-18T21:14:07Z", "issued_at": "2000-01-18T21:14:07Z", "roles": [ { "id": "41b1af9bb39241e8b8b79fae5906abcc", "name": "role1" }, { "id": "ac9add6b3c5a46dcaaf21390c4657949", "name": "role2" } ], "system": { "all": True }, "catalog": [ { "endpoints": [ { "id": "3b5e554bcf114f2483e8a1be7a0506d1", "interface": "admin", "url": "http://127.0.0.1:8776/v1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "54abd2dc463c4ba4a72915498f8ecad1", "interface": "internal", "url": "http://127.0.0.1:8776/v1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "70a7efa4b1b941968357cc43ae1419ee", "interface": "public", "url": "http://127.0.0.1:8776/v1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "id": "5707c3fc0a294703a3c638e9cf6a6c3a", "type": "volume", "name": "volume" }, { "endpoints": [ { "id": "92217a3b95394492859bc49fd474382f", "interface": "admin", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "f20563bdf66f4efa8a1f11d99b672be1", "interface": "internal", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "375f9ba459a447738fb60fe5fc26e9aa", "interface": "public", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" } ], "id": "15c21aae6b274a8da52e0a068e908aac", "type": "image", "name": "glance" }, { "endpoints": [ { "id": "edbd9f50f66746ae9ed11dc3b1ae35da", "interface": "admin", "url": "http://127.0.0.1:8774/v1.1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "9e03c46c80a34a159cb39f5cb0498b92", "interface": "internal", "url": "http://127.0.0.1:8774/v1.1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "1df0b44d92634d59bd0e0d60cf7ce432", "interface": "public", "url": "http://127.0.0.1:8774/v1.1/" + "64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "id": "2f404fdb89154c589efbc10726b029ec", "type": "compute", "name": "nova" }, { "endpoints": [ { "id": "a4501e141a4b4e14bf282e7bffd81dc5", "interface": "admin", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "3d17e3227bfc4483b58de5eaa584e360", "interface": "internal", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": REGION_ONE_PUBLIC_KEYSTONE_ENDPOINT_ID, "interface": "public", "url": "http://127.0.0.1:5000/v3", "region": "RegionOne" } ], "id": "c5d926d566424e4fba4f80c37916cde5", "type": "identity", "name": "keystone" } ], "user": { "domain": { "id": "domain_id1", "name": "domain_name1" }, "name": "user_name1", "id": "user_id1" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/oslo_policy/version.py0000664000175000017500000000126300000000000020465 0ustar00zuulzuul00000000000000# Copyright 2016 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version version_info = pbr.version.VersionInfo('oslo.policy') ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8395183 oslo.policy-3.11.0/releasenotes/0000775000175000017500000000000000000000000016562 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.855518 oslo.policy-3.11.0/releasenotes/notes/0000775000175000017500000000000000000000000017712 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/Fix-map-system-scope-for-creds-dict-e4cbec2f7495f22e.yaml0000664000175000017500000000022700000000000031717 0ustar00zuulzuul00000000000000--- fixes: - | Fixes the mapping of 'system_scope' to 'system' when enforce is called with a 'creds' dictionary instead of a RequestContext. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/Pass-target-dict-to-oslopolicy-checker-87185d40aec413ee.yaml0000664000175000017500000000034600000000000032345 0ustar00zuulzuul00000000000000--- features: - | oslopolicy-checker was added the ability to accept a file containing a hash that represents the target. This makes it possible to check policies that have non-conventional targets such as barbican. ././@PaxHeader0000000000000000000000000000020500000000000011452 xustar0000000000000000111 path=oslo.policy-3.11.0/releasenotes/notes/add-deprecated-metadata-to-DeprecatedRule-79d2e8a3f5d11743.yaml 22 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add-deprecated-metadata-to-DeprecatedRule-79d2e8a3f5d11743.yam0000664000175000017500000000075500000000000032506 0ustar00zuulzuul00000000000000--- features: - | ``DeprecatedRule`` now accepts two new parameters: ``deprecated_reason`` and ``deprecated_since``. These should be used in place of the equivalent parameters on the rule that is replacing this rule in order to avoid confusion. upgrade: - | Users with a ``RuleDefault`` or ``DocumentedRuleDefault`` that have configured a ``deprecated_rule`` should move the ``deprecated_reason`` and ``deprecated_since`` parameters to this ``DeprecatedRule``. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add-policy-convert-json-to-yaml-tool-3c93604aee79f58a.yaml0000664000175000017500000000042500000000000032034 0ustar00zuulzuul00000000000000--- features: - | Add ``oslopolicy-convert-json-to-yaml`` tool to convert the json formatted policy file to yaml format in compatible way. Refer to `this document `_ for details. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add-policy-upgrade-command-a65bc4f760e5d8b1.yaml0000664000175000017500000000032700000000000030164 0ustar00zuulzuul00000000000000--- features: - | Add ``oslopolicy-policy-upgrade`` command to help operators upgrade their self-defined policy file to new release format. It will upgrade the deprected policy name with the new name. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add-scope-types-to-sphinxext-cacd845c4575e965.yaml0000664000175000017500000000031400000000000030477 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1773473 `_] The ``sphinxext`` extension for rendering policy documentation now supports ``scope_types`` attributes. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add-sphinxpolicygen-39e2f8fa24930b0c.yaml0000664000175000017500000000022000000000000026755 0ustar00zuulzuul00000000000000--- features: - | Add ``sphinxpolicygen`` Sphinx plugin, which can be used to generate a sample policy file for use in documentation. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add_custom_rule_check_plugins-3c15c2c7ca5e.yaml0000664000175000017500000000027000000000000030532 0ustar00zuulzuul00000000000000--- features: - | Add support for custom rule check plugins. ``http`` and ``https`` external rule checks have been converted into stevedore plugins and serve as examples.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/add_reno-3b4ae0789e9c45b4.yaml0000664000175000017500000000007100000000000024573 0ustar00zuulzuul00000000000000--- other: - Switch to reno for managing release notes.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/bug-1779172-c1323c0f647bc44c.yaml0000664000175000017500000000176700000000000024430 0ustar00zuulzuul00000000000000--- features: - | [`bug 1779172 `_] The ``enforce()`` method now supports the ability to parse ``oslo.context`` objects if passed into ``enforce()`` as ``creds``. This provides more consistent policy enforcement for service developers by ensuring the attributes provided in policy enforcement are standardized. In this case they are being standardized through the ``oslo_context.context.RequestContext.to_policy_values()`` method. fixes: - | [`bug 1779172 `_] The ``enforce()`` method now supports the ability to parse ``oslo.context`` objects if passed into ``enforce()`` as ``creds``. This provides more consistent policy enforcement for service developers by ensuring the attributes provided in policy enforcement are standardized. In this case they are being standardized through the ``oslo_context.context.RequestContext.to_policy_values()`` method. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/bug-1880959-8f1370a59759d40d.yaml0000664000175000017500000000060100000000000024303 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1880959 `_] The behavior of policy file reloading from policy directories was fixed. Previously the rules from policy files located in the directories specified in the ``policy_dirs`` option were not reapplied after the rules from the primary policy file have been reapplied due to a change. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/bug-1913718-f1b46bbff3231d98.yaml0000664000175000017500000000040400000000000024502 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1913718 `_] The `Enforcer()` object now only processes deprecated rules once at load or enforcement time, improving performance for users that make extensive use of policy enforcement. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/bug-1943584-fc74f9205039883c.yaml0000664000175000017500000000044100000000000024302 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1943584 `_] If file in policy directory was emptied, rules were not re-calculated. The only workaround was to restart an application. Now rules are re-calculated "on the fly", without app restart. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/deprecate-policy-file-json-format-e1921f15b5d00287.yaml0000664000175000017500000000112100000000000031243 0ustar00zuulzuul00000000000000--- deprecations: - | ``policy_file`` support for JSON formatted file is deprecated. Use YAML formatted file which will be default in future. Use `oslopolicy-convert-json-to-yaml `_ tool to convert the existing JSON to YAML formatted policy file in backward compatible way. JSON format support and ``--format`` option in ``oslopolicy-sample-generator`` and ``oslopolicy-policy-upgrade`` tools are also deprecated. In future release, ``--format`` option will be removed. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/drop-python27-support-9aa06224812cc352.yaml0000664000175000017500000000017700000000000027012 0ustar00zuulzuul00000000000000--- upgrade: - | Support for Python 2.7 has been dropped. The minimum version of Python now supported is Python 3.6. ././@PaxHeader0000000000000000000000000000022100000000000011450 xustar0000000000000000123 path=oslo.policy-3.11.0/releasenotes/notes/enforce-scope-checks-always-when-rule-has-scope_types-8f983cdf70766e4f.yaml 22 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/enforce-scope-checks-always-when-rule-has-scope_types-8f983cdf0000664000175000017500000000027100000000000033365 0ustar00zuulzuul00000000000000--- other: - | Scope check is enforced for all rules, registered ones as well as the ones which are subclasses of the ``BaseCheck`` class if rule has ``scope_types`` set. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/enforce_new_defaults-6ae17d8b8d166a2c.yaml0000664000175000017500000000121100000000000027250 0ustar00zuulzuul00000000000000features: - | A new configuration option ``enforce_new_defaults`` has been added to the ``[oslo_policy]`` group to control whether or not to use the old deprecated defaults. If ``True``, the old deprecated defaults are not going to be evaluated which means if any existing token is allowed for old defaults but disallowed for new defaults it will be disallowed. It is encouraged to enable this flag along with the ``enforce_scope`` flag so that you can get the benefits of new defaults and ``scope_type`` together. This way operators can switch to new defaults without overwriting the rules in the policy file. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/enforce_scope_types-1e92f6a34e4173ef.yaml0000664000175000017500000000141200000000000027053 0ustar00zuulzuul00000000000000--- features: - | A new configuration option has been added to the ``[oslo_policy]`` group called ``enforce_scope``. When set to ``True``, oslo.policy will raise an ``InvalidScope`` exception if the context passed into the enforce method doesn't match the policy's ``scope_types``. If ``False``, a warning will be logged for operators. Note that operators should only enable this option once they've audited their users to ensure system users have roles on the system. This could potentially prevent some users from being able to make system-level API calls. This will also give other services the flexibility to fix long-standing RBAC issues in OpenStack once they start introducing ``scope_types`` for policies used in their service. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/expand-cli-docs-02c2f13adbe251c0.yaml0000664000175000017500000000033000000000000026011 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1741073 `_] Documentation has been improved to include ``oslopolicy-sample-generator`` and ``oslopolicy-list-redundant`` usage. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/fix-bug-1914095-fa71d81c9639ba94.yaml0000664000175000017500000000051000000000000025214 0ustar00zuulzuul00000000000000--- fixes: - | This fixes the Bug# 1914095. Policy engine has bug of modifying the registered rule original object which caused issue when there are multiple policy objects are processing rules in parallel. With this fix. policy engine will make copies of all the registered rules and process accordingly. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/fix-rendering-for-deprecated-rules-d465292e4155f483.yaml0000664000175000017500000000034200000000000031356 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1771442 `_] Policy rules that are deprecated for removal are now properly formatted when rendering sample policy files for documentation. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/list-redundant-deprecation-warnings-f84a06133efdaedd.yaml0000664000175000017500000000024700000000000032311 0ustar00zuulzuul00000000000000--- fixes: - | Deprecated policy warnings are now suppressed in the ``oslopolicy-list-redundant`` tool so that they don't overwhelm the relevant output. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/oslo-policy-descriptive-support-3ee688c5fa48d751.yaml0000664000175000017500000000107400000000000031336 0ustar00zuulzuul00000000000000--- features: - | Added the option to define a more descriptive policy rule by using `policy.DocumentedRuleDefault` class. When using this class it is required that the description and operations parameters are defined, unlike `policy.RuleDefault`. The operations parameter is a list of dictionaries that must contain the two keys 'path' and 'method' which represent the API URL and the HTTP REQUEST METHOD. More information can be found in the `policy usage documentation `_. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/policy-check-performance-fbad83c7a4afd7d7.yaml0000664000175000017500000000034600000000000030173 0ustar00zuulzuul00000000000000--- fixes: - | As reported in launchpad bug 1723030, under some circumstances policy checks caused a significant performance degradation. This release includes improved logic around rule validation to prevent that. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/notes/policy-file-validator-906d5cff864a2d51.yaml0000664000175000017500000000032000000000000027213 0ustar00zuulzuul00000000000000--- features: - | A new tool, ``oslopolicy-validator``, has been added. It allows deployers to easily run basic sanity checks against their policy files. See the documentation for full details. ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/releasenotes/source/0000775000175000017500000000000000000000000020062 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/releasenotes/source/_static/0000775000175000017500000000000000000000000021510 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/_static/.placeholder0000664000175000017500000000000000000000000023761 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/releasenotes/source/_templates/0000775000175000017500000000000000000000000022217 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/_templates/.placeholder0000664000175000017500000000000000000000000024470 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/conf.py0000664000175000017500000002104500000000000021363 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'openstackdocstheme', 'reno.sphinxext', ] # openstackdocstheme options openstackdocs_repo_name = 'openstack/oslo.policy' openstackdocs_bug_project = 'oslo.policy' openstackdocs_bug_tag = '' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. copyright = '2016, oslo.policy Developers' # Release notes do not need a version in the title, they span # multiple versions. # The full version, including alpha/beta/rc tags. release = '' # The short X.Y version. version = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'oslo.policyReleaseNotesDoc' # -- Options for LaTeX output --------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'oslo.policyReleaseNotes.tex', 'oslo.policy Release Notes Documentation', 'oslo.policy Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'oslo.policyReleaseNotes', 'oslo.policy Release Notes Documentation', ['oslo.policy Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'oslo.policyReleaseNotes', 'oslo.policy Release Notes Documentation', 'oslo.policy Developers', 'oslo.policyReleaseNotes', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/index.rst0000664000175000017500000000035100000000000021722 0ustar00zuulzuul00000000000000=========================== oslo.policy Release Notes =========================== .. toctree:: :maxdepth: 1 unreleased xena wallaby victoria ussuri train stein rocky queens pike ocata newton ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8435183 oslo.policy-3.11.0/releasenotes/source/locale/0000775000175000017500000000000000000000000021321 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8435183 oslo.policy-3.11.0/releasenotes/source/locale/en_GB/0000775000175000017500000000000000000000000022273 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/releasenotes/source/locale/en_GB/LC_MESSAGES/0000775000175000017500000000000000000000000024060 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po0000664000175000017500000001656100000000000027122 0ustar00zuulzuul00000000000000# Andi Chandler , 2016. #zanata # Andi Chandler , 2017. #zanata # Andi Chandler , 2018. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.policy\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2018-08-07 10:53+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2018-08-08 09:50+0000\n" "Last-Translator: Andi Chandler \n" "Language-Team: English (United Kingdom)\n" "Language: en_GB\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" msgid "1.15.0" msgstr "1.15.0" msgid "1.22.1" msgstr "1.22.1" msgid "1.25.2-2" msgstr "1.25.2-2" msgid "1.29.0" msgstr "1.29.0" msgid "1.33.0" msgstr "1.33.0" msgid "1.33.1-4" msgstr "1.33.1-4" msgid "1.37.0" msgstr "1.37.0" msgid "1.38.0" msgstr "1.38.0" msgid "1.38.1" msgstr "1.38.1" msgid "1.9.0" msgstr "1.9.0" msgid "" "A new configuration option has been added to the ``[oslo_policy]`` group " "called ``enforce_scope``. When set to ``True``, oslo.policy will raise an " "``InvalidScope`` exception if the context passed into the enforce method " "doesn't match the policy's ``scope_types``. If ``False``, a warning will be " "logged for operators. Note that operators should only enable this option " "once they've audited their users to ensure system users have roles on the " "system. This could potentially prevent some users from being able to make " "system-level API calls. This will also give other services the flexibility " "to fix long-standing RBAC issues in OpenStack once they start introducing " "``scope_types`` for policies used in their service." msgstr "" "A new configuration option has been added to the ``[oslo_policy]`` group " "called ``enforce_scope``. When set to ``True``, oslo.policy will raise an " "``InvalidScope`` exception if the context passed into the enforce method " "doesn't match the policy's ``scope_types``. If ``False``, a warning will be " "logged for operators. Note that operators should only enable this option " "once they've audited their users to ensure system users have roles on the " "system. This could potentially prevent some users from being able to make " "system-level API calls. This will also give other services the flexibility " "to fix long-standing RBAC issues in OpenStack once they start introducing " "``scope_types`` for policies used in their service." msgid "" "Add ``sphinxpolicygen`` Sphinx plugin, which can be used to generate a " "sample policy file for use in documentation." msgstr "" "Add ``sphinxpolicygen`` Sphinx plugin, which can be used to generate a " "sample policy file for use in documentation." msgid "" "Add support for custom rule check plugins. ``http`` and ``https`` external " "rule checks have been converted into stevedore plugins and serve as examples." msgstr "" "Add support for custom rule check plugins. ``http`` and ``https`` external " "rule checks have been converted into stevedore plugins and serve as examples." msgid "" "Added the option to define a more descriptive policy rule by using `policy." "DocumentedRuleDefault` class. When using this class it is required that the " "description and operations parameters are defined, unlike `policy." "RuleDefault`. The operations parameter is a list of dictionaries that must " "contain the two keys 'path' and 'method' which represent the API URL and the " "HTTP REQUEST METHOD. More information can be found in the `policy usage " "documentation `_." msgstr "" "Added the option to define a more descriptive policy rule by using `policy." "DocumentedRuleDefault` class. When using this class it is required that the " "description and operations parameters are defined, unlike `policy." "RuleDefault`. The operations parameter is a list of dictionaries that must " "contain the two keys 'path' and 'method' which represent the API URL and the " "HTTP REQUEST METHOD. More information can be found in the `policy usage " "documentation `_." msgid "" "As reported in launchpad bug 1723030, under some circumstances policy checks " "caused a significant performance degradation. This release includes improved " "logic around rule validation to prevent that." msgstr "" "As reported in Launchpad bug 1723030, under some circumstances policy checks " "caused a significant performance degradation. This release includes improved " "logic around rule validation to prevent that." msgid "Bug Fixes" msgstr "Bug Fixes" msgid "New Features" msgstr "New Features" msgid "Newton Series Release Notes" msgstr "Newton Series Release Notes" msgid "Ocata Series Release Notes" msgstr "Ocata Series Release Notes" msgid "Other Notes" msgstr "Other Notes" msgid "Pike Series Release Notes" msgstr "Pike Series Release Notes" msgid "Queens Series Release Notes" msgstr "Queens Series Release Notes" msgid "Rocky Series Release Notes" msgstr "Rocky Series Release Notes" msgid "Switch to reno for managing release notes." msgstr "Switch to reno for managing release notes." msgid "Unreleased Release Notes" msgstr "Unreleased Release Notes" msgid "" "[`bug 1741073 `_] " "Documentation has been improved to include ``oslopolicy-sample-generator`` " "and ``oslopolicy-list-redundant`` usage." msgstr "" "[`bug 1741073 `_] " "Documentation has been improved to include ``oslopolicy-sample-generator`` " "and ``oslopolicy-list-redundant`` usage." msgid "" "[`bug 1771442 `_] " "Policy rules that are deprecated for removal are now properly formatted when " "rendering sample policy files for documentation." msgstr "" "[`bug 1771442 `_] " "Policy rules that are deprecated for removal are now properly formatted when " "rendering sample policy files for documentation." msgid "" "[`bug 1773473 `_] The " "``sphinxext`` extension for rendering policy documentation now supports " "``scope_types`` attributes." msgstr "" "[`bug 1773473 `_] The " "``sphinxext`` extension for rendering policy documentation now supports " "``scope_types`` attributes." msgid "" "[`bug 1779172 `_] The " "``enforce()`` method now supports the ability to parse ``oslo.context`` " "objects if passed into ``enforce()`` as ``creds``. This provides more " "consistent policy enforcement for service developers by ensuring the " "attributes provided in policy enforcement are standardized. In this case " "they are being standardized through the ``oslo_context.context." "RequestContext.to_policy_values()`` method." msgstr "" "[`bug 1779172 `_] The " "``enforce()`` method now supports the ability to parse ``oslo.context`` " "objects if passed into ``enforce()`` as ``creds``. This provides more " "consistent policy enforcement for service developers by ensuring the " "attributes provided in policy enforcement are standardised. In this case " "they are being standardised through the ``oslo_context.context." "RequestContext.to_policy_values()`` method." msgid "oslo.policy Release Notes" msgstr "oslo.policy Release Notes" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1645118009.8435183 oslo.policy-3.11.0/releasenotes/source/locale/fr/0000775000175000017500000000000000000000000021730 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/releasenotes/source/locale/fr/LC_MESSAGES/0000775000175000017500000000000000000000000023515 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/locale/fr/LC_MESSAGES/releasenotes.po0000664000175000017500000000204000000000000026542 0ustar00zuulzuul00000000000000# Gérald LONLAS , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.policy Release Notes 1.16.1\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2016-10-23 20:40+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-10-22 06:02+0000\n" "Last-Translator: Gérald LONLAS \n" "Language-Team: French\n" "Language: fr\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=2; plural=(n > 1)\n" msgid "1.15.0" msgstr "1.15.0" msgid "1.9.0" msgstr "1.9.0" msgid "New Features" msgstr "Nouvelles fonctionnalités" msgid "Newton Series Release Notes" msgstr "Note de release pour Newton" msgid "Other Notes" msgstr "Autres notes" msgid "Switch to reno for managing release notes." msgstr "Commence à utiliser reno pour la gestion des notes de release" msgid "Unreleased Release Notes" msgstr "Note de release pour les changements non déployées" msgid "oslo.policy Release Notes" msgstr "Note de release pour oslo.policy" ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/newton.rst0000664000175000017500000000021600000000000022125 0ustar00zuulzuul00000000000000============================= Newton Series Release Notes ============================= .. release-notes:: :branch: origin/stable/newton ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/ocata.rst0000664000175000017500000000023000000000000021676 0ustar00zuulzuul00000000000000=================================== Ocata Series Release Notes =================================== .. release-notes:: :branch: origin/stable/ocata ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/pike.rst0000664000175000017500000000021700000000000021544 0ustar00zuulzuul00000000000000=================================== Pike Series Release Notes =================================== .. release-notes:: :branch: stable/pike ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/queens.rst0000664000175000017500000000022300000000000022111 0ustar00zuulzuul00000000000000=================================== Queens Series Release Notes =================================== .. release-notes:: :branch: stable/queens ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/rocky.rst0000664000175000017500000000022100000000000021736 0ustar00zuulzuul00000000000000=================================== Rocky Series Release Notes =================================== .. release-notes:: :branch: stable/rocky ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/stein.rst0000664000175000017500000000022100000000000021731 0ustar00zuulzuul00000000000000=================================== Stein Series Release Notes =================================== .. release-notes:: :branch: stable/stein ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/train.rst0000664000175000017500000000017600000000000021735 0ustar00zuulzuul00000000000000========================== Train Series Release Notes ========================== .. release-notes:: :branch: stable/train ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/unreleased.rst0000664000175000017500000000014400000000000022742 0ustar00zuulzuul00000000000000========================== Unreleased Release Notes ========================== .. release-notes:: ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/ussuri.rst0000664000175000017500000000020200000000000022140 0ustar00zuulzuul00000000000000=========================== Ussuri Series Release Notes =========================== .. release-notes:: :branch: stable/ussuri ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/victoria.rst0000664000175000017500000000021200000000000022427 0ustar00zuulzuul00000000000000============================= Victoria Series Release Notes ============================= .. release-notes:: :branch: stable/victoria ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/wallaby.rst0000664000175000017500000000020600000000000022245 0ustar00zuulzuul00000000000000============================ Wallaby Series Release Notes ============================ .. release-notes:: :branch: stable/wallaby ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/releasenotes/source/xena.rst0000664000175000017500000000017200000000000021547 0ustar00zuulzuul00000000000000========================= Xena Series Release Notes ========================= .. release-notes:: :branch: stable/xena ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/requirements.txt0000664000175000017500000000072400000000000017360 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. requests>=2.14.2 # Apache-2.0 oslo.config>=6.0.0 # Apache-2.0 oslo.context>=2.22.0 # Apache-2.0 oslo.i18n>=3.15.3 # Apache-2.0 oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0 PyYAML>=5.1 # MIT stevedore>=1.20.0 # Apache-2.0 oslo.utils>=3.40.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.859518 oslo.policy-3.11.0/sample_data/0000775000175000017500000000000000000000000016343 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/sample_data/auth_v3_token_admin.json0000664000175000017500000001162000000000000023157 0ustar00zuulzuul00000000000000{ "token": { "methods": [ "password" ], "roles": [ { "name": "admin", "id":"41b1af9bb39241e8b8b79fae5906abcc" } ], "expires_at": "2038-01-18T21:14:07Z", "issued_at": "2000-01-18T21:14:07Z", "project": { "id": "tenant_id1", "domain": { "id": "domain_id1", "name": "domain_name1" }, "enabled": true, "description": null, "name": "tenant_name1" }, "catalog": [ { "endpoints": [ { "id": "f84e070735e54914b41e2b5cfa94dcf7", "interface": "admin", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "8220bba1d2844e0b81b171c6ede1155f", "interface": "internal", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "719b92ea82a04e7a9ff1107c62da10da", "interface": "public", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "type": "volume", "name": "volume", "id":"547e9195d1914b5eb087bedbc98fccc3" }, { "endpoints": [ { "id": "44752324c0d44375bc854168ea22f1fc", "interface": "admin", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "a59b3734f57449078f1637c10f96c8e8", "interface": "internal", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "16c3ab1a4df640569812e432c98b2a48", "interface": "public", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" } ], "type": "image", "name": "glance", "id": "22c15d232e55419eb4aeb3ebbd12aac2" }, { "endpoints": [ { "id": "9c2fdc2d45bb45c5a7f973e235e0f998", "interface": "admin", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "88ccfa8cbb7743998b38b998f4e6a720", "interface": "internal", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "113ee928c6934c92b9a12bd4e456c804", "interface": "public", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "type": "compute", "name": "nova", "id": "fbf2afcdeb10473392636df9785d3fb5" }, { "endpoints": [ { "id": "c10a5cda00784049953296d18464aa38", "interface": "admin", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "334650263e064428bb2f0b7c3c7a743c", "interface": "internal", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "52ff54addc38430d9b656c7164e2caf8", "interface": "public", "url": "http://127.0.0.1:5000/v3", "region": "RegionOne" } ], "type": "identity", "name": "keystone", "id": "a0d9913a4bca4d5699e151804e0b5172" } ], "user": { "domain": { "id": "domain_id1", "name": "domain_name1" }, "name": "user_name1", "id": "user_id1" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/sample_data/auth_v3_token_member.json0000664000175000017500000001162600000000000023344 0ustar00zuulzuul00000000000000{ "token": { "methods": [ "password" ], "roles": [ { "name": "member", "id": "bb8d0f54-a1c9-444f-81da-231aef4d7e03" } ], "expires_at": "2038-01-18T21:14:07Z", "issued_at": "2000-01-18T21:14:07Z", "project": { "id": "tenant_id1", "domain": { "id": "domain_id1", "name": "domain_name1" }, "enabled": true, "description": null, "name": "tenant_name1" }, "catalog": [ { "endpoints": [ { "id": "f84e070735e54914b41e2b5cfa94dcf7", "interface": "admin", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "8220bba1d2844e0b81b171c6ede1155f", "interface": "internal", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "719b92ea82a04e7a9ff1107c62da10da", "interface": "public", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "type": "volume", "name": "volume", "id":"547e9195d1914b5eb087bedbc98fccc3" }, { "endpoints": [ { "id": "44752324c0d44375bc854168ea22f1fc", "interface": "admin", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "a59b3734f57449078f1637c10f96c8e8", "interface": "internal", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "16c3ab1a4df640569812e432c98b2a48", "interface": "public", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" } ], "type": "image", "name": "glance", "id": "22c15d232e55419eb4aeb3ebbd12aac2" }, { "endpoints": [ { "id": "9c2fdc2d45bb45c5a7f973e235e0f998", "interface": "admin", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "88ccfa8cbb7743998b38b998f4e6a720", "interface": "internal", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "113ee928c6934c92b9a12bd4e456c804", "interface": "public", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "type": "compute", "name": "nova", "id": "fbf2afcdeb10473392636df9785d3fb5" }, { "endpoints": [ { "id": "c10a5cda00784049953296d18464aa38", "interface": "admin", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "334650263e064428bb2f0b7c3c7a743c", "interface": "internal", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "52ff54addc38430d9b656c7164e2caf8", "interface": "public", "url": "http://127.0.0.1:5000/v3", "region": "RegionOne" } ], "type": "identity", "name": "keystone", "id": "a0d9913a4bca4d5699e151804e0b5172" } ], "user": { "domain": { "id": "domain_id1", "name": "domain_name1" }, "name": "user_name1", "id": "user_id1" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/sample_data/auth_v3_token_system_admin.json0000664000175000017500000001164400000000000024571 0ustar00zuulzuul00000000000000{ "token": { "methods": [ "password" ], "expires_at": "2038-01-18T21:14:07Z", "issued_at": "2000-01-18T21:14:07Z", "roles": [ { "id":"41b1af9bb39241e8b8b79fae5906abcc", "name": "admin" }, { "id": "ac9add6b3c5a46dcaaf21390c4657949", "name": "member" }, { "id": "b0cb8117845f4fd489865d498b80bab3", "name": "reader" } ], "system": { "all": true }, "catalog": [ { "endpoints": [ { "id": "f84e070735e54914b41e2b5cfa94dcf7", "interface": "admin", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "8220bba1d2844e0b81b171c6ede1155f", "interface": "internal", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "719b92ea82a04e7a9ff1107c62da10da", "interface": "public", "url": "http://127.0.0.1:8776/v1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "type": "volume", "name": "volume", "id":"547e9195d1914b5eb087bedbc98fccc3" }, { "endpoints": [ { "id": "44752324c0d44375bc854168ea22f1fc", "interface": "admin", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "a59b3734f57449078f1637c10f96c8e8", "interface": "internal", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" }, { "id": "16c3ab1a4df640569812e432c98b2a48", "interface": "public", "url": "http://127.0.0.1:9292/v1", "region": "regionOne" } ], "type": "image", "name": "glance", "id": "22c15d232e55419eb4aeb3ebbd12aac2" }, { "endpoints": [ { "id": "9c2fdc2d45bb45c5a7f973e235e0f998", "interface": "admin", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "88ccfa8cbb7743998b38b998f4e6a720", "interface": "internal", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" }, { "id": "113ee928c6934c92b9a12bd4e456c804", "interface": "public", "url": "http://127.0.0.1:8774/v1.1/64b6f3fbcc53435e8a60fcf89bb6617a", "region": "regionOne" } ], "type": "compute", "name": "nova", "id": "fbf2afcdeb10473392636df9785d3fb5" }, { "endpoints": [ { "id": "c10a5cda00784049953296d18464aa38", "interface": "admin", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "334650263e064428bb2f0b7c3c7a743c", "interface": "internal", "url": "http://127.0.0.1:35357/v3", "region": "RegionOne" }, { "id": "52ff54addc38430d9b656c7164e2caf8", "interface": "public", "url": "http://127.0.0.1:5000/v3", "region": "RegionOne" } ], "type": "identity", "name": "keystone", "id": "a0d9913a4bca4d5699e151804e0b5172" } ], "user": { "domain": { "id": "domain_id1", "name": "domain_name1" }, "name": "user_name1", "id": "user_id1" } } } ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1645118009.863518 oslo.policy-3.11.0/setup.cfg0000664000175000017500000000300000000000000015703 0ustar00zuulzuul00000000000000[metadata] name = oslo.policy summary = Oslo Policy library description_file = README.rst author = OpenStack author_email = openstack-discuss@lists.openstack.org home_page = https://docs.openstack.org/oslo.policy/latest/ python_requires = >=3.6 classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3 :: Only Programming Language :: Python :: Implementation :: CPython [files] packages = oslo_policy [entry_points] oslo.config.opts = oslo.policy = oslo_policy.opts:list_opts console_scripts = oslopolicy-checker = oslo_policy.shell:main oslopolicy-sample-generator = oslo_policy.generator:generate_sample oslopolicy-policy-generator = oslo_policy.generator:generate_policy oslopolicy-list-redundant = oslo_policy.generator:list_redundant oslopolicy-policy-upgrade = oslo_policy.generator:upgrade_policy oslopolicy-validator = oslo_policy.generator:validate_policy oslopolicy-convert-json-to-yaml = oslo_policy.generator:convert_policy_json_to_yaml oslo.policy.rule_checks = http = oslo_policy._external:HttpCheck https = oslo_policy._external:HttpsCheck [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/setup.py0000664000175000017500000000127100000000000015604 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/test-requirements.txt0000664000175000017500000000062000000000000020330 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. oslotest>=3.2.0 # Apache-2.0 requests-mock>=1.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 sphinx>=2.0.0,!=2.1.0 # BSD # computes code coverage percentages coverage!=4.4,>=4.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1645117929.0 oslo.policy-3.11.0/tox.ini0000664000175000017500000000272100000000000015406 0ustar00zuulzuul00000000000000[tox] minversion = 3.18.0 envlist = py3,pep8,docs ignore_basepython_conflict = true [testenv] basepython = python3 deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt -r{toxinidir}/doc/requirements.txt commands = stestr run --slowest {posargs} [testenv:pep8] deps = pre-commit>=2.6.0 # MIT bandit>=1.6.0,<1.7.0 # Apache-2.0 commands = pre-commit run -a # Run security linter bandit -r oslo_policy tests -n5 [testenv:venv] commands = {posargs} [testenv:docs] allowlist_externals = rm deps = {[testenv]deps} -r{toxinidir}/doc/requirements.txt commands = rm -rf doc/build doc/source/reference/api sphinx-build -W --keep-going -b html doc/source doc/build/html [testenv:cover] setenv = PYTHON=coverage run --source oslo_policy --parallel-mode commands = stestr run --slowest {posargs} coverage combine coverage html -d cover coverage report [flake8] show-source = True # W503 line break before binary operator # W504 line break after binary operator ignore = W503,W504 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build [hacking] import_exceptions = oslo_policy._i18n [testenv:releasenotes] deps = -r{toxinidir}/doc/requirements.txt allowlist_externals = rm commands = rm -rf releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees --keep-going -b html releasenotes/source releasenotes/build/html