././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7546198 python-tackerclient-2.1.0/0000775000175000017500000000000000000000000015524 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/.coveragerc0000664000175000017500000000017600000000000017651 0ustar00zuulzuul00000000000000[run] branch = True source = tackerclient omit = tackerclient/openstack/*,tackerclient/tests/* [report] ignore_errors = True ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/.pylintrc0000664000175000017500000000255200000000000017375 0ustar00zuulzuul00000000000000# The format of this file isn't really documented; just use --generate-rcfile [MASTER] # Add to the black list. It should be a base name, not a # path. You may set this option multiple times. ignore=test [Messages Control] # NOTE(justinsb): We might want to have a 2nd strict pylintrc in future # C0111: Don't require docstrings on every method # W0511: TODOs in code comments are fine. # W0142: *args and **kwargs are fine. # W0622: Redefining id is fine. disable=C0111,W0511,W0142,W0622 [Basic] # Variable names can be 1 to 31 characters long, with lowercase and underscores variable-rgx=[a-z_][a-z0-9_]{0,30}$ # Argument names can be 2 to 31 characters long, with lowercase and underscores argument-rgx=[a-z_][a-z0-9_]{1,30}$ # Method names should be at least 3 characters long # and be lowecased with underscores method-rgx=([a-z_][a-z0-9_]{2,50}|setUp|tearDown)$ # Module names matching quantum-* are ok (files in bin/) module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(quantum-[a-z0-9_-]+))$ # Don't require docstrings on tests. no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$ [Design] max-public-methods=100 min-public-methods=0 max-args=6 [Variables] # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. # _ is used by our localization additional-builtins=_ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/.stestr.conf0000664000175000017500000000007200000000000017774 0ustar00zuulzuul00000000000000[DEFAULT] test_path=./tackerclient/tests/unit top_path=./ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/.zuul.yaml0000664000175000017500000000023200000000000017462 0ustar00zuulzuul00000000000000- project: templates: - check-requirements - openstack-python3-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/AUTHORS0000664000175000017500000001123500000000000016576 0ustar00zuulzuul0000000000000098k <18552437190@163.com> Ai Hamano Aldinson Esto Alfredo Moralejo Andreas Jaeger Anh Tran Anusree Ayumu Ueha Bharath Thiruveedula Bob HADDLETON Cong Phuoc Hoang Dharmendra Kushwaha Dimitrios Markou Dirk Mueller Donghun Cha Doug Hellmann Flavio Percoco Ghanshyam Mann Hervé Beraud Hideki Matsuda Hideki Saito Hirofumi Noguchi Hiromu Asahina Hiroo Kitamura Hiroya Nakaya Isaku Yamahata Itsuro Oda Janki Janki Janki Chhatbar Janki Chhatbar Janonymous Javier Pena Jeremy Stanley Joel Capitao KLuka Kanagaraj Manickam Kaori Mitani Kawaguchi Kentaro Koichi Edagawa Koji Shimizu LiangLu LongKB Lu lei Luka Krajger Manikantha Srinivas Tadi Manpreet Kaur Martin Oemke Monty Taylor Naoya Harada Neeldhwaj Pathak Nguyen Hai Nguyen Hai Truong Ondřej Nový OpenStack Release Bot Pooja Singla Renu Ronald Bradford Sairam Vengala Saju Madhavan Sean McGinnis ShangXiao Sharat Sharma Shrinath Suresh Shubham Shubham Potale Sridhar Ramaswamy Sripriya Steve Martinelli Swapnil Kulkarni (coolsvap) Thomas Bechtold Tim Rozet Tin Lam Tony Breeds Tony Xu Trevor McCasland Trinath Somanchi Trinh Nguyen Vieri <15050873171@163.com> Vishwanath Jayaraman Wataru Juso Yasufumi Ogawa Yi Feng ZhongShengping aksingh an.abdulrehman changzhi claire1006 dharmendra dharmendra dharmendra kushwaha digambar gaofei gecong1973 gong yong sheng gongysh howardlee huxining jacky06 janki ji-xuepeng kavithahr kexuesheng lingyongxu maaoyu nirajsingh pengyuesheng prankul mahajan psingla qingszhao rajat29 ricolin rtmdk shihanzhang shu-mutou sunqingliang6 vagrant venkatamahesh vish wangzihao wu.chunyang wu.chunyang xu-haiwei yatinkarel yong sheng gong zhangboye ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/CONTRIBUTING.rst0000664000175000017500000000105000000000000020161 0ustar00zuulzuul00000000000000If you would like to contribute to the development of OpenStack, you must follow the steps in this page: https://docs.openstack.org/infra/manual/developers.html Once those steps have been completed, changes to OpenStack should be submitted for review via the Gerrit tool, following the workflow documented at: https://docs.openstack.org/infra/manual/developers.html#development-workflow Pull requests submitted through GitHub will be ignored. Bugs should be filed on Launchpad, not GitHub: https://bugs.launchpad.net/python-tackerclient ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/ChangeLog0000664000175000017500000004165400000000000017310 0ustar00zuulzuul00000000000000CHANGES ======= 2.1.0 ----- * Update master for stable/2024.1 * reno: Update master for unmaintained/xena * reno: Update master for unmaintained/wallaby * reno: Update master for unmaintained/victoria * Fix output regarding PM Job response * Fix the handling of content-type in HTTPClient * reno: Update master for unmaintained/yoga 2.0.0 ----- * Obsolete Legacy APIs excluding VIM feature * Fix unit test error caused by cliff upgrade * Remove legacy documentation * Update master for stable/2023.2 1.14.0 ------ * Update metadata in setup.cfg * Update master for stable/2023.1 1.13.0 ------ * Fix list VNF/Subscription UT error * Extra param for OpenStack type when register vim * Add support cnf auto scale via threshold interface * Extends unit test code support for merged patch * Add deprecation notice for Legacy APIs * Fix tox4 error * Update python classifier for python 3.10 * Switch to 2023.1 Python3 unit tests and generic template name * Update master for stable/zed * Add docs for cnf auto heal and scale 1.12.0 ------ * Add support cnf auto heal and scale * Support listing all records at once with paging * Update Python-TackerClient Documentation * Support for 'extra' parameters for vim register * Add OpenID Connect Token Auth for k8s * Add vnflcm subscriptions APIs * Add Python3 zed unit tests 1.11.0 ------ * Update master for stable/yoga 1.10.0 ------ * Support ChangeVNFPackage command in tackerclient * Add vnflcm v2 APIs * Help message of heal cli modified * Fix in "vnflcm op list" CLI with exclude-fields 1.9.0 ----- * Update python testing classifier * Support of Cancel VNF command in openstackclient * Add Python3 yoga unit tests * Drop test for lower constraints * Update master for stable/xena 1.8.0 ----- * Multi version API support * Remove use of testtools.helpers.safe\_hasattr * Remove broken link from Python-TackerClient guide * Correct YAML load warning * vnfConfigurableProperties missing in VNF show 1.7.0 ----- * Use assertCountEqual instead of assertItemsEqual * Update min version of tox * setup.cfg: Replace dashes with underscores * Add Python3 xena unit tests * Adds support force delete VNFFG * Fix failing UT in TestListVnfLcmOp * Update master for stable/wallaby 1.6.0 ----- * CLI for Individual VNF LCM Operation Occurrence * Support CLI for Getting List of VNF LCM Operation Occurrences * Support of Retry VNF command in openstackclient * Support CLI of Change External VNF Connectivity * Support of Fail VNF command in openstackclient * Fix error message for nonexistent vnf package * Dropping explicit unicode literal * Fix old links in installation guide * Update TOX\_CONSTRAINTS\_FILE 1.5.0 ----- * Modify operation of scale parameters * Move python-tackerclient to new hacking 4.0.0 1.4.0 ----- * Drop six support * Modify the description of the command --help * Remove the unused coding style modules * Add py38 as a runtime in tox.ini * Remove six.moves.urllib * Add Python3 wallaby unit tests * Update master for stable/victoria 1.3.0 ----- * Support of RollbackVNF command in openstackclient * Deleting Network Service(NS) with force option bug fixes * Implementation Artifacts support in Tacker * Support of Scale command in openstackclient * Add missing argument in UpdateVNFFG at ../nfvo/vnffg.py * Support of UpdateVNF command in openstackclient * drop mock from lower-constraints * Replace assertItemsEqual with assertCountEqual * Stop to use the \_\_future\_\_ module 1.2.0 ----- * Fix exception message * Use unittest.mock instead of third party mock * Switch to newer openstackdocstheme and reno versions * Fix hacking min version to 3.0.1 * Cleanup deprecated iteritems of dict * Display correct fields in output for list command * Improve readability of vnflcm show command * Improve readability of vnf package show command * Add Python3 victoria unit tests * Update master for stable/ussuri 1.1.0 ----- * Enhance vnf package list command to support filtering of vnf packages * Update hacking for Python3 * Add command for Fetch VNF package API * Add command to read vnfd of vnf package API * Add command for update vnf package API * Cleanup py27 support * OSC support to heal vnf * OSC support to list vnfs * OSC support to delete and terminate vnf * OSC support to instantiate and show vnf * OSC support to create vnf using vnflcm API * Replace assertItemsEqual with assertCountEqual * Support updating VNF parameters in tackerclient 1.0.0 ----- * Drop python 2.7 support and testing * Show proper error message for non-existing vnf package * Update master for stable/train * Add restFul methods to fix backward compatibility issue 0.16.0 ------ * Add documentation for VNF Package commands * Add command for upload vnf package API * Add commands for list, show and delete vnf package API's * OSC support for VNF package APIs * fix:"openstack vnf set" command can't execute * Blacklist sphinx 2.1.0 (autodoc bug) * Add Python 3 Train unit tests * Adds support force delete for NS * fix:osc scale is failed * Replace git.openstack.org URLs with opendev.org URLs * update sphinx requirement * Drop py35 jobs * OpenDev Migration Patch * Update master for stable/stein * Update hacking version * add python 3.7 unit test job 0.15.0 ------ * Add upper-constraints.txt to releasenotes tox environment * Parameter changed from mgmt\_url to mgmt\_ip\_address * add python 3.6 unit test job * Use template for lower-constraints * Change openstack-dev to openstack-discuss * Add Python 3.6 classifier to setup.cfg * Adds support force delete resources * Add python 3.6 unit test job * Remove tacker command guide in docs * switch documentation job to new PTI * import zuul job settings from project-config * Update reno for stable/rocky 0.14.0 ------ * Add reno note for updates in NS and VNFFG list commands * Add reno: Fix the VNFFG update osc command * Trivial: Update HACKING.rst with stestr * Trivial: Fix a typo in release notes * Add reno: Fix cannot show the VNFFGD template 0.13.0 ------ * Fix errors in README * Add release note link in README * Updates in NS and VNFFG list commands * Add deprecation note for tacker CLI * Fix: Tackerclient failed to handle yaml load error * fix tox python3 overrides * Update homepage link in setup.cfg * Switch to using stestr * Fix the old doc links * add lower-constraints job * Fix some reST field lists in docstrings * Updated from global requirements 0.12.0 ------ * Add reno note for tacker support osc commands * Deperate tacker command lines * Complete VNF osc commands * Updated from global requirements * Fix the VNFFG update osc command * Add documentation for python-tackerclient * Updated from global requirements * Complete VNFFG & related VNFFG osc commands * Complete NS osc commands * Complete Event osc commands * Complete NSD osc commands * Do not have to mention ssl\_ca\_cert in vim config file (client) * Fix "F821 undefined name 'unicode' error when run tox pep8 * Complete VNFFGD osc commands * Add --tenant-id in VIM & VNFD osc commands * Update README.rst and add CONTRIBUTING.rst to repo * Fix cannot show the VNFFGD template * Fix typo * Complete VNFD osc commands * Revert "Add reno note for classifier name field in its list command" it should be at stable/queens branch This reverts commit df80486f32cf0011faadcbbc73e595e4afd4364c * Revert "Add reno note for cert\_verify in vim config file" this should be in queens branch This reverts commit e557ecde7274da77c7aa6fa77227f8eb2e5f9a9e * Revert "Add reno note for vnffg template updation command" * Implement Tacker Client to support VNF cluster features * Add reno note for cert\_verify in vim config file * Add reno note for vnffg template updation command * Add reno note for classifier name field in its list command * Update reno for stable/queens 0.11.0 ------ * Show classifier's name * Updated from global requirements * Complete VIM osc commands * Updated from global requirements * fix misspell * Updated from global requirements * Add "cert\_verify" in vim\_config file to support insecure VIM * Add "--vnffgd-template" to vnffg-update command * Base OpenStackClient(OSC) plugin support * Add doc migration framework * let hacking install its dependencies * Update the documentation link for doc migration * Updated from global requirements * Avoid tox\_install.sh for constraints support * Remove setting of version/release from releasenotes * Updated from global requirements * Updated from global requirements * Implement client to support Kubernetes as VIM * Updated from global requirements * Fixes symmetrical for vnffg update * Fixes passing boolean as string for symmetrical * Move oslosphinx and openstackdocstheme to test-requirements * Updated from global requirements * Updated from global requirements * Update creating directly VNFFG and NS from descriptor template * Update reno for stable/pike * Updated from global requirements * Updated from global requirements * Switch from oslosphinx to openstackdocstheme * Error handling for vnfd-create with empty vnfd-file * Updated from global requirements 0.10.0 ------ * Add release note for vim update without config-file argument * Replace six.iteritems() with .items() * Updated from global requirements * Updated from global requirements * Add releasenotes for bug 1629169 * Auth\_creds should show the user specified values * Add "is\_default" attribute in vim-list,and remove some unnecessary info * Updated from global requirements * Updated from global requirements * Updated from global requirements * 'update-vim' to update params without config file * Updated from global requirements * Updated from global requirements * Updated from global requirements * direct input for config was deprecated in ocata. Removing now * Updated from global requirements * Refactor mox references to use the mock library in pythontackerclient * Remove log translations * Revert "Switch to oslo\_log" * Switch to oslo\_log * vnfd-delete gives ambiguous success message * The Python 3.5 is added * Updated from global requirements * sync test-requirement with global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Update reno for stable/ocata 0.9.0 ----- * Add release notes for Ocata * Deprecate direct YAML input in tackerclient * Support VNF creation from inline VNF template * Removes unused utf-8 encoding declaration * Adds vnfd\_id coloumn in vnf-list * Remove unwanted 'required' parameters * Remove support for legacy VNFD templates * Improve help text for events-list command options * Fix invalid unit test code in python-tackerclient 0.8.0 ----- * Implement client support for NSD * Replaced e.message with str(e) * Support parameter input for vnffg template * Add Constraints support * Remove passing infra and mgmt driver from client * Fix oslo.i18n problems in python-tackerclient * Changed the help message of --config-file parameter * Updated from global requirements * Show team and repo badges on README * Updated from global requirements * Add \_\_ne\_\_ built-in function * Usability improvements of vim-update options * Updated from global requirements * Updated from global requirements * Add stevedore to requirements * Updated from global requirements * Revert "Remove unused import library" * make python34 CI job to pass * mox to mock refactor * Remove unused cliff tablib from test requirements * rename vm into vnfm * Enable release notes translation * Updated home-page info with the developer documentation * Updated from global requirements * Using assertIsInstance() instead of assertEqual(True, isinstance()) * Remove commented out code in test\_shell.py * Updated from global requirements * Updated from global requirements * Fix cString ImportError for py34 * Updated from global requirements * Updated from global requirements * Body can not be None while creating vnfd * Add support for multi delete * Update reno for stable/newton * Remove "else" branch in "create\_vnfd" function * Allow auth url without port for vim registration 0.7.0 ----- * Moving test files to vm directory * Updated from global requirements * Modify MyURLComparator to handle deep match on URL * Deprecate infra\_driver and mgmt\_driver * Remove list\_vnf\_resource\_pagination unit test * Removing test cases for already removed methods * Add client support for VNFFG in NFVO * cli: modify vnfd, param & config attr. to dict obj * Adds client commands for listing a VNF resources * Revert "Creates details API to fetch VNF detials" * Creates details API to fetch VNF detials 0.6.0 ----- * Adds audit support in client * Remove '--config' option when create/update a vim * Updated from global requirements * Add "Description" parameter while creating VNF with CLI * VNF scaling: CLI and python client * VNFD legacy template deprecation warning * Updated from global requirements 0.5.0 ----- * Add domain information into auth cred * Unified formats of log\_xxx function * Make VNFD/VNF/VIM Name Mandatory in Tacker CLI * Remove the mask password logic in vim list and vim show * Remove discover from test-requirements * Transition default VIM to API and DB operation * Remove unused import library * remove unused LOG * Updated from global requirements * Add .idea/ to python-tackerclient .gitignore * Fix ext-show command error * Updated from global requirements * Updated from global requirements 0.4.0 ----- * Change the initial letter to capitals in Tacker CLI help texts * Updated from global requirements * Add client side support for error\_reason * Change bind\_port to 9890 * Add reno support to python-tackerclient * Updated from global requirements * Updated from global requirements * Fix incompatible code with python3 for tox pep8 test * Capitalize help descriptions * Updated from global requirements * Updated from global requirements * Display Health status of VIM * Updated from global requirements * Updated from global requirements * py3.x: Use six.iteritems for Iterable dict items * Fix deprecation warning in tackerclient * Do not display password in VIM command outputs * Clean device from help message * Updated from global requirements * Updated from global requirements * Clean unused code * Cleanup copyright header * Updated from global requirements * Cleanup vim settings 0.3.0 ----- * Fix attr for vim update * Implement client support for multisite VIM * Cleanup Oslo Incubator code * Help message correction * Clean device related unused path * Clean up references to neutron * Code clean-up in client.py * Updated from global requirements * Removing "device" CLI from master branch * Fix summary and author in setup.cfg * Updated from global requirements * Updated from global requirements * Clean up flake8 ignore list * Code correction while calling base class method * Updated from global requirements * Remove argparse from requirements * Renamed 'servicevm' in python-tackerclient * Fix H238 errors in tackerclient code * use keystoneclient exceptions instead of oslo-incubator code * Fix H405 errors in tackerclient code * Put py34 first in the env order of tox * Fix H105 errors in tackerclient code * Fix E265, E129, E113 errors in tackerclient code * Updated from global requirements * Drop py33 support * Remove service instance related stuff and fix unittests * Deprecated tox -downloadcache option removed * Updated from global requirements * Remove py26 support * Update requirements according to global requirements * Added command to display VNFD's template * delete interface attach/detach commands 0.2.0 ----- * Fix for vnfd-delete with vnfd-name as arg * Escape yaml string to configure VDU * Update .gitreview for new namespace * Limit description length in vnfd-list * Implement Tacker MANO API client changes * Change ignore-errors to ignore\_errors * Fix drop of config values supplied by end user * Added validation for vnfd-create * Parameterization support added for VNFD templates * Support Tacker client in master branch * The vnf-create and vnfd-create output is hard to read * Add instructions to tackerclient * Add --vnfd-name option to vnf-create command * Clean up list output for device, vnf and vnfd * call \_super method for \_XtachInterface parent class * update\_vnf: support config as yaml file * vnfd: name and description was not supported * device, vnf: name column support * vnf-create: key error by tenant\_id * tackerclient: api for vnfd, vnd * vnfd, vnf: command options * teach vnf-create config * add vnf related command * add vnfd commends * remove commands for service * command: device-template and device * Revert "service instance: drop command related to service instance" * XML\_NS\_V20 -> XML\_NS\_V10 * catch up neutronclient change * use servicevm as service type for openstack client * rename --kwargs to --attributes * attach/detach interface command * service instance: drop command related to service instance * correct repo in .gitreview * Work toward Python 3.4 support and testing * implement servicevm related command * rename neutron to tacker * rename neutron to tacker * remove unnecessary neutronclient files * remove unnecessary neutron file under neutronclient/tests * setup logger name of NeutronCommand automatically * import python-neutronclient of 1bce6e437e2dd1fa5de6fc2ccdd0ee8ac3f44d18 * Added .gitreview ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/HACKING.rst0000664000175000017500000000227100000000000017324 0ustar00zuulzuul00000000000000Tacker Style Commandments ========================= - Step 1: Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest - Step 2: Read on Running Tests ------------- The testing system is based on a combination of tox and stestr. The canonical approach to running tests is to simply run the command ``tox``. This will create virtual environments, populate them with dependencies and run all of the tests that OpenStack CI systems run. Behind the scenes, tox is running ``stestr run``, but is set up such that you can supply any additional stestr arguments that are needed to tox. For example, you can run: ``tox -- --analyze-isolation`` to cause tox to tell stestr to add --analyze-isolation to its argument list. It is also possible to run the tests inside of a virtual environment you have created, or it is possible that you have all of the dependencies installed locally already. In this case, you can interact with the stestr command directly. Running ``stestr run`` will run the entire test suite. ``stestr run --concurrency=1`` will run tests serially (by default, stestr runs tests in parallel). More information about stestr can be found at: http://stestr.readthedocs.io/ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/LICENSE0000664000175000017500000002363700000000000016544 0ustar00zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/MANIFEST.in0000664000175000017500000000020300000000000017255 0ustar00zuulzuul00000000000000include tox.ini include LICENSE README.rst HACKING.rst include AUTHORS include ChangeLog include tools/* recursive-include tests * ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7546198 python-tackerclient-2.1.0/PKG-INFO0000664000175000017500000000215400000000000016623 0ustar00zuulzuul00000000000000Metadata-Version: 1.2 Name: python-tackerclient Version: 2.1.0 Summary: CLI and Client Library for OpenStack Tacker Home-page: https://docs.openstack.org/python-tackerclient/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: CLI and Client Library for OpenStack Tacker Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Requires-Python: >=3.6 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/README.rst0000664000175000017500000000323400000000000017215 0ustar00zuulzuul00000000000000======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/python-tackerclient.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on NFV Orchestration (Tacker) Client ================================= CLI and Client Library for OpenStack Tacker Installation ============ **Note:** The paths we are using for configuration files in these steps are with reference to Ubuntu Operating System. The paths may vary for other Operating Systems. The branch_name which is used in commands, specify the branch_name as stable/ for any stable branch installation. For eg: stable/queens, stable/pike. If unspecified the default will be master branch. Using python install -------------------- Clone python-tackerclient repository. :: $ cd ~/ $ git clone https://github.com/openstack/python-tackerclient -b Install python-tackerclient. :: $ cd python-tackerclient $ sudo python setup.py install Using pip --------- You can also install the latest version by using ``pip`` command: :: $ pip install python-tackerclient Or, if it is needed to install ``python-tackerclient`` from master branch, type :: $ pip install git+https://github.com/openstack/python-tackerclient.git More Information ================ * Python-tackerclient documentation: https://docs.openstack.org/python-tackerclient/latest/ * Tacker Documentation: https://docs.openstack.org/tacker/latest/ * Tacker Wiki: https://wiki.openstack.org/wiki/Tacker * Release Notes: https://docs.openstack.org/releasenotes/python-tackerclient ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7226198 python-tackerclient-2.1.0/doc/0000775000175000017500000000000000000000000016271 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/requirements.txt0000664000175000017500000000052500000000000021557 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. # These are needed for docs generation sphinx>=2.0.0,!=2.1.0 # BSD openstackdocstheme>=2.2.1 # Apache-2.0 reno>=3.1.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7226198 python-tackerclient-2.1.0/doc/source/0000775000175000017500000000000000000000000017571 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7226198 python-tackerclient-2.1.0/doc/source/cli/0000775000175000017500000000000000000000000020340 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/commands.rst0000664000175000017500000001506000000000000022675 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ============= Command List ============= The following list covers the extended commands for Tacker services available in **openstack** command. These commands can be referenced by doing **openstack help** and the detail of individual command can be referred by **openstack help **. .. code-block:: console [legacy] openstack vim list List VIM(s) that belong to a given tenant. openstack vim register Create a VIM. openstack vim show Show information of a given VIM. openstack vim set Update a given VIM. openstack vim delete Delete given VIM(s). [v1] --os-tacker-api-version 1 openstack vnf package create Create a new individual VNF package resource. openstack vnf package delete Delete given VNF package(s). openstack vnf package list List all VNF packages. openstack vnf package show Show package details. openstack vnf package upload Upload a VNF package. openstack vnf package download Download a VNF package. openstack vnf package artifact download Download a VNF package artifact. openstack vnf package update Update a state of a VNF package. openstack vnflcm create Create a new VNF instance resource. openstack vnflcm instantiate Instantiate a VNF instance. openstack vnflcm list List VNF instance. openstack vnflcm show Show VNF instance. openstack vnflcm terminate Terminate a VNF instance. openstack vnflcm delete Delete a VNF instance resource. openstack vnflcm heal Heal a VNF instance. openstack vnflcm update Update information of a VNF instance. openstack vnflcm scale Scale a VNF instance. openstack vnflcm change-ext-conn Change external VNF connectivity. openstack vnflcm op rollback Rollback a VNF LCM operation occurrence. openstack vnflcm op retry Retry a VNF LCM operation occurrence. openstack vnflcm op fail Fail a VNF LCM operation occurrence. openstack vnflcm op list List VNF LCM operation occurrence. openstack vnflcm op show Show VNF LCM operation occurrence. openstack vnflcm op cancel Cancel a VNF LCM operation occurrence. openstack vnflcm versions Show VNF LCM API versions. openstack vnflcm subsc create Create new subscription. openstack vnflcm subsc delete Delete subscription. openstack vnflcm subsc list List subscription. openstack vnflcm subsc show Show subscription. [v2] --os-tacker-api-version 2 openstack vnflcm create Create a new VNF instance resource. openstack vnflcm instantiate Instantiate a VNF instance. openstack vnflcm list List VNF instance. openstack vnflcm show Show VNF instance. openstack vnflcm terminate Terminate a VNF instance. openstack vnflcm delete Delete a VNF instance resource. openstack vnflcm heal Heal a VNF instance. openstack vnflcm update Update information of a VNF instance. openstack vnflcm scale Scale a VNF instance. openstack vnflcm change-ext-conn Change external VNF connectivity. openstack vnflcm change-vnfpkg Change current VNF package. openstack vnflcm op rollback Rollback a VNF LCM operation occurrence. openstack vnflcm op retry Retry a VNF LCM operation occurrence. openstack vnflcm op fail Fail a VNF LCM operation occurrence. openstack vnflcm op list List VNF LCM operation occurrence. openstack vnflcm op show Show VNF LCM operation occurrence. openstack vnflcm versions Show VNF LCM API versions. openstack vnflcm subsc create Create new subscription. openstack vnflcm subsc delete Delete subscription. openstack vnflcm subsc list List subscription. openstack vnflcm subsc show Show subscription. openstack vnffm alarm list List alarm. openstack vnffm alarm show Show alarm. openstack vnffm alarm update Update alarm. openstack vnffm sub create Create FM subscription. openstack vnffm sub list List FM subscription. openstack vnffm sub show Show FM subscription. openstack vnffm sub delete Delete FM subscription. openstack vnfpm job create Create PM job. openstack vnfpm job list List PM job. openstack vnfpm job show Show PM job. openstack vnfpm job update Update PM job. openstack vnfpm job delete Delete PM job. openstack vnfpm report show Show PM report. openstack vnfpm threshold create Create PM threshold. openstack vnfpm threshold list List PM threshold. openstack vnfpm threshold show Show PM threshold. openstack vnfpm threshold update Update PM threshold. openstack vnfpm threshold delete Delete PM threshold. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/index.rst0000664000175000017500000000166300000000000022207 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ========= CLI Usage ========= Command List ------------ .. toctree:: commands Operations for ETSI NFV-SOL implementation ------------------------------------------ .. toctree:: vnf_package_commands vnflcm_commands vnffm_commands vnfpm_commands Operations for Legacy implementation ------------------------------------ .. toctree:: vim_commands ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/vim_commands.rst0000664000175000017500000000134600000000000023552 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ======================= VIM Management commands ======================= .. autoprogram-cliff:: openstack.tackerclient.v1 :command: vim * ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/vnf_package_commands.rst0000664000175000017500000000165500000000000025226 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================== VNF Package commands ==================== VNF Package commands are CLI interface of VNF Package Management Interface in `ETSI NFV-SOL 005 `_. .. autoprogram-cliff:: openstack.tackerclient.v1 :command: vnf package * ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/vnffm_commands.rst0000664000175000017500000000226700000000000024076 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =============== VNF FM commands =============== VNF FM commands are CLI interface of VNF Fault Management interface in `ETSI NFV-SOL 002 `_ and `ETSI NFV-SOL 003 `_. .. note:: Commands only support calling version 2 vnffm APIs. You can use the commands with **\-\-os-tacker-api-version 2** to call version 2 vnffm APIs. .. autoprogram-cliff:: openstack.tackerclient.v2 :command: vnffm * ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/vnflcm_commands.rst0000664000175000017500000000276400000000000024251 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================ VNF Lcm commands ================ VNF LCM commands are CLI interface of VNF Lifecycle Management Interface in `ETSI NFV-SOL 002 `_ and `ETSI NFV-SOL 003 `_. .. note:: Commands call version 1 vnflcm APIs by default. You can call the specific version of vnflcm APIs by using the option **\-\-os-tacker-api-version**. Commands with **\-\-os-tacker-api-version 2** call version 2 vnflcm APIs. **vnflcm op cancel** is included in only version 1 vnflcm APIs and **change-vnfpkg** is included in only version 2 vnflcm APIs. .. autoprogram-cliff:: openstack.tackerclient.v1 :command: vnflcm * .. autoprogram-cliff:: openstack.tackerclient.v2 :command: vnflcm change-vnfpkg ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/cli/vnfpm_commands.rst0000664000175000017500000000227500000000000024107 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =============== VNF PM commands =============== VNF PM commands are CLI interface of VNF Performance Management interface in `ETSI NFV-SOL 002 `_ and `ETSI NFV-SOL 003 `_. .. note:: Commands only support calling version 2 vnfpm APIs. You can use the commands with **\-\-os-tacker-api-version 2** to call version 2 vnfpm APIs. .. autoprogram-cliff:: openstack.tackerclient.v2 :command: vnfpm * ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/conf.py0000664000175000017500000000555100000000000021076 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # python-tackerclient documentation build configuration file import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.append(os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'reno.sphinxext', 'openstackdocstheme', 'cliff.sphinxext', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. copyright = 'OpenStack Contributors' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'openstackdocs' # Output file base name for HTML help builder. htmlhelp_basename = 'tackerclientdoc' # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # -- Options for manual page output ------------------------------------------- man_pages = [ ('cli/index', 'tacker', 'Client for Tacker API', ['OpenStack Contributors'], 1), ] # -- Options for openstackdocstheme ------------------------------------------- openstackdocs_repo_name = 'openstack/python-tackerclient' openstackdocs_bug_project = 'python-tackerclient' openstackdocs_bug_tag = 'doc' # -- Options for cliff.sphinxext plugin --------------------------------------- autoprogram_cliff_application = 'openstack' ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7226198 python-tackerclient-2.1.0/doc/source/contributor/0000775000175000017500000000000000000000000022143 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/contributor/contributing.rst0000664000175000017500000000122500000000000025404 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ============ Contributing ============ .. include:: ../../../CONTRIBUTING.rst ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/contributor/developing.rst0000664000175000017500000000301700000000000025032 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =================================== Developing with Python-TackerClient =================================== Project Info ============ * **Free software:** under the `Apache license `_ * **Tacker Service:** https://opendev.org/openstack/tacker * **Tacker Client Library:** https://opendev.org/openstack/python-tackerclient * **Tacker Service Bugs:** https://bugs.launchpad.net/tacker * **Client Bugs:** https://bugs.launchpad.net/python-tackerclient * **Blueprints:** https://blueprints.launchpad.net/tacker Meetings ======== For details please refer to the `OpenStack IRC meetings`_ page. .. _`OpenStack IRC meetings`: http://eavesdrop.openstack.org/#Tacker_(NFV_Orchestrator_and_VNF_Manager)_Team_Meeting Testing ======= For details please refer to the `Developing with OpenStackClient`_ page. .. _`Developing with OpenStackClient`: https://docs.openstack.org/python-openstackclient/latest/contributor/developing.html././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/contributor/index.rst0000664000175000017500000000154100000000000024005 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================= Contributor Guide ================= In the Contributor Guide, you will find information on tackerclient's lower level programming details or APIs as well as the transition to OpenStack client. .. toctree:: :maxdepth: 2 contributing.rst developing.rst ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/index.rst0000664000175000017500000000231200000000000021430 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================= Python-TackerClient Documentation ================================= Python-TackerClient is a client for OpenStack NFV MANO (Tacker) API. It provides :doc:`Python API bindings ` (the tackerclient module) and :doc:`command-line interface (CLI) `. Contents -------- .. toctree:: :maxdepth: 2 install/index cli/index contributor/index reference/index Release Notes ------------- .. toctree:: :maxdepth: 1 Release Notes Indices and Tables ------------------ * :ref:`genindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7226198 python-tackerclient-2.1.0/doc/source/install/0000775000175000017500000000000000000000000021237 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/install/index.rst0000664000175000017500000000315100000000000023100 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ============ Installation ============ This document describes how to install python-tackerclient. .. note:: This installation guide contents are specific to Ubuntu distro. Using python install ==================== #. Clone python-tackerclient repository. You can use -b for specific release, optionally. .. code-block:: console $ cd ~/ $ git clone https://opendev.org/openstack/python-tackerclient -b .. note:: Make sure to replace the ```` in command example with specific branch name, such as ``stable/victoria``. #. Install python-tackerclient. .. code-block:: console $ cd python-tackerclient $ sudo python3 setup.py install Using pip ========= You can also install the latest version by using ``pip`` command: .. code-block:: console $ pip3 install python-tackerclient Or, if it is needed to install ``python-tackerclient`` from master branch, type .. code-block:: console $ pip3 install git+https://opendev.org/openstack/python-tackerclient ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7226198 python-tackerclient-2.1.0/doc/source/reference/0000775000175000017500000000000000000000000021527 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/doc/source/reference/index.rst0000664000175000017500000000140000000000000023363 0ustar00zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ========= Reference ========= - `Tacker API reference `_ - `Tacker CLI reference `_././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7266197 python-tackerclient-2.1.0/python_tackerclient.egg-info/0000775000175000017500000000000000000000000023267 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/PKG-INFO0000664000175000017500000000215400000000000024366 0ustar00zuulzuul00000000000000Metadata-Version: 1.2 Name: python-tackerclient Version: 2.1.0 Summary: CLI and Client Library for OpenStack Tacker Home-page: https://docs.openstack.org/python-tackerclient/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: CLI and Client Library for OpenStack Tacker Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Requires-Python: >=3.6 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/SOURCES.txt0000664000175000017500000002263200000000000025160 0ustar00zuulzuul00000000000000.coveragerc .pylintrc .stestr.conf .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog HACKING.rst LICENSE MANIFEST.in README.rst requirements.txt setup.cfg setup.py test-requirements.txt tox.ini doc/requirements.txt doc/source/conf.py doc/source/index.rst doc/source/cli/commands.rst doc/source/cli/index.rst doc/source/cli/vim_commands.rst doc/source/cli/vnf_package_commands.rst doc/source/cli/vnffm_commands.rst doc/source/cli/vnflcm_commands.rst doc/source/cli/vnfpm_commands.rst doc/source/contributor/contributing.rst doc/source/contributor/developing.rst doc/source/contributor/index.rst doc/source/install/index.rst doc/source/reference/index.rst python_tackerclient.egg-info/PKG-INFO python_tackerclient.egg-info/SOURCES.txt python_tackerclient.egg-info/dependency_links.txt python_tackerclient.egg-info/entry_points.txt python_tackerclient.egg-info/not-zip-safe python_tackerclient.egg-info/pbr.json python_tackerclient.egg-info/requires.txt python_tackerclient.egg-info/top_level.txt releasenotes/notes/add-creating-ns-vnffg-from-template-213eee7f1820aa0c.yaml releasenotes/notes/add-vnffg-and-vnf-ids-to-ns-list-commands-9d462efc103f8ecb.yaml releasenotes/notes/bug-1750865-04c3ebd0c3f8af29.yaml releasenotes/notes/bug-1754556-53268d3081fa18d1.yaml releasenotes/notes/bug-1754793-54446bcd0a4e84aa.yaml releasenotes/notes/bug-1754926-06ac4d7ffd17b5ce.yaml releasenotes/notes/clustering-service-cli-e15cc6627de293fa.yaml releasenotes/notes/del-project_and_user_id-e9dd396f83a162d6.yaml releasenotes/notes/deprecate-direct-yaml-cli-input-812564bab1b99b4b.yaml releasenotes/notes/deprecate-infra-mgmt-driver-attributes-e371624c50accee8.yaml releasenotes/notes/deprecate-legacy-apis-excluding-vim-debaa69507f73179.yaml releasenotes/notes/deprecated-tacker-command-29121558bd748082.yaml releasenotes/notes/drop-py-2-7-b2052825c4b92b52.yaml releasenotes/notes/multi-delete-support-in-tacker-acd4a7e86114f0be.yaml releasenotes/notes/network-services-descriptor-06f6abe90adb40f3.yaml releasenotes/notes/new-commmand-vnf-resource-list-d5422ab917f0892f.yaml releasenotes/notes/obsolete-legacy-apis-excluding-vim-43d8dd73c3768fbb.yaml releasenotes/notes/remove-passing-mgmt-and-infra-driver-from-client-c9135f84480b2cae.yaml releasenotes/notes/tacker-support-python-openstackclient-b88b20b80b872229.yaml releasenotes/notes/update-vim-without-config-c3b637741889eff6.yaml releasenotes/notes/vnf-inline-template-25f6a0b66f7407a1.yaml releasenotes/notes/vnfd-vnf-vim-name-mandatory-in-tacker-cli-dfe802af6de5c80e.yaml releasenotes/notes/vnffg-client-abd7d7f06860b91d.yaml releasenotes/source/2023.1.rst releasenotes/source/2023.2.rst releasenotes/source/2024.1.rst releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/newton.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst releasenotes/source/unreleased.rst releasenotes/source/ussuri.rst releasenotes/source/victoria.rst releasenotes/source/wallaby.rst releasenotes/source/xena.rst releasenotes/source/yoga.rst releasenotes/source/zed.rst releasenotes/source/_static/.placeholder releasenotes/source/_templates/.placeholder tackerclient/__init__.py tackerclient/client.py tackerclient/i18n.py tackerclient/shell.py tackerclient/version.py tackerclient/common/__init__.py tackerclient/common/_i18n.py tackerclient/common/clientmanager.py tackerclient/common/command.py tackerclient/common/constants.py tackerclient/common/exceptions.py tackerclient/common/extension.py tackerclient/common/serializer.py tackerclient/common/utils.py tackerclient/common/validators.py tackerclient/osc/__init__.py tackerclient/osc/plugin.py tackerclient/osc/sdk_utils.py tackerclient/osc/utils.py tackerclient/osc/common/__init__.py tackerclient/osc/common/vnflcm/__init__.py tackerclient/osc/common/vnflcm/vnflcm_versions.py tackerclient/osc/v1/__init__.py tackerclient/osc/v1/nfvo/__init__.py tackerclient/osc/v1/nfvo/vim.py tackerclient/osc/v1/vnflcm/__init__.py tackerclient/osc/v1/vnflcm/vnflcm.py tackerclient/osc/v1/vnflcm/vnflcm_op_occs.py tackerclient/osc/v1/vnflcm/vnflcm_subsc.py tackerclient/osc/v1/vnflcm/samples/change_ext_conn_vnf_instance_param_sample.json tackerclient/osc/v1/vnflcm/samples/create_lccn_subscription_param_sample.json tackerclient/osc/v1/vnflcm/samples/heal_vnf_instance_param_sample.json tackerclient/osc/v1/vnflcm/samples/instantiate_vnf_instance_param_sample.json tackerclient/osc/v1/vnflcm/samples/scale_vnf_instance_param_sample.json tackerclient/osc/v1/vnflcm/samples/update_vnf_instance_param_sample.json tackerclient/osc/v1/vnfpkgm/__init__.py tackerclient/osc/v1/vnfpkgm/vnf_package.py tackerclient/osc/v2/__init__.py tackerclient/osc/v2/vnffm/__init__.py tackerclient/osc/v2/vnffm/vnffm_alarm.py tackerclient/osc/v2/vnffm/vnffm_sub.py tackerclient/osc/v2/vnffm/samples/create_vnf_fm_subscription_param_sample.json tackerclient/osc/v2/vnflcm/samples/change_vnfpkg_vnf_instance_param_sample.json tackerclient/osc/v2/vnfpm/__init__.py tackerclient/osc/v2/vnfpm/vnfpm_job.py tackerclient/osc/v2/vnfpm/vnfpm_report.py tackerclient/osc/v2/vnfpm/vnfpm_threshold.py tackerclient/osc/v2/vnfpm/samples/create_vnf_pm_job_param_sample.json tackerclient/osc/v2/vnfpm/samples/create_vnf_pm_threshold_param_sample.json tackerclient/osc/v2/vnfpm/samples/update_vnf_pm_job_param_sample.json tackerclient/osc/v2/vnfpm/samples/update_vnf_pm_threshold_param_sample.json tackerclient/tacker/__init__.py tackerclient/tacker/client.py tackerclient/tacker/v1_0/__init__.py tackerclient/tacker/v1_0/nfvo/__init__.py tackerclient/tacker/v1_0/nfvo/vim.py tackerclient/tacker/v1_0/nfvo/vim_utils.py tackerclient/tests/__init__.py tackerclient/tests/unit/__init__.py tackerclient/tests/unit/test_auth.py tackerclient/tests/unit/test_casual_args.py tackerclient/tests/unit/test_cli10.py tackerclient/tests/unit/test_command_meta.py tackerclient/tests/unit/test_http.py tackerclient/tests/unit/test_shell.py tackerclient/tests/unit/test_ssl.py tackerclient/tests/unit/test_utils.py tackerclient/tests/unit/test_validators.py tackerclient/tests/unit/osc/__init__.py tackerclient/tests/unit/osc/base.py tackerclient/tests/unit/osc/common/__init__.py tackerclient/tests/unit/osc/common/test_vnflcm_versions.py tackerclient/tests/unit/osc/v1/__init__.py tackerclient/tests/unit/osc/v1/test_vnf_package.py tackerclient/tests/unit/osc/v1/test_vnflcm.py tackerclient/tests/unit/osc/v1/test_vnflcm_op_occs.py tackerclient/tests/unit/osc/v1/test_vnflcm_subsc.py tackerclient/tests/unit/osc/v1/vnf_package_fakes.py tackerclient/tests/unit/osc/v1/vnflcm_fakes.py tackerclient/tests/unit/osc/v1/vnflcm_op_occs_fakes.py tackerclient/tests/unit/osc/v1/vnflcm_subsc_fakes.py tackerclient/tests/unit/osc/v1/fixture_data/__init__.py tackerclient/tests/unit/osc/v1/fixture_data/client.py tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/etsi_nfv_sol001_common_types.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/etsi_nfv_sol001_vnfd_types.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/helloworld3_df_simple.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/helloworld3_top.vnfd.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/helloworld3_types.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/TOSCA-Metadata/TOSCA.meta tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/etsi_nfv_sol001_common_types.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/etsi_nfv_sol001_vnfd_types.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/helloworld3_df_simple.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/helloworld3_top.vnfd.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/helloworld3_types.yaml tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Scripts/install.sh tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/TOSCA-Metadata/TOSCA.meta tackerclient/tests/unit/osc/v2/__init__.py tackerclient/tests/unit/osc/v2/test_vnffm_alarm.py tackerclient/tests/unit/osc/v2/test_vnffm_sub.py tackerclient/tests/unit/osc/v2/test_vnflcm.py tackerclient/tests/unit/osc/v2/test_vnfpm_job.py tackerclient/tests/unit/osc/v2/test_vnfpm_report.py tackerclient/tests/unit/osc/v2/test_vnfpm_threshold.py tackerclient/tests/unit/osc/v2/vnffm_alarm_fakes.py tackerclient/tests/unit/osc/v2/vnffm_sub_fakes.py tackerclient/tests/unit/osc/v2/vnfpm_job_fakes.py tackerclient/tests/unit/osc/v2/vnfpm_report_fakes.py tackerclient/tests/unit/osc/v2/vnfpm_threshold_fakes.py tackerclient/tests/unit/vm/__init__.py tackerclient/tests/unit/vm/test_cli10_vim.py tackerclient/tests/unit/vm/test_cli10_vim_k8s.py tackerclient/tests/unit/vm/test_cli10_vim_k8s_with_bearer_token.py tackerclient/tests/unit/vm/test_vim_utils.py tackerclient/tests/unit/vm/samples/vim_config.yaml tackerclient/tests/unit/vm/samples/vim_config_with_false_cert_verify.yaml tackerclient/tests/unit/vm/samples/vim_config_without_auth_url.yaml tackerclient/tests/unit/vm/samples/vim_k8s_bearer_token.yaml tackerclient/tests/unit/vm/samples/vim_k8s_bearer_token_without_auth_url.yaml tackerclient/tests/unit/vm/samples/vim_k8s_config.yaml tackerclient/tests/unit/vm/samples/vim_k8s_config_without_auth_url.yaml tackerclient/v1_0/__init__.py tackerclient/v1_0/client.py tools/tacker.bash_completion././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/dependency_links.txt0000664000175000017500000000000100000000000027335 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/entry_points.txt0000664000175000017500000001245500000000000026574 0ustar00zuulzuul00000000000000[console_scripts] tacker = tackerclient.shell:main [openstack.cli.extension] tackerclient = tackerclient.osc.plugin [openstack.tackerclient.v1] vim_delete = tackerclient.osc.v1.nfvo.vim:DeleteVIM vim_list = tackerclient.osc.v1.nfvo.vim:ListVIM vim_register = tackerclient.osc.v1.nfvo.vim:CreateVIM vim_set = tackerclient.osc.v1.nfvo.vim:UpdateVIM vim_show = tackerclient.osc.v1.nfvo.vim:ShowVIM vnf_package_artifact_download = tackerclient.osc.v1.vnfpkgm.vnf_package:DownloadVnfPackageArtifact vnf_package_create = tackerclient.osc.v1.vnfpkgm.vnf_package:CreateVnfPackage vnf_package_delete = tackerclient.osc.v1.vnfpkgm.vnf_package:DeleteVnfPackage vnf_package_download = tackerclient.osc.v1.vnfpkgm.vnf_package:DownloadVnfPackage vnf_package_list = tackerclient.osc.v1.vnfpkgm.vnf_package:ListVnfPackage vnf_package_show = tackerclient.osc.v1.vnfpkgm.vnf_package:ShowVnfPackage vnf_package_update = tackerclient.osc.v1.vnfpkgm.vnf_package:UpdateVnfPackage vnf_package_upload = tackerclient.osc.v1.vnfpkgm.vnf_package:UploadVnfPackage vnflcm_change-ext-conn = tackerclient.osc.v1.vnflcm.vnflcm:ChangeExtConnVnfLcm vnflcm_create = tackerclient.osc.v1.vnflcm.vnflcm:CreateVnfLcm vnflcm_delete = tackerclient.osc.v1.vnflcm.vnflcm:DeleteVnfLcm vnflcm_heal = tackerclient.osc.v1.vnflcm.vnflcm:HealVnfLcm vnflcm_instantiate = tackerclient.osc.v1.vnflcm.vnflcm:InstantiateVnfLcm vnflcm_list = tackerclient.osc.v1.vnflcm.vnflcm:ListVnfLcm vnflcm_op_cancel = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:CancelVnfLcmOp vnflcm_op_fail = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:FailVnfLcmOp vnflcm_op_list = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ListVnfLcmOp vnflcm_op_retry = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RetryVnfLcmOp vnflcm_op_rollback = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RollbackVnfLcmOp vnflcm_op_show = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ShowVnfLcmOp vnflcm_scale = tackerclient.osc.v1.vnflcm.vnflcm:ScaleVnfLcm vnflcm_show = tackerclient.osc.v1.vnflcm.vnflcm:ShowVnfLcm vnflcm_subsc_create = tackerclient.osc.v1.vnflcm.vnflcm_subsc:CreateLccnSubscription vnflcm_subsc_delete = tackerclient.osc.v1.vnflcm.vnflcm_subsc:DeleteLccnSubscription vnflcm_subsc_list = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ListLccnSubscription vnflcm_subsc_show = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ShowLccnSubscription vnflcm_terminate = tackerclient.osc.v1.vnflcm.vnflcm:TerminateVnfLcm vnflcm_update = tackerclient.osc.v1.vnflcm.vnflcm:UpdateVnfLcm vnflcm_versions = tackerclient.osc.common.vnflcm.vnflcm_versions:VnfLcmVersions [openstack.tackerclient.v2] vnffm_alarm_list = tackerclient.osc.v2.vnffm.vnffm_alarm:ListVnfFmAlarm vnffm_alarm_show = tackerclient.osc.v2.vnffm.vnffm_alarm:ShowVnfFmAlarm vnffm_alarm_update = tackerclient.osc.v2.vnffm.vnffm_alarm:UpdateVnfFmAlarm vnffm_sub_create = tackerclient.osc.v2.vnffm.vnffm_sub:CreateVnfFmSub vnffm_sub_delete = tackerclient.osc.v2.vnffm.vnffm_sub:DeleteVnfFmSub vnffm_sub_list = tackerclient.osc.v2.vnffm.vnffm_sub:ListVnfFmSub vnffm_sub_show = tackerclient.osc.v2.vnffm.vnffm_sub:ShowVnfFmSub vnflcm_change-ext-conn = tackerclient.osc.v1.vnflcm.vnflcm:ChangeExtConnVnfLcm vnflcm_change-vnfpkg = tackerclient.osc.v1.vnflcm.vnflcm:ChangeVnfPkgVnfLcm vnflcm_create = tackerclient.osc.v1.vnflcm.vnflcm:CreateVnfLcm vnflcm_delete = tackerclient.osc.v1.vnflcm.vnflcm:DeleteVnfLcm vnflcm_heal = tackerclient.osc.v1.vnflcm.vnflcm:HealVnfLcm vnflcm_instantiate = tackerclient.osc.v1.vnflcm.vnflcm:InstantiateVnfLcm vnflcm_list = tackerclient.osc.v1.vnflcm.vnflcm:ListVnfLcm vnflcm_op_fail = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:FailVnfLcmOp vnflcm_op_list = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ListVnfLcmOp vnflcm_op_retry = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RetryVnfLcmOp vnflcm_op_rollback = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RollbackVnfLcmOp vnflcm_op_show = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ShowVnfLcmOp vnflcm_scale = tackerclient.osc.v1.vnflcm.vnflcm:ScaleVnfLcm vnflcm_show = tackerclient.osc.v1.vnflcm.vnflcm:ShowVnfLcm vnflcm_subsc_create = tackerclient.osc.v1.vnflcm.vnflcm_subsc:CreateLccnSubscription vnflcm_subsc_delete = tackerclient.osc.v1.vnflcm.vnflcm_subsc:DeleteLccnSubscription vnflcm_subsc_list = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ListLccnSubscription vnflcm_subsc_show = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ShowLccnSubscription vnflcm_terminate = tackerclient.osc.v1.vnflcm.vnflcm:TerminateVnfLcm vnflcm_update = tackerclient.osc.v1.vnflcm.vnflcm:UpdateVnfLcm vnflcm_versions = tackerclient.osc.common.vnflcm.vnflcm_versions:VnfLcmVersions vnfpm_job_create = tackerclient.osc.v2.vnfpm.vnfpm_job:CreateVnfPmJob vnfpm_job_delete = tackerclient.osc.v2.vnfpm.vnfpm_job:DeleteVnfPmJob vnfpm_job_list = tackerclient.osc.v2.vnfpm.vnfpm_job:ListVnfPmJob vnfpm_job_show = tackerclient.osc.v2.vnfpm.vnfpm_job:ShowVnfPmJob vnfpm_job_update = tackerclient.osc.v2.vnfpm.vnfpm_job:UpdateVnfPmJob vnfpm_report_show = tackerclient.osc.v2.vnfpm.vnfpm_report:ShowVnfPmReport vnfpm_threshold_create = tackerclient.osc.v2.vnfpm.vnfpm_threshold:CreateVnfPmThreshold vnfpm_threshold_delete = tackerclient.osc.v2.vnfpm.vnfpm_threshold:DeleteVnfPmThreshold vnfpm_threshold_list = tackerclient.osc.v2.vnfpm.vnfpm_threshold:ListVnfPmThreshold vnfpm_threshold_show = tackerclient.osc.v2.vnfpm.vnfpm_threshold:ShowVnfPmThreshold vnfpm_threshold_update = tackerclient.osc.v2.vnfpm.vnfpm_threshold:UpdateVnfPmThreshold ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/not-zip-safe0000664000175000017500000000000100000000000025515 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/pbr.json0000664000175000017500000000005600000000000024746 0ustar00zuulzuul00000000000000{"git_version": "940b7a5", "is_release": true}././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/requires.txt0000664000175000017500000000043000000000000025664 0ustar00zuulzuul00000000000000Babel!=2.4.0,>=2.3.4 cliff!=2.9.0,>=2.8.0 iso8601>=0.1.11 netaddr>=0.7.18 osc-lib>=1.8.0 oslo.i18n>=3.15.3 oslo.log>=3.36.0 oslo.serialization!=2.19.1,>=2.18.0 oslo.utils>=3.40.0 pbr!=2.1.0,>=2.0.0 python-keystoneclient>=3.8.0 requests>=2.14.2 simplejson>=3.5.1 stevedore>=1.20.0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864642.0 python-tackerclient-2.1.0/python_tackerclient.egg-info/top_level.txt0000664000175000017500000000001500000000000026015 0ustar00zuulzuul00000000000000tackerclient ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7146199 python-tackerclient-2.1.0/releasenotes/0000775000175000017500000000000000000000000020215 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.73062 python-tackerclient-2.1.0/releasenotes/notes/0000775000175000017500000000000000000000000021345 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000020600000000000011453 xustar0000000000000000112 path=python-tackerclient-2.1.0/releasenotes/notes/add-creating-ns-vnffg-from-template-213eee7f1820aa0c.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/add-creating-ns-vnffg-from-template-213eee7f1820aa0c.ya0000664000175000017500000000020100000000000032766 0ustar00zuulzuul00000000000000--- features: - | Support to create directly VNFFG and NS from its descriptor template without creating VNFFGD and NSD.././@PaxHeader0000000000000000000000000000021400000000000011452 xustar0000000000000000118 path=python-tackerclient-2.1.0/releasenotes/notes/add-vnffg-and-vnf-ids-to-ns-list-commands-9d462efc103f8ecb.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/add-vnffg-and-vnf-ids-to-ns-list-commands-9d462efc103f80000664000175000017500000000043700000000000032750 0ustar00zuulzuul00000000000000--- features: - | Add 'vnf_ids' and 'vnffg_ids' fields in outputs from network service list command. Users can know which VNFs or VNFFGs, belongs to specific NS. Add 'ns_id' field to VNFFG list command, that shows the network service the current VNFFG belongs to. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/bug-1750865-04c3ebd0c3f8af29.yaml0000664000175000017500000000010500000000000026207 0ustar00zuulzuul00000000000000--- fixes: - | The VNFFGD CLI cannot show the VNFFGD template. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/bug-1754556-53268d3081fa18d1.yaml0000664000175000017500000000010000000000000025705 0ustar00zuulzuul00000000000000--- fixes: - | Add documentation for python-tackerclient. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/bug-1754793-54446bcd0a4e84aa.yaml0000664000175000017500000000012600000000000026133 0ustar00zuulzuul00000000000000--- fixes: - | Fix the VNFFG update osc command misusing create_vnffg function. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/bug-1754926-06ac4d7ffd17b5ce.yaml0000664000175000017500000000006400000000000026302 0ustar00zuulzuul00000000000000--- fixes: - | Fix local test fail with pypy. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/clustering-service-cli-e15cc6627de293fa.yaml0000664000175000017500000000012300000000000031110 0ustar00zuulzuul00000000000000--- features: - | Enable CLI to support clustering service in Tacker Server. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/del-project_and_user_id-e9dd396f83a162d6.yaml0000664000175000017500000000024500000000000031230 0ustar00zuulzuul00000000000000--- features: - As user gives input of project and user name in vim_config.yaml, delete the user and project id from the vim specific commands output. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/deprecate-direct-yaml-cli-input-812564bab1b99b4b.yaml0000664000175000017500000000035000000000000032506 0ustar00zuulzuul00000000000000--- deprecations: - | Direct YAML input of any kind is now deprecated. Only file based YAML input is supported. This deprecation is across all resources like VNFFGD template, VNFD template and VNF configuration input. ././@PaxHeader0000000000000000000000000000021100000000000011447 xustar0000000000000000115 path=python-tackerclient-2.1.0/releasenotes/notes/deprecate-infra-mgmt-driver-attributes-e371624c50accee8.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/deprecate-infra-mgmt-driver-attributes-e371624c50accee80000664000175000017500000000020700000000000033231 0ustar00zuulzuul00000000000000--- deprecations: - infra_driver and mgmt_driver attributes in VNFD client attribute is deprecated and will be removed in Ocata. ././@PaxHeader0000000000000000000000000000020600000000000011453 xustar0000000000000000112 path=python-tackerclient-2.1.0/releasenotes/notes/deprecate-legacy-apis-excluding-vim-debaa69507f73179.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/deprecate-legacy-apis-excluding-vim-debaa69507f73179.ya0000664000175000017500000000030500000000000033025 0ustar00zuulzuul00000000000000--- deprecations: - | Legacy APIs excluding VIM feature are deprecated and will be removed in the first major release after the Tacker server version 9.0.0 (2023.1 Antelope release). ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/deprecated-tacker-command-29121558bd748082.yaml0000664000175000017500000000033200000000000031133 0ustar00zuulzuul00000000000000--- deprecations: - | tacker CLI is deprecated, will be deleted after Rocky release. Use `openstack CLI`_ instead. .. _openstack CLI: https://docs.openstack.org/python-tackerclient/latest/cli/index.html ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/drop-py-2-7-b2052825c4b92b52.yaml0000664000175000017500000000034400000000000026173 0ustar00zuulzuul00000000000000--- upgrade: - | Python 2.7 support has been dropped. Last release of python-tackerclient to support python 2.7 is OpenStack Train. The minimum version of Python now supported by python-tackerclient is Python 3.6. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/multi-delete-support-in-tacker-acd4a7e86114f0be.yaml0000664000175000017500000000010600000000000032554 0ustar00zuulzuul00000000000000--- features: - Add support for multi delete feature for resources. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/network-services-descriptor-06f6abe90adb40f3.yaml0000664000175000017500000000034100000000000032261 0ustar00zuulzuul00000000000000--- features: - | CLIs to onboard Network Services Descriptor (NSD) based on TOSCA Simple Profile for NFV and to create Network Services using NSD to create multiple related VNFs using a single TOSCA template. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/new-commmand-vnf-resource-list-d5422ab917f0892f.yaml0000664000175000017500000000015400000000000032341 0ustar00zuulzuul00000000000000--- features: - Adds new CLI command 'vnf-resource-list' to view VNF resources, such as VDU, CP, etc. ././@PaxHeader0000000000000000000000000000020500000000000011452 xustar0000000000000000111 path=python-tackerclient-2.1.0/releasenotes/notes/obsolete-legacy-apis-excluding-vim-43d8dd73c3768fbb.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/obsolete-legacy-apis-excluding-vim-43d8dd73c3768fbb.yam0000664000175000017500000000010700000000000033143 0ustar00zuulzuul00000000000000--- upgrade: - | Legacy APIs excluding VIM feature are obsoleted.././@PaxHeader0000000000000000000000000000022300000000000011452 xustar0000000000000000125 path=python-tackerclient-2.1.0/releasenotes/notes/remove-passing-mgmt-and-infra-driver-from-client-c9135f84480b2cae.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/remove-passing-mgmt-and-infra-driver-from-client-c9135f0000664000175000017500000000010100000000000033434 0ustar00zuulzuul00000000000000--- fixes: - Remove passing mgmt and infra driver from client. ././@PaxHeader0000000000000000000000000000021000000000000011446 xustar0000000000000000114 path=python-tackerclient-2.1.0/releasenotes/notes/tacker-support-python-openstackclient-b88b20b80b872229.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/tacker-support-python-openstackclient-b88b20b80b872229.0000664000175000017500000000013600000000000033111 0ustar00zuulzuul00000000000000--- features: - | Add python-vnfd, vnf, nsd, ns, vnffgd, vnffg, event commands support. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/update-vim-without-config-c3b637741889eff6.yaml0000664000175000017500000000014000000000000031424 0ustar00zuulzuul00000000000000--- features: - VIM can be updated without config-file argument in tacker vim-update command. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/vnf-inline-template-25f6a0b66f7407a1.yaml0000664000175000017500000000012200000000000030233 0ustar00zuulzuul00000000000000--- features: - Support to create VNF with direct VNFD template input from CLI. ././@PaxHeader0000000000000000000000000000021400000000000011452 xustar0000000000000000118 path=python-tackerclient-2.1.0/releasenotes/notes/vnfd-vnf-vim-name-mandatory-in-tacker-cli-dfe802af6de5c80e.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/vnfd-vnf-vim-name-mandatory-in-tacker-cli-dfe802af6de5c0000664000175000017500000000010300000000000033255 0ustar00zuulzuul00000000000000--- features: - Made VNFD/VNF/VIM names mandatory in tacker CLI. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/notes/vnffg-client-abd7d7f06860b91d.yaml0000664000175000017500000000006000000000000027107 0ustar00zuulzuul00000000000000--- features: - Add client support for VNFFG. ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.73062 python-tackerclient-2.1.0/releasenotes/source/0000775000175000017500000000000000000000000021515 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/2023.1.rst0000664000175000017500000000020200000000000022766 0ustar00zuulzuul00000000000000=========================== 2023.1 Series Release Notes =========================== .. release-notes:: :branch: stable/2023.1 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/2023.2.rst0000664000175000017500000000020200000000000022767 0ustar00zuulzuul00000000000000=========================== 2023.2 Series Release Notes =========================== .. release-notes:: :branch: stable/2023.2 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/2024.1.rst0000664000175000017500000000020200000000000022767 0ustar00zuulzuul00000000000000=========================== 2024.1 Series Release Notes =========================== .. release-notes:: :branch: stable/2024.1 ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.73062 python-tackerclient-2.1.0/releasenotes/source/_static/0000775000175000017500000000000000000000000023143 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/_static/.placeholder0000664000175000017500000000000000000000000025414 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.73062 python-tackerclient-2.1.0/releasenotes/source/_templates/0000775000175000017500000000000000000000000023652 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/_templates/.placeholder0000664000175000017500000000000000000000000026123 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/conf.py0000664000175000017500000002047000000000000023017 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # tacker client documentation build configuration file, created by # sphinx-quickstart on Tue May 31 19:07:30 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'reno.sphinxext', 'openstackdocstheme', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Tacker Client Release Notes' copyright = '2016, Tacker Developers' # Release notes are version independent. release = '' version = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to # use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'tackerclientdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'TackerClientReleaseNotes.tex', 'Tacker Client Release Notes Documentation', 'Tacker Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'tackerreleasenotes', 'Tacker Client Release Notes Documentation', ['Tacker Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'TackerClientReleaseNotes', 'Tacker Client Release Notes Documentation', 'Tacker Developers', 'TackerClientReleaseNotes', 'Tacker Client Project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] # -- Options for openstackdocstheme ------------------------------------------- openstackdocs_repo_name = 'openstack/python-tackerclient' openstackdocs_bug_project = 'python-tackerclient' openstackdocs_bug_tag = '' openstackdocs_auto_name = False ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/index.rst0000664000175000017500000000042200000000000023354 0ustar00zuulzuul00000000000000Python-TackerClient Release Notes ================================= Contents: .. toctree:: :maxdepth: 2 unreleased 2024.1 2023.2 2023.1 zed yoga xena wallaby victoria ussuri train stein rocky queens pike ocata newton ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/newton.rst0000664000175000017500000000023200000000000023556 0ustar00zuulzuul00000000000000=================================== Newton Series Release Notes =================================== .. release-notes:: :branch: origin/stable/newton ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/ocata.rst0000664000175000017500000000023000000000000023331 0ustar00zuulzuul00000000000000=================================== Ocata Series Release Notes =================================== .. release-notes:: :branch: origin/stable/ocata ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/pike.rst0000664000175000017500000000021700000000000023177 0ustar00zuulzuul00000000000000=================================== Pike Series Release Notes =================================== .. release-notes:: :branch: stable/pike ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/queens.rst0000664000175000017500000000022300000000000023544 0ustar00zuulzuul00000000000000=================================== Queens Series Release Notes =================================== .. release-notes:: :branch: stable/queens ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/rocky.rst0000664000175000017500000000022100000000000023371 0ustar00zuulzuul00000000000000=================================== Rocky Series Release Notes =================================== .. release-notes:: :branch: stable/rocky ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/stein.rst0000664000175000017500000000022100000000000023364 0ustar00zuulzuul00000000000000=================================== Stein Series Release Notes =================================== .. release-notes:: :branch: stable/stein ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/train.rst0000664000175000017500000000017600000000000023370 0ustar00zuulzuul00000000000000========================== Train Series Release Notes ========================== .. release-notes:: :branch: stable/train ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/unreleased.rst0000664000175000017500000000015300000000000024375 0ustar00zuulzuul00000000000000============================ Current Series Release Notes ============================ .. release-notes:: ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/ussuri.rst0000664000175000017500000000020200000000000023573 0ustar00zuulzuul00000000000000=========================== Ussuri Series Release Notes =========================== .. release-notes:: :branch: stable/ussuri ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/victoria.rst0000664000175000017500000000022000000000000024061 0ustar00zuulzuul00000000000000============================= Victoria Series Release Notes ============================= .. release-notes:: :branch: unmaintained/victoria ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/wallaby.rst0000664000175000017500000000021400000000000023677 0ustar00zuulzuul00000000000000============================ Wallaby Series Release Notes ============================ .. release-notes:: :branch: unmaintained/wallaby ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/xena.rst0000664000175000017500000000020000000000000023172 0ustar00zuulzuul00000000000000========================= Xena Series Release Notes ========================= .. release-notes:: :branch: unmaintained/xena ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/yoga.rst0000664000175000017500000000020000000000000023176 0ustar00zuulzuul00000000000000========================= Yoga Series Release Notes ========================= .. release-notes:: :branch: unmaintained/yoga ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/releasenotes/source/zed.rst0000664000175000017500000000016600000000000023034 0ustar00zuulzuul00000000000000======================== Zed Series Release Notes ======================== .. release-notes:: :branch: stable/zed ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/requirements.txt0000664000175000017500000000120400000000000021005 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. pbr!=2.1.0,>=2.0.0 # Apache-2.0 cliff!=2.9.0,>=2.8.0 # Apache-2.0 iso8601>=0.1.11 # MIT netaddr>=0.7.18 # BSD requests>=2.14.2 # Apache-2.0 python-keystoneclient>=3.8.0 # Apache-2.0 simplejson>=3.5.1 # MIT stevedore>=1.20.0 # Apache-2.0 Babel!=2.4.0,>=2.3.4 # BSD oslo.i18n>=3.15.3 # Apache-2.0 osc-lib>=1.8.0 # Apache-2.0 oslo.log>=3.36.0 # Apache-2.0 oslo.utils>=3.40.0 # Apache-2.0 oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7586198 python-tackerclient-2.1.0/setup.cfg0000664000175000017500000001453200000000000017352 0ustar00zuulzuul00000000000000[metadata] name = python-tackerclient description = CLI and Client Library for OpenStack Tacker long_description = file: README.rst author = OpenStack author_email = openstack-discuss@lists.openstack.org url = https://docs.openstack.org/python-tackerclient/ python_requires = >=3.6 classifiers = Environment :: OpenStack Intended Audience :: Developers Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 [files] packages = tackerclient [entry_points] console_scripts = tacker = tackerclient.shell:main openstack.cli.extension = tackerclient = tackerclient.osc.plugin openstack.tackerclient.v1 = vim_register = tackerclient.osc.v1.nfvo.vim:CreateVIM vim_list = tackerclient.osc.v1.nfvo.vim:ListVIM vim_set = tackerclient.osc.v1.nfvo.vim:UpdateVIM vim_delete = tackerclient.osc.v1.nfvo.vim:DeleteVIM vim_show = tackerclient.osc.v1.nfvo.vim:ShowVIM vnf_package_create = tackerclient.osc.v1.vnfpkgm.vnf_package:CreateVnfPackage vnf_package_list = tackerclient.osc.v1.vnfpkgm.vnf_package:ListVnfPackage vnf_package_show = tackerclient.osc.v1.vnfpkgm.vnf_package:ShowVnfPackage vnf_package_upload = tackerclient.osc.v1.vnfpkgm.vnf_package:UploadVnfPackage vnf_package_delete = tackerclient.osc.v1.vnfpkgm.vnf_package:DeleteVnfPackage vnf_package_update = tackerclient.osc.v1.vnfpkgm.vnf_package:UpdateVnfPackage vnf_package_download = tackerclient.osc.v1.vnfpkgm.vnf_package:DownloadVnfPackage vnf_package_artifact_download = tackerclient.osc.v1.vnfpkgm.vnf_package:DownloadVnfPackageArtifact vnflcm_create = tackerclient.osc.v1.vnflcm.vnflcm:CreateVnfLcm vnflcm_show = tackerclient.osc.v1.vnflcm.vnflcm:ShowVnfLcm vnflcm_list = tackerclient.osc.v1.vnflcm.vnflcm:ListVnfLcm vnflcm_instantiate = tackerclient.osc.v1.vnflcm.vnflcm:InstantiateVnfLcm vnflcm_terminate = tackerclient.osc.v1.vnflcm.vnflcm:TerminateVnfLcm vnflcm_delete = tackerclient.osc.v1.vnflcm.vnflcm:DeleteVnfLcm vnflcm_heal = tackerclient.osc.v1.vnflcm.vnflcm:HealVnfLcm vnflcm_update = tackerclient.osc.v1.vnflcm.vnflcm:UpdateVnfLcm vnflcm_scale = tackerclient.osc.v1.vnflcm.vnflcm:ScaleVnfLcm vnflcm_change-ext-conn = tackerclient.osc.v1.vnflcm.vnflcm:ChangeExtConnVnfLcm vnflcm_op_rollback = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RollbackVnfLcmOp vnflcm_op_cancel = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:CancelVnfLcmOp vnflcm_op_fail = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:FailVnfLcmOp vnflcm_op_retry = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RetryVnfLcmOp vnflcm_op_list = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ListVnfLcmOp vnflcm_op_show = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ShowVnfLcmOp vnflcm_subsc_create = tackerclient.osc.v1.vnflcm.vnflcm_subsc:CreateLccnSubscription vnflcm_subsc_delete = tackerclient.osc.v1.vnflcm.vnflcm_subsc:DeleteLccnSubscription vnflcm_subsc_list = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ListLccnSubscription vnflcm_subsc_show = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ShowLccnSubscription vnflcm_versions = tackerclient.osc.common.vnflcm.vnflcm_versions:VnfLcmVersions openstack.tackerclient.v2 = vnflcm_create = tackerclient.osc.v1.vnflcm.vnflcm:CreateVnfLcm vnflcm_show = tackerclient.osc.v1.vnflcm.vnflcm:ShowVnfLcm vnflcm_list = tackerclient.osc.v1.vnflcm.vnflcm:ListVnfLcm vnflcm_instantiate = tackerclient.osc.v1.vnflcm.vnflcm:InstantiateVnfLcm vnflcm_terminate = tackerclient.osc.v1.vnflcm.vnflcm:TerminateVnfLcm vnflcm_change-vnfpkg = tackerclient.osc.v1.vnflcm.vnflcm:ChangeVnfPkgVnfLcm vnflcm_delete = tackerclient.osc.v1.vnflcm.vnflcm:DeleteVnfLcm vnflcm_heal = tackerclient.osc.v1.vnflcm.vnflcm:HealVnfLcm vnflcm_update = tackerclient.osc.v1.vnflcm.vnflcm:UpdateVnfLcm vnflcm_scale = tackerclient.osc.v1.vnflcm.vnflcm:ScaleVnfLcm vnflcm_change-ext-conn = tackerclient.osc.v1.vnflcm.vnflcm:ChangeExtConnVnfLcm vnflcm_op_rollback = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RollbackVnfLcmOp vnflcm_op_fail = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:FailVnfLcmOp vnflcm_op_retry = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:RetryVnfLcmOp vnflcm_op_list = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ListVnfLcmOp vnflcm_op_show = tackerclient.osc.v1.vnflcm.vnflcm_op_occs:ShowVnfLcmOp vnflcm_subsc_create = tackerclient.osc.v1.vnflcm.vnflcm_subsc:CreateLccnSubscription vnflcm_subsc_delete = tackerclient.osc.v1.vnflcm.vnflcm_subsc:DeleteLccnSubscription vnflcm_subsc_list = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ListLccnSubscription vnflcm_subsc_show = tackerclient.osc.v1.vnflcm.vnflcm_subsc:ShowLccnSubscription vnflcm_versions = tackerclient.osc.common.vnflcm.vnflcm_versions:VnfLcmVersions vnfpm_job_create = tackerclient.osc.v2.vnfpm.vnfpm_job:CreateVnfPmJob vnfpm_job_list = tackerclient.osc.v2.vnfpm.vnfpm_job:ListVnfPmJob vnfpm_job_show = tackerclient.osc.v2.vnfpm.vnfpm_job:ShowVnfPmJob vnfpm_job_update = tackerclient.osc.v2.vnfpm.vnfpm_job:UpdateVnfPmJob vnfpm_job_delete = tackerclient.osc.v2.vnfpm.vnfpm_job:DeleteVnfPmJob vnfpm_report_show = tackerclient.osc.v2.vnfpm.vnfpm_report:ShowVnfPmReport vnfpm_threshold_create = tackerclient.osc.v2.vnfpm.vnfpm_threshold:CreateVnfPmThreshold vnfpm_threshold_list = tackerclient.osc.v2.vnfpm.vnfpm_threshold:ListVnfPmThreshold vnfpm_threshold_show = tackerclient.osc.v2.vnfpm.vnfpm_threshold:ShowVnfPmThreshold vnfpm_threshold_update = tackerclient.osc.v2.vnfpm.vnfpm_threshold:UpdateVnfPmThreshold vnfpm_threshold_delete = tackerclient.osc.v2.vnfpm.vnfpm_threshold:DeleteVnfPmThreshold vnffm_alarm_list = tackerclient.osc.v2.vnffm.vnffm_alarm:ListVnfFmAlarm vnffm_alarm_show = tackerclient.osc.v2.vnffm.vnffm_alarm:ShowVnfFmAlarm vnffm_alarm_update = tackerclient.osc.v2.vnffm.vnffm_alarm:UpdateVnfFmAlarm vnffm_sub_create = tackerclient.osc.v2.vnffm.vnffm_sub:CreateVnfFmSub vnffm_sub_list = tackerclient.osc.v2.vnffm.vnffm_sub:ListVnfFmSub vnffm_sub_show = tackerclient.osc.v2.vnffm.vnffm_sub:ShowVnfFmSub vnffm_sub_delete = tackerclient.osc.v2.vnffm.vnffm_sub:DeleteVnfFmSub [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/setup.py0000664000175000017500000000127100000000000017237 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7346199 python-tackerclient-2.1.0/tackerclient/0000775000175000017500000000000000000000000020174 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/__init__.py0000664000175000017500000000000000000000000022273 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/client.py0000664000175000017500000003510000000000000022023 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # try: import json except ImportError: import simplejson as json import logging import os from keystoneclient import access from keystoneclient import adapter import requests from tackerclient.common import exceptions from tackerclient.common import utils from tackerclient.i18n import _ _logger = logging.getLogger(__name__) if os.environ.get('TACKERCLIENT_DEBUG'): ch = logging.StreamHandler() _logger.setLevel(logging.DEBUG) _logger.addHandler(ch) _requests_log_level = logging.DEBUG else: _requests_log_level = logging.WARNING logging.getLogger("requests").setLevel(_requests_log_level) MAX_URI_LEN = 8192 class HTTPClient(object): """Handles the REST calls and responses, include authn.""" USER_AGENT = 'python-tackerclient' CONTENT_TYPE = 'application/json' def __init__(self, username=None, user_id=None, tenant_name=None, tenant_id=None, password=None, auth_url=None, token=None, region_name=None, timeout=None, endpoint_url=None, insecure=False, endpoint_type='publicURL', auth_strategy='keystone', ca_cert=None, log_credentials=False, service_type='nfv-orchestration', **kwargs): self.username = username self.user_id = user_id self.tenant_name = tenant_name self.tenant_id = tenant_id self.password = password self.auth_url = auth_url.rstrip('/') if auth_url else None self.service_type = service_type self.endpoint_type = endpoint_type self.region_name = region_name self.timeout = timeout self.auth_token = token self.auth_tenant_id = None self.auth_user_id = None self.endpoint_url = endpoint_url self.auth_strategy = auth_strategy self.log_credentials = log_credentials if insecure: self.verify_cert = False else: self.verify_cert = ca_cert if ca_cert else True def _cs_request(self, *args, **kwargs): kargs = {} kargs.setdefault('headers', kwargs.get('headers', {})) kargs['headers']['User-Agent'] = self.USER_AGENT if 'body' in kwargs: kargs['body'] = kwargs['body'] if 'content_type' in kwargs: kargs['content_type'] = kwargs['content_type'] if self.log_credentials: log_kargs = kargs else: log_kargs = self._strip_credentials(kargs) utils.http_log_req(_logger, args, log_kargs) try: resp, body = self.request(*args, **kargs) except requests.exceptions.SSLError as e: raise exceptions.SslCertificateValidationError(reason=e) except Exception as e: # Wrap the low-level connection error (socket timeout, redirect # limit, decompression error, etc) into our custom high-level # connection exception (it is excepted in the upper layers of code) _logger.debug("throwing ConnectionFailed : %s", e) raise exceptions.ConnectionFailed(reason=e) utils.http_log_resp(_logger, resp, body) if resp.status_code == 401: raise exceptions.Unauthorized(message=body) return resp, body def _strip_credentials(self, kwargs): if kwargs.get('body') and self.password: log_kwargs = kwargs.copy() log_kwargs['body'] = kwargs['body'].replace(self.password, 'REDACTED') return log_kwargs else: return kwargs def authenticate_and_fetch_endpoint_url(self): if not self.auth_token: self.authenticate() elif not self.endpoint_url: self.endpoint_url = self._get_endpoint_url() def request(self, url, method, body=None, headers=None, **kwargs): """Request without authentication.""" content_type = kwargs.pop('content_type', None) or 'application/json' headers = headers or {} headers.setdefault('Accept', content_type) if body: headers.setdefault('Content-Type', content_type) headers['User-Agent'] = self.USER_AGENT resp = requests.request( method, url, data=body, headers=headers, verify=self.verify_cert, timeout=self.timeout, **kwargs) if resp.headers.get('content-type') == 'application/zip': return resp, resp.content return resp, resp.text def _check_uri_length(self, action): uri_len = len(self.endpoint_url) + len(action) if uri_len > MAX_URI_LEN: raise exceptions.RequestURITooLong( excess=uri_len - MAX_URI_LEN) def do_request(self, url, method, **kwargs): # Ensure client always has correct uri - do not guesstimate anything self.authenticate_and_fetch_endpoint_url() self._check_uri_length(url) # Perform the request once. If we get a 401 back then it # might be because the auth token expired, so try to # re-authenticate and try again. If it still fails, bail. try: kwargs.setdefault('headers', {}) kwargs.setdefault('content_type', kwargs.get('content_type')) if self.auth_token is None: self.auth_token = "" kwargs['headers']['X-Auth-Token'] = self.auth_token resp, body = self._cs_request(self.endpoint_url + url, method, **kwargs) return resp, body except exceptions.Unauthorized: self.authenticate() resp, body = self._cs_request( self.endpoint_url + url, method, **kwargs) return resp, body def _extract_service_catalog(self, body): """Set the client's service catalog from the response data.""" self.auth_ref = access.AccessInfo.factory(body=body) self.service_catalog = self.auth_ref.service_catalog self.auth_token = self.auth_ref.auth_token self.auth_tenant_id = self.auth_ref.tenant_id self.auth_user_id = self.auth_ref.user_id if not self.endpoint_url: self.endpoint_url = self.service_catalog.url_for( region_name=self.region_name, service_type=self.service_type, endpoint_type=self.endpoint_type) def _authenticate_keystone(self): if self.user_id: creds = {'userId': self.user_id, 'password': self.password} else: creds = {'username': self.username, 'password': self.password} if self.tenant_id: body = {'auth': {'passwordCredentials': creds, 'tenantId': self.tenant_id, }, } else: body = {'auth': {'passwordCredentials': creds, 'tenantName': self.tenant_name, }, } if self.auth_url is None: raise exceptions.NoAuthURLProvided() token_url = self.auth_url + "/tokens" resp, resp_body = self._cs_request(token_url, "POST", body=json.dumps(body), content_type="application/json", allow_redirects=True) if resp.status_code != 200: raise exceptions.Unauthorized(message=resp_body) if resp_body: try: resp_body = json.loads(resp_body) except ValueError: pass else: resp_body = None self._extract_service_catalog(resp_body) def _authenticate_noauth(self): if not self.endpoint_url: message = _('For "noauth" authentication strategy, the endpoint ' 'must be specified either in the constructor or ' 'using --os-url') raise exceptions.Unauthorized(message=message) def authenticate(self): if self.auth_strategy == 'keystone': self._authenticate_keystone() elif self.auth_strategy == 'noauth': self._authenticate_noauth() else: err_msg = _('Unknown auth strategy: %s') % self.auth_strategy raise exceptions.Unauthorized(message=err_msg) def _get_endpoint_url(self): if self.auth_url is None: raise exceptions.NoAuthURLProvided() url = self.auth_url + '/tokens/%s/endpoints' % self.auth_token try: resp, body = self._cs_request(url, "GET") except exceptions.Unauthorized: # rollback to authenticate() to handle case when tacker client # is initialized just before the token is expired self.authenticate() return self.endpoint_url body = json.loads(body) for endpoint in body.get('endpoints', []): if (endpoint['type'] == 'nfv-orchestration' and endpoint.get('region') == self.region_name): if self.endpoint_type not in endpoint: raise exceptions.EndpointTypeNotFound( type_=self.endpoint_type) return endpoint[self.endpoint_type] raise exceptions.EndpointNotFound() def get_auth_info(self): return {'auth_token': self.auth_token, 'auth_tenant_id': self.auth_tenant_id, 'auth_user_id': self.auth_user_id, 'endpoint_url': self.endpoint_url} class SessionClient(adapter.Adapter): def request(self, *args, **kwargs): kwargs.setdefault('authenticated', False) kwargs.setdefault('raise_exc', False) content_type = kwargs.pop('content_type', None) or 'application/json' headers = kwargs.setdefault('headers', {}) headers.setdefault('Accept', content_type) try: kwargs.setdefault('data', kwargs.pop('body')) except KeyError: pass if kwargs.get('data'): headers.setdefault('Content-Type', content_type) resp = super(SessionClient, self).request(*args, **kwargs) if resp.headers.get('content-type') == 'application/zip': return resp, resp.content return resp, resp.text def _check_uri_length(self, url): uri_len = len(self.endpoint_url) + len(url) if uri_len > MAX_URI_LEN: raise exceptions.RequestURITooLong( excess=uri_len - MAX_URI_LEN) def do_request(self, url, method, **kwargs): kwargs.setdefault('authenticated', True) self._check_uri_length(url) return self.request(url, method, **kwargs) @property def endpoint_url(self): # NOTE(jamielennox): This is used purely by the CLI and should be # removed when the CLI gets smarter. return self.get_endpoint() @property def auth_token(self): # NOTE(jamielennox): This is used purely by the CLI and should be # removed when the CLI gets smarter. return self.get_token() def authenticate(self): # NOTE(jamielennox): This is used purely by the CLI and should be # removed when the CLI gets smarter. self.get_token() def get_auth_info(self): auth_info = {'auth_token': self.auth_token, 'endpoint_url': self.endpoint_url} # NOTE(jamielennox): This is the best we can do here. It will work # with identity plugins which is the primary case but we should # deprecate it's usage as much as possible. try: get_access = (self.auth or self.session.auth).get_access except AttributeError: pass else: auth_ref = get_access(self.session) auth_info['auth_tenant_id'] = auth_ref.project_id auth_info['auth_user_id'] = auth_ref.user_id return auth_info # FIXME(bklei): Should refactor this to use kwargs and only # explicitly list arguments that are not None. def construct_http_client(username=None, user_id=None, tenant_name=None, tenant_id=None, password=None, auth_url=None, token=None, region_name=None, timeout=None, endpoint_url=None, insecure=False, endpoint_type='publicURL', log_credentials=None, auth_strategy='keystone', ca_cert=None, service_type='nfv-orchestration', session=None, **kwargs): if session: kwargs.setdefault('user_agent', 'python-tackerclient') kwargs.setdefault('interface', endpoint_type) return SessionClient(session=session, service_type=service_type, region_name=region_name, **kwargs) else: # FIXME(bklei): username and password are now optional. Need # to test that they were provided in this mode. Should also # refactor to use kwargs. return HTTPClient(username=username, password=password, tenant_id=tenant_id, tenant_name=tenant_name, user_id=user_id, auth_url=auth_url, token=token, endpoint_url=endpoint_url, insecure=insecure, timeout=timeout, region_name=region_name, endpoint_type=endpoint_type, service_type=service_type, ca_cert=ca_cert, log_credentials=log_credentials, auth_strategy=auth_strategy) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7346199 python-tackerclient-2.1.0/tackerclient/common/0000775000175000017500000000000000000000000021464 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/__init__.py0000664000175000017500000000000000000000000023563 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/_i18n.py0000664000175000017500000000226500000000000022761 0ustar00zuulzuul00000000000000# Copyright 2016 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html. """ import oslo_i18n DOMAIN = "tackerclient" _translators = oslo_i18n.TranslatorFactory(domain=DOMAIN) # The primary translation function using the well-known name "_" _ = _translators.primary # The contextual translation function using the name "_C" # requires oslo.i18n >=2.1.0 _C = _translators.contextual_form # The plural translation function using the name "_P" # requires oslo.i18n >=2.1.0 _P = _translators.plural_form def get_available_languages(): return oslo_i18n.get_available_languages(DOMAIN) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/clientmanager.py0000664000175000017500000000731100000000000024651 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Manage access to the clients, including authenticating when needed. """ from tackerclient import client from tackerclient.tacker import client as tacker_client class ClientCache(object): """Descriptor class for caching created client handles.""" def __init__(self, factory): self.factory = factory self._handle = None def __get__(self, instance, owner): # Tell the ClientManager to login to keystone if self._handle is None: self._handle = self.factory(instance) return self._handle class ClientManager(object): """Manages access to API clients, including authentication.""" tacker = ClientCache(tacker_client.make_client) def __init__(self, token=None, url=None, auth_url=None, endpoint_type=None, tenant_name=None, tenant_id=None, username=None, user_id=None, password=None, region_name=None, api_version=None, auth_strategy=None, insecure=False, ca_cert=None, log_credentials=False, service_type=None, timeout=None, retries=0, raise_errors=True, session=None, auth=None, ): self._token = token self._url = url self._auth_url = auth_url self._service_type = service_type self._endpoint_type = endpoint_type self._tenant_name = tenant_name self._tenant_id = tenant_id self._username = username self._user_id = user_id self._password = password self._region_name = region_name self._api_version = api_version self._service_catalog = None self._auth_strategy = auth_strategy self._insecure = insecure self._ca_cert = ca_cert self._log_credentials = log_credentials self._timeout = timeout self._retries = retries self._raise_errors = raise_errors self._session = session self._auth = auth return def initialize(self): if not self._url: httpclient = client.construct_http_client( username=self._username, user_id=self._user_id, tenant_name=self._tenant_name, tenant_id=self._tenant_id, password=self._password, region_name=self._region_name, auth_url=self._auth_url, service_type=self._service_type, endpoint_type=self._endpoint_type, insecure=self._insecure, ca_cert=self._ca_cert, timeout=self._timeout, session=self._session, auth=self._auth, log_credentials=self._log_credentials) httpclient.authenticate() # Populate other password flow attributes self._token = httpclient.auth_token self._url = httpclient.endpoint_url ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/command.py0000664000175000017500000000230500000000000023454 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from sys import stderr from cliff import command class OpenStackCommand(command.Command): """Base class for OpenStack commands.""" api = None def run(self, parsed_args): stderr.write("Deprecated: tacker command line is deprecated, " "will be deleted after Rocky is released.\n") if not self.api: return else: return super(OpenStackCommand, self).run(parsed_args) def get_data(self, parsed_args): pass def take_action(self, parsed_args): return self.get_data(parsed_args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/constants.py0000664000175000017500000000125700000000000024057 0ustar00zuulzuul00000000000000# Copyright (c) 2012 OpenStack Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. TYPE_BOOL = "bool" TYPE_INT = "int" TYPE_FLOAT = "float" TYPE_LIST = "list" TYPE_DICT = "dict" ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/exceptions.py0000664000175000017500000001177600000000000024233 0ustar00zuulzuul00000000000000# Copyright 2011 VMware, Inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tackerclient.i18n import _ """ Tacker base exception handling. Exceptions are classified into three categories: * Exceptions corresponding to exceptions from tacker server: This type of exceptions should inherit one of exceptions in HTTP_EXCEPTION_MAP. * Exceptions from client library: This type of exceptions should inherit TackerClientException. * Exceptions from CLI code: This type of exceptions should inherit TackerCLIError. """ class TackerException(Exception): """Base Tacker Exception. To correctly use this class, inherit from it and define a 'message' property. That message will get printf'd with the keyword arguments provided to the constructor. """ message = _("An unknown exception occurred.") def __init__(self, message=None, **kwargs): if message: self.message = message try: self._error_string = self.message % kwargs except Exception: # at least get the core message out if something happened self._error_string = self.message def __str__(self): return self._error_string class TackerClientException(TackerException): """Base exception which exceptions from Tacker are mapped into. NOTE: on the client side, we use different exception types in order to allow client library users to handle server exceptions in try...except blocks. The actual error message is the one generated on the server side. """ status_code = 0 def __init__(self, message=None, **kwargs): if 'status_code' in kwargs: self.status_code = kwargs['status_code'] super(TackerClientException, self).__init__(message, **kwargs) # Base exceptions from Tacker class BadRequest(TackerClientException): status_code = 400 class Unauthorized(TackerClientException): status_code = 401 message = _("Unauthorized: bad credentials.") class Forbidden(TackerClientException): status_code = 403 message = _("Forbidden: your credentials don't give you access to this " "resource.") class NotFound(TackerClientException): status_code = 404 class Conflict(TackerClientException): status_code = 409 class InternalServerError(TackerClientException): status_code = 500 class ServiceUnavailable(TackerClientException): status_code = 503 HTTP_EXCEPTION_MAP = { 400: BadRequest, 401: Unauthorized, 403: Forbidden, 404: NotFound, 409: Conflict, 500: InternalServerError, 503: ServiceUnavailable, } # Exceptions from client library class NoAuthURLProvided(Unauthorized): message = _("auth_url was not provided to the Tacker client") class EndpointNotFound(TackerClientException): message = _("Could not find Service or Region in Service Catalog.") class EndpointTypeNotFound(TackerClientException): message = _("Could not find endpoint type %(type_)s in Service Catalog.") class RequestURITooLong(TackerClientException): """Raised when a request fails with HTTP error 414.""" def __init__(self, **kwargs): self.excess = kwargs.get('excess', 0) super(RequestURITooLong, self).__init__(**kwargs) class ConnectionFailed(TackerClientException): message = _("Connection to tacker failed: %(reason)s") class SslCertificateValidationError(TackerClientException): message = _("SSL certificate validation has failed: %(reason)s") class MalformedResponseBody(TackerClientException): message = _("Malformed response body: %(reason)s") class InvalidContentType(TackerClientException): message = _("Invalid content type %(content_type)s.") class InvalidInput(TackerClientException): message = _("Invalid input: %(reason)s") class EmptyInput(TackerClientException): message = _("Empty input: %(reason)s") class UnsupportedCommandVersion(TackerClientException): message = _("This command is not supported in version %(version)s") # Command line exceptions class TackerCLIError(TackerException): """Exception raised when command line parsing fails.""" pass class CommandError(TackerCLIError): pass class UnsupportedVersion(TackerCLIError): """Unsupported Version. Indicates that the user is trying to use an unsupported version of the API. """ pass class TackerClientNoUniqueMatch(TackerCLIError): message = _("Multiple %(resource)s matches found for name '%(name)s'," " use an ID to be more specific.") ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/extension.py0000664000175000017500000000645000000000000024057 0ustar00zuulzuul00000000000000# Copyright 2015 Rackspace Hosting Inc. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from stevedore import extension from tackerclient.tacker import v1_0 as tackerV10 def _discover_via_entry_points(): emgr = extension.ExtensionManager('tackerclient.extension', invoke_on_load=False) return ((ext.name, ext.plugin) for ext in emgr) class TackerClientExtension(tackerV10.TackerCommand): pagination_support = False _formatters = {} sorting_support = False class ClientExtensionShow(TackerClientExtension, tackerV10.ShowCommand): def get_data(self, parsed_args): # NOTE(mdietz): Calls 'execute' to provide a consistent pattern # for any implementers adding extensions with # regard to any other extension verb. return self.execute(parsed_args) def execute(self, parsed_args): return super(ClientExtensionShow, self).get_data(parsed_args) class ClientExtensionList(TackerClientExtension, tackerV10.ListCommand): def get_data(self, parsed_args): # NOTE(mdietz): Calls 'execute' to provide a consistent pattern # for any implementers adding extensions with # regard to any other extension verb. return self.execute(parsed_args) def execute(self, parsed_args): return super(ClientExtensionList, self).get_data(parsed_args) class ClientExtensionDelete(TackerClientExtension, tackerV10.DeleteCommand): def run(self, parsed_args): # NOTE(mdietz): Calls 'execute' to provide a consistent pattern # for any implementers adding extensions with # regard to any other extension verb. return self.execute(parsed_args) def execute(self, parsed_args): return super(ClientExtensionDelete, self).run(parsed_args) class ClientExtensionCreate(TackerClientExtension, tackerV10.CreateCommand): def get_data(self, parsed_args): # NOTE(mdietz): Calls 'execute' to provide a consistent pattern # for any implementers adding extensions with # regard to any other extension verb. return self.execute(parsed_args) def execute(self, parsed_args): return super(ClientExtensionCreate, self).get_data(parsed_args) class ClientExtensionUpdate(TackerClientExtension, tackerV10.UpdateCommand): def run(self, parsed_args): # NOTE(mdietz): Calls 'execute' to provide a consistent pattern # for any implementers adding extensions with # regard to any other extension verb. return self.execute(parsed_args) def execute(self, parsed_args): return super(ClientExtensionUpdate, self).run(parsed_args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/serializer.py0000664000175000017500000000736100000000000024216 0ustar00zuulzuul00000000000000# Copyright 2013 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from oslo_serialization import jsonutils from tackerclient.common import exceptions as exception from tackerclient.i18n import _ LOG = logging.getLogger(__name__) class ActionDispatcher(object): """Maps method name to local methods through action name.""" def dispatch(self, *args, **kwargs): """Find and call local method.""" action = kwargs.pop('action', 'default') action_method = getattr(self, str(action), self.default) return action_method(*args, **kwargs) def default(self, data): raise NotImplementedError() class DictSerializer(ActionDispatcher): """Default request body serialization.""" def serialize(self, data, action='default'): return self.dispatch(data, action=action) def default(self, data): return "" class JSONDictSerializer(DictSerializer): """Default JSON request body serialization.""" def default(self, data): def sanitizer(obj): return str(obj) return jsonutils.dumps(data, default=sanitizer) class TextDeserializer(ActionDispatcher): """Default request body deserialization.""" def deserialize(self, datastring, action='default'): return self.dispatch(datastring, action=action) def default(self, datastring): return {} class JSONDeserializer(TextDeserializer): def _from_json(self, datastring): try: return jsonutils.loads(datastring) except ValueError: msg = _("Cannot understand JSON") raise exception.MalformedResponseBody(reason=msg) def default(self, datastring): return {'body': self._from_json(datastring)} # NOTE(maru): this class is duplicated from tacker.wsgi class Serializer(object): """Serializes and deserializes dictionaries to certain MIME types.""" def __init__(self, metadata=None): """Create a serializer based on the given WSGI environment. 'metadata' is an optional dict mapping MIME types to information needed to serialize a dictionary to that type. """ self.metadata = metadata or {} def _get_serialize_handler(self, content_type): handlers = { 'application/json': JSONDictSerializer() } try: return handlers[content_type] except Exception: raise exception.InvalidContentType(content_type=content_type) def serialize(self, data, content_type): """Serialize a dictionary into the specified content type.""" return self._get_serialize_handler(content_type).serialize(data) def deserialize(self, datastring, content_type): """Deserialize a string to a dictionary. The string must be in the format of a supported MIME type. """ return self.get_deserialize_handler(content_type).deserialize( datastring) def get_deserialize_handler(self, content_type): handlers = { 'application/json': JSONDeserializer() } try: return handlers[content_type] except Exception: raise exception.InvalidContentType(content_type=content_type) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/utils.py0000664000175000017500000001325600000000000023205 0ustar00zuulzuul00000000000000# Copyright 2011, VMware, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Borrowed from nova code base, more utilities will be added/borrowed as and # when needed. """Utilities and helper functions.""" import argparse import logging import os from oslo_log import versionutils from oslo_utils import encodeutils from oslo_utils import importutils from tackerclient.common import exceptions from tackerclient.i18n import _ def env(*vars, **kwargs): """Returns the first environment variable set. If none are non-empty, defaults to '' or keyword arg default. """ for v in vars: value = os.environ.get(v) if value: return value return kwargs.get('default', '') def get_client_class(api_name, version, version_map): """Returns the client class for the requested API version. :param api_name: the name of the API, e.g. 'compute', 'image', etc :param version: the requested API version :param version_map: a dict of client classes keyed by version :rtype: a client class for the requested API version """ try: client_path = version_map[str(version)] except (KeyError, ValueError): msg = _("Invalid %(api_name)s client version '%(version)s'. must be " "one of: %(map_keys)s") msg = msg % {'api_name': api_name, 'version': version, 'map_keys': ', '.join(version_map.keys())} raise exceptions.UnsupportedVersion(message=msg) return importutils.import_class(client_path) def get_item_properties(item, fields, mixed_case_fields=(), formatters=None): """Return a tuple containing the item properties. :param item: a single item resource (e.g. Server, Tenant, etc) :param fields: tuple of strings with the desired field names :param mixed_case_fields: tuple of field names to preserve case :param formatters: dictionary mapping field names to callables to format the values """ if formatters is None: formatters = {} row = [] for field in fields: if field in formatters: row.append(formatters[field](item)) else: if field in mixed_case_fields: field_name = field.replace(' ', '_') else: field_name = field.lower().replace(' ', '_') if not hasattr(item, field_name) and isinstance(item, dict): data = item[field_name] else: data = getattr(item, field_name, '') if data is None: data = '' row.append(data) return tuple(row) def str2bool(strbool): if strbool is None: return None return strbool.lower() == 'true' def str2dict(strdict): """Convert key1=value1,key2=value2,... string into dictionary. :param strdict: key1=value1,key2=value2 """ if not strdict: return {} return dict([kv.split('=', 1) for kv in strdict.split(',')]) def http_log_req(_logger, args, kwargs): if not _logger.isEnabledFor(logging.DEBUG): return string_parts = ['curl -i'] for element in args: if element in ('GET', 'POST', 'DELETE', 'PUT'): string_parts.append(' -X %s' % element) else: string_parts.append(' %s' % element) for element in kwargs['headers']: header = ' -H "%s: %s"' % (element, kwargs['headers'][element]) string_parts.append(header) if 'body' in kwargs and kwargs['body']: string_parts.append(" -d '%s'" % (kwargs['body'])) req = encodeutils.safe_encode("".join(string_parts)) _logger.debug("\nREQ: %s\n", req) def http_log_resp(_logger, resp, body): if not _logger.isEnabledFor(logging.DEBUG): return _logger.debug("RESP:%(code)s %(headers)s %(body)s\n", {'code': resp.status_code, 'headers': resp.headers, 'body': body}) def _safe_encode_without_obj(data): if isinstance(data, str): return encodeutils.safe_encode(data) return data def safe_encode_list(data): return list(map(_safe_encode_without_obj, data)) def safe_encode_dict(data): def _encode_item(item): k, v = item if isinstance(v, list): return (k, safe_encode_list(v)) elif isinstance(v, dict): return (k, safe_encode_dict(v)) return (k, _safe_encode_without_obj(v)) return dict(list(map(_encode_item, data.items()))) def add_boolean_argument(parser, name, **kwargs): for keyword in ('metavar', 'choices'): kwargs.pop(keyword, None) default = kwargs.pop('default', argparse.SUPPRESS) parser.add_argument( name, metavar='{True,False}', choices=['True', 'true', 'False', 'false'], default=default, **kwargs) def get_file_path(filename): file_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../%s' % filename)) return file_path def deprecate_warning(what, as_of, in_favor_of=None, remove_in=1): versionutils.deprecation_warning(as_of=as_of, what=what, in_favor_of=in_favor_of, remove_in=remove_in) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/common/validators.py0000664000175000017500000000506600000000000024215 0ustar00zuulzuul00000000000000# Copyright 2014 NEC Corporation # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from tackerclient.common import exceptions from tackerclient.i18n import _ def validate_int_range(parsed_args, attr_name, min_value=None, max_value=None): val = getattr(parsed_args, attr_name, None) if val is None: return try: if not isinstance(val, int): int_val = int(val, 0) else: int_val = val if ((min_value is None or min_value <= int_val) and (max_value is None or int_val <= max_value)): return except (ValueError, TypeError): pass if min_value is not None and max_value is not None: msg = (_('%(attr_name)s "%(val)s" should be an integer ' '[%(min)i:%(max)i].') % {'attr_name': attr_name.replace('_', '-'), 'val': val, 'min': min_value, 'max': max_value}) elif min_value is not None: msg = (_('%(attr_name)s "%(val)s" should be an integer ' 'greater than or equal to %(min)i.') % {'attr_name': attr_name.replace('_', '-'), 'val': val, 'min': min_value}) elif max_value is not None: msg = (_('%(attr_name)s "%(val)s" should be an integer ' 'smaller than or equal to %(max)i.') % {'attr_name': attr_name.replace('_', '-'), 'val': val, 'max': max_value}) else: msg = (_('%(attr_name)s "%(val)s" should be an integer.') % {'attr_name': attr_name.replace('_', '-'), 'val': val}) raise exceptions.CommandError(message=msg) def validate_ip_subnet(parsed_args, attr_name): val = getattr(parsed_args, attr_name) if not val: return try: netaddr.IPNetwork(val) except (netaddr.AddrFormatError, ValueError): raise exceptions.CommandError( message=(_('%(attr_name)s "%(val)s" is not a valid CIDR.') % {'attr_name': attr_name.replace('_', '-'), 'val': val})) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/i18n.py0000664000175000017500000000135700000000000021333 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_i18n as i18n _translators = i18n.TranslatorFactory(domain='tackerclient') # The primary translation function using the well-known name "_" _ = _translators.primary ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7346199 python-tackerclient-2.1.0/tackerclient/osc/0000775000175000017500000000000000000000000020760 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/__init__.py0000664000175000017500000000000000000000000023057 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7346199 python-tackerclient-2.1.0/tackerclient/osc/common/0000775000175000017500000000000000000000000022250 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/common/__init__.py0000664000175000017500000000000000000000000024347 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7346199 python-tackerclient-2.1.0/tackerclient/osc/common/vnflcm/0000775000175000017500000000000000000000000023535 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/common/vnflcm/__init__.py0000664000175000017500000000000000000000000025634 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/common/vnflcm/vnflcm_versions.py0000664000175000017500000000333600000000000027331 0ustar00zuulzuul00000000000000# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from osc_lib.command import command from tackerclient.common import exceptions from tackerclient.i18n import _ SUPPORTED_VERSIONS = [1, 2] class VnfLcmVersions(command.ShowOne): _description = _("Show VnfLcm Api versions") def get_parser(self, prog_name): parser = super(VnfLcmVersions, self).get_parser(prog_name) parser.add_argument( '--major-version', metavar="", type=int, help=_('Show only specify major version.')) return parser def take_action(self, parsed_args): v = None if parsed_args.major_version: if parsed_args.major_version not in SUPPORTED_VERSIONS: msg = _("Major version %d is not supported") reason = msg % parsed_args.major_version raise exceptions.InvalidInput(reason=reason) v = "v{}".format(parsed_args.major_version) client = self.app.client_manager.tackerclient data = client.show_vnf_lcm_versions(v) return (tuple(data.keys()), tuple(data.values())) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/plugin.py0000664000175000017500000000357700000000000022644 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """OpenStackClient plugin for nfv-orchestration service.""" import logging from osc_lib import utils LOG = logging.getLogger(__name__) # Required by the OSC plugin interface DEFAULT_TACKER_API_VERSION = '1' API_NAME = 'tackerclient' API_VERSION_OPTION = 'os_tacker_api_version' API_VERSIONS = { '1': 'tackerclient.v1_0.client.Client', '2': 'tackerclient.v1_0.client.Client', } def make_client(instance): """Returns a client to the ClientManager.""" api_version = instance._api_version[API_NAME] tacker_client = utils.get_client_class( API_NAME, api_version, API_VERSIONS) LOG.debug('Instantiating tacker client: %s', tacker_client) kwargs = {'service_type': 'nfv-orchestration', 'region_name': instance._region_name, 'endpoint_type': instance._interface, 'interface': instance._interface, 'session': instance.session, 'api_version': api_version } client = tacker_client(**kwargs) return client def build_option_parser(parser): """Hook to add global options.""" parser.add_argument( '--os-tacker-api-version', metavar='', default=utils.env( 'OS_TACKER_API_VERSION', default=DEFAULT_TACKER_API_VERSION)) return parser ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/sdk_utils.py0000664000175000017500000001037400000000000023340 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from oslo_utils import encodeutils def get_osc_show_columns_for_sdk_resource( sdk_resource, osc_column_map, invisible_columns=None ): """Get and filter the display and attribute columns for an SDK resource. Common utility function for preparing the output of an OSC show command. Some of the columns may need to get renamed, others made invisible. :param sdk_resource: An SDK resource :param osc_column_map: A hash of mappings for display column names :param invisible_columns: A list of invisible column names :returns: Two tuples containing the names of the display and attribute columns """ if getattr(sdk_resource, 'allow_get', None) is not None: resource_dict = sdk_resource.to_dict( body=True, headers=False, ignore_none=False) else: resource_dict = sdk_resource # Build the OSC column names to display for the SDK resource. attr_map = {} display_columns = list(resource_dict.keys()) invisible_columns = [] if invisible_columns is None else invisible_columns for col_name in invisible_columns: if col_name in display_columns: display_columns.remove(col_name) for sdk_attr, osc_attr in osc_column_map.items(): if sdk_attr in display_columns: attr_map[osc_attr] = sdk_attr display_columns.remove(sdk_attr) if osc_attr not in display_columns: display_columns.append(osc_attr) sorted_display_columns = sorted(display_columns) # Build the SDK attribute names for the OSC column names. attr_columns = [] for column in sorted_display_columns: new_column = attr_map[column] if column in attr_map else column attr_columns.append(new_column) return tuple(sorted_display_columns), tuple(attr_columns) class DictModel(dict): """Convert dict into an object that provides attribute access to values.""" def __init__(self, *args, **kwargs): """Convert dict values to DictModel values.""" super(DictModel, self).__init__(*args, **kwargs) def needs_upgrade(item): return isinstance(item, dict) and not isinstance(item, DictModel) def upgrade(item): """Upgrade item if it needs to be upgraded.""" if needs_upgrade(item): return DictModel(item) else: return item for key, value in self.items(): if isinstance(value, (list, tuple)): # Keep the same type but convert dicts to DictModels self[key] = type(value)( (upgrade(item) for item in value) ) elif needs_upgrade(value): # Change dict instance values to DictModel instance values self[key] = DictModel(value) def __getattr__(self, name): try: return self[name] except KeyError as e: raise AttributeError(e) def __setattr__(self, name, value): self[name] = value def __delattr__(self, name): del self[name] def __str__(self): pairs = ['%s=%s' % (k, v) for k, v in self.items()] return ', '.join(sorted(pairs)) def save_data(data, path): """Save data to the specified path. :param data: binary or string data :param path: file path to save data """ if path is None: vnfpackage = getattr(sys.stdout, 'buffer', sys.stdout) else: mode = 'wb' if isinstance(data, bytes) else 'w' vnfpackage = open(path, mode) try: vnfpackage.write(data) finally: vnfpackage.close() def exit(msg=None, exit_code=1): if msg: print(encodeutils.safe_decode(msg)) sys.exit(exit_code) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/utils.py0000664000175000017500000002074200000000000022477 0ustar00zuulzuul00000000000000# Copyright 2016 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """This module should contain OSC plugin generic methods. Methods in this module are candidates adopted to osc-lib. Stuffs specific to tackerclient OSC plugin should not be added to this module. They should go to tackerclient.osc.v1.utils. """ import json import operator import os from cliff import columns as cliff_columns from keystoneclient import exceptions as identity_exc from keystoneclient.v3 import domains from keystoneclient.v3 import projects from osc_lib import utils from oslo_serialization import jsonutils from tackerclient.common import exceptions from tackerclient.i18n import _ LIST_BOTH = 'both' LIST_SHORT_ONLY = 'short_only' LIST_LONG_ONLY = 'long_only' def format_dict_with_indention(data): """Return a formatted string of key value pairs :param data: a dict :rtype: a string formatted to key='value' """ if data is None: return None return jsonutils.dumps(data, indent=4) def get_column_definitions(attr_map, long_listing): """Return table headers and column names for a listing table. :param attr_map: a list of table entry definitions. Each entry should be a tuple consisting of (API attribute name, header name, listing mode). For example: (('id', 'ID', LIST_BOTH), ('name', 'Name', LIST_BOTH), ('tenant_id', 'Project', LIST_LONG_ONLY)) The third field of each tuple must be one of LIST_BOTH, LIST_LONG_ONLY (a corresponding column is shown only in a long mode), or LIST_SHORT_ONLY (a corresponding column is shown only in a short mode). :param long_listing: A boolean value which indicates a long listing or not. In most cases, parsed_args.long is passed to this argument. :return: A tuple of a list of table headers and a list of column names. """ if long_listing: headers = [hdr for col, hdr, listing_mode in attr_map if listing_mode in (LIST_BOTH, LIST_LONG_ONLY)] columns = [col for col, hdr, listing_mode in attr_map if listing_mode in (LIST_BOTH, LIST_LONG_ONLY)] else: headers = [hdr for col, hdr, listing_mode in attr_map if listing_mode in (LIST_BOTH, LIST_SHORT_ONLY)] columns = [col for col, hdr, listing_mode in attr_map if listing_mode in (LIST_BOTH, LIST_SHORT_ONLY)] return headers, columns def get_columns(item, attr_map=None): """Return pair of resource attributes and corresponding display names. Assume the following item and attr_map are passed. item: {'id': 'myid', 'name': 'myname', 'foo': 'bar', 'tenant_id': 'mytenan'} attr_map: (('id', 'ID', LIST_BOTH), ('name', 'Name', LIST_BOTH), ('tenant_id', 'Project', LIST_LONG_ONLY)) This method returns: (('id', 'name', 'tenant_id', 'foo'), # attributes ('ID', 'Name', 'Project', 'foo') # display names Both tuples of attributes and display names are sorted by display names in the alphabetical order. Attributes not found in a given attr_map are kept as-is. :param item: a dictionary which represents a resource. Keys of the dictionary are expected to be attributes of the resource. Values are not referred to by this method. :param attr_map: a list of mapping from attribute to display name. The same format is used as for get_column_definitions attr_map. :return: A pair of tuple of attributes and tuple of display names. """ attr_map = attr_map or tuple([]) _attr_map_dict = dict((col, hdr) for col, hdr, listing_mode in attr_map) columns = [(column, _attr_map_dict.get(column, column)) for column in item.keys()] columns = sorted(columns, key=operator.itemgetter(1)) return (tuple(col[0] for col in columns), tuple(col[1] for col in columns)) # TODO(amotoki): Use osc-lib version once osc-lib provides this. def add_project_owner_option_to_parser(parser): """Register project and project domain options. :param parser: argparse.Argument parser object. """ parser.add_argument( '--project', metavar='', help=_("Owner's project (name or ID)") ) # Borrowed from openstackclient.identity.common # as it is not exposed officially. parser.add_argument( '--project-domain', metavar='', help=_('Domain the project belongs to (name or ID). ' 'This can be used in case collisions between project names ' 'exist.'), ) # The following methods are borrowed from openstackclient.identity.common # as it is not exposed officially. # TODO(amotoki): Use osc-lib version once osc-lib provides this. def find_domain(identity_client, name_or_id): return _find_identity_resource(identity_client.domains, name_or_id, domains.Domain) def find_project(identity_client, name_or_id, domain_name_or_id=None): domain_id = _get_domain_id_if_requested(identity_client, domain_name_or_id) if not domain_id: return _find_identity_resource(identity_client.projects, name_or_id, projects.Project) else: return _find_identity_resource(identity_client.projects, name_or_id, projects.Project, domain_id=domain_id) def _get_domain_id_if_requested(identity_client, domain_name_or_id): if not domain_name_or_id: return None domain = find_domain(identity_client, domain_name_or_id) return domain.id def _find_identity_resource(identity_client_manager, name_or_id, resource_type, **kwargs): """Find a specific identity resource. Using keystoneclient's manager, attempt to find a specific resource by its name or ID. If Forbidden to find the resource (a common case if the user does not have permission), then return the resource by creating a local instance of keystoneclient's Resource. The parameter identity_client_manager is a keystoneclient manager, for example: keystoneclient.v3.users or keystoneclient.v3.projects. The parameter resource_type is a keystoneclient resource, for example: keystoneclient.v3.users.User or keystoneclient.v3.projects.Project. :param identity_client_manager: the manager that contains the resource :type identity_client_manager: `keystoneclient.base.CrudManager` :param name_or_id: the resources's name or ID :type name_or_id: string :param resource_type: class that represents the resource type :type resource_type: `keystoneclient.base.Resource` :returns: the resource in question :rtype: `keystoneclient.base.Resource` """ try: identity_resource = utils.find_resource(identity_client_manager, name_or_id, **kwargs) if identity_resource is not None: return identity_resource except identity_exc.Forbidden: pass return resource_type(None, {'id': name_or_id, 'name': name_or_id}) # The above are borrowed from openstackclient.identity.common. class FormatComplexDataColumn(cliff_columns.FormattableColumn): def human_readable(self): return format_dict_with_indention(self._value) def jsonfile2body(file_path): if file_path is None: msg = _("File %s does not exist") reason = msg % file_path raise exceptions.InvalidInput(reason=reason) if os.access(file_path, os.R_OK) is False: msg = _("User does not have read privileges to it") raise exceptions.InvalidInput(reason=msg) try: with open(file_path) as f: body = json.load(f) except (IOError, ValueError) as ex: msg = _("Failed to load parameter file. Error: %s") reason = msg % ex raise exceptions.InvalidInput(reason=reason) if not body: reason = _('The parameter file is empty') raise exceptions.EmptyInput(reason=reason) return body ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7346199 python-tackerclient-2.1.0/tackerclient/osc/v1/0000775000175000017500000000000000000000000021306 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/__init__.py0000664000175000017500000000000000000000000023405 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v1/nfvo/0000775000175000017500000000000000000000000022256 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/nfvo/__init__.py0000664000175000017500000000000000000000000024355 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/nfvo/vim.py0000664000175000017500000002331300000000000023425 0ustar00zuulzuul00000000000000# Copyright 2016 Brocade Communications Systems Inc # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import yaml from osc_lib.command import command from osc_lib import utils from oslo_utils import strutils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils from tackerclient.tacker import v1_0 as tackerV10 from tackerclient.tacker.v1_0.nfvo import vim_utils _attr_map = ( ('id', 'ID', tacker_osc_utils.LIST_BOTH), ('name', 'Name', tacker_osc_utils.LIST_BOTH), ('tenant_id', 'Tenant_id', tacker_osc_utils.LIST_BOTH), ('type', 'Type', tacker_osc_utils.LIST_BOTH), ('is_default', 'Is Default', tacker_osc_utils.LIST_BOTH), ('placement_attr', 'Placement attribution', tacker_osc_utils.LIST_LONG_ONLY), ('status', 'Status', tacker_osc_utils.LIST_BOTH), ) _VIM = 'vim' class ListVIM(command.Lister): _description = _("List VIMs that belong to a given tenant.") def get_parser(self, prog_name): parser = super(ListVIM, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', help=_("List additional fields in output") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient data = client.list_vims() headers, columns = tacker_osc_utils.get_column_definitions( _attr_map, long_listing=parsed_args.long) return (headers, (utils.get_dict_properties( s, columns, ) for s in data[_VIM + 's'])) class ShowVIM(command.ShowOne): _description = _("Display VIM details") def get_parser(self, prog_name): parser = super(ShowVIM, self).get_parser(prog_name) parser.add_argument( _VIM, metavar="", help=_("VIM to display (name or ID)") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj_id = tackerV10.find_resourceid_by_name_or_id( client, _VIM, parsed_args.vim) obj = client.show_vim(obj_id) display_columns, columns = _get_columns(obj[_VIM]) data = utils.get_item_properties( sdk_utils.DictModel(obj[_VIM]), columns, formatters=_formatters) return (display_columns, data) class CreateVIM(command.ShowOne): _description = _("Register a new VIM") def get_parser(self, prog_name): parser = super(CreateVIM, self).get_parser(prog_name) parser.add_argument( 'name', metavar='NAME', help=_('Set a name for the VIM')) parser.add_argument( '--tenant-id', metavar='TENANT_ID', help=_('The owner tenant ID or project ID')) parser.add_argument( '--config-file', required=True, help=_('YAML file with VIM configuration parameters')) parser.add_argument( '--description', help=_('Set a description for the VIM')) parser.add_argument( '--is-default', action='store_true', default=False, help=_('Set as default VIM')) return parser def args2body(self, parsed_args): body = {_VIM: {}} if parsed_args.config_file: with open(parsed_args.config_file) as f: vim_config = f.read() try: config_param = yaml.load(vim_config, Loader=yaml.SafeLoader) except yaml.YAMLError as e: raise exceptions.InvalidInput(reason=e) vim_obj = body[_VIM] try: auth_url = config_param.pop('auth_url') except KeyError: raise exceptions.TackerClientException(message='Auth URL must be ' 'specified', status_code=404) vim_obj['auth_url'] = vim_utils.validate_auth_url(auth_url).geturl() vim_utils.args2body_vim(config_param, vim_obj) tackerV10.update_dict(parsed_args, body[_VIM], ['tenant_id', 'name', 'description', 'is_default']) return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vim = client.create_vim(self.args2body(parsed_args)) display_columns, columns = _get_columns(vim[_VIM]) data = utils.get_item_properties( sdk_utils.DictModel(vim[_VIM]), columns, formatters=_formatters) return (display_columns, data) class DeleteVIM(command.Command): _description = _("Delete VIM(s).") def get_parser(self, prog_name): parser = super(DeleteVIM, self).get_parser(prog_name) parser.add_argument( _VIM, metavar="", nargs="+", help=_("VIM(s) to delete (name or ID)") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient failure = False deleted_ids = [] failed_items = {} for resource_id in parsed_args.vim: try: obj = tackerV10.find_resourceid_by_name_or_id( client, _VIM, resource_id) client.delete_vim(obj) deleted_ids.append(resource_id) except Exception as e: failure = True failed_items[resource_id] = e if failure: msg = '' if deleted_ids: msg = (_('Successfully deleted %(resource)s(s):' ' %(deleted_list)s') % {'deleted_list': ', '.join(deleted_ids), 'resource': _VIM}) err_msg = _("\n\nUnable to delete the below" " %s(s):") % _VIM for failed_id, error in failed_items.items(): err_msg += (_('\n Cannot delete %(failed_id)s: %(error)s') % {'failed_id': failed_id, 'error': error}) msg += err_msg raise exceptions.CommandError(message=msg) else: print((_('All specified %(resource)s(s) deleted successfully') % {'resource': _VIM})) return class UpdateVIM(command.ShowOne): _description = _("Update VIM.") def get_parser(self, prog_name): parser = super(UpdateVIM, self).get_parser(prog_name) parser.add_argument( 'id', metavar="VIM", help=_('ID or name of %s to update') % _VIM) parser.add_argument( '--config-file', required=False, help=_('YAML file with VIM configuration parameters')) parser.add_argument( '--name', help=_('New name for the VIM')) parser.add_argument( '--description', help=_('New description for the VIM')) parser.add_argument( '--is-default', type=strutils.bool_from_string, metavar='{True,False}', help=_('Indicate whether the VIM is used as default')) return parser def args2body(self, parsed_args): body = {_VIM: {}} config_param = None # config arg passed as data overrides config yaml when both args passed if parsed_args.config_file: with open(parsed_args.config_file) as f: config_yaml = f.read() try: config_param = yaml.load(config_yaml, Loader=yaml.SafeLoader) except yaml.YAMLError as e: raise exceptions.InvalidInput(reason=e) vim_obj = body[_VIM] if config_param is not None: vim_utils.args2body_vim(config_param, vim_obj) tackerV10.update_dict(parsed_args, body[_VIM], ['tenant_id', 'name', 'description', 'is_default']) # type attribute is read-only, it can't be updated, so remove it # in update method body[_VIM].pop('type', None) return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj_id = tackerV10.find_resourceid_by_name_or_id( client, _VIM, parsed_args.id) vim = client.update_vim(obj_id, self.args2body(parsed_args)) display_columns, columns = _get_columns(vim[_VIM]) data = utils.get_item_properties( sdk_utils.DictModel(vim[_VIM]), columns, formatters=_formatters) return (display_columns, data) _formatters = { 'auth_cred': tacker_osc_utils.format_dict_with_indention, 'placement_attr': tacker_osc_utils.format_dict_with_indention, 'vim_project': tacker_osc_utils.format_dict_with_indention, } def _get_columns(item): column_map = { 'tenant_id': 'project_id', } return sdk_utils.get_osc_show_columns_for_sdk_resource(item, column_map) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/0000775000175000017500000000000000000000000022573 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/__init__.py0000664000175000017500000000000000000000000024672 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/0000775000175000017500000000000000000000000024237 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000021300000000000011451 xustar0000000000000000117 path=python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/change_ext_conn_vnf_instance_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/change_ext_conn_vnf_instance_param_samp0000664000175000017500000000325300000000000034244 0ustar00zuulzuul00000000000000{ "extVirtualLinks": [ { "id": "ext-vl-uuid-VL1", "resourceId": "neutron-network-uuid_VL1", "extCps": [ { "cpdId": "CP1", "cpConfig": [ { "cpProtocolData": [ { "layerProtocol": "IP_OVER_ETHERNET", "ipOverEthernet": { "ipAddresses": [ { "type": "IPV4", "numDynamicAddresses": 1, "subnetId": "subnet-uuid" } ] } } ] } ] }, { "cpdId": "CP2", "cpConfig": [ { "cpProtocolData": [ { "layerProtocol": "IP_OVER_ETHERNET", "ipOverEthernet": { "ipAddresses": [ { "type": "IPV4", "fixedAddresses": [ "10.0.0.1" ], "subnetId": "subnet-uuid" } ] } } ] } ] } ] } ], "vimConnectionInfo": [ { "id": "vim-uuid", "vimType": "ETSINFV.OPENSTACK_KEYSTONE.v_2", "vimConnectionId": "dummy-vimid", "interfaceInfo": { "key1":"value1", "key2":"value2" }, "accessInfo": { "key1":"value1", "key2":"value2" } } ] } ././@PaxHeader0000000000000000000000000000020700000000000011454 xustar0000000000000000113 path=python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/create_lccn_subscription_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/create_lccn_subscription_param_sample.j0000664000175000017500000000774700000000000034220 0ustar00zuulzuul00000000000000{ "filter": { "vnfInstanceSubscriptionFilter": { "vnfdIds": [ "dummy-vnfdId-1", "dummy-vnfdId-2" ], "vnfProductsFromProviders": [ { "vnfProvider": "dummy-vnfProvider-1", "vnfProducts": [ { "vnfProductName": "dummy-vnfProductName-1-1", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] }, { "vnfProductName": "dummy-vnfProductName-1-2", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] } ] }, { "vnfProvider": "dummy-vnfProvider-2", "vnfProducts": [ { "vnfProductName": "dummy-vnfProductName-2-1", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] }, { "vnfProductName": "dummy-vnfProductName-2-2", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] } ] } ], "vnfInstanceIds": [ "dummy-vnfInstanceId-1", "dummy-vnfInstanceId-2" ], "vnfInstanceNames": [ "dummy-vnfInstanceName-1", "dummy-vnfInstanceName-2" ] }, "notificationTypes": [ "VnfLcmOperationOccurrenceNotification", "VnfIdentifierCreationNotification", "VnfIdentifierDeletionNotification" ], "operationTypes": [ "INSTANTIATE", "SCALE", "TERMINATE", "HEAL", "MODIFY_INFO", "CHANGE_EXT_CONN" ], "operationStates": [ "COMPLETED", "FAILED", "FAILED_TEMP", "PROCESSING", "ROLLING_BACK", "ROLLED_BACK", "STARTING" ] }, "callbackUri": "http://localhost:9990/notification/callback/test", "authentication": { "authType": [ "BASIC" ], "paramsBasic": { "password": "test_pass", "userName": "test_user" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/heal_vnf_instance_param_sample.json0000664000175000017500000000005000000000000033314 0ustar00zuulzuul00000000000000{ "additionalParams": {"all": true} } ././@PaxHeader0000000000000000000000000000020700000000000011454 xustar0000000000000000113 path=python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/instantiate_vnf_instance_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/instantiate_vnf_instance_param_sample.j0000664000175000017500000000472500000000000034223 0ustar00zuulzuul00000000000000{ "flavourId":"simple", "instantiationLevelId":"instantiation_level_1", "extVirtualLinks":[ { "id":"ext-vl-uuid-VL1", "vimConnectionId":"vim-uuid", "resourceProviderId":"resource-provider-id", "resourceId":"neutron-network-uuid_VL1", "extCps":[ { "cpdId":"CP1", "cpConfig":[ { "cpInstanceId":"cp-instance-id", "linkPortId":"link-port-uuid_CP1", "cpProtocolData":[ { "layerProtocol":"IP_OVER_ETHERNET", "ipOverEthernet":{ "macAddress":"00:25:96:FF:FE:12:34:56", "ipAddresses":[ { "addressRange":{ "minAddress":"192.168.11.01", "maxAddress":"192.168.21.201" }, "subnetId":"neutron-subnet-uuid_CP1" } ] } } ] } ] } ], "extLinkPorts":[ { "id":"link-port-uuid_CP1", "resourceHandle":{ "vimConnectionId":"vim-uuid", "resourceProviderId":"resource-provider-id", "resourceId":"neutron-port-uuid_CP1", "vimLevelResourceType":"LINKPORT" } } ] } ], "extManagedVirtualLinks":[ { "id":"extMngVLnk-uuid_VL3", "vnfVirtualLinkDescId":"VL3", "vimConnectionId":"vim-uuid", "resourceProviderId":"resource-provider-id", "resourceId":"neutron-network-uuid_VL3" } ], "vimConnectionInfo":[ { "id":"vim-uuid", "vimId":"dummy-vimid", "vimType":"ETSINFV.OPENSTACK_KEYSTONE.v_2", "interfaceInfo":{ "key1":"value1", "key2":"value2" }, "accessInfo":{ "key1":"value1", "key2":"value2" }, "extra":{ "key1":"value1", "key2":"value2" } } ] }././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/scale_vnf_instance_param_sample.json0000664000175000017500000000007600000000000033502 0ustar00zuulzuul00000000000000{ "additionalParams": {"key1":"value1", "key2":"value2"} }././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/samples/update_vnf_instance_param_sample.json0000664000175000017500000000016600000000000033675 0ustar00zuulzuul00000000000000{ "vnfInstanceName": "sample", "vnfInstanceDescription" : "sample_description", "vnfdId" : "sample_id" }././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/vnflcm.py0000664000175000017500000005270200000000000024440 0ustar00zuulzuul00000000000000# Copyright (C) 2020 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import time from osc_lib.command import command from osc_lib import utils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils _attr_map = ( ('id', 'ID', tacker_osc_utils.LIST_BOTH), ('vnfInstanceName', 'VNF Instance Name', tacker_osc_utils.LIST_BOTH), ('instantiationState', 'Instantiation State', tacker_osc_utils.LIST_BOTH), ('vnfProvider', 'VNF Provider', tacker_osc_utils.LIST_BOTH), ('vnfSoftwareVersion', 'VNF Software Version', tacker_osc_utils.LIST_BOTH), ('vnfProductName', 'VNF Product Name', tacker_osc_utils.LIST_BOTH), ('vnfdId', 'VNFD ID', tacker_osc_utils.LIST_BOTH) ) LOG = logging.getLogger(__name__) _mixed_case_fields = ('vnfInstanceName', 'vnfInstanceDescription', 'vnfdId', 'vnfProvider', 'vnfProductName', 'vnfSoftwareVersion', 'vnfdVersion', 'instantiationState', 'vimConnectionInfo', 'instantiatedVnfInfo', 'vnfConfigurableProperties') _VNF_INSTANCE = 'vnf_instance' VNF_INSTANCE_TERMINATION_TIMEOUT = 300 EXTRA_WAITING_TIME = 10 SLEEP_TIME = 1 formatters = {'vimConnectionInfo': tacker_osc_utils.FormatComplexDataColumn, 'instantiatedVnfInfo': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn} def _get_columns(vnflcm_obj, action=None): column_map = { 'id': 'ID', 'vnfInstanceName': 'VNF Instance Name', 'vnfInstanceDescription': 'VNF Instance Description', 'vnfdId': 'VNFD ID', 'vnfProvider': 'VNF Provider', 'vnfProductName': 'VNF Product Name', 'vnfSoftwareVersion': 'VNF Software Version', 'vnfdVersion': 'VNFD Version', 'instantiationState': 'Instantiation State', '_links': 'Links', 'vnfConfigurableProperties': 'VNF Configurable Properties', } if action == 'show': if vnflcm_obj['instantiationState'] == 'INSTANTIATED': column_map.update( {'instantiatedVnfInfo': 'Instantiated Vnf Info'} ) column_map.update( {'vimConnectionInfo': 'VIM Connection Info', '_links': 'Links'} ) return sdk_utils.get_osc_show_columns_for_sdk_resource(vnflcm_obj, column_map) class CreateVnfLcm(command.ShowOne): _description = _("Create a new VNF Instance") def get_parser(self, prog_name): parser = super(CreateVnfLcm, self).get_parser(prog_name) parser.add_argument( 'vnfd_id', metavar="", help=_('Identifier that identifies the VNFD which defines the ' 'VNF instance to be created.')) parser.add_argument( '--name', metavar="", help=_('Name of the VNF instance to be created.')) parser.add_argument( '--description', metavar="", help=_('Description of the VNF instance to be created.')) parser.add_argument( '--I', metavar="", help=_("Instantiate VNF subsequently after it's creation. " "Specify instantiate request parameters in a json file.")) return parser def args2body(self, parsed_args, file_path=None): body = {} if file_path: return tacker_osc_utils.jsonfile2body(file_path) body['vnfdId'] = parsed_args.vnfd_id if parsed_args.description: body['vnfInstanceDescription'] = parsed_args.description if parsed_args.name: body['vnfInstanceName'] = parsed_args.name return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vnf = client.create_vnf_instance(self.args2body(parsed_args)) if parsed_args.I: # Instantiate VNF instance. result = client.instantiate_vnf_instance( vnf['id'], self.args2body(parsed_args, file_path=parsed_args.I)) if not result: print((_('VNF Instance %(id)s is created and instantiation' ' request has been accepted.') % {'id': vnf['id']})) display_columns, columns = _get_columns(vnf) data = utils.get_item_properties(sdk_utils.DictModel(vnf), columns, formatters=formatters, mixed_case_fields=_mixed_case_fields) return (display_columns, data) class ShowVnfLcm(command.ShowOne): _description = _("Display VNF instance details") def get_parser(self, prog_name): parser = super(ShowVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_("VNF instance ID to display")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_vnf_instance(parsed_args.vnf_instance) display_columns, columns = _get_columns(obj, action='show') data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, mixed_case_fields=_mixed_case_fields, formatters=formatters) return (display_columns, data) class ListVnfLcm(command.Lister): _description = _("List VNF Instance") def get_parser(self, prog_name): parser = super(ListVnfLcm, self).get_parser(prog_name) return parser def take_action(self, parsed_args): _params = {} client = self.app.client_manager.tackerclient vnf_instances = client.list_vnf_instances(**_params) headers, columns = tacker_osc_utils.get_column_definitions( _attr_map, long_listing=True) return (headers, (utils.get_dict_properties( s, columns, mixed_case_fields=_mixed_case_fields, ) for s in vnf_instances)) class InstantiateVnfLcm(command.Command): _description = _("Instantiate a VNF Instance") def get_parser(self, prog_name): parser = super(InstantiateVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_("VNF instance ID to instantiate")) parser.add_argument( 'instantiation_request_file', metavar="", help=_('Specify instantiate request parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient result = client.instantiate_vnf_instance( parsed_args.vnf_instance, tacker_osc_utils.jsonfile2body( parsed_args.instantiation_request_file)) if not result: print((_('Instantiate request for VNF Instance %(id)s has been' ' accepted.') % {'id': parsed_args.vnf_instance})) class HealVnfLcm(command.Command): _description = _("Heal VNF Instance") def get_parser(self, prog_name): parser = super(HealVnfLcm, self).get_parser(prog_name) usage_message = ('''%(prog)s [-h] [--cause CAUSE] [--vnfc-instance ''' '''[ ...]] [--additional-param-file ] -- ''') parser.usage = usage_message parser.add_argument( _VNF_INSTANCE, metavar="", help=_("VNF instance ID to heal")) parser.add_argument( '--cause', help=_('Specify the reason why a healing procedure is required.')) parser.add_argument( '--vnfc-instance', metavar="", nargs="+", help=_("List of VNFC instances requiring a healing action.") ) parser.add_argument( '--additional-param-file', metavar="", help=_("Additional parameters passed by the NFVO as input " "to the healing process.")) return parser def args2body(self, parsed_args): body = {} if parsed_args.cause: body['cause'] = parsed_args.cause if parsed_args.vnfc_instance: body['vnfcInstanceId'] = parsed_args.vnfc_instance if parsed_args.additional_param_file: body.update(tacker_osc_utils.jsonfile2body( parsed_args.additional_param_file)) return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient result = client.heal_vnf_instance( parsed_args.vnf_instance, self.args2body(parsed_args)) if not result: print((_('Heal request for VNF Instance %(id)s has been' ' accepted.') % {'id': parsed_args.vnf_instance})) class TerminateVnfLcm(command.Command): _description = _("Terminate a VNF instance") def get_parser(self, prog_name): parser = super(TerminateVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_("VNF instance ID to terminate")) parser.add_argument( "--termination-type", default='GRACEFUL', metavar="", choices=['GRACEFUL', 'FORCEFUL'], help=_("Termination type can be 'GRACEFUL' or 'FORCEFUL'. " "Default is 'GRACEFUL'")) parser.add_argument( '--graceful-termination-timeout', metavar="", type=int, help=_('This attribute is only applicable in case of graceful ' 'termination. It defines the time to wait for the VNF to be' ' taken out of service before shutting down the VNF and ' 'releasing the resources. The unit is seconds.')) parser.add_argument( '--D', action='store_true', default=False, help=_("Delete VNF Instance subsequently after it's termination"), ) return parser def args2body(self, parsed_args): body = {} body['terminationType'] = parsed_args.termination_type if parsed_args.graceful_termination_timeout: if parsed_args.termination_type == 'FORCEFUL': exceptions.InvalidInput(reason='--graceful-termination-timeout' ' argument is invalid for "FORCEFUL"' ' termination') body['gracefulTerminationTimeout'] = parsed_args.\ graceful_termination_timeout return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient result = client.terminate_vnf_instance(parsed_args.vnf_instance, self.args2body(parsed_args)) if not result: print(_("Terminate request for VNF Instance '%(id)s' has been" " accepted.") % {'id': parsed_args.vnf_instance}) if parsed_args.D: print(_("Waiting for vnf instance to be terminated before " "deleting")) self._wait_until_vnf_is_terminated( client, parsed_args.vnf_instance, graceful_timeout=parsed_args.graceful_termination_timeout) result = client.delete_vnf_instance(parsed_args.vnf_instance) if not result: print(_("VNF Instance '%(id)s' is deleted successfully") % {'id': parsed_args.vnf_instance}) def _wait_until_vnf_is_terminated(self, client, vnf_instance_id, graceful_timeout=None): # wait until vnf instance 'instantiationState' is set to # 'NOT_INSTANTIATED' if graceful_timeout: # If graceful_termination_timeout is provided, # terminate vnf will start after this timeout period. # Hence, it should wait for extra time of 10 seconds # after this graceful_termination_timeout period. timeout = graceful_timeout + EXTRA_WAITING_TIME else: timeout = VNF_INSTANCE_TERMINATION_TIMEOUT start_time = int(time.time()) while True: vnf_instance = client.show_vnf_instance(vnf_instance_id) if vnf_instance['instantiationState'] == 'NOT_INSTANTIATED': break if ((int(time.time()) - start_time) > timeout): msg = _("Couldn't verify vnf instance is terminated within " "'%(timeout)s' seconds. Unable to delete vnf instance " "%(id)s") raise exceptions.CommandError( message=msg % {'timeout': timeout, 'id': vnf_instance_id}) time.sleep(SLEEP_TIME) class DeleteVnfLcm(command.Command): """Vnf lcm delete DeleteVnfLcm class supports bulk deletion of vnf instances, and error handling. """ _description = _("Delete VNF Instance(s)") def get_parser(self, prog_name): parser = super(DeleteVnfLcm, self).get_parser(prog_name) parser.add_argument( 'vnf_instances', metavar="", nargs="+", help=_("VNF instance ID(s) to delete")) return parser def take_action(self, parsed_args): error_count = 0 client = self.app.client_manager.tackerclient vnf_instances = parsed_args.vnf_instances for vnf_instance in vnf_instances: try: client.delete_vnf_instance(vnf_instance) except Exception as e: error_count += 1 LOG.error(_("Failed to delete vnf instance with " "ID '%(vnf)s': %(e)s"), {'vnf': vnf_instance, 'e': e}) total = len(vnf_instances) if (error_count > 0): msg = (_("Failed to delete %(error_count)s of %(total)s " "vnf instances.") % {'error_count': error_count, 'total': total}) raise exceptions.CommandError(message=msg) else: if total > 1: print(_('All specified vnf instances are deleted ' 'successfully')) else: print(_("Vnf instance '%s' is deleted " "successfully") % vnf_instances[0]) class UpdateVnfLcm(command.Command): _description = _("Update VNF Instance") def get_parser(self, prog_name): """Add arguments to parser. Args: prog_name ([string]): program name Returns: parser([ArgumentParser]): [description] """ parser = super(UpdateVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_('VNF instance ID to update.')) parser.add_argument( '--I', metavar="", help=_("Specify update request parameters in a json file.")) return parser def args2body(self, file_path=None): """Call jsonfile2body to store request body to body(dict) Args: file_path ([string], optional): file path of param file(json). Defaults to None. Returns: body ([dict]): Request body is stored """ body = {} if file_path: return tacker_osc_utils.jsonfile2body(file_path) return body def take_action(self, parsed_args): """Execute update_vnf_instance and output result comment Args: parsed_args ([Namespace]): [description] """ client = self.app.client_manager.tackerclient if parsed_args.I: # Update VNF instance. result = client.update_vnf_instance( parsed_args.vnf_instance, self.args2body(file_path=parsed_args.I)) if not result: print((_('Update vnf:%(id)s ') % {'id': parsed_args.vnf_instance})) class ScaleVnfLcm(command.Command): _description = _("Scale a VNF Instance") def get_parser(self, prog_name): parser = super(ScaleVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_('VNF instance ID to scale')) parser.add_argument( '--number-of-steps', metavar="", type=int, help=_("Number of scaling steps to be executed as part of " "this Scale VNF operation.")) parser.add_argument( '--additional-param-file', metavar="", help=_("Additional parameters passed by the NFVO as input " "to the scaling process.")) scale_require_parameters = parser.add_argument_group( "require arguments" ) scale_require_parameters.add_argument( '--type', metavar="", required=True, choices=['SCALE_OUT', 'SCALE_IN'], help=_("SCALE_OUT or SCALE_IN for type of scale operation.")) scale_require_parameters.add_argument( '--aspect-id', required=True, metavar="", help=_("Identifier of the scaling aspect.")) return parser def args2body(self, parsed_args): """To store request body, call jsonfile2body. Args: parsed_args ([Namespace]): arguments of CLI. Returns: body ([dict]): Request body is stored """ body = {'type': parsed_args.type, 'aspectId': parsed_args.aspect_id} if parsed_args.number_of_steps: body['numberOfSteps'] = parsed_args.number_of_steps if parsed_args.additional_param_file: body.update(tacker_osc_utils.jsonfile2body( parsed_args.additional_param_file)) return body def take_action(self, parsed_args): """Execute scale_vnf_instance and output result comment. Args: parsed_args ([Namespace]): arguments of CLI. """ client = self.app.client_manager.tackerclient result = client.scale_vnf_instance( parsed_args.vnf_instance, self.args2body(parsed_args)) if not result: print((_('Scale request for VNF Instance %s has been accepted.') % parsed_args.vnf_instance)) class ChangeExtConnVnfLcm(command.Command): _description = _("Change External VNF Connectivity") def get_parser(self, prog_name): parser = super(ChangeExtConnVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_("VNF instance ID to Change External VNF Connectivity")) parser.add_argument( 'request_file', metavar="", help=_("Specify change-ext-conn request parameters " "in a json file.")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient result = client.change_ext_conn_vnf_instance( parsed_args.vnf_instance, tacker_osc_utils.jsonfile2body( parsed_args.request_file)) if not result: print((_('Change External VNF Connectivity for VNF Instance %s ' 'has been accepted.') % parsed_args.vnf_instance)) class ChangeVnfPkgVnfLcm(command.Command): _description = _("Change Current VNF Package") def get_parser(self, prog_name): parser = super(ChangeVnfPkgVnfLcm, self).get_parser(prog_name) parser.add_argument( _VNF_INSTANCE, metavar="", help=_("VNF instance ID to Change Current VNF Package")) parser.add_argument( 'request_file', metavar="", help=_("Specify change-vnfpkg request parameters " "in a json file.")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient result = client.change_vnfpkg_vnf_instance( parsed_args.vnf_instance, tacker_osc_utils.jsonfile2body( parsed_args.request_file)) if not result: print((_('Change Current VNF Package for VNF Instance %s ' 'has been accepted.') % parsed_args.vnf_instance)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/vnflcm_op_occs.py0000664000175000017500000002720500000000000026145 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from osc_lib.command import command from osc_lib import utils from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils _VNF_LCM_OP_OCC_ID = 'vnf_lcm_op_occ_id' _MIXED_CASE_FIELDS = ['operationState', 'stateEnteredTime', 'startTime', 'vnfInstanceId', 'grantId', 'isAutomaticInvocation', 'isCancelPending', 'cancelMode', 'operationParams', 'resourceChanges', 'changedInfo', 'changedExtConnectivity'] _FORMATTERS = { 'operationParams': tacker_osc_utils.FormatComplexDataColumn, 'error': tacker_osc_utils.FormatComplexDataColumn, 'resourceChanges': tacker_osc_utils.FormatComplexDataColumn, 'changedInfo': tacker_osc_utils.FormatComplexDataColumn, 'changedExtConnectivity': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn } _ATTR_MAP = ( ('id', 'id', tacker_osc_utils.LIST_BOTH), ('operationState', 'operationState', tacker_osc_utils.LIST_BOTH), ('vnfInstanceId', 'vnfInstanceId', tacker_osc_utils.LIST_BOTH), ('operation', 'operation', tacker_osc_utils.LIST_BOTH) ) def _get_columns(vnflcm_op_occ_obj, action=None): column_map = { 'id': 'ID', 'operationState': 'Operation State', 'stateEnteredTime': 'State Entered Time', 'startTime': 'Start Time', 'vnfInstanceId': 'VNF Instance ID', 'operation': 'Operation', 'isAutomaticInvocation': 'Is Automatic Invocation', 'isCancelPending': 'Is Cancel Pending', 'error': 'Error', '_links': 'Links' } if action == 'show': column_map.update( {'operationParams': 'Operation Parameters', 'grantId': 'Grant ID', 'resourceChanges': 'Resource Changes', 'changedInfo': 'Changed Info', 'cancelMode': 'Cancel Mode', 'changedExtConnectivity': 'Changed External Connectivity'} ) return sdk_utils.get_osc_show_columns_for_sdk_resource(vnflcm_op_occ_obj, column_map) class RollbackVnfLcmOp(command.Command): def get_parser(self, prog_name): """Add arguments to parser. Args: prog_name ([type]): program name Returns: parser([ArgumentParser]): """ parser = super(RollbackVnfLcmOp, self).get_parser(prog_name) parser.add_argument( _VNF_LCM_OP_OCC_ID, metavar="", help=_('VNF lifecycle management operation occurrence ID.')) return parser def take_action(self, parsed_args): """Execute rollback_vnf_instance and output comment. Args: parsed_args ([Namespace]): arguments of CLI. """ client = self.app.client_manager.tackerclient result = client.rollback_vnf_instance(parsed_args.vnf_lcm_op_occ_id) if not result: print((_('Rollback request for LCM operation %(id)s has been' ' accepted') % {'id': parsed_args.vnf_lcm_op_occ_id})) class CancelVnfLcmOp(command.ShowOne): _description = _("Cancel VNF Instance") def get_parser(self, prog_name): """Add arguments to parser. Args: prog_name ([type]): program name Returns: parser([ArgumentParser]): """ parser = super(CancelVnfLcmOp, self).get_parser(prog_name) parser.add_argument( _VNF_LCM_OP_OCC_ID, metavar="", help=_('VNF lifecycle management operation occurrence ID.')) parser.add_argument( "--cancel-mode", default='GRACEFUL', metavar="", choices=['GRACEFUL', 'FORCEFUL'], help=_("Cancel mode can be 'GRACEFUL' or 'FORCEFUL'. " "Default is 'GRACEFUL'")) return parser def take_action(self, parsed_args): """Execute cancel_vnf_instance and output comment. Args: parsed_args ([Namespace]): arguments of CLI. """ client = self.app.client_manager.tackerclient result = client.cancel_vnf_instance( parsed_args.vnf_lcm_op_occ_id, {'cancelMode': parsed_args.cancel_mode}) if not result: print((_('Cancel request for LCM operation %(id)s has been' ' accepted') % {'id': parsed_args.vnf_lcm_op_occ_id})) class FailVnfLcmOp(command.ShowOne): _description = _("Fail VNF Instance") def get_parser(self, prog_name): """Add arguments to parser. Args: prog_name ([type]): program name Returns: parser([ArgumentParser]): """ parser = super(FailVnfLcmOp, self).get_parser(prog_name) parser.add_argument( _VNF_LCM_OP_OCC_ID, metavar="", help=_('VNF lifecycle management operation occurrence ID.')) return parser def take_action(self, parsed_args): """Execute fail_vnf_instance and output response. Args: parsed_args ([Namespace]): arguments of CLI. """ client = self.app.client_manager.tackerclient obj = client.fail_vnf_instance(parsed_args.vnf_lcm_op_occ_id) display_columns, columns = _get_columns(obj) data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) class RetryVnfLcmOp(command.Command): _description = _("Retry VNF Instance") def get_parser(self, prog_name): """Add arguments to parser. Args: prog_name ([type]): program name Returns: parser([ArgumentParser]): """ parser = super(RetryVnfLcmOp, self).get_parser(prog_name) parser.add_argument( _VNF_LCM_OP_OCC_ID, metavar="", help=_('VNF lifecycle management operation occurrence ID.')) return parser def take_action(self, parsed_args): """Execute retry_vnf_instance and output comment. Args: parsed_args ([Namespace]): arguments of CLI. """ client = self.app.client_manager.tackerclient result = client.retry_vnf_instance(parsed_args.vnf_lcm_op_occ_id) if not result: print((_('Retry request for LCM operation %(id)s has been' ' accepted') % {'id': parsed_args.vnf_lcm_op_occ_id})) class ListVnfLcmOp(command.Lister): _description = _("List LCM Operation Occurrences") def get_parser(self, program_name): """Add arguments to parser. Args: program_name ([type]): program name Returns: parser([ArgumentParser]): """ parser = super(ListVnfLcmOp, self).get_parser(program_name) parser.add_argument( "--filter", metavar="", help=_("Attribute-based-filtering parameters"), ) fields_exclusive_group = parser.add_mutually_exclusive_group( required=False) fields_exclusive_group.add_argument( "--fields", metavar="", help=_("Complex attributes to be included into the response"), ) fields_exclusive_group.add_argument( "--exclude-fields", metavar="", help=_("Complex attributes to be excluded from the response"), ) return parser def get_attributes(self, exclude=None): """Get attributes. Args: exclude([exclude]): a list of fields which needs to exclude. Returns: attributes([attributes]): a list of table entry definitions. Each entry should be a tuple consisting of (API attribute name, header name, listing mode). """ fields = [ { "key": "id", "value": "ID" }, { "key": "operationState", "value": "Operation State" }, { "key": "vnfInstanceId", "value": "VNF Instance ID" }, { "key": "operation", "value": "Operation" } ] attributes = [] if exclude is None: exclude = [] for field in fields: if field['value'] not in exclude: attributes.extend([(field['key'], field['value'], tacker_osc_utils.LIST_BOTH)]) return tuple(attributes) def take_action(self, parsed_args): """Execute list_vnflcm_op_occs and output response. Args: parsed_args ([Namespace]): arguments of CLI. """ params = {} exclude_fields = [] extra_fields = [] if parsed_args.filter: params['filter'] = parsed_args.filter if parsed_args.fields: params['fields'] = parsed_args.fields fields = parsed_args.fields.split(',') for field in fields: extra_fields.append(field.split('/')[0]) if parsed_args.exclude_fields: params['exclude-fields'] = parsed_args.exclude_fields fields = parsed_args.exclude_fields.split(',') exclude_fields.extend(fields) client = self.app.client_manager.tackerclient vnflcm_op_occs = client.list_vnf_lcm_op_occs(**params) headers, columns = tacker_osc_utils.get_column_definitions( self.get_attributes(exclude=exclude_fields), long_listing=True) dictionary_properties = (utils.get_dict_properties( s, columns, mixed_case_fields=_MIXED_CASE_FIELDS) for s in vnflcm_op_occs ) return (headers, dictionary_properties) class ShowVnfLcmOp(command.ShowOne): _description = _("Display Operation Occurrence details") def get_parser(self, program_name): """Add arguments to parser. Args: program_name ([type]): program name Returns: parser([ArgumentParser]): """ parser = super(ShowVnfLcmOp, self).get_parser(program_name) parser.add_argument( _VNF_LCM_OP_OCC_ID, metavar="", help=_('VNF lifecycle management operation occurrence ID.')) return parser def take_action(self, parsed_args): """Execute show_vnf_lcm_op_occs and output response. Args: parsed_args ([Namespace]): arguments of CLI. """ client = self.app.client_manager.tackerclient obj = client.show_vnf_lcm_op_occs(parsed_args.vnf_lcm_op_occ_id) display_columns, columns = _get_columns(obj, action='show') data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnflcm/vnflcm_subsc.py0000664000175000017500000001433600000000000025640 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Nippon Telegraph and Telephone Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from osc_lib.command import command from osc_lib import utils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) _LCCN_SUBSCRIPTION_ID = 'subscription_id' _MIXED_CASE_FIELDS = ['filter', 'callbackUri'] _FORMATTERS = { 'filter': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn } def _get_columns(lccn_subsc_obj): column_map = { 'id': 'ID', 'filter': 'Filter', 'callbackUri': 'Callback URI', '_links': 'Links' } return sdk_utils.get_osc_show_columns_for_sdk_resource(lccn_subsc_obj, column_map) class CreateLccnSubscription(command.ShowOne): _description = _("Create a new Lccn Subscription") def get_parser(self, prog_name): parser = super(CreateLccnSubscription, self).get_parser(prog_name) parser.add_argument( 'create_request_file', metavar="", help=_('Specify create request parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient subsc = client.create_lccn_subscription( tacker_osc_utils.jsonfile2body(parsed_args.create_request_file)) display_columns, columns = _get_columns(subsc) data = utils.get_item_properties(sdk_utils.DictModel(subsc), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) class DeleteLccnSubscription(command.Command): _description = _("Delete Lccn Subscription(s)") def get_parser(self, prog_name): parser = super(DeleteLccnSubscription, self).get_parser(prog_name) parser.add_argument( _LCCN_SUBSCRIPTION_ID, metavar="", nargs="+", help=_("Lccn Subscription ID(s) to delete")) return parser def take_action(self, parsed_args): error_count = 0 client = self.app.client_manager.tackerclient lccn_subscriptions = parsed_args.subscription_id for lccn_subscription in lccn_subscriptions: try: client.delete_lccn_subscription(lccn_subscription) except Exception as e: error_count += 1 LOG.error(_("Failed to delete Lccn Subscription with " "ID '%(subsc)s': %(e)s"), {'subsc': lccn_subscription, 'e': e}) total = len(lccn_subscriptions) if (error_count > 0): msg = (_("Failed to delete %(error_count)s of %(total)s " "Lccn Subscriptions.") % {'error_count': error_count, 'total': total}) raise exceptions.CommandError(message=msg) else: if total > 1: print(_('All specified Lccn Subscriptions are deleted ' 'successfully')) else: print(_("Lccn Subscription '%s' is deleted " "successfully") % lccn_subscriptions[0]) class ListLccnSubscription(command.Lister): _description = _("List Lccn Subscriptions") def get_parser(self, program_name): parser = super(ListLccnSubscription, self).get_parser(program_name) parser.add_argument( "--filter", metavar="", help=_("Attribute-based-filtering parameters"), ) return parser def get_attributes(self, exclude=None): fields = [ { "key": "id", "value": "ID" }, { "key": "callbackUri", "value": "Callback URI" } ] attributes = [] for field in fields: attributes.extend([(field['key'], field['value'], tacker_osc_utils.LIST_BOTH)]) return tuple(attributes) def take_action(self, parsed_args): params = {} if parsed_args.filter: params['filter'] = parsed_args.filter client = self.app.client_manager.tackerclient subscriptions = client.list_lccn_subscriptions(**params) headers, columns = tacker_osc_utils.get_column_definitions( self.get_attributes(), long_listing=True) dictionary_properties = (utils.get_dict_properties( s, columns, mixed_case_fields=_MIXED_CASE_FIELDS) for s in subscriptions ) return (headers, dictionary_properties) class ShowLccnSubscription(command.ShowOne): _description = _("Display Lccn Subscription details") def get_parser(self, program_name): parser = super(ShowLccnSubscription, self).get_parser(program_name) parser.add_argument( _LCCN_SUBSCRIPTION_ID, metavar="", help=_('Lccn Subscription ID to display')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_lccn_subscription(parsed_args.subscription_id) display_columns, columns = _get_columns(obj) data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v1/vnfpkgm/0000775000175000017500000000000000000000000022756 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnfpkgm/__init__.py0000664000175000017500000000000000000000000025055 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v1/vnfpkgm/vnf_package.py0000664000175000017500000004603600000000000025605 0ustar00zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from functools import reduce import logging import sys from osc_lib.cli import parseractions from osc_lib.command import command from osc_lib import utils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) formatters = {'softwareImages': tacker_osc_utils.FormatComplexDataColumn, 'checksum': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn, 'userDefinedData': tacker_osc_utils.FormatComplexDataColumn, 'additionalArtifacts': tacker_osc_utils.FormatComplexDataColumn} _mixed_case_fields = ('usageState', 'onboardingState', 'operationalState', 'vnfProductName', 'softwareImages', 'userDefinedData', 'vnfdId', 'vnfdVersion', 'vnfSoftwareVersion', 'vnfProvider', 'additionalArtifacts') def _get_columns(vnf_package_obj): column_map = { '_links': 'Links', 'onboardingState': 'Onboarding State', 'operationalState': 'Operational State', 'usageState': 'Usage State', 'userDefinedData': 'User Defined Data', 'id': 'ID' } if vnf_package_obj['onboardingState'] == 'ONBOARDED': column_map.update({ 'softwareImages': 'Software Images', 'vnfProvider': 'VNF Provider', 'vnfSoftwareVersion': 'VNF Software Version', 'vnfProductName': 'VNF Product Name', 'vnfdId': 'VNFD ID', 'vnfdVersion': 'VNFD Version', 'checksum': 'Checksum', 'additionalArtifacts': 'Additional Artifacts' }) return sdk_utils.get_osc_show_columns_for_sdk_resource(vnf_package_obj, column_map) class CreateVnfPackage(command.ShowOne): _description = _("Create a new VNF Package") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(CreateVnfPackage, self).get_parser(prog_name) parser.add_argument( '--user-data', metavar='', action=parseractions.KeyValueAction, help=_('User defined data for the VNF package ' '(repeat option to set multiple user defined data)'), ) return parser def args2body(self, parsed_args): body = {} if parsed_args.user_data: body["userDefinedData"] = parsed_args.user_data return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vnf_package = client.create_vnf_package(self.args2body(parsed_args)) display_columns, columns = _get_columns(vnf_package) data = utils.get_item_properties( sdk_utils.DictModel(vnf_package), columns, formatters=formatters, mixed_case_fields=_mixed_case_fields) return (display_columns, data) class ListVnfPackage(command.Lister): _description = _("List VNF Packages") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ListVnfPackage, self).get_parser(prog_name) parser.add_argument( "--filter", metavar="", help=_("Atrribute-based-filtering parameters"), ) fields_exclusive_group = parser.add_mutually_exclusive_group( required=False) fields_exclusive_group.add_argument( "--all_fields", action="store_true", default=False, help=_("Include all complex attributes in the response"), ) fields_exclusive_group.add_argument( "--fields", metavar="fields", help=_("Complex attributes to be included into the response"), ) fields_exclusive_group.add_argument( "--exclude_fields", metavar="exclude-fields", help=_("Complex attributes to be excluded from the response"), ) parser.add_argument( "--exclude_default", action="store_true", default=False, help=_("Indicates to exclude all complex attributes" " from the response. This argument can be used alone or" " with --fields and --filter. For all other combinations" " tacker server will throw bad request error"), ) return parser def case_modify(self, field): return reduce( lambda x, y: x + (' ' if y.isupper() else '') + y, field).title() def get_attributes(self, extra_fields=None, all_fields=False, exclude_fields=None, exclude_default=False): fields = ['id', 'vnfProductName', 'onboardingState', 'usageState', 'operationalState', '_links'] complex_fields = [ 'checksum', 'softwareImages', 'userDefinedData', 'additionalArtifacts'] simple_fields = ['vnfdVersion', 'vnfProvider', 'vnfSoftwareVersion', 'vnfdId'] if extra_fields: fields.extend(extra_fields) if exclude_fields: fields.extend([field for field in complex_fields if field not in exclude_fields]) if all_fields: fields.extend(complex_fields) fields.extend(simple_fields) if exclude_default: fields.extend(simple_fields) attrs = [] for field in fields: if field == '_links': attrs.extend([(field, 'Links', tacker_osc_utils.LIST_BOTH)]) else: attrs.extend([(field, self.case_modify(field), tacker_osc_utils.LIST_BOTH)]) return tuple(attrs) def take_action(self, parsed_args): _params = {} extra_fields = [] exclude_fields = [] all_fields = False exclude_default = False if parsed_args.filter: _params['filter'] = parsed_args.filter if parsed_args.fields: _params['fields'] = parsed_args.fields fields = parsed_args.fields.split(',') for field in fields: extra_fields.append(field.split('/')[0]) if parsed_args.exclude_fields: _params['exclude_fields'] = parsed_args.exclude_fields fields = parsed_args.exclude_fields.split(',') exclude_fields.extend(fields) if parsed_args.exclude_default: _params['exclude_default'] = None exclude_default = True if parsed_args.all_fields: _params['all_fields'] = None all_fields = True client = self.app.client_manager.tackerclient data = client.list_vnf_packages(**_params) headers, columns = tacker_osc_utils.get_column_definitions( self.get_attributes(extra_fields, all_fields, exclude_fields, exclude_default), long_listing=True) return (headers, (utils.get_dict_properties( s, columns, formatters=formatters, mixed_case_fields=_mixed_case_fields, ) for s in data['vnf_packages'])) class ShowVnfPackage(command.ShowOne): _description = _("Show VNF Package Details") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ShowVnfPackage, self).get_parser(prog_name) parser.add_argument( 'vnf_package', metavar="", help=_("VNF package ID") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vnf_package = client.show_vnf_package(parsed_args.vnf_package) display_columns, columns = _get_columns(vnf_package) data = utils.get_item_properties( sdk_utils.DictModel(vnf_package), columns, formatters=formatters, mixed_case_fields=_mixed_case_fields) return (display_columns, data) class UploadVnfPackage(command.Command): _description = _("Upload VNF Package") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(UploadVnfPackage, self).get_parser(prog_name) parser.add_argument( 'vnf_package', metavar="", help=_("VNF package ID") ) file_source = parser.add_mutually_exclusive_group(required=True) file_source.add_argument( "--path", metavar="", help=_("Upload VNF CSAR package from local file"), ) file_source.add_argument( "--url", metavar="", help=_("Uri of the VNF package content"), ) parser.add_argument( "--user-name", metavar="", help=_("User name for authentication"), ) parser.add_argument( "--password", metavar="", help=_("Password for authentication"), ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient attrs = {} if parsed_args.user_name: attrs['userName'] = parsed_args.user_name if parsed_args.password: attrs['password'] = parsed_args.password if parsed_args.url: attrs['url'] = parsed_args.url file_data = None try: if parsed_args.path: file_data = open(parsed_args.path, 'rb') result = client.upload_vnf_package(parsed_args.vnf_package, file_data, **attrs) if not result: print((_('Upload request for VNF package %(id)s has been' ' accepted.') % {'id': parsed_args.vnf_package})) finally: if file_data: file_data.close() class DeleteVnfPackage(command.Command): """Vnf package delete Delete class supports bulk deletion of vnf packages, and error handling. """ _description = _("Delete VNF Package") resource = 'vnf-package' def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(DeleteVnfPackage, self).get_parser(prog_name) parser.add_argument( 'vnf-package', metavar="", nargs="+", help=_("Vnf package(s) ID to delete") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient failure = False deleted_ids = [] failed_items = {} resources = getattr(parsed_args, self.resource, []) for resource_id in resources: try: vnf_package = client.show_vnf_package(resource_id) client.delete_vnf_package(vnf_package['id']) deleted_ids.append(resource_id) except Exception as e: failure = True failed_items[resource_id] = e if failure: msg = '' if deleted_ids: msg = (_('Successfully deleted %(resource)s(s):' ' %(deleted_list)s') % {'deleted_list': ', '.join(deleted_ids), 'resource': self.resource}) err_msg = _("\n\nUnable to delete the below" " 'vnf_package'(s):") for failed_id, error in failed_items.items(): err_msg += (_('\n Cannot delete %(failed_id)s: %(error)s') % {'failed_id': failed_id, 'error': error}) msg += err_msg raise exceptions.CommandError(message=msg) else: print((_('All specified %(resource)s(s) deleted successfully') % {'resource': self.resource})) return class DownloadVnfPackage(command.Command): _description = _("Download VNF package contents or VNFD of an on-boarded " "VNF package.") def get_parser(self, prog_name): parser = super(DownloadVnfPackage, self).get_parser(prog_name) parser.add_argument( "vnf_package", metavar="", help=_("VNF package ID") ) parser.add_argument( "--file", metavar="", help=_("Local file to save downloaded VNF Package or VNFD data. " "If this is not specified and there is no redirection " "then data will not be saved.") ) parser.add_argument( "--vnfd", action="store_true", default=False, help=_("Download VNFD of an on-boarded vnf package."), ) parser.add_argument( "--type", default="application/zip", metavar="", choices=["text/plain", "application/zip", "both"], help=_("Provide text/plain when VNFD is implemented as a single " "YAML file otherwise use application/zip. If you are not " "aware whether VNFD is a single or multiple yaml files, " "then you can specify 'both' option value. " "Provide this option only when --vnfd is set.") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient if parsed_args.vnfd: if sys.stdout.isatty() and not (parsed_args.file and parsed_args.type != "text/plain"): msg = ("No redirection or local file specified for downloaded " "VNFD data. Please specify a local file with --file to " "save downloaded VNFD data or use redirection.") sdk_utils.exit(msg) body = client.download_vnfd_from_vnf_package( parsed_args.vnf_package, parsed_args.type) if not parsed_args.file: print(body) return else: body = client.download_vnf_package(parsed_args.vnf_package) sdk_utils.save_data(body, parsed_args.file) class DownloadVnfPackageArtifact(command.Command): _description = _("Download VNF package artifact of an on-boarded " "VNF package.") def get_parser(self, prog_name): parser = super(DownloadVnfPackageArtifact, self).get_parser(prog_name) parser.add_argument( "vnf_package", metavar="", help=_("VNF package ID") ) parser.add_argument( "artifact_path", metavar="", help=_("The artifact file's path") ) parser.add_argument( "--file", metavar="", help=_("Local file to save downloaded VNF Package artifact " "file data. If this is not specified and " "there is no redirection then data will not be saved.") ) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient if sys.stdout.isatty() and not (parsed_args.file): msg = ( "No redirection or local file specified for downloaded " "vnf package artifact data. Please specify a " "local file with --file to " "save downloaded vnf package artifact data " "or use redirection.") sdk_utils.exit(msg) body = client.download_artifact_from_vnf_package( parsed_args.vnf_package, parsed_args.artifact_path) if not parsed_args.file: print(body) return else: sdk_utils.save_data(body, parsed_args.file) class UpdateVnfPackage(command.ShowOne): _description = _("Update information about an individual VNF package") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(UpdateVnfPackage, self).get_parser(prog_name) parser.add_argument( 'vnf_package', metavar="", help=_("VNF package ID") ) parser.add_argument( '--operational-state', metavar="", choices=['ENABLED', 'DISABLED'], help=_("Change the operational state of VNF Package, Valid values" " are 'ENABLED' or 'DISABLED'.") ) parser.add_argument( '--user-data', metavar='', action=parseractions.KeyValueAction, help=_('User defined data for the VNF package ' '(repeat option to set multiple user defined data)'), ) return parser def get_columns(self, updated_values): column_map = {} if updated_values.get('userDefinedData'): column_map.update({'userDefinedData': 'User Defined Data'}) if updated_values.get('operationalState'): column_map.update({'operationalState': 'Operational State'}) return sdk_utils.get_osc_show_columns_for_sdk_resource(updated_values, column_map) def args2body(self, parsed_args): body = {} if not parsed_args.user_data and not parsed_args.operational_state: msg = ('Provide at least one of the argument from "--user-data"' ' or "--operational-state"') sdk_utils.exit(msg) if parsed_args.user_data: body["userDefinedData"] = parsed_args.user_data if parsed_args.operational_state: body["operationalState"] = parsed_args.operational_state return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient updated_values = client.update_vnf_package( parsed_args.vnf_package, self.args2body(parsed_args)) display_columns, columns = self.get_columns(updated_values) data = utils.get_item_properties( sdk_utils.DictModel(updated_values), columns, formatters=formatters, mixed_case_fields=_mixed_case_fields) return (display_columns, data) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v2/0000775000175000017500000000000000000000000021307 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/__init__.py0000664000175000017500000000000000000000000023406 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/0000775000175000017500000000000000000000000022423 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/__init__.py0000664000175000017500000000000000000000000024522 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/samples/0000775000175000017500000000000000000000000024067 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000021000000000000011446 xustar0000000000000000114 path=python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/samples/create_vnf_fm_subscription_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/samples/create_vnf_fm_subscription_param_sample.0000664000175000017500000000331700000000000034217 0ustar00zuulzuul00000000000000{ "filter": { "vnfInstanceSubscriptionFilter": { "vnfdIds": [ "dummy-vnfdId-1" ], "vnfProductsFromProviders": [ { "vnfProvider": "dummy-vnfProvider-1", "vnfProducts": [ { "vnfProductName": "dummy-vnfProductName-1-1", "versions": [ { "vnfSoftwareVersion": 1.0, "vnfdVersions": [1.0, 2.0] } ] } ] } ], "vnfInstanceIds": [ "dummy-vnfInstanceId-1" ], "vnfInstanceNames": [ "dummy-vnfInstanceName-1" ] }, "notificationTypes": [ "AlarmNotification" ], "faultyResourceTypes": [ "COMPUTE" ], "perceivedSeverities": [ "WARNING" ], "eventTypes": [ "EQUIPMENT_ALARM" ], "probableCauses": [ "The server cannot be connected." ] }, "callbackUri": "/nfvo/notify/alarm", "authentication": { "authType": [ "BASIC", "OAUTH2_CLIENT_CREDENTIALS" ], "paramsBasic": { "userName": "nfvo", "password": "nfvopwd" }, "paramsOauth2ClientCredentials": { "clientId": "auth_user_name", "clientPassword": "auth_password", "tokenEndpoint": "token_endpoint" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/vnffm_alarm.py0000664000175000017500000001447600000000000025301 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from osc_lib.command import command from osc_lib import utils from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) _ATTR_MAP = ( ('id', 'ID', tacker_osc_utils.LIST_BOTH), ('managedObjectId', 'Managed Object Id', tacker_osc_utils.LIST_BOTH), ('ackState', 'Ack State', tacker_osc_utils.LIST_BOTH), ('eventType', 'Event Type', tacker_osc_utils.LIST_BOTH), ('perceivedSeverity', 'Perceived Severity', tacker_osc_utils.LIST_BOTH), ('probableCause', 'Probable Cause', tacker_osc_utils.LIST_BOTH) ) _FORMATTERS = { 'vnfcInstanceIds': tacker_osc_utils.FormatComplexDataColumn, 'rootCauseFaultyResource': tacker_osc_utils.FormatComplexDataColumn, 'correlatedAlarmIds': tacker_osc_utils.FormatComplexDataColumn, 'faultDetails': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn } _MIXED_CASE_FIELDS = ( 'managedObjectId', 'rootCauseFaultyResource', 'vnfcInstanceIds', 'alarmRaisedTime', 'alarmChangedTime', 'alarmClearedTime', 'alarmAcknowledgedTime', 'ackState', 'perceivedSeverity', 'eventTime', 'eventType', 'faultType', 'probableCause', 'isRootCause', 'correlatedAlarmIds', 'faultDetails' ) _VNF_FM_ALARM_ID = 'vnf_fm_alarm_id' def _get_columns(vnffm_alarm_obj, action=None): if action == 'update': column_map = { 'ackState': 'Ack State' } else: column_map = { 'id': 'ID', 'managedObjectId': 'Managed Object Id', 'ackState': 'Ack State', 'perceivedSeverity': 'Perceived Severity', 'eventType': 'Event Type', 'probableCause': 'Probable Cause' } if action == 'show': column_map.update({ 'vnfcInstanceIds': 'Vnfc Instance Ids', 'rootCauseFaultyResource': 'Root Cause Faulty Resource', 'alarmRaisedTime': 'Alarm Raised Time', 'alarmChangedTime': 'Alarm Changed Time', 'alarmClearedTime': 'Alarm Cleared Time', 'alarmAcknowledgedTime': 'Alarm Acknowledged Time', 'eventTime': 'Event Time', 'faultType': 'Fault Type', 'isRootCause': 'Is Root Cause', 'correlatedAlarmIds': 'Correlated Alarm Ids', 'faultDetails': 'Fault Details', '_links': 'Links' }) return sdk_utils.get_osc_show_columns_for_sdk_resource( vnffm_alarm_obj, column_map) class ListVnfFmAlarm(command.Lister): _description = _("List VNF FM alarms") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ListVnfFmAlarm, self).get_parser(prog_name) parser.add_argument( "--filter", metavar="", help=_("Attribute-based-filtering parameters"), ) return parser def take_action(self, parsed_args): _params = {} if parsed_args.filter: _params['filter'] = parsed_args.filter client = self.app.client_manager.tackerclient data = client.list_vnf_fm_alarms(**_params) headers, columns = tacker_osc_utils.get_column_definitions( _ATTR_MAP, long_listing=True) return (headers, (utils.get_dict_properties( s, columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS, ) for s in data['vnf_fm_alarms'])) class ShowVnfFmAlarm(command.ShowOne): _description = _("Display VNF FM alarm details") def get_parser(self, prog_name): parser = super(ShowVnfFmAlarm, self).get_parser(prog_name) parser.add_argument( _VNF_FM_ALARM_ID, metavar="", help=_("VNF FM alarm ID to display")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_vnf_fm_alarm(parsed_args.vnf_fm_alarm_id) display_columns, columns = _get_columns(obj, action='show') data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, mixed_case_fields=_MIXED_CASE_FIELDS, formatters=_FORMATTERS) return (display_columns, data) class UpdateVnfFmAlarm(command.ShowOne): _description = _("Update information about an individual VNF FM alarm") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(UpdateVnfFmAlarm, self).get_parser(prog_name) parser.add_argument( _VNF_FM_ALARM_ID, metavar="", help=_("VNF FM alarm ID to update.") ) update_require_parameters = parser.add_argument_group( "require arguments" ) update_require_parameters.add_argument( "--ack-state", metavar="", choices=['ACKNOWLEDGED', 'UNACKNOWLEDGED'], help=_("Ask state can be 'ACKNOWLEDGED' or 'UNACKNOWLEDGED'.")) return parser def args2body(self, parsed_args): body = {'ackState': parsed_args.ack_state} return body def take_action(self, parsed_args): client = self.app.client_manager.tackerclient updated_values = client.update_vnf_fm_alarm( parsed_args.vnf_fm_alarm_id, self.args2body(parsed_args)) display_columns, columns = _get_columns( updated_values, action='update') data = utils.get_item_properties( sdk_utils.DictModel(updated_values), columns, mixed_case_fields=_MIXED_CASE_FIELDS, formatters=_FORMATTERS) return (display_columns, data) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnffm/vnffm_sub.py0000664000175000017500000001315700000000000024771 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from osc_lib.command import command from osc_lib import utils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) _ATTR_MAP = ( ('id', 'ID', tacker_osc_utils.LIST_BOTH), ('callbackUri', 'Callback Uri', tacker_osc_utils.LIST_BOTH) ) _FORMATTERS = { 'filter': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn } _MIXED_CASE_FIELDS = ( 'callbackUri' ) _VNF_FM_SUB_ID = 'vnf_fm_sub_id' def _get_columns(vnffm_sub_obj): column_map = { 'id': 'ID', 'filter': 'Filter', 'callbackUri': 'Callback Uri', '_links': 'Links' } return sdk_utils.get_osc_show_columns_for_sdk_resource( vnffm_sub_obj, column_map) class CreateVnfFmSub(command.ShowOne): _description = _("Create a new VNF FM subscription") def get_parser(self, prog_name): parser = super(CreateVnfFmSub, self).get_parser(prog_name) parser.add_argument( 'request_file', metavar="", help=_('Specify create VNF FM subscription request ' 'parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vnf_fm_sub = client.create_vnf_fm_sub( tacker_osc_utils.jsonfile2body(parsed_args.request_file)) display_columns, columns = _get_columns(vnf_fm_sub) data = utils.get_item_properties( sdk_utils.DictModel(vnf_fm_sub), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) class ListVnfFmSub(command.Lister): _description = _("List VNF FM subs") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ListVnfFmSub, self).get_parser(prog_name) parser.add_argument( "--filter", metavar="", help=_("Attribute-based-filtering parameters"), ) return parser def take_action(self, parsed_args): _params = {} if parsed_args.filter: _params['filter'] = parsed_args.filter client = self.app.client_manager.tackerclient data = client.list_vnf_fm_subs(**_params) headers, columns = tacker_osc_utils.get_column_definitions( _ATTR_MAP, long_listing=True) return (headers, (utils.get_dict_properties( s, columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS, ) for s in data['vnf_fm_subs'])) class ShowVnfFmSub(command.ShowOne): _description = _("Display VNF FM subscription details") def get_parser(self, prog_name): parser = super(ShowVnfFmSub, self).get_parser(prog_name) parser.add_argument( _VNF_FM_SUB_ID, metavar="", help=_("VNF FM subscription ID to display")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_vnf_fm_sub(parsed_args.vnf_fm_sub_id) display_columns, columns = _get_columns(obj) data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, mixed_case_fields=_MIXED_CASE_FIELDS, formatters=_FORMATTERS) return (display_columns, data) class DeleteVnfFmSub(command.Command): _description = _("Delete VNF FM subscription(s)") def get_parser(self, prog_name): parser = super(DeleteVnfFmSub, self).get_parser(prog_name) parser.add_argument( _VNF_FM_SUB_ID, metavar="", nargs="+", help=_("VNF FM subscription ID(s) to delete")) return parser def take_action(self, parsed_args): error_count = 0 client = self.app.client_manager.tackerclient vnf_fm_sub_ids = parsed_args.vnf_fm_sub_id for sub_id in vnf_fm_sub_ids: try: client.delete_vnf_fm_sub(sub_id) except Exception as e: error_count += 1 LOG.error(_("Failed to delete VNF FM subscription with " "ID '%(sub_id)s': %(e)s"), {'sub_id': sub_id, 'e': e}) total = len(vnf_fm_sub_ids) if error_count > 0: msg = (_("Failed to delete %(error_count)s of %(total)s " "VNF FM subscriptions.") % {'error_count': error_count, 'total': total}) raise exceptions.CommandError(message=msg) if total > 1: print(_('All specified VNF FM subscriptions are deleted ' 'successfully')) else: print(_("VNF FM subscription '%s' deleted " "successfully") % vnf_fm_sub_ids[0]) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7146199 python-tackerclient-2.1.0/tackerclient/osc/v2/vnflcm/0000775000175000017500000000000000000000000022574 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7386198 python-tackerclient-2.1.0/tackerclient/osc/v2/vnflcm/samples/0000775000175000017500000000000000000000000024240 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000021100000000000011447 xustar0000000000000000115 path=python-tackerclient-2.1.0/tackerclient/osc/v2/vnflcm/samples/change_vnfpkg_vnf_instance_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnflcm/samples/change_vnfpkg_vnf_instance_param_sample0000664000175000017500000000223300000000000034241 0ustar00zuulzuul00000000000000{ "vnfdId": "c6595341-a5bb-8246-53c4-7aeb843d60c5", "additionalParams": { "upgrade_type": "RollingUpdate", "lcm-operation-coordinate-old-vnf": "./Scripts/coordinate_old_vnf.py", "lcm-operation-coordinate-old-vnf-class": "CoordinateOldVnf", "lcm-operation-coordinate-new-vnf": "./Scripts/coordinate_new_vnf.py", "lcm-operation-coordinate-new-vnf-class": "CoordinateNewVnf", "vdu_params": [{ "vduId": "VDU1", "old_vnfc_param": { "cp_name": "VDU1_CP1", "username": "ubuntu", "password": "ubuntu" }, "new_vnfc_param": { "cp_name": "VDU1_CP1", "username": "ubuntu", "password": "ubuntu" } }, { "vduId": "VDU2", "old_vnfc_param": { "cp_name": "VDU2_CP1", "username": "ubuntu", "password": "ubuntu" }, "new_vnfc_param": { "cp_name": "VDU2_CP1", "username": "ubuntu", "password": "ubuntu" } }] } } ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7426198 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/0000775000175000017500000000000000000000000022435 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/__init__.py0000664000175000017500000000000000000000000024534 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7426198 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/0000775000175000017500000000000000000000000024101 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/create_vnf_pm_job_param_sample.json0000664000175000017500000000166400000000000033166 0ustar00zuulzuul00000000000000{ "objectType": "VNFC", "objectInstanceIds": [ "object-instance-id-1" ], "subObjectInstanceIds": [ "sub-object-instance-id-2" ], "criteria": { "performanceMetric": [ "VCpuUsageMeanVnf.object-instance-id-1" ], "performanceMetricGroup": [ "VirtualisedComputeResource" ], "collectionPeriod": "500", "reportingPeriod": "1000", "reportingBoundary": "2022/07/25 10:43:55" }, "callbackUri": "/nfvo/notify/job", "authentication": { "authType": [ "BASIC", "OAUTH2_CLIENT_CREDENTIALS" ], "paramsBasic": { "userName": "nfvo", "password": "nfvopwd" }, "paramsOauth2ClientCredentials": { "clientId": "auth_user_name", "clientPassword": "auth_password", "tokenEndpoint": "token_endpoint" } } } ././@PaxHeader0000000000000000000000000000020500000000000011452 xustar0000000000000000111 path=python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/create_vnf_pm_threshold_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/create_vnf_pm_threshold_param_sample.jso0000664000175000017500000000221500000000000034223 0ustar00zuulzuul00000000000000{ "objectType": "Vnfc", "objectInstanceId": "object-instance-id-1", "subObjectInstanceIds": [ "sub-object-instance-id-2" ], "criteria": { "performanceMetric": "VCpuUsageMeanVnf.object-instance-id-1", "thresholdType": "SIMPLE", "simpleThresholdDetails": { "thresholdValue": 400.5, "hysteresis": 10.3 } }, "callbackUri": "/nfvo/notify/threshold", "authentication": { "authType": [ "BASIC", "OAUTH2_CLIENT_CREDENTIALS", "OAUTH2_CLIENT_CERT" ], "paramsBasic": { "userName": "nfvo", "password": "nfvopwd" }, "paramsOauth2ClientCredentials": { "clientId": "auth_user_name", "clientPassword": "auth_password", "tokenEndpoint": "token_endpoint" }, "paramsOauth2ClientCert": { "clientId": "test", "certificateRef": { "type": "x5t#256", "value": "03c6e188d1fe5d3da8c9bc9a8dc531a2b3e" }, "tokenEndpoint": "http://127.0.0.1/token" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/update_vnf_pm_job_param_sample.json0000664000175000017500000000071100000000000033175 0ustar00zuulzuul00000000000000{ "callbackUri": "/nfvo/notify/job", "authentication": { "authType": [ "BASIC", "OAUTH2_CLIENT_CREDENTIALS" ], "paramsBasic": { "userName": "nfvo", "password": "nfvopwd" }, "paramsOauth2ClientCredentials": { "clientId": "auth_user_name", "clientPassword": "auth_password", "tokenEndpoint": "token_endpoint" } } } ././@PaxHeader0000000000000000000000000000020500000000000011452 xustar0000000000000000111 path=python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/update_vnf_pm_threshold_param_sample.json 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/samples/update_vnf_pm_threshold_param_sample.jso0000664000175000017500000000140700000000000034244 0ustar00zuulzuul00000000000000{ "callbackUri": "/nfvo/notify/threshold", "authentication": { "authType": [ "BASIC", "OAUTH2_CLIENT_CREDENTIALS", "OAUTH2_CLIENT_CERT" ], "paramsBasic": { "userName": "nfvo", "password": "nfvopwd" }, "paramsOauth2ClientCredentials": { "clientId": "auth_user_name", "clientPassword": "auth_password", "tokenEndpoint": "token_endpoint" }, "paramsOauth2ClientCert": { "clientId": "test", "certificateRef": { "type": "x5t#256", "value": "03c6e188d1fe5d3da8c9bc9a8dc531a2b3e" }, "tokenEndpoint": "http://127.0.0.1/token" } } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/vnfpm_job.py0000664000175000017500000002466600000000000025005 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from functools import reduce from osc_lib.command import command from osc_lib import utils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) _FORMATTERS = { 'objectInstanceIds': tacker_osc_utils.FormatComplexDataColumn, 'subObjectInstanceIds': tacker_osc_utils.FormatComplexDataColumn, 'criteria': tacker_osc_utils.FormatComplexDataColumn, 'reports': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn } _MIXED_CASE_FIELDS = ( 'objectType', 'objectInstanceIds', 'subObjectInstanceIds', 'callbackUri' ) _MIXED_CASE_FIELDS_UPDATE = ( 'callbackUri' ) _VNF_PM_JOB_ID = 'vnf_pm_job_id' def _get_columns(vnfpm_job_obj, action=None): if action == 'update': column_map = { 'callbackUri': 'Callback Uri' } else: column_map = { 'id': 'ID', 'objectType': 'Object Type', 'objectInstanceIds': 'Object Instance Ids', 'subObjectInstanceIds': 'Sub Object Instance Ids', 'criteria': 'Criteria', 'callbackUri': 'Callback Uri', 'reports': 'Reports', '_links': 'Links' } if action == 'show': column_map.update( {'reports': 'Reports'} ) return sdk_utils.get_osc_show_columns_for_sdk_resource( vnfpm_job_obj, column_map) class CreateVnfPmJob(command.ShowOne): _description = _("Create a new VNF PM job") def get_parser(self, prog_name): parser = super(CreateVnfPmJob, self).get_parser(prog_name) parser.add_argument( 'request_file', metavar="", help=_('Specify create VNF PM job request ' 'parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vnf_pm_job = client.create_vnf_pm_job( tacker_osc_utils.jsonfile2body(parsed_args.request_file)) display_columns, columns = _get_columns(vnf_pm_job) data = utils.get_item_properties( sdk_utils.DictModel(vnf_pm_job), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) class ListVnfPmJob(command.Lister): _description = _("List VNF PM jobs") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ListVnfPmJob, self).get_parser(prog_name) parser.add_argument( "--filter", metavar="", help=_("Attribute-based-filtering parameters"), ) fields_exclusive_group = parser.add_mutually_exclusive_group( required=False) fields_exclusive_group.add_argument( "--all_fields", action="store_true", default=False, help=_("Include all complex attributes in the response"), ) fields_exclusive_group.add_argument( "--fields", metavar="fields", help=_("Complex attributes to be included into the response"), ) fields_exclusive_group.add_argument( "--exclude_fields", metavar="exclude-fields", help=_("Complex attributes to be excluded from the response"), ) parser.add_argument( "--exclude_default", action="store_true", default=False, help=_("Indicates to exclude all complex attributes" " from the response. This argument can be used alone or" " with --fields and --filter. For all other combinations" " tacker server will throw bad request error"), ) return parser def case_modify(self, field): return reduce( lambda x, y: x + (' ' if y.isupper() else '') + y, field).title() def get_attributes(self, extra_fields=None, all_fields=False, exclude_fields=None, exclude_default=False): fields = ['id', 'objectType', '_links'] complex_fields = [ 'objectInstanceIds', 'subObjectInstanceIds', 'criteria', 'reports'] simple_fields = ['callbackUri'] if extra_fields: fields.extend(extra_fields) if exclude_fields: fields.extend([field for field in complex_fields if field not in exclude_fields]) if all_fields: fields.extend(complex_fields) fields.extend(simple_fields) if exclude_default: fields.extend(simple_fields) attrs = [] for field in fields: if field == '_links': attrs.extend([(field, 'Links', tacker_osc_utils.LIST_BOTH)]) else: attrs.extend([(field, self.case_modify(field), tacker_osc_utils.LIST_BOTH)]) return tuple(attrs) def take_action(self, parsed_args): _params = {} extra_fields = [] exclude_fields = [] all_fields = False exclude_default = False if parsed_args.filter: _params['filter'] = parsed_args.filter if parsed_args.fields: _params['fields'] = parsed_args.fields fields = parsed_args.fields.split(',') for field in fields: extra_fields.append(field.split('/')[0]) if parsed_args.exclude_fields: _params['exclude_fields'] = parsed_args.exclude_fields fields = parsed_args.exclude_fields.split(',') exclude_fields.extend(fields) if parsed_args.exclude_default: _params['exclude_default'] = None exclude_default = True if parsed_args.all_fields: _params['all_fields'] = None all_fields = True client = self.app.client_manager.tackerclient data = client.list_vnf_pm_jobs(**_params) headers, columns = tacker_osc_utils.get_column_definitions( self.get_attributes(extra_fields, all_fields, exclude_fields, exclude_default), long_listing=True) return (headers, (utils.get_dict_properties( s, columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS, ) for s in data['vnf_pm_jobs'])) class ShowVnfPmJob(command.ShowOne): _description = _("Display VNF PM job details") def get_parser(self, prog_name): parser = super(ShowVnfPmJob, self).get_parser(prog_name) parser.add_argument( _VNF_PM_JOB_ID, metavar="", help=_("VNF PM job ID to display")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_vnf_pm_job(parsed_args.vnf_pm_job_id) display_columns, columns = _get_columns(obj, action='show') data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, mixed_case_fields=_MIXED_CASE_FIELDS, formatters=_FORMATTERS) return (display_columns, data) class UpdateVnfPmJob(command.ShowOne): _description = _("Update information about an individual VNF PM job") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(UpdateVnfPmJob, self).get_parser(prog_name) parser.add_argument( _VNF_PM_JOB_ID, metavar="", help=_("VNF PM job ID to update.") ) parser.add_argument( 'request_file', metavar="", help=_('Specify update PM job request ' 'parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient updated_values = client.update_vnf_pm_job( parsed_args.vnf_pm_job_id, tacker_osc_utils.jsonfile2body(parsed_args.request_file)) display_columns, columns = _get_columns(updated_values, 'update') data = utils.get_item_properties( sdk_utils.DictModel(updated_values), columns, mixed_case_fields=_MIXED_CASE_FIELDS_UPDATE) return (display_columns, data) class DeleteVnfPmJob(command.Command): _description = _("Delete VNF PM job") def get_parser(self, prog_name): parser = super(DeleteVnfPmJob, self).get_parser(prog_name) parser.add_argument( _VNF_PM_JOB_ID, metavar="", nargs="+", help=_("VNF PM job ID(s) to delete")) return parser def take_action(self, parsed_args): error_count = 0 client = self.app.client_manager.tackerclient vnf_pm_job_ids = parsed_args.vnf_pm_job_id for job_id in vnf_pm_job_ids: try: client.delete_vnf_pm_job(job_id) except Exception as e: error_count += 1 LOG.error(_("Failed to delete VNF PM job with " "ID '%(job_id)s': %(e)s"), {'job_id': job_id, 'e': e}) total = len(vnf_pm_job_ids) if error_count > 0: msg = (_("Failed to delete %(error_count)s of %(total)s " "VNF PM jobs.") % {'error_count': error_count, 'total': total}) raise exceptions.CommandError(message=msg) if total > 1: print(_('All specified VNF PM jobs are deleted ' 'successfully')) else: print(_("VNF PM job '%s' deleted " "successfully") % vnf_pm_job_ids[0]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/vnfpm_report.py0000664000175000017500000000424000000000000025530 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from osc_lib.command import command from osc_lib import utils from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) _FORMATTERS = { 'entries': tacker_osc_utils.FormatComplexDataColumn } _VNF_PM_JOB_ID = 'vnf_pm_job_id' _VNF_PM_REPORT_ID = 'vnf_pm_report_id' def _get_columns(vnfpm_report_obj): column_map = { 'entries': 'Entries' } return sdk_utils.get_osc_show_columns_for_sdk_resource( vnfpm_report_obj, column_map) class ShowVnfPmReport(command.ShowOne): _description = _("Display VNF PM report details") def get_parser(self, prog_name): parser = super(ShowVnfPmReport, self).get_parser(prog_name) parser.add_argument( _VNF_PM_JOB_ID, metavar="", help=_("VNF PM job id where the VNF PM report is located")) parser.add_argument( _VNF_PM_REPORT_ID, metavar="", help=_("VNF PM report ID to display")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_vnf_pm_report( parsed_args.vnf_pm_job_id, parsed_args.vnf_pm_report_id) display_columns, columns = _get_columns(obj) data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, formatters=_FORMATTERS, mixed_case_fields=None) return (display_columns, data) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/osc/v2/vnfpm/vnfpm_threshold.py0000664000175000017500000001715300000000000026220 0ustar00zuulzuul00000000000000# Copyright (C) 2023 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from osc_lib.command import command from osc_lib import utils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.osc import sdk_utils from tackerclient.osc import utils as tacker_osc_utils LOG = logging.getLogger(__name__) _ATTR_MAP = ( ('id', 'ID', tacker_osc_utils.LIST_BOTH), ('objectType', 'Object Type', tacker_osc_utils.LIST_BOTH), ('_links', 'Links', tacker_osc_utils.LIST_BOTH) ) _FORMATTERS = { 'subObjectInstanceIds': tacker_osc_utils.FormatComplexDataColumn, 'criteria': tacker_osc_utils.FormatComplexDataColumn, '_links': tacker_osc_utils.FormatComplexDataColumn } _MIXED_CASE_FIELDS = ( 'objectType', 'objectInstanceId', 'subObjectInstanceIds', 'callbackUri' ) _MIXED_CASE_FIELDS_UPDATE = ( 'callbackUri' ) _VNF_PM_THRESHOLD_ID = 'vnf_pm_threshold_id' def _get_columns(vnf_pm_threshold, action=None): if action == 'update': column_map = { 'callbackUri': 'Callback Uri' } else: column_map = { 'id': 'ID', 'objectType': 'Object Type', 'objectInstanceId': 'Object Instance Id', 'subObjectInstanceIds': 'Sub Object Instance Ids', 'criteria': 'Criteria', 'callbackUri': 'Callback Uri', '_links': 'Links' } return sdk_utils.get_osc_show_columns_for_sdk_resource( vnf_pm_threshold, column_map) class CreateVnfPmThreshold(command.ShowOne): _description = _("Create a new VNF PM threshold") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(CreateVnfPmThreshold, self).get_parser(prog_name) parser.add_argument( 'request_file', metavar="", help=_('Specify create VNF PM threshold request ' 'parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient vnf_pm_threshold = client.create_vnf_pm_threshold( tacker_osc_utils.jsonfile2body(parsed_args.request_file)) display_columns, columns = _get_columns(vnf_pm_threshold) data = utils.get_item_properties( sdk_utils.DictModel(vnf_pm_threshold), columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS) return (display_columns, data) class ListVnfPmThreshold(command.Lister): _description = _("List VNF PM thresholds") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ListVnfPmThreshold, self).get_parser(prog_name) parser.add_argument( "--filter", metavar="", help=_("Attribute-based-filtering parameters"), ) return parser def take_action(self, parsed_args): _params = {} if parsed_args.filter: _params['filter'] = parsed_args.filter client = self.app.client_manager.tackerclient data = client.list_vnf_pm_thresholds(**_params) headers, columns = tacker_osc_utils.get_column_definitions( _ATTR_MAP, long_listing=True) return (headers, (utils.get_dict_properties( s, columns, formatters=_FORMATTERS, mixed_case_fields=_MIXED_CASE_FIELDS, ) for s in data['vnf_pm_thresholds'])) class ShowVnfPmThreshold(command.ShowOne): _description = _("Display VNF PM threshold details") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(ShowVnfPmThreshold, self).get_parser(prog_name) parser.add_argument( _VNF_PM_THRESHOLD_ID, metavar="", help=_("VNF PM threshold ID to display")) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient obj = client.show_vnf_pm_threshold(parsed_args.vnf_pm_threshold_id) display_columns, columns = _get_columns(obj) data = utils.get_item_properties( sdk_utils.DictModel(obj), columns, mixed_case_fields=_MIXED_CASE_FIELDS, formatters=_FORMATTERS) return (display_columns, data) class UpdateVnfPmThreshold(command.ShowOne): _description = _("Update information about an individual VNF PM threshold") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(UpdateVnfPmThreshold, self).get_parser(prog_name) parser.add_argument( _VNF_PM_THRESHOLD_ID, metavar="", help=_("VNF PM threshold ID to update.") ) parser.add_argument( 'request_file', metavar="", help=_('Specify update PM threshold request ' 'parameters in a json file.')) return parser def take_action(self, parsed_args): client = self.app.client_manager.tackerclient updated_values = client.update_vnf_pm_threshold( parsed_args.vnf_pm_threshold_id, tacker_osc_utils.jsonfile2body(parsed_args.request_file)) display_columns, columns = _get_columns(updated_values, 'update') data = utils.get_item_properties( sdk_utils.DictModel(updated_values), columns, mixed_case_fields=_MIXED_CASE_FIELDS_UPDATE) return (display_columns, data) class DeleteVnfPmThreshold(command.Command): _description = _("Delete VNF PM threshold") def get_parser(self, prog_name): LOG.debug('get_parser(%s)', prog_name) parser = super(DeleteVnfPmThreshold, self).get_parser(prog_name) parser.add_argument( _VNF_PM_THRESHOLD_ID, metavar="", nargs="+", help=_("VNF PM threshold ID(s) to delete")) return parser def take_action(self, parsed_args): error_count = 0 client = self.app.client_manager.tackerclient vnf_pm_threshold_ids = parsed_args.vnf_pm_threshold_id for threshold_id in vnf_pm_threshold_ids: try: client.delete_vnf_pm_threshold(threshold_id) except Exception as e: error_count += 1 LOG.error(_("Failed to delete VNF PM threshold with " "ID '%(threshold_id)s': %(e)s"), {'threshold_id': threshold_id, 'e': e}) total = len(vnf_pm_threshold_ids) if error_count > 0: msg = (_("Failed to delete %(error_count)s of %(total)s " "VNF PM thresholds.") % {'error_count': error_count, 'total': total}) raise exceptions.CommandError(message=msg) if total > 1: print(_('All specified VNF PM thresholds are deleted ' 'successfully')) return print(_("VNF PM threshold '%s' deleted " "successfully") % vnf_pm_threshold_ids[0]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/shell.py0000664000175000017500000007457000000000000021672 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """ Command-line interface to the Tacker APIs """ import argparse import getpass import inspect import itertools import logging import os import sys from urllib import parse as urlparse from cliff import app from cliff import commandmanager from keystoneclient.auth.identity import v2 as v2_auth from keystoneclient.auth.identity import v3 as v3_auth from keystoneclient import discover from keystoneclient import exceptions as ks_exc from keystoneclient import session from oslo_utils import encodeutils from tackerclient.common import clientmanager from tackerclient.common import command as openstack_command from tackerclient.common import exceptions as exc from tackerclient.common import extension as client_extension from tackerclient.common import utils from tackerclient.i18n import _ from tackerclient.tacker.v1_0.nfvo import vim from tackerclient.version import __version__ VERSION = '1.0' TACKER_API_VERSION = '1.0' def run_command(cmd, cmd_parser, sub_argv): _argv = sub_argv index = -1 values_specs = [] if '--' in sub_argv: index = sub_argv.index('--') _argv = sub_argv[:index] values_specs = sub_argv[index:] known_args, _values_specs = cmd_parser.parse_known_args(_argv) cmd.values_specs = (index == -1 and _values_specs or values_specs) return cmd.run(known_args) def env(*_vars, **kwargs): """Search for the first defined of possibly many env vars. Returns the first environment variable defined in vars, or returns the default defined in kwargs. """ for v in _vars: value = os.environ.get(v, None) if value: return value return kwargs.get('default', '') def check_non_negative_int(value): try: value = int(value) except ValueError: raise argparse.ArgumentTypeError(_("invalid int value: %r") % value) if value < 0: raise argparse.ArgumentTypeError(_("input value %d is negative") % value) return value class BashCompletionCommand(openstack_command.OpenStackCommand): """Prints all of the commands and options for bash-completion.""" resource = "bash_completion" COMMAND_V1 = { 'bash-completion': BashCompletionCommand, # MANO lingo 'vim-register': vim.CreateVIM, 'vim-update': vim.UpdateVIM, 'vim-delete': vim.DeleteVIM, 'vim-list': vim.ListVIM, 'vim-show': vim.ShowVIM } COMMANDS = {'1.0': COMMAND_V1} class HelpAction(argparse.Action): """Provides a custom action for the -h and --help options. The commands are determined by checking the CommandManager instance, passed in as the "default" value for the action. :returns: a list of the commands """ def __call__(self, parser, namespace, values, option_string=None): outputs = [] max_len = 0 app = self.default parser.print_help(app.stdout) app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version) command_manager = app.command_manager for name, ep in sorted(command_manager): factory = ep.load() cmd = factory(self, None) one_liner = cmd.get_description().split('\n')[0] outputs.append((name, one_liner)) max_len = max(len(name), max_len) for (name, one_liner) in outputs: app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner)) sys.exit(0) class TackerShell(app.App): # verbose logging levels WARNING_LEVEL = 0 INFO_LEVEL = 1 DEBUG_LEVEL = 2 CONSOLE_MESSAGE_FORMAT = '%(message)s' DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s' log = logging.getLogger(__name__) def __init__(self, apiversion): super(TackerShell, self).__init__( description=__doc__.strip(), version=VERSION, command_manager=commandmanager.CommandManager('tacker.cli'), ) self.commands = COMMANDS for k, v in self.commands[apiversion].items(): self.command_manager.add_command(k, v) self._register_extensions(VERSION) # Pop the 'complete' to correct the outputs of 'tacker help'. self.command_manager.commands.pop('complete') # This is instantiated in initialize_app() only when using # password flow auth self.auth_client = None self.api_version = apiversion def build_option_parser(self, description, version): """Return an argparse option parser for this application. Subclasses may override this method to extend the parser with more global options. :param description: full description of the application :paramtype description: str :param version: version number for the application :paramtype version: str """ parser = argparse.ArgumentParser( description=description, add_help=False, ) parser.add_argument( '--version', action='version', version=__version__, ) parser.add_argument( '-v', '--verbose', '--debug', action='count', dest='verbose_level', default=self.DEFAULT_VERBOSE_LEVEL, help=_('Increase verbosity of output and show tracebacks on' ' errors. You can repeat this option.')) parser.add_argument( '-q', '--quiet', action='store_const', dest='verbose_level', const=0, help=_('Suppress output except warnings and errors.')) parser.add_argument( '-h', '--help', action=HelpAction, nargs=0, default=self, # tricky help=_("Show this help message and exit.")) parser.add_argument( '-r', '--retries', metavar="NUM", type=check_non_negative_int, default=0, help=_("How many times the request to the Tacker server should " "be retried if it fails.")) # FIXME(bklei): this method should come from python-keystoneclient self._append_global_identity_args(parser) return parser def _append_global_identity_args(self, parser): # FIXME(bklei): these are global identity (Keystone) arguments which # should be consistent and shared by all service clients. Therefore, # they should be provided by python-keystoneclient. We will need to # refactor this code once this functionality is available in # python-keystoneclient. # # Note: At that time we'll need to decide if we can just abandon # the deprecated args (--service-type and --endpoint-type). parser.add_argument( '--os-service-type', metavar='', default=env('OS_TACKER_SERVICE_TYPE', default='nfv-orchestration'), help=_('Defaults to env[OS_TACKER_SERVICE_TYPE] or \ nfv-orchestration.')) parser.add_argument( '--os-endpoint-type', metavar='', default=env('OS_ENDPOINT_TYPE', default='publicURL'), help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.')) # FIXME(bklei): --service-type is deprecated but kept in for # backward compatibility. parser.add_argument( '--service-type', metavar='', default=env('OS_TACKER_SERVICE_TYPE', default='nfv-orchestration'), help=_('DEPRECATED! Use --os-service-type.')) # FIXME(bklei): --endpoint-type is deprecated but kept in for # backward compatibility. parser.add_argument( '--endpoint-type', metavar='', default=env('OS_ENDPOINT_TYPE', default='publicURL'), help=_('DEPRECATED! Use --os-endpoint-type.')) parser.add_argument( '--os-auth-strategy', metavar='', default=env('OS_AUTH_STRATEGY', default='keystone'), help=_('DEPRECATED! Only keystone is supported.')) parser.add_argument( '--os_auth_strategy', help=argparse.SUPPRESS) parser.add_argument( '--os-auth-url', metavar='', default=env('OS_AUTH_URL'), help=_('Authentication URL, defaults to env[OS_AUTH_URL].')) parser.add_argument( '--os_auth_url', help=argparse.SUPPRESS) project_name_group = parser.add_mutually_exclusive_group() project_name_group.add_argument( '--os-tenant-name', metavar='', default=env('OS_TENANT_NAME'), help=_('Authentication tenant name, defaults to ' 'env[OS_TENANT_NAME].')) project_name_group.add_argument( '--os-project-name', metavar='', default=utils.env('OS_PROJECT_NAME'), help=_('Another way to specify tenant name. ' 'This option is mutually exclusive with ' ' --os-tenant-name. ' 'Defaults to env[OS_PROJECT_NAME].')) parser.add_argument( '--os_tenant_name', help=argparse.SUPPRESS) project_id_group = parser.add_mutually_exclusive_group() project_id_group.add_argument( '--os-tenant-id', metavar='', default=env('OS_TENANT_ID'), help=_('Authentication tenant ID, defaults to ' 'env[OS_TENANT_ID].')) project_id_group.add_argument( '--os-project-id', metavar='', default=utils.env('OS_PROJECT_ID'), help=_('Another way to specify tenant ID. ' 'This option is mutually exclusive with ' ' --os-tenant-id. ' 'Defaults to env[OS_PROJECT_ID].')) parser.add_argument( '--os-username', metavar='', default=utils.env('OS_USERNAME'), help=_('Authentication username, defaults to env[OS_USERNAME].')) parser.add_argument( '--os_username', help=argparse.SUPPRESS) parser.add_argument( '--os-user-id', metavar='', default=env('OS_USER_ID'), help=_('Authentication user ID (Env: OS_USER_ID)')) parser.add_argument( '--os_user_id', help=argparse.SUPPRESS) parser.add_argument( '--os-user-domain-id', metavar='', default=utils.env('OS_USER_DOMAIN_ID'), help=_('OpenStack user domain ID. ' 'Defaults to env[OS_USER_DOMAIN_ID].')) parser.add_argument( '--os_user_domain_id', help=argparse.SUPPRESS) parser.add_argument( '--os-user-domain-name', metavar='', default=utils.env('OS_USER_DOMAIN_NAME'), help=_('OpenStack user domain name. ' 'Defaults to env[OS_USER_DOMAIN_NAME].')) parser.add_argument( '--os_user_domain_name', help=argparse.SUPPRESS) parser.add_argument( '--os_project_id', help=argparse.SUPPRESS) parser.add_argument( '--os_project_name', help=argparse.SUPPRESS) parser.add_argument( '--os-project-domain-id', metavar='', default=utils.env('OS_PROJECT_DOMAIN_ID'), help=_('Defaults to env[OS_PROJECT_DOMAIN_ID].')) parser.add_argument( '--os-project-domain-name', metavar='', default=utils.env('OS_PROJECT_DOMAIN_NAME'), help=_('Defaults to env[OS_PROJECT_DOMAIN_NAME].')) parser.add_argument( '--os-cert', metavar='', default=utils.env('OS_CERT'), help=_("Path of certificate file to use in SSL " "connection. This file can optionally be " "prepended with the private key. Defaults " "to env[OS_CERT].")) parser.add_argument( '--os-cacert', metavar='', default=env('OS_CACERT', default=None), help=_("Specify a CA bundle file to use in " "verifying a TLS (https) server certificate. " "Defaults to env[OS_CACERT].")) parser.add_argument( '--os-key', metavar='', default=utils.env('OS_KEY'), help=_("Path of client key to use in SSL " "connection. This option is not necessary " "if your key is prepended to your certificate " "file. Defaults to env[OS_KEY].")) parser.add_argument( '--os-password', metavar='', default=utils.env('OS_PASSWORD'), help=_('Authentication password, defaults to env[OS_PASSWORD].')) parser.add_argument( '--os_password', help=argparse.SUPPRESS) parser.add_argument( '--os-region-name', metavar='', default=env('OS_REGION_NAME'), help=_('Authentication region name, defaults to ' 'env[OS_REGION_NAME].')) parser.add_argument( '--os_region_name', help=argparse.SUPPRESS) parser.add_argument( '--os-token', metavar='', default=env('OS_TOKEN'), help=_('Authentication token, defaults to env[OS_TOKEN].')) parser.add_argument( '--os_token', help=argparse.SUPPRESS) parser.add_argument( '--http-timeout', metavar='', default=env('OS_NETWORK_TIMEOUT', default=None), type=float, help=_('Timeout in seconds to wait for an HTTP response. Defaults ' 'to env[OS_NETWORK_TIMEOUT] or None if not specified.')) parser.add_argument( '--os-url', metavar='', default=env('OS_URL'), help=_('Defaults to env[OS_URL].')) parser.add_argument( '--os_url', help=argparse.SUPPRESS) parser.add_argument( '--insecure', action='store_true', default=env('TACKERCLIENT_INSECURE', default=False), help=_("Explicitly allow tackerclient to perform \"insecure\" " "SSL (https) requests. The server's certificate will " "not be verified against any certificate authorities. " "This option should be used with caution.")) def _bash_completion(self): """Prints all of the commands and options for bash-completion.""" commands = set() options = set() for option, _action in self.parser._option_string_actions.items(): options.add(option) for command_name, command in self.command_manager: commands.add(command_name) cmd_factory = command.load() cmd = cmd_factory(self, None) cmd_parser = cmd.get_parser('') for option, _action in cmd_parser._option_string_actions.items(): options.add(option) print(' '.join(commands | options)) def _register_extensions(self, version): for name, module in itertools.chain( client_extension._discover_via_entry_points()): self._extend_shell_commands(module, version) def _extend_shell_commands(self, module, version): classes = inspect.getmembers(module, inspect.isclass) for cls_name, cls in classes: if (issubclass(cls, client_extension.TackerClientExtension) and hasattr(cls, 'shell_command')): cmd = cls.shell_command if hasattr(cls, 'versions'): if version not in cls.versions: continue try: self.command_manager.add_command(cmd, cls) self.commands[version][cmd] = cls except TypeError: pass def run(self, argv): """Equivalent to the main program for the application. :param argv: input arguments and options :paramtype argv: list of str """ try: index = 0 command_pos = -1 help_pos = -1 help_command_pos = -1 for arg in argv: if arg == 'bash-completion' and help_command_pos == -1: self._bash_completion() return 0 if arg in self.commands[self.api_version]: if command_pos == -1: command_pos = index elif arg in ('-h', '--help'): if help_pos == -1: help_pos = index elif arg == 'help': if help_command_pos == -1: help_command_pos = index index = index + 1 if command_pos > -1 and help_pos > command_pos: argv = ['help', argv[command_pos]] if help_command_pos > -1 and command_pos == -1: argv[help_command_pos] = '--help' self.options, remainder = self.parser.parse_known_args(argv) self.configure_logging() self.interactive_mode = not remainder self.initialize_app(remainder) except Exception as err: if self.options.verbose_level >= self.DEBUG_LEVEL: self.log.exception(err) raise else: self.log.error(err) return 1 if self.interactive_mode: _argv = [sys.argv[0]] sys.argv = _argv return self.interact() return self.run_subcommand(remainder) def run_subcommand(self, argv): subcommand = self.command_manager.find_command(argv) cmd_factory, cmd_name, sub_argv = subcommand cmd = cmd_factory(self, self.options) try: self.prepare_to_run_command(cmd) full_name = (cmd_name if self.interactive_mode else ' '.join([self.NAME, cmd_name]) ) cmd_parser = cmd.get_parser(full_name) return run_command(cmd, cmd_parser, sub_argv) except Exception as e: if self.options.verbose_level >= self.DEBUG_LEVEL: self.log.exception("%s", e) raise self.log.error("%s", e) return 1 def authenticate_user(self): """Authentication validation. Make sure the user has provided all of the authentication info we need. """ if self.options.os_auth_strategy == 'keystone': if self.options.os_token or self.options.os_url: # Token flow auth takes priority if not self.options.os_token: raise exc.CommandError( _("You must provide a token via" " either --os-token or env[OS_TOKEN]" " when providing a service URL")) if not self.options.os_url: raise exc.CommandError( _("You must provide a service URL via" " either --os-url or env[OS_URL]" " when providing a token")) else: # Validate password flow auth project_info = (self.options.os_tenant_name or self.options.os_tenant_id or (self.options.os_project_name and (self.options.os_project_domain_name or self.options.os_project_domain_id)) or self.options.os_project_id) if (not self.options.os_username and not self.options.os_user_id): raise exc.CommandError( _("You must provide a username or user ID via" " --os-username, env[OS_USERNAME] or" " --os-user-id, env[OS_USER_ID]")) if not self.options.os_password: # No password, If we've got a tty, try prompting for it if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty(): # Check for Ctl-D try: self.options.os_password = getpass.getpass( 'OS Password: ') except EOFError: pass # No password because we didn't have a tty or the # user Ctl-D when prompted. if not self.options.os_password: raise exc.CommandError( _("You must provide a password via" " either --os-password or env[OS_PASSWORD]")) if (not project_info): # tenent is deprecated in Keystone v3. Use the latest # terminology instead. raise exc.CommandError( _("You must provide a project_id or project_name (" "with project_domain_name or project_domain_id) " "via " " --os-project-id (env[OS_PROJECT_ID])" " --os-project-name (env[OS_PROJECT_NAME])," " --os-project-domain-id " "(env[OS_PROJECT_DOMAIN_ID])" " --os-project-domain-name " "(env[OS_PROJECT_DOMAIN_NAME])")) if not self.options.os_auth_url: raise exc.CommandError( _("You must provide an auth url via" " either --os-auth-url or via env[OS_AUTH_URL]")) auth_session = self._get_keystone_session() auth = auth_session.auth else: # not keystone if not self.options.os_url: raise exc.CommandError( _("You must provide a service URL via" " either --os-url or env[OS_URL]")) auth_session = None auth = None self.client_manager = clientmanager.ClientManager( token=self.options.os_token, url=self.options.os_url, auth_url=self.options.os_auth_url, tenant_name=self.options.os_tenant_name, tenant_id=self.options.os_tenant_id, username=self.options.os_username, user_id=self.options.os_user_id, password=self.options.os_password, region_name=self.options.os_region_name, api_version=self.api_version, auth_strategy=self.options.os_auth_strategy, # FIXME (bklei) honor deprecated service_type and # endpoint type until they are removed service_type=self.options.os_service_type or self.options.service_type, endpoint_type=self.options.os_endpoint_type or self.endpoint_type, insecure=self.options.insecure, ca_cert=self.options.os_cacert, timeout=self.options.http_timeout, retries=self.options.retries, raise_errors=False, session=auth_session, auth=auth, log_credentials=True) return def initialize_app(self, argv): """Global app init bits: * set up API versions * validate authentication info """ super(TackerShell, self).initialize_app(argv) self.api_version = {'nfv-orchestration': self.api_version} # If the user is not asking for help, make sure they # have given us auth. cmd_name = None if argv: cmd_info = self.command_manager.find_command(argv) cmd_factory, cmd_name, sub_argv = cmd_info if self.interactive_mode or cmd_name != 'help': self.authenticate_user() def configure_logging(self): """Create logging handlers for any log output.""" root_logger = logging.getLogger('') # Set up logging to a file root_logger.setLevel(logging.DEBUG) # Send higher-level messages to the console via stderr console = logging.StreamHandler(self.stderr) console_level = {self.WARNING_LEVEL: logging.WARNING, self.INFO_LEVEL: logging.INFO, self.DEBUG_LEVEL: logging.DEBUG, }.get(self.options.verbose_level, logging.DEBUG) # The default log level is INFO, in this situation, set the # log level of the console to WARNING, to avoid displaying # useless messages. This equals using "--quiet" if console_level == logging.INFO: console.setLevel(logging.WARNING) else: console.setLevel(console_level) if logging.DEBUG == console_level: formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT) else: formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT) logging.getLogger('iso8601.iso8601').setLevel(logging.WARNING) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) console.setFormatter(formatter) root_logger.addHandler(console) return def get_v2_auth(self, v2_auth_url): return v2_auth.Password( v2_auth_url, username=self.options.os_username, password=self.options.os_password, tenant_id=self.options.os_tenant_id, tenant_name=self.options.os_tenant_name) def get_v3_auth(self, v3_auth_url): project_id = self.options.os_project_id or self.options.os_tenant_id project_name = (self.options.os_project_name or self.options.os_tenant_name) return v3_auth.Password( v3_auth_url, username=self.options.os_username, password=self.options.os_password, user_id=self.options.os_user_id, user_domain_name=self.options.os_user_domain_name, user_domain_id=self.options.os_user_domain_id, project_id=project_id, project_name=project_name, project_domain_name=self.options.os_project_domain_name, project_domain_id=self.options.os_project_domain_id ) def _discover_auth_versions(self, session, auth_url): # discover the API versions the server is supporting base on the # given URL try: ks_discover = discover.Discover(session=session, auth_url=auth_url) return (ks_discover.url_for('2.0'), ks_discover.url_for('3.0')) except ks_exc.ClientException: # Identity service may not support discover API version. # Lets try to figure out the API version from the original URL. url_parts = urlparse.urlparse(auth_url) (scheme, netloc, path, params, query, fragment) = url_parts path = path.lower() if path.startswith('/v3'): return (None, auth_url) elif path.startswith('/v2'): return (auth_url, None) else: # not enough information to determine the auth version msg = _('Unable to determine the Keystone version ' 'to authenticate with using the given ' 'auth_url. Identity service may not support API ' 'version discovery. Please provide a versioned ' 'auth_url instead.') raise exc.CommandError(msg) def _get_keystone_session(self): # first create a Keystone session cacert = self.options.os_cacert or None cert = self.options.os_cert or None key = self.options.os_key or None insecure = self.options.insecure or False ks_session = session.Session.construct(dict(cacert=cacert, cert=cert, key=key, insecure=insecure)) # discover the supported keystone versions using the given url (v2_auth_url, v3_auth_url) = self._discover_auth_versions( session=ks_session, auth_url=self.options.os_auth_url) # Determine which authentication plugin to use. First inspect the # auth_url to see the supported version. If both v3 and v2 are # supported, then use the highest version if possible. user_domain_name = self.options.os_user_domain_name or None user_domain_id = self.options.os_user_domain_id or None project_domain_name = self.options.os_project_domain_name or None project_domain_id = self.options.os_project_domain_id or None domain_info = (user_domain_name or user_domain_id or project_domain_name or project_domain_id) if (v2_auth_url and not domain_info) or not v3_auth_url: ks_session.auth = self.get_v2_auth(v2_auth_url) else: ks_session.auth = self.get_v3_auth(v3_auth_url) return ks_session def main(argv=sys.argv[1:]): try: return TackerShell(TACKER_API_VERSION).run( list(map(encodeutils.safe_decode, argv))) except KeyboardInterrupt: print("... terminating tacker client", file=sys.stderr) return 130 except exc.TackerClientException: return 1 except Exception as e: print(e) return 1 if __name__ == "__main__": sys.exit(main(sys.argv[1:])) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7426198 python-tackerclient-2.1.0/tackerclient/tacker/0000775000175000017500000000000000000000000021445 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tacker/__init__.py0000664000175000017500000000000000000000000023544 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tacker/client.py0000664000175000017500000000477400000000000023311 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from tackerclient.common._i18n import _ from tackerclient.common import exceptions from tackerclient.common import utils API_NAME = 'nfv-orchestration' API_VERSIONS = { '1.0': 'tackerclient.v1_0.client.Client', } def make_client(instance): """Returns an tacker client.""" tacker_client = utils.get_client_class( API_NAME, instance._api_version[API_NAME], API_VERSIONS, ) instance.initialize() url = instance._url url = url.rstrip("/") if '1.0' == instance._api_version[API_NAME]: client = tacker_client(username=instance._username, tenant_name=instance._tenant_name, password=instance._password, region_name=instance._region_name, auth_url=instance._auth_url, endpoint_url=url, endpoint_type=instance._endpoint_type, token=instance._token, auth_strategy=instance._auth_strategy, insecure=instance._insecure, ca_cert=instance._ca_cert, retries=instance._retries, raise_errors=instance._raise_errors, session=instance._session, auth=instance._auth) return client else: raise exceptions.UnsupportedVersion( reason=_("API version %s is not supported") % instance._api_version[API_NAME]) def Client(api_version, *args, **kwargs): """Return an tacker client. :param api_version: only 1.0 is supported now """ tacker_client = utils.get_client_class( API_NAME, api_version, API_VERSIONS, ) return tacker_client(*args, **kwargs) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7426198 python-tackerclient-2.1.0/tackerclient/tacker/v1_0/0000775000175000017500000000000000000000000022212 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tacker/v1_0/__init__.py0000664000175000017500000006366400000000000024342 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import abc import argparse import logging import re from cliff.formatters import table from cliff import lister from cliff import show from oslo_serialization import jsonutils from tackerclient.common._i18n import _ from tackerclient.common import command from tackerclient.common import exceptions from tackerclient.common import utils HEX_ELEM = '[0-9A-Fa-f]' UUID_PATTERN = '-'.join([HEX_ELEM + '{8}', HEX_ELEM + '{4}', HEX_ELEM + '{4}', HEX_ELEM + '{4}', HEX_ELEM + '{12}']) def _get_resource_plural(resource, client): plurals = getattr(client, 'EXTED_PLURALS', []) for k in plurals: if plurals[k] == resource: return k return resource + 's' def find_resourceid_by_id(client, resource, resource_id): resource_plural = _get_resource_plural(resource, client) obj_lister = getattr(client, "list_%s" % resource_plural) if resource == 'event': match = resource_id.isdigit() and resource_id != 0 else: match = re.match(UUID_PATTERN, resource_id) collection = resource_plural if match: data = obj_lister(id=resource_id, fields='id') if data and data[collection]: return data[collection][0]['id'] not_found_message = (_("Unable to find %(resource)s with id " "'%(id)s'") % {'resource': resource, 'id': resource_id}) # 404 is used to simulate server side behavior raise exceptions.TackerClientException( message=not_found_message, status_code=404) def _find_resourceid_by_name(client, resource, name): resource_plural = _get_resource_plural(resource, client) obj_lister = getattr(client, "list_%s" % resource_plural) data = obj_lister(name=name, fields='id') collection = resource_plural info = data[collection] if len(info) > 1: raise exceptions.TackerClientNoUniqueMatch(resource=resource, name=name) elif len(info) == 0: not_found_message = (_("Unable to find %(resource)s with name " "'%(name)s'") % {'resource': resource, 'name': name}) # 404 is used to simulate server side behavior raise exceptions.TackerClientException( message=not_found_message, status_code=404) else: return info[0]['id'] def find_resourceid_by_name_or_id(client, resource, name_or_id): try: return find_resourceid_by_id(client, resource, name_or_id) except exceptions.TackerClientException: return _find_resourceid_by_name(client, resource, name_or_id) def add_show_list_common_argument(parser): parser.add_argument( '-D', '--show-details', help=_('Show detailed info'), action='store_true', default=False,) parser.add_argument( '--show_details', action='store_true', help=argparse.SUPPRESS) parser.add_argument( '--fields', help=argparse.SUPPRESS, action='append', default=[]) parser.add_argument( '-F', '--field', dest='fields', metavar='FIELD', help=_('Specify the field(s) to be returned by server. You can ' 'repeat this option.'), action='append', default=[]) def add_pagination_argument(parser): parser.add_argument( '-P', '--page-size', dest='page_size', metavar='SIZE', type=int, help=_("Specify retrieve unit of each request, then split one request " "to several requests"), default=None) def add_sorting_argument(parser): parser.add_argument( '--sort-key', dest='sort_key', metavar='FIELD', action='append', help=_("Sorts the list by the specified fields in the specified " "directions. You can repeat this option, but you must " "specify an equal number of sort_dir and sort_key values. " "Extra sort_dir options are ignored. Missing sort_dir options " "use the default asc value."), default=[]) parser.add_argument( '--sort-dir', dest='sort_dir', metavar='{asc,desc}', help=_("Sorts the list in the specified direction. You can repeat " "this option."), action='append', default=[], choices=['asc', 'desc']) def is_number(s): try: float(s) # for int, long and float except ValueError: try: complex(s) # for complex except ValueError: return False return True def _process_previous_argument(current_arg, _value_number, current_type_str, _list_flag, _values_specs, _clear_flag, values_specs): if current_arg is not None: if _value_number == 0 and (current_type_str or _list_flag): # This kind of argument should have value raise exceptions.CommandError( message=_("Invalid values_specs %s") % ' '.join(values_specs)) if _value_number > 1 or _list_flag or current_type_str == 'list': current_arg.update({'nargs': '+'}) elif _value_number == 0: if _clear_flag: # if we have action=clear, we use argument's default # value None for argument _values_specs.pop() else: # We assume non value argument as bool one current_arg.update({'action': 'store_true'}) def parse_args_to_dict(values_specs): '''It is used to analyze the extra command options to command. Besides known options and arguments, our commands also support user to put more options to the end of command line. For example, list_nets -- --tag x y --key1 value1, where '-- --tag x y --key1 value1' is extra options to our list_nets. This feature can support V1.0 API's fields selection and filters. For example, to list networks which has name 'test4', we can have list_nets -- --name=test4. value spec is: --key type=int|bool|... value. Type is one of Python built-in types. By default, type is string. The key without value is a bool option. Key with two values will be a list option. ''' # values_specs for example: '-- --tag x y --key1 type=int value1' # -- is a pseudo argument values_specs_copy = values_specs[:] if values_specs_copy and values_specs_copy[0] == '--': del values_specs_copy[0] # converted ArgumentParser arguments for each of the options _options = {} # the argument part for current option in _options current_arg = None # the string after remove meta info in values_specs # for example, '--tag x y --key1 value1' _values_specs = [] # record the count of values for an option # for example: for '--tag x y', it is 2, while for '--key1 value1', it is 1 _value_number = 0 # list=true _list_flag = False # action=clear _clear_flag = False # the current item in values_specs current_item = None # the str after 'type=' current_type_str = None for _item in values_specs_copy: if _item.startswith('--'): # Deal with previous argument if any _process_previous_argument( current_arg, _value_number, current_type_str, _list_flag, _values_specs, _clear_flag, values_specs) # Init variables for current argument current_item = _item _list_flag = False _clear_flag = False current_type_str = None if "=" in _item: _value_number = 1 _item = _item.split('=')[0] else: _value_number = 0 if _item in _options: raise exceptions.CommandError( message=_("Duplicated " "options %s") % ' '.join(values_specs)) else: _options.update({_item: {}}) current_arg = _options[_item] _item = current_item elif _item.startswith('type='): if current_arg is None: raise exceptions.CommandError( message=_("Invalid " "values_specs %s") % ' '.join(values_specs)) if 'type' not in current_arg: current_type_str = _item.split('=', 2)[1] current_arg.update({'type': eval(current_type_str)}) if current_type_str == 'bool': current_arg.update({'type': utils.str2bool}) elif current_type_str == 'dict': current_arg.update({'type': utils.str2dict}) continue elif _item == 'list=true': _list_flag = True continue elif _item == 'action=clear': _clear_flag = True continue if not _item.startswith('--'): # All others are value items # Make sure '--' occurs first and allow minus value if (not current_item or '=' in current_item or _item.startswith('-') and not is_number(_item)): raise exceptions.CommandError( message=_("Invalid " "values_specs %s") % ' '.join(values_specs)) _value_number += 1 _values_specs.append(_item) # Deal with last one argument _process_previous_argument( current_arg, _value_number, current_type_str, _list_flag, _values_specs, _clear_flag, values_specs) # populate the parser with arguments _parser = argparse.ArgumentParser(add_help=False) for opt, optspec in _options.items(): _parser.add_argument(opt, **optspec) _args = _parser.parse_args(_values_specs) result_dict = {} for opt in _options.keys(): _opt = opt.split('--', 2)[1] _opt = _opt.replace('-', '_') _value = getattr(_args, _opt) result_dict.update({_opt: _value}) return result_dict def _merge_args(qCmd, parsed_args, _extra_values, value_specs): """Merge arguments from _extra_values into parsed_args. If an argument value are provided in both and it is a list, the values in _extra_values will be merged into parsed_args. @param parsed_args: the parsed args from known options @param _extra_values: the other parsed arguments in unknown parts @param values_specs: the unparsed unknown parts """ temp_values = _extra_values.copy() for key, value in temp_values.items(): if hasattr(parsed_args, key): arg_value = getattr(parsed_args, key) if arg_value is not None and value is not None: if isinstance(arg_value, list): if value and isinstance(value, list): if (not arg_value or isinstance(arg_value[0], type(value[0]))): arg_value.extend(value) _extra_values.pop(key) def update_dict(obj, dict, attributes): """Update dict with fields from obj.attributes :param obj: the object updated into dict :param dict: the result dictionary :param attributes: a list of attributes belonging to obj """ for attribute in attributes: if hasattr(obj, attribute) and getattr(obj, attribute) is not None: dict[attribute] = getattr(obj, attribute) class TableFormater(table.TableFormatter): """This class is used to keep consistency with prettytable 0.6. https://bugs.launchpad.net/python-tackerclient/+bug/1165962 """ def emit_list(self, column_names, data, stdout, parsed_args): if column_names: super(TableFormater, self).emit_list(column_names, data, stdout, parsed_args) else: stdout.write('\n') # command.OpenStackCommand is abstract class so that metaclass of # subclass must be subclass of metaclass of all its base. # otherwise metaclass conflict exception is raised. class TackerCommandMeta(abc.ABCMeta): def __new__(cls, name, bases, cls_dict): if 'log' not in cls_dict: cls_dict['log'] = logging.getLogger( cls_dict['__module__'] + '.' + name) return super(TackerCommandMeta, cls).__new__(cls, name, bases, cls_dict) class TackerCommand(command.OpenStackCommand, metaclass=TackerCommandMeta): api = 'nfv-orchestration' values_specs = [] json_indent = None def __init__(self, app, app_args): super(TackerCommand, self).__init__(app, app_args) # NOTE(markmcclain): This is no longer supported in cliff version 1.5.2 # see https://bugs.launchpad.net/python-tackerclient/+bug/1265926 # if hasattr(self, 'formatters'): # self.formatters['table'] = TableFormater() def get_client(self): return self.app.client_manager.tacker def get_parser(self, prog_name): parser = super(TackerCommand, self).get_parser(prog_name) parser.add_argument( '--request-format', help=_('The json request format'), default='json', choices=['json', ], ) parser.add_argument( '--request_format', choices=['json', ], help=argparse.SUPPRESS) return parser def format_output_data(self, data): # Modify data to make it more readable if self.resource in data: for k, v in data[self.resource].items(): if isinstance(v, list): value = '\n'.join(jsonutils.dumps( i, indent=self.json_indent) if isinstance(i, dict) else str(i) for i in v) data[self.resource][k] = value elif isinstance(v, dict): value = jsonutils.dumps(v, indent=self.json_indent) data[self.resource][k] = value elif v is None: data[self.resource][k] = '' def add_known_arguments(self, parser): pass def args2body(self, parsed_args): return {} class CreateCommand(TackerCommand, show.ShowOne): """Create a resource for a given tenant """ api = 'nfv-orchestration' resource = None log = None remove_output_fields = [] def get_parser(self, prog_name): parser = super(CreateCommand, self).get_parser(prog_name) parser.add_argument( '--tenant-id', metavar='TENANT_ID', help=_('The owner tenant ID'), ) parser.add_argument( '--tenant_id', help=argparse.SUPPRESS) self.add_known_arguments(parser) return parser def get_data(self, parsed_args): self.log.debug('get_data(%s)', parsed_args) tacker_client = self.get_client() tacker_client.format = parsed_args.request_format _extra_values = parse_args_to_dict(self.values_specs) _merge_args(self, parsed_args, _extra_values, self.values_specs) body = self.args2body(parsed_args) body[self.resource].update(_extra_values) obj_creator = getattr(tacker_client, "create_%s" % self.resource) data = obj_creator(body) self.format_output_data(data) # {u'network': {u'id': u'e9424a76-6db4-4c93-97b6-ec311cd51f19'}} info = self.resource in data and data[self.resource] or None if info: print(_('Created a new %s:') % self.resource, file=self.app.stdout) for f in self.remove_output_fields: if f in info: info.pop(f) else: info = {'': ''} return zip(*sorted(info.items())) class UpdateCommand(TackerCommand): """Update resource's information.""" api = 'nfv-orchestration' resource = None log = None allow_names = True def get_parser(self, prog_name): parser = super(UpdateCommand, self).get_parser(prog_name) parser.add_argument( 'id', metavar=self.resource.upper(), help=_('ID or name of %s to update') % self.resource) self.add_known_arguments(parser) return parser def run(self, parsed_args): self.log.debug('run(%s)', parsed_args) tacker_client = self.get_client() tacker_client.format = parsed_args.request_format _extra_values = parse_args_to_dict(self.values_specs) _merge_args(self, parsed_args, _extra_values, self.values_specs) body = self.args2body(parsed_args) if self.resource in body: body[self.resource].update(_extra_values) else: body[self.resource] = _extra_values if not body[self.resource]: raise exceptions.CommandError( message=_("Must specify new" " values to update %s") % self.resource) if self.allow_names: _id = find_resourceid_by_name_or_id( tacker_client, self.resource, parsed_args.id) else: _id = find_resourceid_by_id( tacker_client, self.resource, parsed_args.id) obj_updator = getattr(tacker_client, "update_%s" % self.resource) obj_updator(_id, body) print((_('Updated %(resource)s: %(id)s') % {'id': parsed_args.id, 'resource': self.resource}), file=self.app.stdout) return class DeleteCommand(TackerCommand): """Delete given resource(s) """ api = 'nfv-orchestration' resource = None log = None allow_names = True deleted_msg = {} def get_parser(self, prog_name): parser = super(DeleteCommand, self).get_parser(prog_name) if self.allow_names: help_str = _('IDs or names of %s to delete') else: help_str = _('IDs of %s to delete') parser.add_argument( 'ids', nargs='+', metavar=self.resource.upper(), help=help_str % self.resource) self.add_known_arguments(parser) return parser def run(self, parsed_args): failure = False deleted_ids = [] failed_items = {} tacker_client = self.get_client() tacker_client.format = parsed_args.request_format obj_deleter = getattr(tacker_client, "delete_%s" % self.resource) body = self.args2body(parsed_args) for resource_id in parsed_args.ids: try: if self.allow_names: _id = find_resourceid_by_name_or_id( tacker_client, self.resource, resource_id) else: _id = resource_id if body: obj_deleter(_id, body) else: obj_deleter(_id) deleted_ids.append(resource_id) except Exception as e: failure = True failed_items[resource_id] = e if failure: msg = '' if deleted_ids: status_msg = self.deleted_msg.get(self.resource, 'deleted') msg = (_('Successfully %(status_msg)s %(resource)s(s):' ' %(deleted_list)s') % {'status_msg': status_msg, 'deleted_list': ', '.join(deleted_ids), 'resource': self.resource}) err_msg = _("\n\nUnable to delete the below" " %s(s):") % self.resource for failed_id, error in failed_items.items(): err_msg += (_('\n Cannot delete %(failed_id)s: %(error)s') % {'failed_id': failed_id, 'error': error}) msg += err_msg raise exceptions.CommandError(message=msg) else: print((_('All specified %(resource)s(s) %(msg)s successfully') % {'msg': self.deleted_msg.get(self.resource, 'deleted'), 'resource': self.resource})) return class ListCommand(TackerCommand, lister.Lister): """List resources that belong to a given tenant """ api = 'nfv-orchestration' resource = None log = None _formatters = {} list_columns = [] unknown_parts_flag = True pagination_support = False sorting_support = False def get_parser(self, prog_name): parser = super(ListCommand, self).get_parser(prog_name) add_show_list_common_argument(parser) if self.pagination_support: add_pagination_argument(parser) if self.sorting_support: add_sorting_argument(parser) return parser def args2search_opts(self, parsed_args): search_opts = {} fields = parsed_args.fields if parsed_args.fields: search_opts.update({'fields': fields}) if parsed_args.show_details: search_opts.update({'verbose': 'True'}) return search_opts def call_server(self, tacker_client, search_opts, parsed_args): resource_plural = _get_resource_plural(self.resource, tacker_client) obj_lister = getattr(tacker_client, "list_%s" % resource_plural) data = obj_lister(**search_opts) return data def retrieve_list(self, parsed_args): """Retrieve a list of resources from Tacker server""" tacker_client = self.get_client() tacker_client.format = parsed_args.request_format _extra_values = parse_args_to_dict(self.values_specs) _merge_args(self, parsed_args, _extra_values, self.values_specs) search_opts = self.args2search_opts(parsed_args) search_opts.update(_extra_values) if self.pagination_support: page_size = parsed_args.page_size if page_size: search_opts.update({'limit': page_size}) if self.sorting_support: keys = parsed_args.sort_key if keys: search_opts.update({'sort_key': keys}) dirs = parsed_args.sort_dir len_diff = len(keys) - len(dirs) if len_diff > 0: dirs += ['asc'] * len_diff elif len_diff < 0: dirs = dirs[:len(keys)] if dirs: search_opts.update({'sort_dir': dirs}) data = self.call_server(tacker_client, search_opts, parsed_args) collection = _get_resource_plural(self.resource, tacker_client) return data.get(collection, []) def extend_list(self, data, parsed_args): """Update a retrieved list. This method provides a way to modify a original list returned from the tacker server. For example, you can add subnet cidr information to a list network. """ pass def setup_columns(self, info, parsed_args): _columns = len(info) > 0 and sorted(info[0].keys()) or [] if not _columns: # clean the parsed_args.columns so that cliff will not break parsed_args.columns = [] elif parsed_args.columns: _columns = [x for x in parsed_args.columns if x in _columns] elif self.list_columns: # if no -c(s) by user and list_columns, we use columns in # both list_columns and returned resource. # Also Keep their order the same as in list_columns _columns = [x for x in self.list_columns if x in _columns] return (_columns, (utils.get_item_properties( s, _columns, formatters=self._formatters, ) for s in info), ) def get_data(self, parsed_args): self.log.debug('get_data(%s)', parsed_args) data = self.retrieve_list(parsed_args) self.extend_list(data, parsed_args) return self.setup_columns(data, parsed_args) class ShowCommand(TackerCommand, show.ShowOne): """Show information of a given resource """ api = 'nfv-orchestration' resource = None log = None allow_names = True def get_id(self): if self.resource: return self.resource.upper() def get_parser(self, prog_name): parser = super(ShowCommand, self).get_parser(prog_name) add_show_list_common_argument(parser) if self.allow_names: help_str = _('ID or name of %s to look up') else: help_str = _('ID of %s to look up') parser.add_argument( 'id', metavar=self.get_id(), help=help_str % self.resource) return parser def get_data(self, parsed_args): self.log.debug('get_data(%s)', parsed_args) tacker_client = self.get_client() tacker_client.format = parsed_args.request_format params = {} if parsed_args.show_details: params = {'verbose': 'True'} if parsed_args.fields: params = {'fields': parsed_args.fields} if self.allow_names: _id = find_resourceid_by_name_or_id(tacker_client, self.resource, parsed_args.id) else: _id = parsed_args.id obj_shower = getattr(tacker_client, "show_%s" % self.resource) data = obj_shower(_id, **params) self.format_output_data(data) resource = data[self.resource] if self.resource in data: return zip(*sorted(resource.items())) else: return None ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7426198 python-tackerclient-2.1.0/tackerclient/tacker/v1_0/nfvo/0000775000175000017500000000000000000000000023162 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tacker/v1_0/nfvo/__init__.py0000664000175000017500000000000000000000000025261 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tacker/v1_0/nfvo/vim.py0000664000175000017500000001132600000000000024332 0ustar00zuulzuul00000000000000# Copyright 2016 Brocade Communications Systems Inc # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import yaml from oslo_utils import strutils from tackerclient.common import exceptions from tackerclient.i18n import _ from tackerclient.tacker import v1_0 as tackerV10 from tackerclient.tacker.v1_0.nfvo import vim_utils _VIM = "vim" class ListVIM(tackerV10.ListCommand): """List VIMs that belong to a given tenant.""" resource = _VIM list_columns = ['id', 'tenant_id', 'name', 'type', 'is_default', 'placement_attr', 'status'] class ShowVIM(tackerV10.ShowCommand): """Show information of a given VIM.""" resource = _VIM class CreateVIM(tackerV10.CreateCommand): """Create a VIM.""" resource = _VIM def add_known_arguments(self, parser): parser.add_argument( '--config-file', required=True, help=_('YAML file with VIM configuration parameters')) parser.add_argument( 'name', metavar='NAME', help=_('Set a name for the VIM')) parser.add_argument( '--description', help=_('Set a description for the VIM')) parser.add_argument( '--is-default', action='store_true', default=False, help=_('Set as default VIM')) def args2body(self, parsed_args): body = {self.resource: {}} if parsed_args.config_file: with open(parsed_args.config_file) as f: vim_config = f.read() try: config_param = yaml.load(vim_config, Loader=yaml.SafeLoader) except yaml.YAMLError as e: raise exceptions.InvalidInput(reason=e) vim_obj = body[self.resource] try: auth_url = config_param.pop('auth_url') except KeyError: raise exceptions.TackerClientException(message='Auth URL must be ' 'specified', status_code=404) vim_obj['auth_url'] = vim_utils.validate_auth_url(auth_url).geturl() vim_utils.args2body_vim(config_param, vim_obj) tackerV10.update_dict(parsed_args, body[self.resource], ['tenant_id', 'name', 'description', 'is_default']) return body class UpdateVIM(tackerV10.UpdateCommand): """Update a given VIM.""" resource = _VIM def add_known_arguments(self, parser): parser.add_argument( '--config-file', required=False, help=_('YAML file with VIM configuration parameters')) parser.add_argument( '--name', help=_('New name for the VIM')) parser.add_argument( '--description', help=_('New description for the VIM')) parser.add_argument( '--is-default', type=strutils.bool_from_string, metavar='{True,False}', help=_('Indicate whether the VIM is used as default')) def args2body(self, parsed_args): body = {self.resource: {}} config_param = None # config arg passed as data overrides config yaml when both args passed if parsed_args.config_file: with open(parsed_args.config_file) as f: config_yaml = f.read() try: config_param = yaml.load(config_yaml, Loader=yaml.SafeLoader) except yaml.YAMLError as e: raise exceptions.InvalidInput(reason=e) vim_obj = body[self.resource] if config_param is not None: vim_utils.args2body_vim(config_param, vim_obj) tackerV10.update_dict(parsed_args, body[self.resource], ['tenant_id', 'name', 'description', 'is_default']) # type attribute is read-only, it can't be updated, so remove it # in update method body['vim'].pop('type', None) return body class DeleteVIM(tackerV10.DeleteCommand): """Delete given VIM(s).""" resource = _VIM ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tacker/v1_0/nfvo/vim_utils.py0000664000175000017500000001122500000000000025550 0ustar00zuulzuul00000000000000# Copyright 2016 Brocade Communications Systems Inc # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from urllib import parse as urlparse from tackerclient.common import exceptions def args2body_vim(config_param, vim): """Create additional args to vim body :param vim: vim request object :return: vim body with args populated """ vim_type = ['openstack', 'kubernetes'] cert_verify_type = ['True', 'False'] if 'type' in config_param: vim['type'] = config_param.pop('type', '') if not vim['type'] in vim_type: raise exceptions.TackerClientException( message='Supported VIM types: openstack, kubernetes', status_code=400) else: vim['type'] = 'openstack' if vim['type'] == 'openstack': vim['vim_project'] = { 'name': config_param.pop('project_name', ''), 'project_domain_name': config_param.pop('project_domain_name', '')} if not vim['vim_project']['name']: raise exceptions.TackerClientException( message='Project name must be specified', status_code=404) cert_verify = config_param.pop('cert_verify', 'True') if cert_verify not in cert_verify_type: raise exceptions.TackerClientException( message='Supported cert_verify types: True, False', status_code=400) vim['auth_cred'] = {'username': config_param.pop('username', ''), 'password': config_param.pop('password', ''), 'user_domain_name': config_param.pop('user_domain_name', ''), 'cert_verify': cert_verify} elif vim['type'] == 'kubernetes': vim['vim_project'] = { 'name': config_param.pop('project_name', '')} if not vim['vim_project']['name']: raise exceptions.TackerClientException( message='Project name must be specified in Kubernetes VIM,' 'it is namespace in Kubernetes environment', status_code=404) if 'oidc_token_url' in config_param: if ('username' not in config_param or 'password' not in config_param or 'client_id' not in config_param): # the username, password, client_id are required. # client_secret is not required when client type is public. raise exceptions.TackerClientException( message='oidc_token_url must be specified with username,' ' password, client_id, client_secret(optional).', status_code=404) vim['auth_cred'] = { 'oidc_token_url': config_param.pop('oidc_token_url'), 'username': config_param.pop('username'), 'password': config_param.pop('password'), 'client_id': config_param.pop('client_id')} if 'client_secret' in config_param: vim['auth_cred']['client_secret'] = config_param.pop( 'client_secret') elif ('username' in config_param) and ('password' in config_param): vim['auth_cred'] = { 'username': config_param.pop('username', ''), 'password': config_param.pop('password', '')} elif 'bearer_token' in config_param: vim['auth_cred'] = { 'bearer_token': config_param.pop('bearer_token', '')} else: raise exceptions.TackerClientException( message='username and password or bearer_token must be' 'provided', status_code=404) ssl_ca_cert = config_param.pop('ssl_ca_cert', '') if ssl_ca_cert: vim['auth_cred']['ssl_ca_cert'] = ssl_ca_cert if 'extra' in config_param: vim['extra'] = config_param.pop('extra') def validate_auth_url(url): url_parts = urlparse.urlparse(url) if not url_parts.scheme or not url_parts.netloc: raise exceptions.TackerClientException(message='Invalid auth URL') return url_parts ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7426198 python-tackerclient-2.1.0/tackerclient/tests/0000775000175000017500000000000000000000000021336 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/__init__.py0000664000175000017500000000000000000000000023435 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.74662 python-tackerclient-2.1.0/tackerclient/tests/unit/0000775000175000017500000000000000000000000022315 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/__init__.py0000664000175000017500000000000000000000000024414 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.74662 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/0000775000175000017500000000000000000000000023101 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/__init__.py0000664000175000017500000000000000000000000025200 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/base.py0000664000175000017500000000534200000000000024371 0ustar00zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from requests_mock.contrib import fixture as requests_mock_fixture import testtools from unittest import mock from cliff import columns as cliff_columns class FixturedTestCase(testtools.TestCase): client_fixture_class = None api_version = '1' def setUp(self): super(FixturedTestCase, self).setUp() self.app = mock.MagicMock() if self.client_fixture_class: self.requests_mock = self.useFixture(requests_mock_fixture. Fixture()) fix = self.client_fixture_class(self.requests_mock, api_version=self.api_version) self.cs = self.useFixture(fix).client def check_parser(self, cmd, args, verify_args): cmd_parser = cmd.get_parser('check_parser') try: parsed_args = cmd_parser.parse_args(args) except SystemExit: raise ParserException for av in verify_args: attr, value = av if attr: self.assertIn(attr, parsed_args) self.assertEqual(getattr(parsed_args, attr), value) return parsed_args def assertNotCalled(self, m, msg=None): """Assert a function was not called""" if m.called: if not msg: msg = 'method %s should not have been called' % m self.fail(msg) def assertListItemsEqual(self, expected, actual): """Assertion based on human_readable values of list items""" self.assertEqual(len(expected), len(actual)) for col_expected, col_actual in zip(expected, actual): if isinstance(col_actual, tuple): self.assertListItemsEqual(col_expected, col_actual) elif isinstance(col_expected, cliff_columns.FormattableColumn): self.assertIsInstance(col_actual, col_expected.__class__) self.assertEqual(col_expected.human_readable(), col_actual.human_readable()) else: self.assertEqual(col_expected, col_actual) class ParserException(Exception): pass ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.74662 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/common/0000775000175000017500000000000000000000000024371 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/common/__init__.py0000664000175000017500000000000000000000000026470 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/common/test_vnflcm_versions.py0000664000175000017500000000757400000000000031234 0ustar00zuulzuul00000000000000# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import ddt from unittest import mock from tackerclient.common import exceptions from tackerclient.osc.common.vnflcm import vnflcm_versions from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client class TestVnfLcm(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfLcm, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager @ddt.ddt class TestVnfLcmVersions(TestVnfLcm): def setUp(self): super(TestVnfLcmVersions, self).setUp() self.vnflcm_versions = vnflcm_versions.VnfLcmVersions( self.app, self.app_args, cmd_name='vnflcm versions') def _versions_response(self, major_version=None): if major_version is None: return {"uriPrefix": "/vnflcm", "apiVersions": [{"version": "1.3.0", "isDeprecated": False}, {"version": "2.0.0", "isDeprecated": False}]} elif major_version == "1": return {"uriPrefix": "/vnflcm/v1", "apiVersions": [{"version": "1.3.0", "isDeprecated": False}]} elif major_version == "2": return {"uriPrefix": "/vnflcm/v2", "apiVersions": [{"version": "2.0.0", "isDeprecated": False}]} def test_invalid_major_version(self): parser = self.vnflcm_versions.get_parser('vnflcm versions') parsed_args = parser.parse_args(["--major-version", "3"]) self.assertRaises(exceptions.InvalidInput, self.vnflcm_versions.take_action, parsed_args) def test_take_action_no_arg(self): parser = self.vnflcm_versions.get_parser('vnflcm versions') parsed_args = parser.parse_args([]) response = self._versions_response() self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/api_versions'), json=response, headers=self.header) colmns, data = self.vnflcm_versions.take_action(parsed_args) self.assertEqual(colmns, tuple(response.keys())) self.assertEqual(data, tuple(response.values())) @ddt.data('1', '2') def test_take_action_with_major_version(self, major_version): parser = self.vnflcm_versions.get_parser('vnflcm versions') parsed_args = parser.parse_args(["--major-version", major_version]) response = self._versions_response(major_version) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v{}/api_versions'.format(major_version)), json=response, headers=self.header) colmns, data = self.vnflcm_versions.take_action(parsed_args) self.assertEqual(colmns, tuple(response.keys())) self.assertEqual(data, tuple(response.values())) ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.74662 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/0000775000175000017500000000000000000000000023427 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/__init__.py0000664000175000017500000000000000000000000025526 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.74662 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/0000775000175000017500000000000000000000000026106 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/__init__.py0000664000175000017500000000000000000000000030205 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/client.py0000664000175000017500000000476700000000000027754 0ustar00zuulzuul00000000000000# Copyright 2019 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from keystoneauth1 import fixture from keystoneauth1 import loading from keystoneauth1 import session from tackerclient.v1_0 import client as proxy_client IDENTITY_URL = 'http://identityserver:5000/v3' TACKER_URL = 'http://nfv-orchestration' class ClientFixture(fixtures.Fixture): def __init__(self, requests_mock, identity_url=IDENTITY_URL, api_version='1'): super(ClientFixture, self).__init__() self.identity_url = identity_url self.client = None self.token = fixture.V2Token() self.token.set_scope() self.requests_mock = requests_mock self.discovery = fixture.V2Discovery(href=self.identity_url) s = self.token.add_service('nfv-orchestration') s.add_endpoint(TACKER_URL) self.api_version = api_version def setUp(self): super(ClientFixture, self).setUp() auth_url = '%s/tokens' % self.identity_url headers = {'X-Content-Type': 'application/json'} self.requests_mock.post(auth_url, json=self.token, headers=headers) self.requests_mock.get(self.identity_url, json=self.discovery, headers=headers) self.client = self.new_client() def new_client(self): self.session = session.Session() loader = loading.get_plugin_loader('password') self.session.auth = loader.load_from_options( auth_url=self.identity_url, username='xx', password='xx') return proxy_client.Client(service_type='nfv-orchestration', interface='public', endpoint_type='public', region_name='RegionOne', auth_url=self.identity_url, token=self.token.token_id, endpoint_url=TACKER_URL, api_version=self.api_version) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7186198 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/0000775000175000017500000000000000000000000031713 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000021100000000000011447 xustar0000000000000000111 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/ 26 mtime=1715864642.75062 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions0000775000175000017500000000000000000000000034107 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000024600000000000011457 xustar0000000000000000144 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/etsi_nfv_sol001_common_types.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions0000664000175000017500000002174700000000000034124 0ustar00zuulzuul00000000000000# TODO:Manually change from version 1.2 to 1.0 tosca_definitions_version: tosca_simple_yaml_1_0 #tosca_definitions_version: tosca_simple_yaml_1_2 description: ETSI NFV SOL 001 common types definitions version 2.6.1 metadata: template_name: etsi_nfv_sol001_common_types template_author: ETSI_NFV template_version: 2.6.1 data_types: tosca.datatypes.nfv.L2AddressData: derived_from: tosca.datatypes.Root description: Describes the information on the MAC addresses to be assigned to a connection point. properties: mac_address_assignment: type: boolean description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility required: true tosca.datatypes.nfv.L3AddressData: derived_from: tosca.datatypes.Root description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP properties: ip_address_assignment: type: boolean description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility required: true floating_ip_activated: type: boolean description: Specifies if the floating IP scheme is activated on the Connection Point or not required: true ip_address_type: type: string description: Defines address type. The address type should be aligned with the address type supported by the layer_protocols properties of the parent VnfExtCp required: false constraints: - valid_values: [ ipv4, ipv6 ] number_of_ip_address: type: integer description: Minimum number of IP addresses to be assigned required: false constraints: - greater_than: 0 tosca.datatypes.nfv.AddressData: derived_from: tosca.datatypes.Root description: Describes information about the addressing scheme and parameters applicable to a CP properties: address_type: type: string description: Describes the type of the address to be assigned to a connection point. The content type shall be aligned with the address type supported by the layerProtocol property of the connection point required: true constraints: - valid_values: [ mac_address, ip_address ] l2_address_data: type: tosca.datatypes.nfv.L2AddressData description: Provides the information on the MAC addresses to be assigned to a connection point. required: false l3_address_data: type: tosca.datatypes.nfv.L3AddressData description: Provides the information on the IP addresses to be assigned to a connection point required: false tosca.datatypes.nfv.ConnectivityType: derived_from: tosca.datatypes.Root description: describes additional connectivity information of a virtualLink properties: layer_protocols: type: list description: Identifies the protocol a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire).The top layer protocol of the virtualLink protocol stack shall always be provided. The lower layer protocols may be included when there are specific requirements on these layers. required: true entry_schema: type: string constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] flow_pattern: type: string description: Identifies the flow pattern of the connectivity required: false constraints: - valid_values: [ line, tree, mesh ] tosca.datatypes.nfv.LinkBitrateRequirements: derived_from: tosca.datatypes.Root description: describes the requirements in terms of bitrate for a virtual link properties: root: type: integer # in bits per second description: Specifies the throughput requirement in bits per second of the link (e.g. bitrate of E-Line, root bitrate of E-Tree, aggregate capacity of E-LAN). required: true constraints: - greater_or_equal: 0 leaf: type: integer # in bits per second description: Specifies the throughput requirement in bits per second of leaf connections to the link when applicable to the connectivity type (e.g. for E-Tree and E LAN branches). required: false constraints: - greater_or_equal: 0 tosca.datatypes.nfv.CpProtocolData: derived_from: tosca.datatypes.Root description: Describes and associates the protocol layer that a CP uses together with other protocol and connection point information properties: associated_layer_protocol: type: string required: true description: One of the values of the property layer_protocols of the CP constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] address_data: type: list description: Provides information on the addresses to be assigned to the CP entry_schema: type: tosca.datatypes.nfv.AddressData required: false tosca.datatypes.nfv.VnfProfile: derived_from: tosca.datatypes.Root description: describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF. properties: instantiation_level: type: string description: Identifier of the instantiation level of the VNF DF to be used for instantiation. If not present, the default instantiation level as declared in the VNFD shall be used. required: false min_number_of_instances: type: integer description: Minimum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile. required: true constraints: - greater_or_equal: 0 max_number_of_instances: type: integer description: Maximum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile. required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.Qos: derived_from: tosca.datatypes.Root description: describes QoS data for a given VL used in a VNF deployment flavour properties: latency: type: scalar-unit.time #Number description: Specifies the maximum latency required: true constraints: - greater_than: 0 s packet_delay_variation: type: scalar-unit.time #Number description: Specifies the maximum jitter required: true constraints: - greater_or_equal: 0 s packet_loss_ratio: type: float description: Specifies the maximum packet loss ratio required: false constraints: - in_range: [ 0.0, 1.0 ] capability_types: tosca.capabilities.nfv.VirtualLinkable: derived_from: tosca.capabilities.Node description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type relationship_types: tosca.relationships.nfv.VirtualLinksTo: derived_from: tosca.relationships.DependsOn description: Represents an association relationship between the VduCp and VnfVirtualLink node types valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] node_types: tosca.nodes.nfv.Cp: derived_from: tosca.nodes.Root description: Provides information regarding the purpose of the connection point properties: layer_protocols: type: list description: Identifies which protocol the connection point uses for connectivity purposes required: true entry_schema: type: string constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] role: #Name in ETSI NFV IFA011 v0.7.3: cpRole type: string description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS required: false constraints: - valid_values: [ root, leaf ] description: type: string description: Provides human-readable information on the purpose of the connection point required: false protocol: type: list description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor required: false entry_schema: type: tosca.datatypes.nfv.CpProtocolData trunk_mode: type: boolean description: Provides information about whether the CP instantiated from this Cp is in Trunk mode (802.1Q or other), When operating in "trunk mode", the Cp is capable of carrying traffic for several VLANs. Absence of this property implies that trunkMode is not configured for the Cp i.e. It is equivalent to boolean value "false". required: false ././@PaxHeader0000000000000000000000000000024400000000000011455 xustar0000000000000000142 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/etsi_nfv_sol001_vnfd_types.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions0000664000175000017500000020325400000000000034117 0ustar00zuulzuul00000000000000# TODO:Manually change from version 1.2 to 1.0 tosca_definitions_version: tosca_simple_yaml_1_0 #tosca_definitions_version: tosca_simple_yaml_1_2 description: ETSI NFV SOL 001 vnfd types definitions version 2.6.1 metadata: template_name: etsi_nfv_sol001_vnfd_types template_author: ETSI_NFV template_version: 2.6.1 # TODO:Manually change from version 1.2 to 1.0 #imports: # - https://forge.etsi.org/rep/nfv/sol001/raw/v2.6.1/etsi_nfv_sol001_common_types.yaml data_types: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements: derived_from: tosca.datatypes.Root description: Describes requirements on a virtual network interface properties: name: type: string description: Provides a human readable name for the requirement. required: false description: type: string description: Provides a human readable description of the requirement. required: false support_mandatory: type: boolean description: Indicates whether fulfilling the constraint is mandatory (TRUE) for successful operation or desirable (FALSE). required: true network_interface_requirements: type: map description: The network interface requirements. A map of strings that contain a set of key-value pairs that describes the hardware platform specific network interface deployment requirements. required: true entry_schema: type: string nic_io_requirements: type: tosca.datatypes.nfv.LogicalNodeData description: references (couples) the CP with any logical node I/O requirements (for network devices) that may have been created. Linking these attributes is necessary so that so that I/O requirements that need to be articulated at the logical node level can be associated with the network interface requirements associated with the CP. required: false tosca.datatypes.nfv.RequestedAdditionalCapability: derived_from: tosca.datatypes.Root description: describes requested additional capability for a particular VDU properties: requested_additional_capability_name: type: string description: Identifies a requested additional capability for the VDU. required: true support_mandatory: type: boolean description: Indicates whether the requested additional capability is mandatory for successful operation. required: true min_requested_additional_capability_version: type: string description: Identifies the minimum version of the requested additional capability. required: false preferred_requested_additional_capability_version: type: string description: Identifies the preferred version of the requested additional capability. required: false target_performance_parameters: type: map description: Identifies specific attributes, dependent on the requested additional capability type. required: true entry_schema: type: string tosca.datatypes.nfv.VirtualMemory: derived_from: tosca.datatypes.Root description: supports the specification of requirements related to virtual memory of a virtual compute resource properties: virtual_mem_size: type: scalar-unit.size description: Amount of virtual memory. required: true virtual_mem_oversubscription_policy: type: string description: The memory core oversubscription policy in terms of virtual memory to physical memory on the platform. required: false vdu_mem_requirements: type: map description: The hardware platform specific VDU memory requirements. A map of strings that contains a set of key-value pairs that describes hardware platform specific VDU memory requirements. required: false entry_schema: type: string numa_enabled: type: boolean description: It specifies the memory allocation to be cognisant of the relevant process/core allocation. required: false default: false tosca.datatypes.nfv.VirtualCpu: derived_from: tosca.datatypes.Root description: Supports the specification of requirements related to virtual CPU(s) of a virtual compute resource properties: cpu_architecture: type: string description: CPU architecture type. Examples are x86, ARM required: false num_virtual_cpu: type: integer description: Number of virtual CPUs required: true constraints: - greater_than: 0 virtual_cpu_clock: type: scalar-unit.frequency description: Minimum virtual CPU clock rate required: false virtual_cpu_oversubscription_policy: type: string description: CPU core oversubscription policy e.g. the relation of virtual CPU cores to physical CPU cores/threads. required: false vdu_cpu_requirements: type: map description: The hardware platform specific VDU CPU requirements. A map of strings that contains a set of key-value pairs describing VDU CPU specific hardware platform requirements. required: false entry_schema: type: string virtual_cpu_pinning: type: tosca.datatypes.nfv.VirtualCpuPinning description: The virtual CPU pinning configuration for the virtualised compute resource. required: false tosca.datatypes.nfv.VirtualCpuPinning: derived_from: tosca.datatypes.Root description: Supports the specification of requirements related to the virtual CPU pinning configuration of a virtual compute resource properties: virtual_cpu_pinning_policy: type: string description: 'Indicates the policy for CPU pinning. The policy can take values of "static" or "dynamic". In case of "dynamic" the allocation of virtual CPU cores to logical CPU cores is decided by the VIM. (e.g.: SMT (Simultaneous Multi-Threading) requirements). In case of "static" the allocation is requested to be according to the virtual_cpu_pinning_rule.' required: false constraints: - valid_values: [ static, dynamic ] virtual_cpu_pinning_rule: type: list description: Provides the list of rules for allocating virtual CPU cores to logical CPU cores/threads required: false entry_schema: type: string tosca.datatypes.nfv.VnfcConfigurableProperties: derived_from: tosca.datatypes.Root description: Defines the configurable properties of a VNFC # properties: # additional_vnfc_configurable_properties: # type: tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties # description: Describes additional configuration for VNFC that # can be modified using the ModifyVnfInfo operation # required: false # derived types are expected to introduce # additional_vnfc_configurable_properties with its type derived from # tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties: derived_from: tosca.datatypes.Root description: VnfcAdditionalConfigurableProperties type is an empty base type for deriving data types for describing additional configurable properties for a given VNFC. tosca.datatypes.nfv.VduProfile: derived_from: tosca.datatypes.Root description: describes additional instantiation data for a given Vdu.Compute used in a specific deployment flavour. properties: min_number_of_instances: type: integer description: Minimum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour. required: true constraints: - greater_or_equal: 0 max_number_of_instances: type: integer description: Maximum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour. required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.VlProfile: derived_from: tosca.datatypes.Root description: Describes additional instantiation data for a given VL used in a specific VNF deployment flavour. properties: max_bitrate_requirements: type: tosca.datatypes.nfv.LinkBitrateRequirements description: Specifies the maximum bitrate requirements for a VL instantiated according to this profile. required: true min_bitrate_requirements: type: tosca.datatypes.nfv.LinkBitrateRequirements description: Specifies the minimum bitrate requirements for a VL instantiated according to this profile. required: true qos: type: tosca.datatypes.nfv.Qos description: Specifies the QoS requirements of a VL instantiated according to this profile. required: false virtual_link_protocol_data: type: list description: Specifies the protocol data for a virtual link. required: false entry_schema: type: tosca.datatypes.nfv.VirtualLinkProtocolData tosca.datatypes.nfv.VirtualLinkProtocolData: derived_from: tosca.datatypes.Root description: describes one protocol layer and associated protocol data for a given virtual link used in a specific VNF deployment flavour properties: associated_layer_protocol: type: string description: Identifies one of the protocols a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire) as specified by the connectivity_type property. required: true constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] l2_protocol_data: type: tosca.datatypes.nfv.L2ProtocolData description: Specifies the L2 protocol data for a virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L2 protocol and shall be absent otherwise. required: false l3_protocol_data: type: tosca.datatypes.nfv.L3ProtocolData description: Specifies the L3 protocol data for this virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L3 protocol and shall be absent otherwise. required: false tosca.datatypes.nfv.L2ProtocolData: derived_from: tosca.datatypes.Root description: describes L2 protocol data for a given virtual link used in a specific VNF deployment flavour. properties: name: type: string description: Identifies the network name associated with this L2 protocol. required: false network_type: type: string description: Specifies the network type for this L2 protocol.The value may be overridden at run-time. required: false constraints: - valid_values: [ flat, vlan, vxlan, gre ] vlan_transparent: type: boolean description: Specifies whether to support VLAN transparency for this L2 protocol or not. required: false default: false mtu: type: integer description: Specifies the maximum transmission unit (MTU) value for this L2 protocol. required: false constraints: - greater_than: 0 tosca.datatypes.nfv.L3ProtocolData: derived_from: tosca.datatypes.Root description: describes L3 protocol data for a given virtual link used in a specific VNF deployment flavour. properties: name: type: string description: Identifies the network name associated with this L3 protocol. required: false ip_version: type: string description: Specifies IP version of this L3 protocol.The value of the ip_version property shall be consistent with the value of the layer_protocol in the connectivity_type property of the virtual link node. required: true constraints: - valid_values: [ ipv4, ipv6 ] cidr: type: string description: Specifies the CIDR (Classless Inter-Domain Routing) of this L3 protocol. The value may be overridden at run-time. required: true ip_allocation_pools: type: list description: Specifies the allocation pools with start and end IP addresses for this L3 protocol. The value may be overridden at run-time. required: false entry_schema: type: tosca.datatypes.nfv.IpAllocationPool gateway_ip: type: string description: Specifies the gateway IP address for this L3 protocol. The value may be overridden at run-time. required: false dhcp_enabled: type: boolean description: Indicates whether DHCP (Dynamic Host Configuration Protocol) is enabled or disabled for this L3 protocol. The value may be overridden at run-time. required: false ipv6_address_mode: type: string description: Specifies IPv6 address mode. May be present when the value of the ipVersion attribute is "ipv6" and shall be absent otherwise. The value may be overridden at run-time. required: false constraints: - valid_values: [ slaac, dhcpv6-stateful, dhcpv6-stateless ] tosca.datatypes.nfv.IpAllocationPool: derived_from: tosca.datatypes.Root description: Specifies a range of IP addresses properties: start_ip_address: type: string description: The IP address to be used as the first one in a pool of addresses derived from the cidr block full IP range required: true end_ip_address: type: string description: The IP address to be used as the last one in a pool of addresses derived from the cidr block full IP range required: true tosca.datatypes.nfv.InstantiationLevel: derived_from: tosca.datatypes.Root description: Describes the scale level for each aspect that corresponds to a given level of resources to be instantiated within a deployment flavour in term of the number VNFC instances properties: description: type: string description: Human readable description of the level required: true scale_info: type: map # key: aspectId description: Represents for each aspect the scale level that corresponds to this instantiation level. scale_info shall be present if the VNF supports scaling. required: false entry_schema: type: tosca.datatypes.nfv.ScaleInfo tosca.datatypes.nfv.VduLevel: derived_from: tosca.datatypes.Root description: Indicates for a given Vdu.Compute in a given level the number of instances to deploy properties: number_of_instances: type: integer description: Number of instances of VNFC based on this VDU to deploy for this level. required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.VnfLcmOperationsConfiguration: derived_from: tosca.datatypes.Root description: Represents information to configure lifecycle management operations properties: instantiate: type: tosca.datatypes.nfv.VnfInstantiateOperationConfiguration description: Configuration parameters for the InstantiateVnf operation required: false scale: type: tosca.datatypes.nfv.VnfScaleOperationConfiguration description: Configuration parameters for the ScaleVnf operation required: false scale_to_level: type: tosca.datatypes.nfv.VnfScaleToLevelOperationConfiguration description: Configuration parameters for the ScaleVnfToLevel operation required: false change_flavour: type: tosca.datatypes.nfv.VnfChangeFlavourOperationConfiguration description: Configuration parameters for the changeVnfFlavourOpConfig operation required: false heal: type: tosca.datatypes.nfv.VnfHealOperationConfiguration description: Configuration parameters for the HealVnf operation required: false terminate: type: tosca.datatypes.nfv.VnfTerminateOperationConfiguration description: Configuration parameters for the TerminateVnf operation required: false operate: type: tosca.datatypes.nfv.VnfOperateOperationConfiguration description: Configuration parameters for the OperateVnf operation required: false change_ext_connectivity: type: tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration description: Configuration parameters for the changeExtVnfConnectivityOpConfig operation required: false tosca.datatypes.nfv.VnfInstantiateOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the InstantiateVnf operation. tosca.datatypes.nfv.VnfScaleOperationConfiguration: derived_from: tosca.datatypes.Root description: Represents information that affect the invocation of the ScaleVnf operation properties: scaling_by_more_than_one_step_supported: type: boolean description: Signals whether passing a value larger than one in the numScalingSteps parameter of the ScaleVnf operation is supported by this VNF. required: false default: false tosca.datatypes.nfv.VnfScaleToLevelOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the ScaleVnfToLevel operation properties: arbitrary_target_levels_supported: type: boolean description: Signals whether scaling according to the parameter "scaleInfo" is supported by this VNF required: true tosca.datatypes.nfv.VnfHealOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the HealVnf operation properties: causes: type: list description: Supported "cause" parameter values required: false entry_schema: type: string tosca.datatypes.nfv.VnfTerminateOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the TerminateVnf properties: min_graceful_termination_timeout: type: scalar-unit.time description: Minimum timeout value for graceful termination of a VNF instance required: true max_recommended_graceful_termination_timeout: type: scalar-unit.time description: Maximum recommended timeout value that can be needed to gracefully terminate a VNF instance of a particular type under certain conditions, such as maximum load condition. This is provided by VNF provider as information for the operator facilitating the selection of optimal timeout value. This value is not used as constraint required: false tosca.datatypes.nfv.VnfOperateOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the OperateVnf operation properties: min_graceful_stop_timeout: type: scalar-unit.time description: Minimum timeout value for graceful stop of a VNF instance required: true max_recommended_graceful_stop_timeout: type: scalar-unit.time description: Maximum recommended timeout value that can be needed to gracefully stop a VNF instance of a particular type under certain conditions, such as maximum load condition. This is provided by VNF provider as information for the operator facilitating the selection of optimal timeout value. This value is not used as constraint required: false tosca.datatypes.nfv.ScaleInfo: derived_from: tosca.datatypes.Root description: Indicates for a given scaleAspect the corresponding scaleLevel properties: scale_level: type: integer description: The scale level for a particular aspect required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.ScalingAspect: derived_from: tosca.datatypes.Root properties: name: type: string required: true description: type: string required: true max_scale_level: type: integer # positiveInteger required: true constraints: - greater_or_equal: 0 step_deltas: type: list required: false entry_schema: type: string # Identifier tosca.datatypes.nfv.VnfConfigurableProperties: derived_from: tosca.datatypes.Root description: indicates configuration properties for a given VNF (e.g. related to auto scaling and auto healing). properties: is_autoscale_enabled: type: boolean description: It permits to enable (TRUE)/disable (FALSE) the auto-scaling functionality. If the properties is not present for configuring, then VNF property is not supported required: false is_autoheal_enabled: type: boolean description: It permits to enable (TRUE)/disable (FALSE) the auto-healing functionality. If the properties is not present for configuring, then VNF property is not supported required: false # additional_configurable_properties: # description: It provides VNF specific configurable properties that # can be modified using the ModifyVnfInfo operation # required: false # type: tosca.datatypes.nfv.VnfAdditionalConfigurableProperties # derived types are expected to introduce # additional_configurable_properties with its type derived from # tosca.datatypes.nfv.VnfAdditionalConfigurableProperties tosca.datatypes.nfv.VnfAdditionalConfigurableProperties: derived_from: tosca.datatypes.Root description: is an empty base type for deriving data types for describing additional configurable properties for a given VNF tosca.datatypes.nfv.VnfInfoModifiableAttributes: derived_from: tosca.datatypes.Root description: Describes VNF-specific extension and metadata for a given VNF #properties: #extensions: #type: tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions #description: "Extension" properties of VnfInfo that are writeable #required: false # derived types are expected to introduce # extensions with its type derived from # tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions #metadata: #type: tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata #description: "Metadata" properties of VnfInfo that are writeable #required: false # derived types are expected to introduce # metadata with its type derived from # tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions: derived_from: tosca.datatypes.Root description: is an empty base type for deriving data types for describing VNF-specific extension tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata: derived_from: tosca.datatypes.Root description: is an empty base type for deriving data types for describing VNF-specific metadata tosca.datatypes.nfv.LogicalNodeData: derived_from: tosca.datatypes.Root description: Describes compute, memory and I/O requirements associated with a particular VDU. properties: logical_node_requirements: type: map description: The logical node-level compute, memory and I/O requirements. A map of strings that contains a set of key-value pairs that describes hardware platform specific deployment requirements, including the number of CPU cores on this logical node, a memory configuration specific to a logical node or a requirement related to the association of an I/O device with the logical node. required: false entry_schema: type: string tosca.datatypes.nfv.SwImageData: derived_from: tosca.datatypes.Root description: describes information related to a software image artifact properties: # in SOL001 v0.8.0: "properties or metadata:" name: type: string description: Name of this software image required: true version: type: string description: Version of this software image required: true checksum: type: tosca.datatypes.nfv.ChecksumData description: Checksum of the software image file required: true container_format: type: string description: The container format describes the container file format in which software image is provided required: true constraints: - valid_values: [ aki, ami, ari, bare, docker, ova, ovf ] disk_format: type: string description: The disk format of a software image is the format of the underlying disk image required: true constraints: - valid_values: [ aki, ami, ari, iso, qcow2, raw, vdi, vhd, vhdx, vmdk ] min_disk: type: scalar-unit.size # Number description: The minimal disk size requirement for this software image required: true constraints: - greater_or_equal: 0 B min_ram: type: scalar-unit.size # Number description: The minimal RAM requirement for this software image required: false constraints: - greater_or_equal: 0 B size: type: scalar-unit.size # Number description: The size of this software image required: true operating_system: type: string description: Identifies the operating system used in the software image required: false supported_virtualisation_environments: type: list description: Identifies the virtualisation environments (e.g. hypervisor) compatible with this software image required: false entry_schema: type: string tosca.datatypes.nfv.VirtualBlockStorageData: derived_from: tosca.datatypes.Root description: VirtualBlockStorageData describes block storage requirements associated with compute resources in a particular VDU, either as a local disk or as virtual attached storage properties: size_of_storage: type: scalar-unit.size description: Size of virtualised storage resource required: true constraints: - greater_or_equal: 0 B vdu_storage_requirements: type: map description: The hardware platform specific storage requirements. A map of strings that contains a set of key-value pairs that represents the hardware platform specific storage deployment requirements. required: false entry_schema: type: string rdma_enabled: type: boolean description: Indicates if the storage support RDMA required: false default: false tosca.datatypes.nfv.VirtualObjectStorageData: derived_from: tosca.datatypes.Root description: VirtualObjectStorageData describes object storage requirements associated with compute resources in a particular VDU properties: max_size_of_storage: type: scalar-unit.size description: Maximum size of virtualized storage resource required: false constraints: - greater_or_equal: 0 B tosca.datatypes.nfv.VirtualFileStorageData: derived_from: tosca.datatypes.Root description: VirtualFileStorageData describes file storage requirements associated with compute resources in a particular VDU properties: size_of_storage: type: scalar-unit.size description: Size of virtualized storage resource required: true constraints: - greater_or_equal: 0 B file_system_protocol: type: string description: The shared file system protocol (e.g. NFS, CIFS) required: true tosca.datatypes.nfv.VirtualLinkBitrateLevel: derived_from: tosca.datatypes.Root description: Describes bitrate requirements applicable to the virtual link instantiated from a particicular VnfVirtualLink properties: bitrate_requirements: type: tosca.datatypes.nfv.LinkBitrateRequirements description: Virtual link bitrate requirements for an instantiation level or bitrate delta for a scaling step required: true tosca.datatypes.nfv.VnfOperationAdditionalParameters: derived_from: tosca.datatypes.Root description: Is an empty base type for deriving data type for describing VNF-specific parameters to be passed when invoking lifecycle management operations #properties: tosca.datatypes.nfv.VnfChangeFlavourOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the ChangeVnfFlavour operation #properties: tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the ChangeExtVnfConnectivity operation #properties: tosca.datatypes.nfv.VnfMonitoringParameter: derived_from: tosca.datatypes.Root description: Represents information on virtualised resource related performance metrics applicable to the VNF. properties: name: type: string description: Human readable name of the monitoring parameter required: true performance_metric: type: string description: Identifies the performance metric, according to ETSI GS NFV-IFA 027. required: true constraints: - valid_values: [ v_cpu_usage_mean_vnf, v_cpu_usage_peak_vnf, v_memory_usage_mean_vnf, v_memory_usage_peak_vnf, v_disk_usage_mean_vnf, v_disk_usage_peak_vnf, byte_incoming_vnf_ext_cp, byte_outgoing_vnf_ext_cp, packet_incoming_vnf_ext_cp, packet_outgoing_vnf_ext_cp ] collection_period: type: scalar-unit.time description: Describes the periodicity at which to collect the performance information. required: false constraints: - greater_than: 0 s tosca.datatypes.nfv.VnfcMonitoringParameter: derived_from: tosca.datatypes.Root description: Represents information on virtualised resource related performance metrics applicable to the VNF. properties: name: type: string description: Human readable name of the monitoring parameter required: true performance_metric: type: string description: Identifies the performance metric, according to ETSI GS NFV-IFA 027. required: true constraints: - valid_values: [ v_cpu_usage_mean_vnf, v_cpu_usage_peak_vnf, v_memory_usage_mean_vnf, v_memory_usage_peak_vnf, v_disk_usage_mean_vnf, v_disk_usage_peak_vnf, byte_incoming_vnf_int_cp, byte_outgoing_vnf_int_cp, packet_incoming_vnf_int_cp, packet_outgoing_vnf_int_cp ] collection_period: type: scalar-unit.time description: Describes the periodicity at which to collect the performance information. required: false constraints: - greater_than: 0 s tosca.datatypes.nfv.VirtualLinkMonitoringParameter: derived_from: tosca.datatypes.Root description: Represents information on virtualised resource related performance metrics applicable to the VNF. properties: name: type: string description: Human readable name of the monitoring parameter required: true performance_metric: type: string description: Identifies a performance metric derived from those defined in ETSI GS NFV-IFA 027.The packetOutgoingVirtualLink and packetIncomingVirtualLink metrics shall be obtained by aggregation the PacketOutgoing and PacketIncoming measurements defined in clause 7.1 of GS NFV-IFA 027 of all virtual link ports attached to the virtual link to which the metrics apply. required: true constraints: - valid_values: [ packet_outgoing_virtual_link, packet_incoming_virtual_link ] collection_period: type: scalar-unit.time description: Describes the periodicity at which to collect the performance information. required: false constraints: - greater_than: 0 s tosca.datatypes.nfv.InterfaceDetails: derived_from: tosca.datatypes.Root description: information used to access an interface exposed by a VNF properties: uri_components: type: tosca.datatypes.nfv.UriComponents description: Provides components to build a Uniform Ressource Identifier (URI) where to access the interface end point. required: false interface_specific_data: type: map description: Provides additional details that are specific to the type of interface considered. required: false entry_schema: type: string tosca.datatypes.nfv.UriComponents: derived_from: tosca.datatypes.Root description: information used to build a URI that complies with IETF RFC 3986 [8]. properties: scheme: type: string # shall comply with IETF RFC3986 description: scheme component of a URI. required: true authority: type: tosca.datatypes.nfv.UriAuthority description: Authority component of a URI required: false path: type: string # shall comply with IETF RFC 3986 description: path component of a URI. required: false query: type: string # shall comply with IETF RFC 3986 description: query component of a URI. required: false fragment: type: string # shall comply with IETF RFC 3986 description: fragment component of a URI. required: false tosca.datatypes.nfv.UriAuthority: derived_from: tosca.datatypes.Root description: information that corresponds to the authority component of a URI as specified in IETF RFC 3986 [8] properties: user_info: type: string # shall comply with IETF RFC 3986 description: user_info field of the authority component of a URI required: false host: type: string # shall comply with IETF RFC 3986 description: host field of the authority component of a URI required: false port: type: string # shall comply with IETF RFC 3986 description: port field of the authority component of a URI required: false tosca.datatypes.nfv.ChecksumData: derived_from: tosca.datatypes.Root description: Describes information about the result of performing a checksum operation over some arbitrary data properties: algorithm: type: string description: Describes the algorithm used to obtain the checksum value required: true constraints: - valid_values: [sha-224, sha-256, sha-384, sha-512 ] hash: type: string description: Contains the result of applying the algorithm indicated by the algorithm property to the data to which this ChecksumData refers required: true artifact_types: tosca.artifacts.nfv.SwImage: derived_from: tosca.artifacts.Deployment.Image description: describes the software image which is directly loaded on the virtualisation container realizing of the VDU or is to be loaded on a virtual storage resource. tosca.artifacts.Implementation.nfv.Mistral: derived_from: tosca.artifacts.Implementation description: artifacts for Mistral workflows mime_type: application/x-yaml file_ext: [ yaml ] capability_types: tosca.capabilities.nfv.VirtualBindable: derived_from: tosca.capabilities.Node description: Indicates that the node that includes it can be pointed by a tosca.relationships.nfv.VirtualBindsTo relationship type which is used to model the VduHasCpd association tosca.capabilities.nfv.VirtualCompute: derived_from: tosca.capabilities.Node description: Describes the capabilities related to virtual compute resources properties: logical_node: type: map description: Describes the Logical Node requirements required: false entry_schema: type: tosca.datatypes.nfv.LogicalNodeData requested_additional_capabilities: type: map description: Describes additional capability for a particular VDU required: false entry_schema: type: tosca.datatypes.nfv.RequestedAdditionalCapability compute_requirements: type: map required: false entry_schema: type: string virtual_memory: type: tosca.datatypes.nfv.VirtualMemory description: Describes virtual memory of the virtualized compute required: true virtual_cpu: type: tosca.datatypes.nfv.VirtualCpu description: Describes virtual CPU(s) of the virtualized compute required: true virtual_local_storage: type: list description: A list of virtual system disks created and destroyed as part of the VM lifecycle required: false entry_schema: type: tosca.datatypes.nfv.VirtualBlockStorageData description: virtual system disk definition tosca.capabilities.nfv.VirtualStorage: derived_from: tosca.capabilities.Root description: Describes the attachment capabilities related to Vdu.Storage relationship_types: tosca.relationships.nfv.VirtualBindsTo: derived_from: tosca.relationships.DependsOn description: Represents an association relationship between Vdu.Compute and VduCp node types valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] tosca.relationships.nfv.AttachesTo: derived_from: tosca.relationships.Root description: Represents an association relationship between the Vdu.Compute and one of the node types, Vdu.VirtualBlockStorage, Vdu.VirtualObjectStorage or Vdu.VirtualFileStorage valid_target_types: [ tosca.capabilities.nfv.VirtualStorage ] interface_types: tosca.interfaces.nfv.Vnflcm: derived_from: tosca.interfaces.Root description: This interface encompasses a set of TOSCA operations corresponding to the VNF LCM operations defined in ETSI GS NFV-IFA 007 as well as to preamble and postamble procedures to the execution of the VNF LCM operations. instantiate: description: Invoked upon receipt of an Instantiate VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters instantiate_start: description: Invoked before instantiate instantiate_end: description: Invoked after instantiate terminate: description: Invoked upon receipt Terminate VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters terminate_start: description: Invoked before terminate terminate_end: description: Invoked after terminate modify_information: description: Invoked upon receipt of a Modify VNF Information request modify_information_start: description: Invoked before modify_information modify_information_end: description: Invoked after modify_information change_flavour: description: Invoked upon receipt of a Change VNF Flavour request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters change_flavour_start: description: Invoked before change_flavour change_flavour_end: description: Invoked after change_flavour change_external_connectivity: description: Invoked upon receipt of a Change External VNF Connectivity request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters change_external_connectivity_start: description: Invoked before change_external_connectivity change_external_connectivity_end: description: Invoked after change_external_connectivity operate: description: Invoked upon receipt of an Operate VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters operate_start: description: Invoked before operate operate_end: description: Invoked after operate heal: description: Invoked upon receipt of a Heal VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters heal_start: description: Invoked before heal heal_end: description: Invoked after heal scale: description: Invoked upon receipt of a Scale VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters scale_start: description: Invoked before scale scale_end: description: Invoked after scale scale_to_level: description: Invoked upon receipt of a Scale VNF to Level request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters scale_to_level_start: description: Invoked before scale_to_level scale_to_level_end: description: Invoked after scale_to_level node_types: tosca.nodes.nfv.VNF: derived_from: tosca.nodes.Root description: The generic abstract type from which all VNF specific abstract node types shall be derived to form, together with other node types, the TOSCA service template(s) representing the VNFD properties: descriptor_id: # instead of vnfd_id type: string # GUID description: Globally unique identifier of the VNFD required: true descriptor_version: # instead of vnfd_version type: string description: Identifies the version of the VNFD required: true provider: # instead of vnf_provider type: string description: Provider of the VNF and of the VNFD required: true product_name: # instead of vnf_product_name type: string description: Human readable name for the VNF Product required: true software_version: # instead of vnf_software_version type: string description: Software version of the VNF required: true product_info_name: # instead of vnf_product_info_name type: string description: Human readable name for the VNF Product required: false product_info_description: # instead of vnf_product_info_description type: string description: Human readable description of the VNF Product required: false vnfm_info: type: list required: true description: Identifies VNFM(s) compatible with the VNF entry_schema: type: string constraints: - pattern: (^etsivnfm:v[0-9]?[0-9]\.[0-9]?[0-9]\.[0-9]?[0-9]$)|(^[0-9]+:[a-zA-Z0-9.-]+$) localization_languages: type: list description: Information about localization languages of the VNF required: false entry_schema: type: string #IETF RFC 5646 string default_localization_language: type: string #IETF RFC 5646 string description: Default localization language that is instantiated if no information about selected localization language is available required: false #configurable_properties: #type: tosca.datatypes.nfv.VnfConfigurableProperties #description: Describes the configurable properties of the VNF #required: false # derived types are expected to introduce configurable_properties # with its type derived from # tosca.datatypes.nfv.VnfConfigurableProperties #modifiable_attributes: #type: tosca.datatypes.nfv.VnfInfoModifiableAttributes #description: Describes the modifiable attributes of the VNF #required: false # derived types are expected to introduce modifiable_attributes # with its type derived from # tosca.datatypes.nfv.VnfInfoModifiableAttributes lcm_operations_configuration: type: tosca.datatypes.nfv.VnfLcmOperationsConfiguration description: Describes the configuration parameters for the VNF LCM operations required: false monitoring_parameters: type: list entry_schema: type: tosca.datatypes.nfv.VnfMonitoringParameter description: Describes monitoring parameters applicable to the VNF. required: false flavour_id: type: string description: Identifier of the Deployment Flavour within the VNFD required: true flavour_description: type: string description: Human readable description of the DF required: true vnf_profile: type: tosca.datatypes.nfv.VnfProfile description: Describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF required: false requirements: - virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo occurrences: [ 0, 1 ] # Additional requirements shall be defined in the VNF specific node type (deriving from tosca.nodes.nfv.VNF) corresponding to NS virtual links that need to connect to VnfExtCps interfaces: Vnflcm: type: tosca.interfaces.nfv.Vnflcm tosca.nodes.nfv.VnfExtCp: derived_from: tosca.nodes.nfv.Cp description: Describes a logical external connection point, exposed by the VNF enabling connection with an external Virtual Link properties: virtual_network_interface_requirements: type: list description: The actual virtual NIC requirements that is been assigned when instantiating the connection point required: false entry_schema: type: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements requirements: - external_virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo - internal_virtual_link: #name in ETSI NFV IFA011 v0.7.3: intVirtualLinkDesc capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo tosca.nodes.nfv.Vdu.Compute: derived_from: tosca.nodes.Root description: Describes the virtual compute part of a VDU which is a construct supporting the description of the deployment and operational behavior of a VNFC properties: name: type: string description: Human readable name of the VDU required: true description: type: string description: Human readable description of the VDU required: true boot_order: type: list # explicit index (boot index) not necessary, contrary to IFA011 description: References a node template name from which a valid boot device is created required: false entry_schema: type: string nfvi_constraints: type: list description: Describes constraints on the NFVI for the VNFC instance(s) created from this VDU required: false entry_schema: type: string monitoring_parameters: type: list description: Describes monitoring parameters applicable to a VNFC instantiated from this VDU required: false entry_schema: type: tosca.datatypes.nfv.VnfcMonitoringParameter #configurable_properties: #type: tosca.datatypes.nfv.VnfcConfigurableProperties #required: false # derived types are expected to introduce # configurable_properties with its type derived from # tosca.datatypes.nfv.VnfcConfigurableProperties vdu_profile: type: tosca.datatypes.nfv.VduProfile description: Defines additional instantiation data for the VDU.Compute node required: true sw_image_data: type: tosca.datatypes.nfv.SwImageData description: Defines information related to a SwImage artifact used by this Vdu.Compute node required: false # property is required when the node template has an associated artifact of type tosca.artifacts.nfv.SwImage and not required otherwise boot_data: type: string description: Contains a string or a URL to a file contained in the VNF package used to customize a virtualised compute resource at boot time. The bootData may contain variable parts that are replaced by deployment specific values before being sent to the VIM. required: false capabilities: virtual_compute: type: tosca.capabilities.nfv.VirtualCompute occurrences: [ 1, 1 ] virtual_binding: type: tosca.capabilities.nfv.VirtualBindable occurrences: [ 1, UNBOUNDED ] requirements: - virtual_storage: capability: tosca.capabilities.nfv.VirtualStorage relationship: tosca.relationships.nfv.AttachesTo occurrences: [ 0, UNBOUNDED ] tosca.nodes.nfv.Vdu.VirtualBlockStorage: derived_from: tosca.nodes.Root description: This node type describes the specifications of requirements related to virtual block storage resources properties: virtual_block_storage_data: type: tosca.datatypes.nfv.VirtualBlockStorageData description: Describes the block storage characteristics. required: true sw_image_data: type: tosca.datatypes.nfv.SwImageData description: Defines information related to a SwImage artifact used by this Vdu.Compute node. required: false # property is required when the node template has an associated artifact of type tosca.artifacts.nfv.SwImage and not required otherwise capabilities: virtual_storage: type: tosca.capabilities.nfv.VirtualStorage description: Defines the capabilities of virtual_storage. tosca.nodes.nfv.Vdu.VirtualObjectStorage: derived_from: tosca.nodes.Root description: This node type describes the specifications of requirements related to virtual object storage resources properties: virtual_object_storage_data: type: tosca.datatypes.nfv.VirtualObjectStorageData description: Describes the object storage characteristics. required: true capabilities: virtual_storage: type: tosca.capabilities.nfv.VirtualStorage description: Defines the capabilities of virtual_storage. tosca.nodes.nfv.Vdu.VirtualFileStorage: derived_from: tosca.nodes.Root description: This node type describes the specifications of requirements related to virtual file storage resources properties: virtual_file_storage_data: type: tosca.datatypes.nfv.VirtualFileStorageData description: Describes the file storage characteristics. required: true capabilities: virtual_storage: type: tosca.capabilities.nfv.VirtualStorage description: Defines the capabilities of virtual_storage. requirements: - virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo #description: Describes the requirements for linking to virtual link tosca.nodes.nfv.VduCp: derived_from: tosca.nodes.nfv.Cp description: describes network connectivity between a VNFC instance based on this VDU and an internal VL properties: bitrate_requirement: type: integer # in bits per second description: Bitrate requirement in bit per second on this connection point required: false constraints: - greater_or_equal: 0 virtual_network_interface_requirements: type: list description: Specifies requirements on a virtual network interface realising the CPs instantiated from this CPD required: false entry_schema: type: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements order: type: integer description: The order of the NIC on the compute instance (e.g.eth2) required: false constraints: - greater_or_equal: 0 vnic_type: type: string description: Describes the type of the virtual network interface realizing the CPs instantiated from this CPD required: false constraints: - valid_values: [ normal, virtio, direct-physical ] requirements: - virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo - virtual_binding: capability: tosca.capabilities.nfv.VirtualBindable relationship: tosca.relationships.nfv.VirtualBindsTo node: tosca.nodes.nfv.Vdu.Compute tosca.nodes.nfv.VnfVirtualLink: derived_from: tosca.nodes.Root description: Describes the information about an internal VNF VL properties: connectivity_type: type: tosca.datatypes.nfv.ConnectivityType description: Specifies the protocol exposed by the VL and the flow pattern supported by the VL required: true description: type: string description: Provides human-readable information on the purpose of the VL required: false test_access: type: list description: Test access facilities available on the VL required: false entry_schema: type: string constraints: - valid_values: [ passive_monitoring, active_loopback ] vl_profile: type: tosca.datatypes.nfv.VlProfile description: Defines additional data for the VL required: true monitoring_parameters: type: list description: Describes monitoring parameters applicable to the VL required: false entry_schema: type: tosca.datatypes.nfv.VirtualLinkMonitoringParameter capabilities: virtual_linkable: type: tosca.capabilities.nfv.VirtualLinkable group_types: tosca.groups.nfv.PlacementGroup: derived_from: tosca.groups.Root description: PlacementGroup is used for describing the affinity or anti-affinity relationship applicable between the virtualization containers to be created based on different VDUs, or between internal VLs to be created based on different VnfVirtualLinkDesc(s) properties: description: type: string description: Human readable description of the group required: true members: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink ] policy_types: tosca.policies.nfv.InstantiationLevels: derived_from: tosca.policies.Root description: The InstantiationLevels type is a policy type representing all the instantiation levels of resources to be instantiated within a deployment flavour and including default instantiation level in term of the number of VNFC instances to be created as defined in ETSI GS NFV-IFA 011 [1]. properties: levels: type: map # key: levelId description: Describes the various levels of resources that can be used to instantiate the VNF using this flavour. required: true entry_schema: type: tosca.datatypes.nfv.InstantiationLevel constraints: - min_length: 1 default_level: type: string # levelId description: The default instantiation level for this flavour. required: false # required if multiple entries in levels tosca.policies.nfv.VduInstantiationLevels: derived_from: tosca.policies.Root description: The VduInstantiationLevels type is a policy type representing all the instantiation levels of resources to be instantiated within a deployment flavour in term of the number of VNFC instances to be created from each vdu.Compute. as defined in ETSI GS NFV-IFA 011 [1] properties: levels: type: map # key: levelId description: Describes the Vdu.Compute levels of resources that can be used to instantiate the VNF using this flavour required: true entry_schema: type: tosca.datatypes.nfv.VduLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.Vdu.Compute ] tosca.policies.nfv.VirtualLinkInstantiationLevels: derived_from: tosca.policies.Root description: The VirtualLinkInstantiationLevels type is a policy type representing all the instantiation levels of virtual link resources to be instantiated within a deployment flavour as defined in ETSI GS NFV-IFA 011 [1]. properties: levels: type: map # key: levelId description: Describes the virtual link levels of resources that can be used to instantiate the VNF using this flavour. required: true entry_schema: type: tosca.datatypes.nfv.VirtualLinkBitrateLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.VnfVirtualLink ] tosca.policies.nfv.ScalingAspects: derived_from: tosca.policies.Root description: The ScalingAspects type is a policy type representing the scaling aspects used for horizontal scaling as defined in ETSI GS NFV-IFA 011 [1]. properties: aspects: type: map # key: aspectId description: Describe maximum scale level for total number of scaling steps that can be applied to a particular aspect required: true entry_schema: type: tosca.datatypes.nfv.ScalingAspect constraints: - min_length: 1 tosca.policies.nfv.VduScalingAspectDeltas: derived_from: tosca.policies.Root description: The VduScalingAspectDeltas type is a policy type representing the Vdu.Compute detail of an aspect deltas used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: aspect: type: string description: Represents the scaling aspect to which this policy applies required: true deltas: type: map # key: scalingDeltaId description: Describes the Vdu.Compute scaling deltas to be applied for every scaling steps of a particular aspect. required: true entry_schema: type: tosca.datatypes.nfv.VduLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.Vdu.Compute ] tosca.policies.nfv.VirtualLinkBitrateScalingAspectDeltas: derived_from: tosca.policies.Root description: The VirtualLinkBitrateScalingAspectDeltas type is a policy type representing the VnfVirtualLink detail of an aspect deltas used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: aspect: type: string description: Represents the scaling aspect to which this policy applies. required: true deltas: type: map # key: scalingDeltaId description: Describes the VnfVirtualLink scaling deltas to be applied for every scaling steps of a particular aspect. required: true entry_schema: type: tosca.datatypes.nfv.VirtualLinkBitrateLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.VnfVirtualLink ] tosca.policies.nfv.VduInitialDelta: derived_from: tosca.policies.Root description: The VduInitialDelta type is a policy type representing the Vdu.Compute detail of an initial delta used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: initial_delta: type: tosca.datatypes.nfv.VduLevel description: Represents the initial minimum size of the VNF. required: true targets: [ tosca.nodes.nfv.Vdu.Compute ] tosca.policies.nfv.VirtualLinkBitrateInitialDelta: derived_from: tosca.policies.Root description: The VirtualLinkBitrateInitialDelta type is a policy type representing the VnfVirtualLink detail of an initial deltas used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: initial_delta: type: tosca.datatypes.nfv.VirtualLinkBitrateLevel description: Represents the initial minimum size of the VNF. required: true targets: [ tosca.nodes.nfv.VnfVirtualLink ] tosca.policies.nfv.AffinityRule: derived_from: tosca.policies.Placement description: The AffinityRule describes the affinity rules applicable for the defined targets properties: scope: type: string description: scope of the rule is an NFVI_node, an NFVI_PoP, etc. required: true constraints: - valid_values: [ nfvi_node, zone, zone_group, nfvi_pop ] targets: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink, tosca.groups.nfv.PlacementGroup ] tosca.policies.nfv.AntiAffinityRule: derived_from: tosca.policies.Placement description: The AntiAffinityRule describes the anti-affinity rules applicable for the defined targets properties: scope: type: string description: scope of the rule is an NFVI_node, an NFVI_PoP, etc. required: true constraints: - valid_values: [ nfvi_node, zone, zone_group, nfvi_pop ] targets: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink, tosca.groups.nfv.PlacementGroup ] tosca.policies.nfv.SecurityGroupRule: derived_from: tosca.policies.Root description: The SecurityGroupRule type is a policy type specified the matching criteria for the ingress and/or egress traffic to/from visited connection points as defined in ETSI GS NFV-IFA 011 [1]. properties: description: type: string description: Human readable description of the security group rule. required: false direction: type: string description: The direction in which the security group rule is applied. The direction of 'ingress' or 'egress' is specified against the associated CP. I.e., 'ingress' means the packets entering a CP, while 'egress' means the packets sent out of a CP. required: false constraints: - valid_values: [ ingress, egress ] default: ingress ether_type: type: string description: Indicates the protocol carried over the Ethernet layer. required: false constraints: - valid_values: [ ipv4, ipv6 ] default: ipv4 protocol: type: string description: Indicates the protocol carried over the IP layer. Permitted values include any protocol defined in the IANA protocol registry, e.g. TCP, UDP, ICMP, etc. required: false constraints: - valid_values: [ hopopt, icmp, igmp, ggp, ipv4, st, tcp, cbt, egp, igp, bbn_rcc_mon, nvp_ii, pup, argus, emcon, xnet, chaos, udp, mux, dcn_meas, hmp, prm, xns_idp, trunk_1, trunk_2, leaf_1, leaf_2, rdp, irtp, iso_tp4, netblt, mfe_nsp, merit_inp, dccp, 3pc, idpr, xtp, ddp, idpr_cmtp, tp++, il, ipv6, sdrp, ipv6_route, ipv6_frag, idrp, rsvp, gre, dsr, bna, esp, ah, i_nlsp, swipe, narp, mobile, tlsp, skip, ipv6_icmp, ipv6_no_nxt, ipv6_opts, cftp, sat_expak, kryptolan, rvd, ippc, sat_mon, visa, ipcv, cpnx, cphb, wsn, pvp, br_sat_mon, sun_nd, wb_mon, wb_expak, iso_ip, vmtp, secure_vmtp, vines, ttp, iptm, nsfnet_igp, dgp, tcf, eigrp, ospfigp, sprite_rpc, larp, mtp, ax.25, ipip, micp, scc_sp, etherip, encap, gmtp, ifmp, pnni, pim, aris, scps, qnx, a/n, ip_comp, snp, compaq_peer, ipx_in_ip, vrrp, pgm, l2tp, ddx, iatp, stp, srp, uti, smp, sm, ptp, isis, fire, crtp, crudp, sscopmce, iplt, sps, pipe, sctp, fc, rsvp_e2e_ignore, mobility, udp_lite, mpls_in_ip, manet, hip, shim6, wesp, rohc ] default: tcp port_range_min: type: integer description: Indicates minimum port number in the range that is matched by the security group rule. If a value is provided at design-time, this value may be overridden at run-time based on other deployment requirements or constraints. required: false constraints: - greater_or_equal: 0 - less_or_equal: 65535 default: 0 port_range_max: type: integer description: Indicates maximum port number in the range that is matched by the security group rule. If a value is provided at design-time, this value may be overridden at run-time based on other deployment requirements or constraints. required: false constraints: - greater_or_equal: 0 - less_or_equal: 65535 default: 65535 targets: [ tosca.nodes.nfv.VduCp, tosca.nodes.nfv.VnfExtCp ] tosca.policies.nfv.SupportedVnfInterface: derived_from: tosca.policies.Root description: this policy type represents interfaces produced by a VNF, the details to access them and the applicable connection points to use to access these interfaces properties: interface_name: type: string description: Identifies an interface produced by the VNF. required: true constraints: - valid_values: [ vnf_indicator, vnf_configuration ] details: type: tosca.datatypes.nfv.InterfaceDetails description: Provide additional data to access the interface endpoint required: false targets: [ tosca.nodes.nfv.VnfExtCp, tosca.nodes.nfv.VduCp ]././@PaxHeader0000000000000000000000000000023700000000000011457 xustar0000000000000000137 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/helloworld3_df_simple.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions0000664000175000017500000001705600000000000034122 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_2 description: Simple deployment flavour for Sample VNF imports: - etsi_nfv_sol001_common_types.yaml - etsi_nfv_sol001_vnfd_types.yaml - helloworld3_types.yaml topology_template: inputs: descriptor_id: type: string descriptor_version: type: string provider: type: string product_name: type: string software_version: type: string vnfm_info: type: list entry_schema: type: string flavour_id: type: string flavour_description: type: string substitution_mappings: node_type: ntt.nslab.VNF properties: flavour_id: simple requirements: virtual_link_external: [ CP1, virtual_link ] node_templates: VNF: type: ntt.nslab.VNF properties: flavour_description: A simple flavour interfaces: Vnflcm: # supporting only 'instantiate', 'terminate', 'modify' # not supporting LCM script, supporting only default LCM instantiate: [] instantiate_start: [] instantiate_end: [] terminate: [] terminate_start: [] terminate_end: [] modify_information: [] modify_information_start: [] modify_information_end: [] # change_flavour: [] # change_flavour_start: [] # change_flavour_end: [] # change_external_connectivity: [] # change_external_connectivity_start: [] # change_external_connectivity_end: [] # operate: [] # operate_start: [] # operate_end: [] # heal: [] # heal_start: [] # heal_end: [] # scale: [] # scale_start: [] # scale_end: [] # scale_to_level: [] # scale_to_level_start: [] # scale_to_level_end: [] VDU1: type: tosca.nodes.nfv.Vdu.Compute properties: name: VDU1 description: VDU1 compute node vdu_profile: min_number_of_instances: 1 max_number_of_instances: 1 sw_image_data: name: Software of VDU1 version: '0.4.0' checksum: algorithm: sha-256 hash: b9c3036539fd7a5f87a1bf38eb05fdde8b556a1a7e664dbeda90ed3cd74b4f9d container_format: bare disk_format: qcow2 min_disk: 1 GB size: 1 GB artifacts: sw_image: type: tosca.artifacts.nfv.SwImage file: cirros-0.4.0-x86_64-disk.img repository: http://download.cirros-cloud.net/0.4.0/ capabilities: virtual_compute: properties: virtual_memory: virtual_mem_size: 512 MB virtual_cpu: num_virtual_cpu: 1 virtual_local_storage: - size_of_storage: 1 GB VDU2: type: tosca.nodes.nfv.Vdu.Compute properties: name: VDU2 description: VDU2 compute node vdu_profile: min_number_of_instances: 1 max_number_of_instances: 3 capabilities: virtual_compute: properties: virtual_memory: virtual_mem_size: 512 MB virtual_cpu: num_virtual_cpu: 1 virtual_local_storage: - size_of_storage: 1 GB requirements: - virtual_storage: VirtualStorage VirtualStorage: type: tosca.nodes.nfv.Vdu.VirtualBlockStorage properties: virtual_block_storage_data: size_of_storage: 30 GB rdma_enabled: true sw_image_data: name: VrtualStorage version: '0.4.0' checksum: algorithm: sha-256 hash: b9c3036539fd7a5f87a1bf38eb05fdde8b556a1a7e664dbeda90ed3cd74b4f9d container_format: bare disk_format: qcow2 min_disk: 2 GB min_ram: 8192 MB size: 2 GB artifacts: sw_image: type: tosca.artifacts.nfv.SwImage file: cirros-0.4.0-x86_64-disk.img repository: http://download.cirros-cloud.net/0.4.0/ CP1: type: tosca.nodes.nfv.VduCp properties: layer_protocols: [ ipv4 ] order: 0 vnic_type: direct-physical requirements: - virtual_binding: VDU1 #- virtual_link: # the target node is determined in the NSD CP2: type: tosca.nodes.nfv.VduCp properties: layer_protocols: [ ipv4 ] order: 1 requirements: - virtual_binding: VDU1 - virtual_link: internalVL2 CP3: type: tosca.nodes.nfv.VduCp properties: layer_protocols: [ ipv4 ] order: 2 requirements: - virtual_binding: VDU2 - virtual_link: internalVL2 internalVL2: type: tosca.nodes.nfv.VnfVirtualLink properties: connectivity_type: layer_protocols: [ ipv4 ] description: Internal Virtual link in the VNF vl_profile: max_bitrate_requirements: root: 1048576 leaf: 1048576 min_bitrate_requirements: root: 1048576 leaf: 1048576 virtual_link_protocol_data: - associated_layer_protocol: ipv4 l3_protocol_data: ip_version: ipv4 cidr: 11.11.0.0/24 policies: - scaling_aspects: type: tosca.policies.nfv.ScalingAspects properties: aspects: worker_instance: name: worker_instance_aspect description: worker_instance scaling aspect max_scale_level: 2 step_deltas: - delta_1 - VDU2_initial_delta: type: tosca.policies.nfv.VduInitialDelta properties: initial_delta: number_of_instances: 1 targets: [ VDU2 ] - VDU2_scaling_aspect_deltas: type: tosca.policies.nfv.VduScalingAspectDeltas properties: aspect: worker_instance deltas: delta_2: number_of_instances: 1 targets: [ VDU2 ] - instantiation_levels: type: tosca.policies.nfv.InstantiationLevels properties: levels: instantiation_level_1: description: Smallest size scale_info: worker_instance: scale_level: 0 instantiation_level_2: description: Largest size scale_info: worker_instance: scale_level: 2 default_level: instantiation_level_1 - VDU1_instantiation_levels: type: tosca.policies.nfv.VduInstantiationLevels properties: levels: instantiation_level_1: number_of_instances: 1 instantiation_level_2: number_of_instances: 3 targets: [ VDU1 ] - VDU2_instantiation_levels: type: tosca.policies.nfv.VduInstantiationLevels properties: levels: instantiation_level_1: number_of_instances: 1 instantiation_level_2: number_of_instances: 1 targets: [ VDU2 ] - internalVL2_instantiation_levels: type: tosca.policies.nfv.VirtualLinkInstantiationLevels properties: levels: instantiation_level_1: bitrate_requirements: root: 1048576 leaf: 1048576 instantiation_level_2: bitrate_requirements: root: 1048576 leaf: 1048576 targets: [ internalVL2 ] ././@PaxHeader0000000000000000000000000000023600000000000011456 xustar0000000000000000136 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/helloworld3_top.vnfd.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions0000664000175000017500000000165100000000000034114 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_0 description: Sample VNF of NTT NS lab. imports: - etsi_nfv_sol001_common_types.yaml - etsi_nfv_sol001_vnfd_types.yaml - helloworld3_types.yaml - helloworld3_df_simple.yaml # - helloworld3_df_complex.yaml topology_template: inputs: selected_flavour: type: string description: VNF deployment flavour selected by the consumer. It is provided in the API node_templates: VNF: type: ntt.nslab.VNF properties: flavour_id: { get_input: selected_flavour } descriptor_id: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 provider: NTT NS lab product_name: Sample VNF software_version: '1.0' descriptor_version: '1.0' vnfm_info: - Tacker requirements: #- virtual_link_external # mapped in lower-level templates #- virtual_link_internal # mapped in lower-level templates ././@PaxHeader0000000000000000000000000000023300000000000011453 xustar0000000000000000133 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions/helloworld3_types.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/Definitions0000664000175000017500000000300600000000000034110 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_0 description: ntt.nslab.VNF type definition imports: - etsi_nfv_sol001_common_types.yaml - etsi_nfv_sol001_vnfd_types.yaml node_types: ntt.nslab.VNF: derived_from: tosca.nodes.nfv.VNF properties: descriptor_id: type: string constraints: [ valid_values: [ b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 ] ] default: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 descriptor_version: type: string constraints: [ valid_values: [ '1.0' ] ] default: '1.0' provider: type: string constraints: [ valid_values: [ 'NTT NS lab' ] ] default: 'NTT NS lab' product_name: type: string constraints: [ valid_values: [ 'Sample VNF' ] ] default: 'Sample VNF' software_version: type: string constraints: [ valid_values: [ '1.0' ] ] default: '1.0' vnfm_info: type: list entry_schema: type: string constraints: [ valid_values: [ Tacker ] ] default: [ Tacker ] flavour_id: type: string constraints: [ valid_values: [ simple ] ] default: simple flavour_description: type: string default: "" requirements: - virtual_link_external: capability: tosca.capabilities.nfv.VirtualLinkable - virtual_link_internal: capability: tosca.capabilities.nfv.VirtualLinkable interfaces: Vnflcm: type: tosca.interfaces.nfv.Vnflcm ././@PaxHeader0000000000000000000000000000021400000000000011452 xustar0000000000000000114 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/TOSCA-Metadata/ 26 mtime=1715864642.75062 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/TOSCA-Metad0000775000175000017500000000000000000000000033535 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000022200000000000011451 xustar0000000000000000124 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/TOSCA-Metadata/TOSCA.meta 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package/TOSCA-Metad0000664000175000017500000000020000000000000033527 0ustar00zuulzuul00000000000000TOSCA-Meta-File-Version: 1.0 Created-by: Hiroyuki JO CSAR-Version: 1.1 Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7186198 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/0000775000175000017500000000000000000000000033753 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000022300000000000011452 xustar0000000000000000121 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/ 26 mtime=1715864642.75062 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/D0000775000175000017500000000000000000000000034057 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000026000000000000011453 xustar0000000000000000154 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/etsi_nfv_sol001_common_types.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/D0000664000175000017500000002164500000000000034071 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_2 description: ETSI NFV SOL 001 common types definitions version 2.6.1 metadata: template_name: etsi_nfv_sol001_common_types template_author: ETSI_NFV template_version: 2.6.1 data_types: tosca.datatypes.nfv.L2AddressData: derived_from: tosca.datatypes.Root description: Describes the information on the MAC addresses to be assigned to a connection point. properties: mac_address_assignment: type: boolean description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility required: true tosca.datatypes.nfv.L3AddressData: derived_from: tosca.datatypes.Root description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP properties: ip_address_assignment: type: boolean description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility required: true floating_ip_activated: type: boolean description: Specifies if the floating IP scheme is activated on the Connection Point or not required: true ip_address_type: type: string description: Defines address type. The address type should be aligned with the address type supported by the layer_protocols properties of the parent VnfExtCp required: false constraints: - valid_values: [ ipv4, ipv6 ] number_of_ip_address: type: integer description: Minimum number of IP addresses to be assigned required: false constraints: - greater_than: 0 tosca.datatypes.nfv.AddressData: derived_from: tosca.datatypes.Root description: Describes information about the addressing scheme and parameters applicable to a CP properties: address_type: type: string description: Describes the type of the address to be assigned to a connection point. The content type shall be aligned with the address type supported by the layerProtocol property of the connection point required: true constraints: - valid_values: [ mac_address, ip_address ] l2_address_data: type: tosca.datatypes.nfv.L2AddressData description: Provides the information on the MAC addresses to be assigned to a connection point. required: false l3_address_data: type: tosca.datatypes.nfv.L3AddressData description: Provides the information on the IP addresses to be assigned to a connection point required: false tosca.datatypes.nfv.ConnectivityType: derived_from: tosca.datatypes.Root description: describes additional connectivity information of a virtualLink properties: layer_protocols: type: list description: Identifies the protocol a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire).The top layer protocol of the virtualLink protocol stack shall always be provided. The lower layer protocols may be included when there are specific requirements on these layers. required: true entry_schema: type: string constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] flow_pattern: type: string description: Identifies the flow pattern of the connectivity required: false constraints: - valid_values: [ line, tree, mesh ] tosca.datatypes.nfv.LinkBitrateRequirements: derived_from: tosca.datatypes.Root description: describes the requirements in terms of bitrate for a virtual link properties: root: type: integer # in bits per second description: Specifies the throughput requirement in bits per second of the link (e.g. bitrate of E-Line, root bitrate of E-Tree, aggregate capacity of E-LAN). required: true constraints: - greater_or_equal: 0 leaf: type: integer # in bits per second description: Specifies the throughput requirement in bits per second of leaf connections to the link when applicable to the connectivity type (e.g. for E-Tree and E LAN branches). required: false constraints: - greater_or_equal: 0 tosca.datatypes.nfv.CpProtocolData: derived_from: tosca.datatypes.Root description: Describes and associates the protocol layer that a CP uses together with other protocol and connection point information properties: associated_layer_protocol: type: string required: true description: One of the values of the property layer_protocols of the CP constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] address_data: type: list description: Provides information on the addresses to be assigned to the CP entry_schema: type: tosca.datatypes.nfv.AddressData required: false tosca.datatypes.nfv.VnfProfile: derived_from: tosca.datatypes.Root description: describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF. properties: instantiation_level: type: string description: Identifier of the instantiation level of the VNF DF to be used for instantiation. If not present, the default instantiation level as declared in the VNFD shall be used. required: false min_number_of_instances: type: integer description: Minimum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile. required: true constraints: - greater_or_equal: 0 max_number_of_instances: type: integer description: Maximum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile. required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.Qos: derived_from: tosca.datatypes.Root description: describes QoS data for a given VL used in a VNF deployment flavour properties: latency: type: scalar-unit.time #Number description: Specifies the maximum latency required: true constraints: - greater_than: 0 s packet_delay_variation: type: scalar-unit.time #Number description: Specifies the maximum jitter required: true constraints: - greater_or_equal: 0 s packet_loss_ratio: type: float description: Specifies the maximum packet loss ratio required: false constraints: - in_range: [ 0.0, 1.0 ] capability_types: tosca.capabilities.nfv.VirtualLinkable: derived_from: tosca.capabilities.Node description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type relationship_types: tosca.relationships.nfv.VirtualLinksTo: derived_from: tosca.relationships.DependsOn description: Represents an association relationship between the VduCp and VnfVirtualLink node types valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] node_types: tosca.nodes.nfv.Cp: derived_from: tosca.nodes.Root description: Provides information regarding the purpose of the connection point properties: layer_protocols: type: list description: Identifies which protocol the connection point uses for connectivity purposes required: true entry_schema: type: string constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] role: #Name in ETSI NFV IFA011 v0.7.3: cpRole type: string description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS required: false constraints: - valid_values: [ root, leaf ] description: type: string description: Provides human-readable information on the purpose of the connection point required: false protocol: type: list description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor required: false entry_schema: type: tosca.datatypes.nfv.CpProtocolData trunk_mode: type: boolean description: Provides information about whether the CP instantiated from this Cp is in Trunk mode (802.1Q or other), When operating in "trunk mode", the Cp is capable of carrying traffic for several VLANs. Absence of this property implies that trunkMode is not configured for the Cp i.e. It is equivalent to boolean value "false". required: false ././@PaxHeader0000000000000000000000000000025600000000000011460 xustar0000000000000000152 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/etsi_nfv_sol001_vnfd_types.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/D0000664000175000017500000020321300000000000034062 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_2 description: ETSI NFV SOL 001 vnfd types definitions version 2.6.1 metadata: template_name: etsi_nfv_sol001_vnfd_types template_author: ETSI_NFV template_version: 2.6.1 imports: - ./etsi_nfv_sol001_common_types.yaml data_types: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements: derived_from: tosca.datatypes.Root description: Describes requirements on a virtual network interface properties: name: type: string description: Provides a human readable name for the requirement. required: false description: type: string description: Provides a human readable description of the requirement. required: false support_mandatory: type: boolean description: Indicates whether fulfilling the constraint is mandatory (TRUE) for successful operation or desirable (FALSE). required: true network_interface_requirements: type: map description: The network interface requirements. A map of strings that contain a set of key-value pairs that describes the hardware platform specific network interface deployment requirements. required: true entry_schema: type: string nic_io_requirements: type: tosca.datatypes.nfv.LogicalNodeData description: references (couples) the CP with any logical node I/O requirements (for network devices) that may have been created. Linking these attributes is necessary so that so that I/O requirements that need to be articulated at the logical node level can be associated with the network interface requirements associated with the CP. required: false tosca.datatypes.nfv.RequestedAdditionalCapability: derived_from: tosca.datatypes.Root description: describes requested additional capability for a particular VDU properties: requested_additional_capability_name: type: string description: Identifies a requested additional capability for the VDU. required: true support_mandatory: type: boolean description: Indicates whether the requested additional capability is mandatory for successful operation. required: true min_requested_additional_capability_version: type: string description: Identifies the minimum version of the requested additional capability. required: false preferred_requested_additional_capability_version: type: string description: Identifies the preferred version of the requested additional capability. required: false target_performance_parameters: type: map description: Identifies specific attributes, dependent on the requested additional capability type. required: true entry_schema: type: string tosca.datatypes.nfv.VirtualMemory: derived_from: tosca.datatypes.Root description: supports the specification of requirements related to virtual memory of a virtual compute resource properties: virtual_mem_size: type: scalar-unit.size description: Amount of virtual memory. required: true virtual_mem_oversubscription_policy: type: string description: The memory core oversubscription policy in terms of virtual memory to physical memory on the platform. required: false vdu_mem_requirements: type: map description: The hardware platform specific VDU memory requirements. A map of strings that contains a set of key-value pairs that describes hardware platform specific VDU memory requirements. required: false entry_schema: type: string numa_enabled: type: boolean description: It specifies the memory allocation to be cognisant of the relevant process/core allocation. required: false default: false tosca.datatypes.nfv.VirtualCpu: derived_from: tosca.datatypes.Root description: Supports the specification of requirements related to virtual CPU(s) of a virtual compute resource properties: cpu_architecture: type: string description: CPU architecture type. Examples are x86, ARM required: false num_virtual_cpu: type: integer description: Number of virtual CPUs required: true constraints: - greater_than: 0 virtual_cpu_clock: type: scalar-unit.frequency description: Minimum virtual CPU clock rate required: false virtual_cpu_oversubscription_policy: type: string description: CPU core oversubscription policy e.g. the relation of virtual CPU cores to physical CPU cores/threads. required: false vdu_cpu_requirements: type: map description: The hardware platform specific VDU CPU requirements. A map of strings that contains a set of key-value pairs describing VDU CPU specific hardware platform requirements. required: false entry_schema: type: string virtual_cpu_pinning: type: tosca.datatypes.nfv.VirtualCpuPinning description: The virtual CPU pinning configuration for the virtualised compute resource. required: false tosca.datatypes.nfv.VirtualCpuPinning: derived_from: tosca.datatypes.Root description: Supports the specification of requirements related to the virtual CPU pinning configuration of a virtual compute resource properties: virtual_cpu_pinning_policy: type: string description: 'Indicates the policy for CPU pinning. The policy can take values of "static" or "dynamic". In case of "dynamic" the allocation of virtual CPU cores to logical CPU cores is decided by the VIM. (e.g.: SMT (Simultaneous Multi-Threading) requirements). In case of "static" the allocation is requested to be according to the virtual_cpu_pinning_rule.' required: false constraints: - valid_values: [ static, dynamic ] virtual_cpu_pinning_rule: type: list description: Provides the list of rules for allocating virtual CPU cores to logical CPU cores/threads required: false entry_schema: type: string tosca.datatypes.nfv.VnfcConfigurableProperties: derived_from: tosca.datatypes.Root description: Defines the configurable properties of a VNFC # properties: # additional_vnfc_configurable_properties: # type: tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties # description: Describes additional configuration for VNFC that # can be modified using the ModifyVnfInfo operation # required: false # derived types are expected to introduce # additional_vnfc_configurable_properties with its type derived from # tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties: derived_from: tosca.datatypes.Root description: VnfcAdditionalConfigurableProperties type is an empty base type for deriving data types for describing additional configurable properties for a given VNFC. tosca.datatypes.nfv.VduProfile: derived_from: tosca.datatypes.Root description: describes additional instantiation data for a given Vdu.Compute used in a specific deployment flavour. properties: min_number_of_instances: type: integer description: Minimum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour. required: true constraints: - greater_or_equal: 0 max_number_of_instances: type: integer description: Maximum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour. required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.VlProfile: derived_from: tosca.datatypes.Root description: Describes additional instantiation data for a given VL used in a specific VNF deployment flavour. properties: max_bitrate_requirements: type: tosca.datatypes.nfv.LinkBitrateRequirements description: Specifies the maximum bitrate requirements for a VL instantiated according to this profile. required: true min_bitrate_requirements: type: tosca.datatypes.nfv.LinkBitrateRequirements description: Specifies the minimum bitrate requirements for a VL instantiated according to this profile. required: true qos: type: tosca.datatypes.nfv.Qos description: Specifies the QoS requirements of a VL instantiated according to this profile. required: false virtual_link_protocol_data: type: list description: Specifies the protocol data for a virtual link. required: false entry_schema: type: tosca.datatypes.nfv.VirtualLinkProtocolData tosca.datatypes.nfv.VirtualLinkProtocolData: derived_from: tosca.datatypes.Root description: describes one protocol layer and associated protocol data for a given virtual link used in a specific VNF deployment flavour properties: associated_layer_protocol: type: string description: Identifies one of the protocols a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire) as specified by the connectivity_type property. required: true constraints: - valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ] l2_protocol_data: type: tosca.datatypes.nfv.L2ProtocolData description: Specifies the L2 protocol data for a virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L2 protocol and shall be absent otherwise. required: false l3_protocol_data: type: tosca.datatypes.nfv.L3ProtocolData description: Specifies the L3 protocol data for this virtual link. Shall be present when the associatedLayerProtocol attribute indicates a L3 protocol and shall be absent otherwise. required: false tosca.datatypes.nfv.L2ProtocolData: derived_from: tosca.datatypes.Root description: describes L2 protocol data for a given virtual link used in a specific VNF deployment flavour. properties: name: type: string description: Identifies the network name associated with this L2 protocol. required: false network_type: type: string description: Specifies the network type for this L2 protocol.The value may be overridden at run-time. required: false constraints: - valid_values: [ flat, vlan, vxlan, gre ] vlan_transparent: type: boolean description: Specifies whether to support VLAN transparency for this L2 protocol or not. required: false default: false mtu: type: integer description: Specifies the maximum transmission unit (MTU) value for this L2 protocol. required: false constraints: - greater_than: 0 tosca.datatypes.nfv.L3ProtocolData: derived_from: tosca.datatypes.Root description: describes L3 protocol data for a given virtual link used in a specific VNF deployment flavour. properties: name: type: string description: Identifies the network name associated with this L3 protocol. required: false ip_version: type: string description: Specifies IP version of this L3 protocol.The value of the ip_version property shall be consistent with the value of the layer_protocol in the connectivity_type property of the virtual link node. required: true constraints: - valid_values: [ ipv4, ipv6 ] cidr: type: string description: Specifies the CIDR (Classless Inter-Domain Routing) of this L3 protocol. The value may be overridden at run-time. required: true ip_allocation_pools: type: list description: Specifies the allocation pools with start and end IP addresses for this L3 protocol. The value may be overridden at run-time. required: false entry_schema: type: tosca.datatypes.nfv.IpAllocationPool gateway_ip: type: string description: Specifies the gateway IP address for this L3 protocol. The value may be overridden at run-time. required: false dhcp_enabled: type: boolean description: Indicates whether DHCP (Dynamic Host Configuration Protocol) is enabled or disabled for this L3 protocol. The value may be overridden at run-time. required: false ipv6_address_mode: type: string description: Specifies IPv6 address mode. May be present when the value of the ipVersion attribute is "ipv6" and shall be absent otherwise. The value may be overridden at run-time. required: false constraints: - valid_values: [ slaac, dhcpv6-stateful, dhcpv6-stateless ] tosca.datatypes.nfv.IpAllocationPool: derived_from: tosca.datatypes.Root description: Specifies a range of IP addresses properties: start_ip_address: type: string description: The IP address to be used as the first one in a pool of addresses derived from the cidr block full IP range required: true end_ip_address: type: string description: The IP address to be used as the last one in a pool of addresses derived from the cidr block full IP range required: true tosca.datatypes.nfv.InstantiationLevel: derived_from: tosca.datatypes.Root description: Describes the scale level for each aspect that corresponds to a given level of resources to be instantiated within a deployment flavour in term of the number VNFC instances properties: description: type: string description: Human readable description of the level required: true scale_info: type: map # key: aspectId description: Represents for each aspect the scale level that corresponds to this instantiation level. scale_info shall be present if the VNF supports scaling. required: false entry_schema: type: tosca.datatypes.nfv.ScaleInfo tosca.datatypes.nfv.VduLevel: derived_from: tosca.datatypes.Root description: Indicates for a given Vdu.Compute in a given level the number of instances to deploy properties: number_of_instances: type: integer description: Number of instances of VNFC based on this VDU to deploy for this level. required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.VnfLcmOperationsConfiguration: derived_from: tosca.datatypes.Root description: Represents information to configure lifecycle management operations properties: instantiate: type: tosca.datatypes.nfv.VnfInstantiateOperationConfiguration description: Configuration parameters for the InstantiateVnf operation required: false scale: type: tosca.datatypes.nfv.VnfScaleOperationConfiguration description: Configuration parameters for the ScaleVnf operation required: false scale_to_level: type: tosca.datatypes.nfv.VnfScaleToLevelOperationConfiguration description: Configuration parameters for the ScaleVnfToLevel operation required: false change_flavour: type: tosca.datatypes.nfv.VnfChangeFlavourOperationConfiguration description: Configuration parameters for the changeVnfFlavourOpConfig operation required: false heal: type: tosca.datatypes.nfv.VnfHealOperationConfiguration description: Configuration parameters for the HealVnf operation required: false terminate: type: tosca.datatypes.nfv.VnfTerminateOperationConfiguration description: Configuration parameters for the TerminateVnf operation required: false operate: type: tosca.datatypes.nfv.VnfOperateOperationConfiguration description: Configuration parameters for the OperateVnf operation required: false change_ext_connectivity: type: tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration description: Configuration parameters for the changeExtVnfConnectivityOpConfig operation required: false tosca.datatypes.nfv.VnfInstantiateOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the InstantiateVnf operation. tosca.datatypes.nfv.VnfScaleOperationConfiguration: derived_from: tosca.datatypes.Root description: Represents information that affect the invocation of the ScaleVnf operation properties: scaling_by_more_than_one_step_supported: type: boolean description: Signals whether passing a value larger than one in the numScalingSteps parameter of the ScaleVnf operation is supported by this VNF. required: false default: false tosca.datatypes.nfv.VnfScaleToLevelOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the ScaleVnfToLevel operation properties: arbitrary_target_levels_supported: type: boolean description: Signals whether scaling according to the parameter "scaleInfo" is supported by this VNF required: true tosca.datatypes.nfv.VnfHealOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the HealVnf operation properties: causes: type: list description: Supported "cause" parameter values required: false entry_schema: type: string tosca.datatypes.nfv.VnfTerminateOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the TerminateVnf properties: min_graceful_termination_timeout: type: scalar-unit.time description: Minimum timeout value for graceful termination of a VNF instance required: true max_recommended_graceful_termination_timeout: type: scalar-unit.time description: Maximum recommended timeout value that can be needed to gracefully terminate a VNF instance of a particular type under certain conditions, such as maximum load condition. This is provided by VNF provider as information for the operator facilitating the selection of optimal timeout value. This value is not used as constraint required: false tosca.datatypes.nfv.VnfOperateOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the OperateVnf operation properties: min_graceful_stop_timeout: type: scalar-unit.time description: Minimum timeout value for graceful stop of a VNF instance required: true max_recommended_graceful_stop_timeout: type: scalar-unit.time description: Maximum recommended timeout value that can be needed to gracefully stop a VNF instance of a particular type under certain conditions, such as maximum load condition. This is provided by VNF provider as information for the operator facilitating the selection of optimal timeout value. This value is not used as constraint required: false tosca.datatypes.nfv.ScaleInfo: derived_from: tosca.datatypes.Root description: Indicates for a given scaleAspect the corresponding scaleLevel properties: scale_level: type: integer description: The scale level for a particular aspect required: true constraints: - greater_or_equal: 0 tosca.datatypes.nfv.ScalingAspect: derived_from: tosca.datatypes.Root properties: name: type: string required: true description: type: string required: true max_scale_level: type: integer # positiveInteger required: true constraints: - greater_or_equal: 0 step_deltas: type: list required: false entry_schema: type: string # Identifier tosca.datatypes.nfv.VnfConfigurableProperties: derived_from: tosca.datatypes.Root description: indicates configuration properties for a given VNF (e.g. related to auto scaling and auto healing). properties: is_autoscale_enabled: type: boolean description: It permits to enable (TRUE)/disable (FALSE) the auto-scaling functionality. If the properties is not present for configuring, then VNF property is not supported required: false is_autoheal_enabled: type: boolean description: It permits to enable (TRUE)/disable (FALSE) the auto-healing functionality. If the properties is not present for configuring, then VNF property is not supported required: false # additional_configurable_properties: # description: It provides VNF specific configurable properties that # can be modified using the ModifyVnfInfo operation # required: false # type: tosca.datatypes.nfv.VnfAdditionalConfigurableProperties # derived types are expected to introduce # additional_configurable_properties with its type derived from # tosca.datatypes.nfv.VnfAdditionalConfigurableProperties tosca.datatypes.nfv.VnfAdditionalConfigurableProperties: derived_from: tosca.datatypes.Root description: is an empty base type for deriving data types for describing additional configurable properties for a given VNF tosca.datatypes.nfv.VnfInfoModifiableAttributes: derived_from: tosca.datatypes.Root description: Describes VNF-specific extension and metadata for a given VNF #properties: #extensions: #type: tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions #description: "Extension" properties of VnfInfo that are writeable #required: false # derived types are expected to introduce # extensions with its type derived from # tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions #metadata: #type: tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata #description: "Metadata" properties of VnfInfo that are writeable #required: false # derived types are expected to introduce # metadata with its type derived from # tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata tosca.datatypes.nfv.VnfInfoModifiableAttributesExtensions: derived_from: tosca.datatypes.Root description: is an empty base type for deriving data types for describing VNF-specific extension tosca.datatypes.nfv.VnfInfoModifiableAttributesMetadata: derived_from: tosca.datatypes.Root description: is an empty base type for deriving data types for describing VNF-specific metadata tosca.datatypes.nfv.LogicalNodeData: derived_from: tosca.datatypes.Root description: Describes compute, memory and I/O requirements associated with a particular VDU. properties: logical_node_requirements: type: map description: The logical node-level compute, memory and I/O requirements. A map of strings that contains a set of key-value pairs that describes hardware platform specific deployment requirements, including the number of CPU cores on this logical node, a memory configuration specific to a logical node or a requirement related to the association of an I/O device with the logical node. required: false entry_schema: type: string tosca.datatypes.nfv.SwImageData: derived_from: tosca.datatypes.Root description: describes information related to a software image artifact properties: # in SOL001 v0.8.0: "properties or metadata:" name: type: string description: Name of this software image required: true version: type: string description: Version of this software image required: true checksum: type: tosca.datatypes.nfv.ChecksumData description: Checksum of the software image file required: true container_format: type: string description: The container format describes the container file format in which software image is provided required: true constraints: - valid_values: [ aki, ami, ari, bare, docker, ova, ovf ] disk_format: type: string description: The disk format of a software image is the format of the underlying disk image required: true constraints: - valid_values: [ aki, ami, ari, iso, qcow2, raw, vdi, vhd, vhdx, vmdk ] min_disk: type: scalar-unit.size # Number description: The minimal disk size requirement for this software image required: true constraints: - greater_or_equal: 0 B min_ram: type: scalar-unit.size # Number description: The minimal RAM requirement for this software image required: false constraints: - greater_or_equal: 0 B size: type: scalar-unit.size # Number description: The size of this software image required: true operating_system: type: string description: Identifies the operating system used in the software image required: false supported_virtualisation_environments: type: list description: Identifies the virtualisation environments (e.g. hypervisor) compatible with this software image required: false entry_schema: type: string tosca.datatypes.nfv.VirtualBlockStorageData: derived_from: tosca.datatypes.Root description: VirtualBlockStorageData describes block storage requirements associated with compute resources in a particular VDU, either as a local disk or as virtual attached storage properties: size_of_storage: type: scalar-unit.size description: Size of virtualised storage resource required: true constraints: - greater_or_equal: 0 B vdu_storage_requirements: type: map description: The hardware platform specific storage requirements. A map of strings that contains a set of key-value pairs that represents the hardware platform specific storage deployment requirements. required: false entry_schema: type: string rdma_enabled: type: boolean description: Indicates if the storage support RDMA required: false default: false tosca.datatypes.nfv.VirtualObjectStorageData: derived_from: tosca.datatypes.Root description: VirtualObjectStorageData describes object storage requirements associated with compute resources in a particular VDU properties: max_size_of_storage: type: scalar-unit.size description: Maximum size of virtualized storage resource required: false constraints: - greater_or_equal: 0 B tosca.datatypes.nfv.VirtualFileStorageData: derived_from: tosca.datatypes.Root description: VirtualFileStorageData describes file storage requirements associated with compute resources in a particular VDU properties: size_of_storage: type: scalar-unit.size description: Size of virtualized storage resource required: true constraints: - greater_or_equal: 0 B file_system_protocol: type: string description: The shared file system protocol (e.g. NFS, CIFS) required: true tosca.datatypes.nfv.VirtualLinkBitrateLevel: derived_from: tosca.datatypes.Root description: Describes bitrate requirements applicable to the virtual link instantiated from a particicular VnfVirtualLink properties: bitrate_requirements: type: tosca.datatypes.nfv.LinkBitrateRequirements description: Virtual link bitrate requirements for an instantiation level or bitrate delta for a scaling step required: true tosca.datatypes.nfv.VnfOperationAdditionalParameters: derived_from: tosca.datatypes.Root description: Is an empty base type for deriving data type for describing VNF-specific parameters to be passed when invoking lifecycle management operations #properties: tosca.datatypes.nfv.VnfChangeFlavourOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the ChangeVnfFlavour operation #properties: tosca.datatypes.nfv.VnfChangeExtConnectivityOperationConfiguration: derived_from: tosca.datatypes.Root description: represents information that affect the invocation of the ChangeExtVnfConnectivity operation #properties: tosca.datatypes.nfv.VnfMonitoringParameter: derived_from: tosca.datatypes.Root description: Represents information on virtualised resource related performance metrics applicable to the VNF. properties: name: type: string description: Human readable name of the monitoring parameter required: true performance_metric: type: string description: Identifies the performance metric, according to ETSI GS NFV-IFA 027. required: true constraints: - valid_values: [ v_cpu_usage_mean_vnf, v_cpu_usage_peak_vnf, v_memory_usage_mean_vnf, v_memory_usage_peak_vnf, v_disk_usage_mean_vnf, v_disk_usage_peak_vnf, byte_incoming_vnf_ext_cp, byte_outgoing_vnf_ext_cp, packet_incoming_vnf_ext_cp, packet_outgoing_vnf_ext_cp ] collection_period: type: scalar-unit.time description: Describes the periodicity at which to collect the performance information. required: false constraints: - greater_than: 0 s tosca.datatypes.nfv.VnfcMonitoringParameter: derived_from: tosca.datatypes.Root description: Represents information on virtualised resource related performance metrics applicable to the VNF. properties: name: type: string description: Human readable name of the monitoring parameter required: true performance_metric: type: string description: Identifies the performance metric, according to ETSI GS NFV-IFA 027. required: true constraints: - valid_values: [ v_cpu_usage_mean_vnf, v_cpu_usage_peak_vnf, v_memory_usage_mean_vnf, v_memory_usage_peak_vnf, v_disk_usage_mean_vnf, v_disk_usage_peak_vnf, byte_incoming_vnf_int_cp, byte_outgoing_vnf_int_cp, packet_incoming_vnf_int_cp, packet_outgoing_vnf_int_cp ] collection_period: type: scalar-unit.time description: Describes the periodicity at which to collect the performance information. required: false constraints: - greater_than: 0 s tosca.datatypes.nfv.VirtualLinkMonitoringParameter: derived_from: tosca.datatypes.Root description: Represents information on virtualised resource related performance metrics applicable to the VNF. properties: name: type: string description: Human readable name of the monitoring parameter required: true performance_metric: type: string description: Identifies a performance metric derived from those defined in ETSI GS NFV-IFA 027.The packetOutgoingVirtualLink and packetIncomingVirtualLink metrics shall be obtained by aggregation the PacketOutgoing and PacketIncoming measurements defined in clause 7.1 of GS NFV-IFA 027 of all virtual link ports attached to the virtual link to which the metrics apply. required: true constraints: - valid_values: [ packet_outgoing_virtual_link, packet_incoming_virtual_link ] collection_period: type: scalar-unit.time description: Describes the periodicity at which to collect the performance information. required: false constraints: - greater_than: 0 s tosca.datatypes.nfv.InterfaceDetails: derived_from: tosca.datatypes.Root description: information used to access an interface exposed by a VNF properties: uri_components: type: tosca.datatypes.nfv.UriComponents description: Provides components to build a Uniform Ressource Identifier (URI) where to access the interface end point. required: false interface_specific_data: type: map description: Provides additional details that are specific to the type of interface considered. required: false entry_schema: type: string tosca.datatypes.nfv.UriComponents: derived_from: tosca.datatypes.Root description: information used to build a URI that complies with IETF RFC 3986 [8]. properties: scheme: type: string # shall comply with IETF RFC3986 description: scheme component of a URI. required: true authority: type: tosca.datatypes.nfv.UriAuthority description: Authority component of a URI required: false path: type: string # shall comply with IETF RFC 3986 description: path component of a URI. required: false query: type: string # shall comply with IETF RFC 3986 description: query component of a URI. required: false fragment: type: string # shall comply with IETF RFC 3986 description: fragment component of a URI. required: false tosca.datatypes.nfv.UriAuthority: derived_from: tosca.datatypes.Root description: information that corresponds to the authority component of a URI as specified in IETF RFC 3986 [8] properties: user_info: type: string # shall comply with IETF RFC 3986 description: user_info field of the authority component of a URI required: false host: type: string # shall comply with IETF RFC 3986 description: host field of the authority component of a URI required: false port: type: string # shall comply with IETF RFC 3986 description: port field of the authority component of a URI required: false tosca.datatypes.nfv.ChecksumData: derived_from: tosca.datatypes.Root description: Describes information about the result of performing a checksum operation over some arbitrary data properties: algorithm: type: string description: Describes the algorithm used to obtain the checksum value required: true constraints: - valid_values: [sha-224, sha-256, sha-384, sha-512 ] hash: type: string description: Contains the result of applying the algorithm indicated by the algorithm property to the data to which this ChecksumData refers required: true artifact_types: tosca.artifacts.nfv.SwImage: derived_from: tosca.artifacts.Deployment.Image description: describes the software image which is directly loaded on the virtualisation container realizing of the VDU or is to be loaded on a virtual storage resource. tosca.artifacts.Implementation.nfv.Mistral: derived_from: tosca.artifacts.Implementation description: artifacts for Mistral workflows mime_type: application/x-yaml file_ext: [ yaml ] capability_types: tosca.capabilities.nfv.VirtualBindable: derived_from: tosca.capabilities.Node description: Indicates that the node that includes it can be pointed by a tosca.relationships.nfv.VirtualBindsTo relationship type which is used to model the VduHasCpd association tosca.capabilities.nfv.VirtualCompute: derived_from: tosca.capabilities.Node description: Describes the capabilities related to virtual compute resources properties: logical_node: type: map description: Describes the Logical Node requirements required: false entry_schema: type: tosca.datatypes.nfv.LogicalNodeData requested_additional_capabilities: type: map description: Describes additional capability for a particular VDU required: false entry_schema: type: tosca.datatypes.nfv.RequestedAdditionalCapability compute_requirements: type: map required: false entry_schema: type: string virtual_memory: type: tosca.datatypes.nfv.VirtualMemory description: Describes virtual memory of the virtualized compute required: true virtual_cpu: type: tosca.datatypes.nfv.VirtualCpu description: Describes virtual CPU(s) of the virtualized compute required: true virtual_local_storage: type: list description: A list of virtual system disks created and destroyed as part of the VM lifecycle required: false entry_schema: type: tosca.datatypes.nfv.VirtualBlockStorageData description: virtual system disk definition tosca.capabilities.nfv.VirtualStorage: derived_from: tosca.capabilities.Root description: Describes the attachment capabilities related to Vdu.Storage relationship_types: tosca.relationships.nfv.VirtualBindsTo: derived_from: tosca.relationships.DependsOn description: Represents an association relationship between Vdu.Compute and VduCp node types valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] tosca.relationships.nfv.AttachesTo: derived_from: tosca.relationships.Root description: Represents an association relationship between the Vdu.Compute and one of the node types, Vdu.VirtualBlockStorage, Vdu.VirtualObjectStorage or Vdu.VirtualFileStorage valid_target_types: [ tosca.capabilities.nfv.VirtualStorage ] interface_types: tosca.interfaces.nfv.Vnflcm: derived_from: tosca.interfaces.Root description: This interface encompasses a set of TOSCA operations corresponding to the VNF LCM operations defined in ETSI GS NFV-IFA 007 as well as to preamble and postamble procedures to the execution of the VNF LCM operations. instantiate: description: Invoked upon receipt of an Instantiate VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters instantiate_start: description: Invoked before instantiate instantiate_end: description: Invoked after instantiate terminate: description: Invoked upon receipt Terminate VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters terminate_start: description: Invoked before terminate terminate_end: description: Invoked after terminate modify_information: description: Invoked upon receipt of a Modify VNF Information request modify_information_start: description: Invoked before modify_information modify_information_end: description: Invoked after modify_information change_flavour: description: Invoked upon receipt of a Change VNF Flavour request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters change_flavour_start: description: Invoked before change_flavour change_flavour_end: description: Invoked after change_flavour change_external_connectivity: description: Invoked upon receipt of a Change External VNF Connectivity request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters change_external_connectivity_start: description: Invoked before change_external_connectivity change_external_connectivity_end: description: Invoked after change_external_connectivity operate: description: Invoked upon receipt of an Operate VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters operate_start: description: Invoked before operate operate_end: description: Invoked after operate heal: description: Invoked upon receipt of a Heal VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters heal_start: description: Invoked before heal heal_end: description: Invoked after heal scale: description: Invoked upon receipt of a Scale VNF request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters scale_start: description: Invoked before scale scale_end: description: Invoked after scale scale_to_level: description: Invoked upon receipt of a Scale VNF to Level request # inputs: # additional_parameters: # type: tosca.datatypes.nfv.VnfOperationAdditionalParameters # required: false # derived types are expected to introduce additional_parameters with # its type derived from # tosca.datatypes.nfv.VnfOperationAdditionalParameters scale_to_level_start: description: Invoked before scale_to_level scale_to_level_end: description: Invoked after scale_to_level node_types: tosca.nodes.nfv.VNF: derived_from: tosca.nodes.Root description: The generic abstract type from which all VNF specific abstract node types shall be derived to form, together with other node types, the TOSCA service template(s) representing the VNFD properties: descriptor_id: # instead of vnfd_id type: string # GUID description: Globally unique identifier of the VNFD required: true descriptor_version: # instead of vnfd_version type: string description: Identifies the version of the VNFD required: true provider: # instead of vnf_provider type: string description: Provider of the VNF and of the VNFD required: true product_name: # instead of vnf_product_name type: string description: Human readable name for the VNF Product required: true software_version: # instead of vnf_software_version type: string description: Software version of the VNF required: true product_info_name: # instead of vnf_product_info_name type: string description: Human readable name for the VNF Product required: false product_info_description: # instead of vnf_product_info_description type: string description: Human readable description of the VNF Product required: false vnfm_info: type: list required: true description: Identifies VNFM(s) compatible with the VNF entry_schema: type: string constraints: - pattern: (^etsivnfm:v[0-9]?[0-9]\.[0-9]?[0-9]\.[0-9]?[0-9]$)|(^[0-9]+:[a-zA-Z0-9.-]+$) localization_languages: type: list description: Information about localization languages of the VNF required: false entry_schema: type: string #IETF RFC 5646 string default_localization_language: type: string #IETF RFC 5646 string description: Default localization language that is instantiated if no information about selected localization language is available required: false #configurable_properties: #type: tosca.datatypes.nfv.VnfConfigurableProperties #description: Describes the configurable properties of the VNF #required: false # derived types are expected to introduce configurable_properties # with its type derived from # tosca.datatypes.nfv.VnfConfigurableProperties #modifiable_attributes: #type: tosca.datatypes.nfv.VnfInfoModifiableAttributes #description: Describes the modifiable attributes of the VNF #required: false # derived types are expected to introduce modifiable_attributes # with its type derived from # tosca.datatypes.nfv.VnfInfoModifiableAttributes lcm_operations_configuration: type: tosca.datatypes.nfv.VnfLcmOperationsConfiguration description: Describes the configuration parameters for the VNF LCM operations required: false monitoring_parameters: type: list entry_schema: type: tosca.datatypes.nfv.VnfMonitoringParameter description: Describes monitoring parameters applicable to the VNF. required: false flavour_id: type: string description: Identifier of the Deployment Flavour within the VNFD required: true flavour_description: type: string description: Human readable description of the DF required: true vnf_profile: type: tosca.datatypes.nfv.VnfProfile description: Describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF required: false requirements: - virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo occurrences: [ 0, 1 ] # Additional requirements shall be defined in the VNF specific node type (deriving from tosca.nodes.nfv.VNF) corresponding to NS virtual links that need to connect to VnfExtCps interfaces: Vnflcm: type: tosca.interfaces.nfv.Vnflcm tosca.nodes.nfv.VnfExtCp: derived_from: tosca.nodes.nfv.Cp description: Describes a logical external connection point, exposed by the VNF enabling connection with an external Virtual Link properties: virtual_network_interface_requirements: type: list description: The actual virtual NIC requirements that is been assigned when instantiating the connection point required: false entry_schema: type: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements requirements: - external_virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo - internal_virtual_link: #name in ETSI NFV IFA011 v0.7.3: intVirtualLinkDesc capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo tosca.nodes.nfv.Vdu.Compute: derived_from: tosca.nodes.Root description: Describes the virtual compute part of a VDU which is a construct supporting the description of the deployment and operational behavior of a VNFC properties: name: type: string description: Human readable name of the VDU required: true description: type: string description: Human readable description of the VDU required: true boot_order: type: list # explicit index (boot index) not necessary, contrary to IFA011 description: References a node template name from which a valid boot device is created required: false entry_schema: type: string nfvi_constraints: type: list description: Describes constraints on the NFVI for the VNFC instance(s) created from this VDU required: false entry_schema: type: string monitoring_parameters: type: list description: Describes monitoring parameters applicable to a VNFC instantiated from this VDU required: false entry_schema: type: tosca.datatypes.nfv.VnfcMonitoringParameter #configurable_properties: #type: tosca.datatypes.nfv.VnfcConfigurableProperties #required: false # derived types are expected to introduce # configurable_properties with its type derived from # tosca.datatypes.nfv.VnfcConfigurableProperties vdu_profile: type: tosca.datatypes.nfv.VduProfile description: Defines additional instantiation data for the VDU.Compute node required: true sw_image_data: type: tosca.datatypes.nfv.SwImageData description: Defines information related to a SwImage artifact used by this Vdu.Compute node required: false # property is required when the node template has an associated artifact of type tosca.artifacts.nfv.SwImage and not required otherwise boot_data: type: string description: Contains a string or a URL to a file contained in the VNF package used to customize a virtualised compute resource at boot time. The bootData may contain variable parts that are replaced by deployment specific values before being sent to the VIM. required: false capabilities: virtual_compute: type: tosca.capabilities.nfv.VirtualCompute occurrences: [ 1, 1 ] virtual_binding: type: tosca.capabilities.nfv.VirtualBindable occurrences: [ 1, UNBOUNDED ] requirements: - virtual_storage: capability: tosca.capabilities.nfv.VirtualStorage relationship: tosca.relationships.nfv.AttachesTo occurrences: [ 0, UNBOUNDED ] tosca.nodes.nfv.Vdu.VirtualBlockStorage: derived_from: tosca.nodes.Root description: This node type describes the specifications of requirements related to virtual block storage resources properties: virtual_block_storage_data: type: tosca.datatypes.nfv.VirtualBlockStorageData description: Describes the block storage characteristics. required: true sw_image_data: type: tosca.datatypes.nfv.SwImageData description: Defines information related to a SwImage artifact used by this Vdu.Compute node. required: false # property is required when the node template has an associated artifact of type tosca.artifacts.nfv.SwImage and not required otherwise capabilities: virtual_storage: type: tosca.capabilities.nfv.VirtualStorage description: Defines the capabilities of virtual_storage. tosca.nodes.nfv.Vdu.VirtualObjectStorage: derived_from: tosca.nodes.Root description: This node type describes the specifications of requirements related to virtual object storage resources properties: virtual_object_storage_data: type: tosca.datatypes.nfv.VirtualObjectStorageData description: Describes the object storage characteristics. required: true capabilities: virtual_storage: type: tosca.capabilities.nfv.VirtualStorage description: Defines the capabilities of virtual_storage. tosca.nodes.nfv.Vdu.VirtualFileStorage: derived_from: tosca.nodes.Root description: This node type describes the specifications of requirements related to virtual file storage resources properties: virtual_file_storage_data: type: tosca.datatypes.nfv.VirtualFileStorageData description: Describes the file storage characteristics. required: true capabilities: virtual_storage: type: tosca.capabilities.nfv.VirtualStorage description: Defines the capabilities of virtual_storage. requirements: - virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo #description: Describes the requirements for linking to virtual link tosca.nodes.nfv.VduCp: derived_from: tosca.nodes.nfv.Cp description: describes network connectivity between a VNFC instance based on this VDU and an internal VL properties: bitrate_requirement: type: integer # in bits per second description: Bitrate requirement in bit per second on this connection point required: false constraints: - greater_or_equal: 0 virtual_network_interface_requirements: type: list description: Specifies requirements on a virtual network interface realising the CPs instantiated from this CPD required: false entry_schema: type: tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements order: type: integer description: The order of the NIC on the compute instance (e.g.eth2) required: false constraints: - greater_or_equal: 0 vnic_type: type: string description: Describes the type of the virtual network interface realizing the CPs instantiated from this CPD required: false constraints: - valid_values: [ normal, virtio, direct-physical ] requirements: - virtual_link: capability: tosca.capabilities.nfv.VirtualLinkable relationship: tosca.relationships.nfv.VirtualLinksTo - virtual_binding: capability: tosca.capabilities.nfv.VirtualBindable relationship: tosca.relationships.nfv.VirtualBindsTo node: tosca.nodes.nfv.Vdu.Compute tosca.nodes.nfv.VnfVirtualLink: derived_from: tosca.nodes.Root description: Describes the information about an internal VNF VL properties: connectivity_type: type: tosca.datatypes.nfv.ConnectivityType description: Specifies the protocol exposed by the VL and the flow pattern supported by the VL required: true description: type: string description: Provides human-readable information on the purpose of the VL required: false test_access: type: list description: Test access facilities available on the VL required: false entry_schema: type: string constraints: - valid_values: [ passive_monitoring, active_loopback ] vl_profile: type: tosca.datatypes.nfv.VlProfile description: Defines additional data for the VL required: true monitoring_parameters: type: list description: Describes monitoring parameters applicable to the VL required: false entry_schema: type: tosca.datatypes.nfv.VirtualLinkMonitoringParameter capabilities: virtual_linkable: type: tosca.capabilities.nfv.VirtualLinkable group_types: tosca.groups.nfv.PlacementGroup: derived_from: tosca.groups.Root description: PlacementGroup is used for describing the affinity or anti-affinity relationship applicable between the virtualization containers to be created based on different VDUs, or between internal VLs to be created based on different VnfVirtualLinkDesc(s) properties: description: type: string description: Human readable description of the group required: true members: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink ] policy_types: tosca.policies.nfv.InstantiationLevels: derived_from: tosca.policies.Root description: The InstantiationLevels type is a policy type representing all the instantiation levels of resources to be instantiated within a deployment flavour and including default instantiation level in term of the number of VNFC instances to be created as defined in ETSI GS NFV-IFA 011 [1]. properties: levels: type: map # key: levelId description: Describes the various levels of resources that can be used to instantiate the VNF using this flavour. required: true entry_schema: type: tosca.datatypes.nfv.InstantiationLevel constraints: - min_length: 1 default_level: type: string # levelId description: The default instantiation level for this flavour. required: false # required if multiple entries in levels tosca.policies.nfv.VduInstantiationLevels: derived_from: tosca.policies.Root description: The VduInstantiationLevels type is a policy type representing all the instantiation levels of resources to be instantiated within a deployment flavour in term of the number of VNFC instances to be created from each vdu.Compute. as defined in ETSI GS NFV-IFA 011 [1] properties: levels: type: map # key: levelId description: Describes the Vdu.Compute levels of resources that can be used to instantiate the VNF using this flavour required: true entry_schema: type: tosca.datatypes.nfv.VduLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.Vdu.Compute ] tosca.policies.nfv.VirtualLinkInstantiationLevels: derived_from: tosca.policies.Root description: The VirtualLinkInstantiationLevels type is a policy type representing all the instantiation levels of virtual link resources to be instantiated within a deployment flavour as defined in ETSI GS NFV-IFA 011 [1]. properties: levels: type: map # key: levelId description: Describes the virtual link levels of resources that can be used to instantiate the VNF using this flavour. required: true entry_schema: type: tosca.datatypes.nfv.VirtualLinkBitrateLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.VnfVirtualLink ] tosca.policies.nfv.ScalingAspects: derived_from: tosca.policies.Root description: The ScalingAspects type is a policy type representing the scaling aspects used for horizontal scaling as defined in ETSI GS NFV-IFA 011 [1]. properties: aspects: type: map # key: aspectId description: Describe maximum scale level for total number of scaling steps that can be applied to a particular aspect required: true entry_schema: type: tosca.datatypes.nfv.ScalingAspect constraints: - min_length: 1 tosca.policies.nfv.VduScalingAspectDeltas: derived_from: tosca.policies.Root description: The VduScalingAspectDeltas type is a policy type representing the Vdu.Compute detail of an aspect deltas used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: aspect: type: string description: Represents the scaling aspect to which this policy applies required: true deltas: type: map # key: scalingDeltaId description: Describes the Vdu.Compute scaling deltas to be applied for every scaling steps of a particular aspect. required: true entry_schema: type: tosca.datatypes.nfv.VduLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.Vdu.Compute ] tosca.policies.nfv.VirtualLinkBitrateScalingAspectDeltas: derived_from: tosca.policies.Root description: The VirtualLinkBitrateScalingAspectDeltas type is a policy type representing the VnfVirtualLink detail of an aspect deltas used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: aspect: type: string description: Represents the scaling aspect to which this policy applies. required: true deltas: type: map # key: scalingDeltaId description: Describes the VnfVirtualLink scaling deltas to be applied for every scaling steps of a particular aspect. required: true entry_schema: type: tosca.datatypes.nfv.VirtualLinkBitrateLevel constraints: - min_length: 1 targets: [ tosca.nodes.nfv.VnfVirtualLink ] tosca.policies.nfv.VduInitialDelta: derived_from: tosca.policies.Root description: The VduInitialDelta type is a policy type representing the Vdu.Compute detail of an initial delta used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: initial_delta: type: tosca.datatypes.nfv.VduLevel description: Represents the initial minimum size of the VNF. required: true targets: [ tosca.nodes.nfv.Vdu.Compute ] tosca.policies.nfv.VirtualLinkBitrateInitialDelta: derived_from: tosca.policies.Root description: The VirtualLinkBitrateInitialDelta type is a policy type representing the VnfVirtualLink detail of an initial deltas used for horizontal scaling, as defined in ETSI GS NFV-IFA 011 [1]. properties: initial_delta: type: tosca.datatypes.nfv.VirtualLinkBitrateLevel description: Represents the initial minimum size of the VNF. required: true targets: [ tosca.nodes.nfv.VnfVirtualLink ] tosca.policies.nfv.AffinityRule: derived_from: tosca.policies.Placement description: The AffinityRule describes the affinity rules applicable for the defined targets properties: scope: type: string description: scope of the rule is an NFVI_node, an NFVI_PoP, etc. required: true constraints: - valid_values: [ nfvi_node, zone, zone_group, nfvi_pop ] targets: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink, tosca.groups.nfv.PlacementGroup ] tosca.policies.nfv.AntiAffinityRule: derived_from: tosca.policies.Placement description: The AntiAffinityRule describes the anti-affinity rules applicable for the defined targets properties: scope: type: string description: scope of the rule is an NFVI_node, an NFVI_PoP, etc. required: true constraints: - valid_values: [ nfvi_node, zone, zone_group, nfvi_pop ] targets: [ tosca.nodes.nfv.Vdu.Compute, tosca.nodes.nfv.VnfVirtualLink, tosca.groups.nfv.PlacementGroup ] tosca.policies.nfv.SecurityGroupRule: derived_from: tosca.policies.Root description: The SecurityGroupRule type is a policy type specified the matching criteria for the ingress and/or egress traffic to/from visited connection points as defined in ETSI GS NFV-IFA 011 [1]. properties: description: type: string description: Human readable description of the security group rule. required: false direction: type: string description: The direction in which the security group rule is applied. The direction of 'ingress' or 'egress' is specified against the associated CP. I.e., 'ingress' means the packets entering a CP, while 'egress' means the packets sent out of a CP. required: false constraints: - valid_values: [ ingress, egress ] default: ingress ether_type: type: string description: Indicates the protocol carried over the Ethernet layer. required: false constraints: - valid_values: [ ipv4, ipv6 ] default: ipv4 protocol: type: string description: Indicates the protocol carried over the IP layer. Permitted values include any protocol defined in the IANA protocol registry, e.g. TCP, UDP, ICMP, etc. required: false constraints: - valid_values: [ hopopt, icmp, igmp, ggp, ipv4, st, tcp, cbt, egp, igp, bbn_rcc_mon, nvp_ii, pup, argus, emcon, xnet, chaos, udp, mux, dcn_meas, hmp, prm, xns_idp, trunk_1, trunk_2, leaf_1, leaf_2, rdp, irtp, iso_tp4, netblt, mfe_nsp, merit_inp, dccp, 3pc, idpr, xtp, ddp, idpr_cmtp, tp++, il, ipv6, sdrp, ipv6_route, ipv6_frag, idrp, rsvp, gre, dsr, bna, esp, ah, i_nlsp, swipe, narp, mobile, tlsp, skip, ipv6_icmp, ipv6_no_nxt, ipv6_opts, cftp, sat_expak, kryptolan, rvd, ippc, sat_mon, visa, ipcv, cpnx, cphb, wsn, pvp, br_sat_mon, sun_nd, wb_mon, wb_expak, iso_ip, vmtp, secure_vmtp, vines, ttp, iptm, nsfnet_igp, dgp, tcf, eigrp, ospfigp, sprite_rpc, larp, mtp, ax.25, ipip, micp, scc_sp, etherip, encap, gmtp, ifmp, pnni, pim, aris, scps, qnx, a/n, ip_comp, snp, compaq_peer, ipx_in_ip, vrrp, pgm, l2tp, ddx, iatp, stp, srp, uti, smp, sm, ptp, isis, fire, crtp, crudp, sscopmce, iplt, sps, pipe, sctp, fc, rsvp_e2e_ignore, mobility, udp_lite, mpls_in_ip, manet, hip, shim6, wesp, rohc ] default: tcp port_range_min: type: integer description: Indicates minimum port number in the range that is matched by the security group rule. If a value is provided at design-time, this value may be overridden at run-time based on other deployment requirements or constraints. required: false constraints: - greater_or_equal: 0 - less_or_equal: 65535 default: 0 port_range_max: type: integer description: Indicates maximum port number in the range that is matched by the security group rule. If a value is provided at design-time, this value may be overridden at run-time based on other deployment requirements or constraints. required: false constraints: - greater_or_equal: 0 - less_or_equal: 65535 default: 65535 targets: [ tosca.nodes.nfv.VduCp, tosca.nodes.nfv.VnfExtCp ] tosca.policies.nfv.SupportedVnfInterface: derived_from: tosca.policies.Root description: this policy type represents interfaces produced by a VNF, the details to access them and the applicable connection points to use to access these interfaces properties: interface_name: type: string description: Identifies an interface produced by the VNF. required: true constraints: - valid_values: [ vnf_indicator, vnf_configuration ] details: type: tosca.datatypes.nfv.InterfaceDetails description: Provide additional data to access the interface endpoint required: false targets: [ tosca.nodes.nfv.VnfExtCp, tosca.nodes.nfv.VduCp ] ././@PaxHeader0000000000000000000000000000025100000000000011453 xustar0000000000000000147 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/helloworld3_df_simple.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/D0000664000175000017500000001705600000000000034072 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_2 description: Simple deployment flavour for Sample VNF imports: - etsi_nfv_sol001_common_types.yaml - etsi_nfv_sol001_vnfd_types.yaml - helloworld3_types.yaml topology_template: inputs: descriptor_id: type: string descriptor_version: type: string provider: type: string product_name: type: string software_version: type: string vnfm_info: type: list entry_schema: type: string flavour_id: type: string flavour_description: type: string substitution_mappings: node_type: ntt.nslab.VNF properties: flavour_id: simple requirements: virtual_link_external: [ CP1, virtual_link ] node_templates: VNF: type: ntt.nslab.VNF properties: flavour_description: A simple flavour interfaces: Vnflcm: # supporting only 'instantiate', 'terminate', 'modify' # not supporting LCM script, supporting only default LCM instantiate: [] instantiate_start: [] instantiate_end: [] terminate: [] terminate_start: [] terminate_end: [] modify_information: [] modify_information_start: [] modify_information_end: [] # change_flavour: [] # change_flavour_start: [] # change_flavour_end: [] # change_external_connectivity: [] # change_external_connectivity_start: [] # change_external_connectivity_end: [] # operate: [] # operate_start: [] # operate_end: [] # heal: [] # heal_start: [] # heal_end: [] # scale: [] # scale_start: [] # scale_end: [] # scale_to_level: [] # scale_to_level_start: [] # scale_to_level_end: [] VDU1: type: tosca.nodes.nfv.Vdu.Compute properties: name: VDU1 description: VDU1 compute node vdu_profile: min_number_of_instances: 1 max_number_of_instances: 1 sw_image_data: name: Software of VDU1 version: '0.4.0' checksum: algorithm: sha-256 hash: b9c3036539fd7a5f87a1bf38eb05fdde8b556a1a7e664dbeda90ed3cd74b4f9d container_format: bare disk_format: qcow2 min_disk: 1 GB size: 1 GB artifacts: sw_image: type: tosca.artifacts.nfv.SwImage file: cirros-0.4.0-x86_64-disk.img repository: http://download.cirros-cloud.net/0.4.0/ capabilities: virtual_compute: properties: virtual_memory: virtual_mem_size: 512 MB virtual_cpu: num_virtual_cpu: 1 virtual_local_storage: - size_of_storage: 1 GB VDU2: type: tosca.nodes.nfv.Vdu.Compute properties: name: VDU2 description: VDU2 compute node vdu_profile: min_number_of_instances: 1 max_number_of_instances: 3 capabilities: virtual_compute: properties: virtual_memory: virtual_mem_size: 512 MB virtual_cpu: num_virtual_cpu: 1 virtual_local_storage: - size_of_storage: 1 GB requirements: - virtual_storage: VirtualStorage VirtualStorage: type: tosca.nodes.nfv.Vdu.VirtualBlockStorage properties: virtual_block_storage_data: size_of_storage: 30 GB rdma_enabled: true sw_image_data: name: VrtualStorage version: '0.4.0' checksum: algorithm: sha-256 hash: b9c3036539fd7a5f87a1bf38eb05fdde8b556a1a7e664dbeda90ed3cd74b4f9d container_format: bare disk_format: qcow2 min_disk: 2 GB min_ram: 8192 MB size: 2 GB artifacts: sw_image: type: tosca.artifacts.nfv.SwImage file: cirros-0.4.0-x86_64-disk.img repository: http://download.cirros-cloud.net/0.4.0/ CP1: type: tosca.nodes.nfv.VduCp properties: layer_protocols: [ ipv4 ] order: 0 vnic_type: direct-physical requirements: - virtual_binding: VDU1 #- virtual_link: # the target node is determined in the NSD CP2: type: tosca.nodes.nfv.VduCp properties: layer_protocols: [ ipv4 ] order: 1 requirements: - virtual_binding: VDU1 - virtual_link: internalVL2 CP3: type: tosca.nodes.nfv.VduCp properties: layer_protocols: [ ipv4 ] order: 2 requirements: - virtual_binding: VDU2 - virtual_link: internalVL2 internalVL2: type: tosca.nodes.nfv.VnfVirtualLink properties: connectivity_type: layer_protocols: [ ipv4 ] description: Internal Virtual link in the VNF vl_profile: max_bitrate_requirements: root: 1048576 leaf: 1048576 min_bitrate_requirements: root: 1048576 leaf: 1048576 virtual_link_protocol_data: - associated_layer_protocol: ipv4 l3_protocol_data: ip_version: ipv4 cidr: 11.11.0.0/24 policies: - scaling_aspects: type: tosca.policies.nfv.ScalingAspects properties: aspects: worker_instance: name: worker_instance_aspect description: worker_instance scaling aspect max_scale_level: 2 step_deltas: - delta_1 - VDU2_initial_delta: type: tosca.policies.nfv.VduInitialDelta properties: initial_delta: number_of_instances: 1 targets: [ VDU2 ] - VDU2_scaling_aspect_deltas: type: tosca.policies.nfv.VduScalingAspectDeltas properties: aspect: worker_instance deltas: delta_2: number_of_instances: 1 targets: [ VDU2 ] - instantiation_levels: type: tosca.policies.nfv.InstantiationLevels properties: levels: instantiation_level_1: description: Smallest size scale_info: worker_instance: scale_level: 0 instantiation_level_2: description: Largest size scale_info: worker_instance: scale_level: 2 default_level: instantiation_level_1 - VDU1_instantiation_levels: type: tosca.policies.nfv.VduInstantiationLevels properties: levels: instantiation_level_1: number_of_instances: 1 instantiation_level_2: number_of_instances: 3 targets: [ VDU1 ] - VDU2_instantiation_levels: type: tosca.policies.nfv.VduInstantiationLevels properties: levels: instantiation_level_1: number_of_instances: 1 instantiation_level_2: number_of_instances: 1 targets: [ VDU2 ] - internalVL2_instantiation_levels: type: tosca.policies.nfv.VirtualLinkInstantiationLevels properties: levels: instantiation_level_1: bitrate_requirements: root: 1048576 leaf: 1048576 instantiation_level_2: bitrate_requirements: root: 1048576 leaf: 1048576 targets: [ internalVL2 ] ././@PaxHeader0000000000000000000000000000025000000000000011452 xustar0000000000000000146 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/helloworld3_top.vnfd.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/D0000664000175000017500000000165100000000000034064 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_0 description: Sample VNF of NTT NS lab. imports: - etsi_nfv_sol001_common_types.yaml - etsi_nfv_sol001_vnfd_types.yaml - helloworld3_types.yaml - helloworld3_df_simple.yaml # - helloworld3_df_complex.yaml topology_template: inputs: selected_flavour: type: string description: VNF deployment flavour selected by the consumer. It is provided in the API node_templates: VNF: type: ntt.nslab.VNF properties: flavour_id: { get_input: selected_flavour } descriptor_id: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 provider: NTT NS lab product_name: Sample VNF software_version: '1.0' descriptor_version: '1.0' vnfm_info: - Tacker requirements: #- virtual_link_external # mapped in lower-level templates #- virtual_link_internal # mapped in lower-level templates ././@PaxHeader0000000000000000000000000000024500000000000011456 xustar0000000000000000143 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Definitions/helloworld3_types.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/D0000664000175000017500000000300600000000000034060 0ustar00zuulzuul00000000000000tosca_definitions_version: tosca_simple_yaml_1_0 description: ntt.nslab.VNF type definition imports: - etsi_nfv_sol001_common_types.yaml - etsi_nfv_sol001_vnfd_types.yaml node_types: ntt.nslab.VNF: derived_from: tosca.nodes.nfv.VNF properties: descriptor_id: type: string constraints: [ valid_values: [ b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 ] ] default: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 descriptor_version: type: string constraints: [ valid_values: [ '1.0' ] ] default: '1.0' provider: type: string constraints: [ valid_values: [ 'NTT NS lab' ] ] default: 'NTT NS lab' product_name: type: string constraints: [ valid_values: [ 'Sample VNF' ] ] default: 'Sample VNF' software_version: type: string constraints: [ valid_values: [ '1.0' ] ] default: '1.0' vnfm_info: type: list entry_schema: type: string constraints: [ valid_values: [ Tacker ] ] default: [ Tacker ] flavour_id: type: string constraints: [ valid_values: [ simple ] ] default: simple flavour_description: type: string default: "" requirements: - virtual_link_external: capability: tosca.capabilities.nfv.VirtualLinkable - virtual_link_internal: capability: tosca.capabilities.nfv.VirtualLinkable interfaces: Vnflcm: type: tosca.interfaces.nfv.Vnflcm ././@PaxHeader0000000000000000000000000000021700000000000011455 xustar0000000000000000117 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Scripts/ 26 mtime=1715864642.75062 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/S0000775000175000017500000000000000000000000034076 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000022500000000000011454 xustar0000000000000000127 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/Scripts/install.sh 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/S0000664000175000017500000000004200000000000034074 0ustar00zuulzuul00000000000000#!/bin/bash echo "Hello, World!" ././@PaxHeader0000000000000000000000000000022600000000000011455 xustar0000000000000000124 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/TOSCA-Metadata/ 26 mtime=1715864642.75062 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/T0000775000175000017500000000000000000000000034077 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000023400000000000011454 xustar0000000000000000134 path=python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/TOSCA-Metadata/TOSCA.meta 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/fixture_data/sample_vnf_package_artifacts/T0000664000175000017500000000036500000000000034105 0ustar00zuulzuul00000000000000TOSCA-Meta-File-Version: 1.0 Created-by: Dummy User CSAR-Version: 1.1 Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml Source: Scripts/install.sh Algorithm: SHA-256 Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/test_vnf_package.py0000664000175000017500000010604300000000000027310 0ustar00zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import filecmp import os import shutil import sys import tempfile from unittest import mock import ddt import zipfile from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v1.vnfpkgm import vnf_package from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v1 import vnf_package_fakes from tackerclient.v1_0 import client as proxy_client def _create_zip(): vnf_package_path = ('./tackerclient/tests//unit/osc/v1/fixture_data/' 'sample_vnf_package') tmpdir = tempfile.mkdtemp() tmparchive = os.path.join(tmpdir, 'sample_vnf_package') zip_file = shutil.make_archive(tmparchive, 'zip', vnf_package_path) return zip_file, tmpdir def _get_columns_vnf_package(action='list', vnf_package_obj=None): columns = [] if action == 'update': if vnf_package_obj.get('userDefinedData'): columns.extend(['User Defined Data']) if vnf_package_obj.get('operationalState'): columns.extend(['Operational State']) return columns columns.extend(['ID', 'Onboarding State', 'Operational State', 'Usage State', 'User Defined Data', 'Links']) if action in ['show', 'create']: if vnf_package_obj and vnf_package_obj[ 'onboardingState'] == 'ONBOARDED': columns.extend(['VNFD ID', 'VNF Provider', 'VNF Software Version', 'VNFD Version', 'Software Images', 'VNF Product Name', 'Checksum', 'Additional Artifacts']) return columns class TestVnfPackage(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfPackage, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager @ddt.ddt class TestCreateVnfPackage(TestVnfPackage): def setUp(self): super(TestCreateVnfPackage, self).setUp() self.create_vnf_package = vnf_package.CreateVnfPackage( self.app, self.app_args, cmd_name='vnf package create') @ddt.data((["--user-data", 'Test_key=Test_value'], [('user_data', {'Test_key': 'Test_value'})]), ([], [])) @ddt.unpack def test_take_action(self, arglist, verifylist): # command param parsed_args = self.check_parser(self.create_vnf_package, arglist, verifylist) if arglist: json = vnf_package_fakes.vnf_package_obj( attrs={'userDefinedData': {'Test_key': 'Test_value'}}) else: json = vnf_package_fakes.vnf_package_obj() self.requests_mock.register_uri( 'POST', self.url + '/vnfpkgm/v1/vnf_packages', json=json, headers=self.header) columns, data = (self.create_vnf_package.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnf_package(), columns) headers, attributes = vnf_package._get_columns(json) self.assertListItemsEqual(vnf_package_fakes.get_vnf_package_data( json, columns=attributes), data) @ddt.ddt class TestListVnfPackage(TestVnfPackage): def setUp(self): super(TestListVnfPackage, self).setUp() self.list_vnf_package = vnf_package.ListVnfPackage( self.app, self.app_args, cmd_name='vnf package list') self._vnf_packages = self._get_vnf_packages() def _get_vnf_packages(self, onboarded_vnf_package=False): return vnf_package_fakes.create_vnf_packages( count=3, onboarded_vnf_package=onboarded_vnf_package) def get_list_columns(self, all_fields=False, exclude_fields=None, extra_fields=None, exclude_default=False): columns = ['Id', 'Vnf Product Name', 'Onboarding State', 'Usage State', 'Operational State', 'Links'] complex_columns = [ 'Checksum', 'Software Images', 'User Defined Data', 'Additional Artifacts'] simple_columns = ['Vnfd Version', 'Vnf Provider', 'Vnfd Id', 'Vnf Software Version'] if extra_fields: columns.extend(extra_fields) if exclude_fields: columns.extend([field for field in complex_columns if field not in exclude_fields]) if all_fields: columns.extend(complex_columns) columns.extend(simple_columns) if exclude_default: columns.extend(simple_columns) return columns def _get_mock_response_for_list_vnf_packages( self, filter_attribute, json=None): self.requests_mock.register_uri( 'GET', self.url + '/vnfpkgm/v1/vnf_packages?' + filter_attribute, json=json if json else self._get_vnf_packages(), headers=self.header) def test_take_action_default_fields(self): parsed_args = self.check_parser(self.list_vnf_package, [], []) self.requests_mock.register_uri( 'GET', self.url + '/vnfpkgm/v1/vnf_packages', json=self._vnf_packages, headers=self.header) actual_columns, data = self.list_vnf_package.take_action(parsed_args) expected_data = [] headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_package.get_attributes(), long_listing=True) for vnf_package_obj in self._vnf_packages['vnf_packages']: expected_data.append(vnf_package_fakes.get_vnf_package_data( vnf_package_obj, columns=columns, list_action=True)) self.assertCountEqual(self.get_list_columns(), actual_columns) self.assertListItemsEqual(expected_data, list(data)) @ddt.data('all_fields', 'exclude_default') def test_take_action(self, arg): parsed_args = self.check_parser( self.list_vnf_package, ["--" + arg, "--filter", '(eq,onboardingState,ONBOARDED)'], [(arg, True), ('filter', '(eq,onboardingState,ONBOARDED)')]) vnf_packages = self._get_vnf_packages(onboarded_vnf_package=True) self._get_mock_response_for_list_vnf_packages( 'filter=(eq,onboardingState,ONBOARDED)&' + arg, json=vnf_packages) actual_columns, data = self.list_vnf_package.take_action(parsed_args) expected_data = [] kwargs = {arg: True} headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_package.get_attributes(**kwargs), long_listing=True) for vnf_package_obj in vnf_packages['vnf_packages']: expected_data.append(vnf_package_fakes.get_vnf_package_data( vnf_package_obj, columns=columns, list_action=True, **kwargs)) self.assertCountEqual(self.get_list_columns(**kwargs), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_pagination(self): next_links_num = 3 parsed_args = self.check_parser(self.list_vnf_package, [], []) path = os.path.join(self.url, '/vnfpkgm/v1/vnf_packages?') links = [0] * next_links_num link_headers = [0] * next_links_num for i in range(next_links_num): links[i] = ( '{base_url}?nextpage_opaque_marker={vnf_package_id}'.format( base_url=path, vnf_package_id=self._vnf_packages ['vnf_packages'][i]['id'])) link_headers[i] = copy.deepcopy(self.header) link_headers[i]['Link'] = '<{link_url}>; rel="next"'.format( link_url=links[i]) self.requests_mock.register_uri( 'GET', path, json=[self._vnf_packages['vnf_packages'][0]], headers=link_headers[0]) self.requests_mock.register_uri( 'GET', links[0], json=[self._vnf_packages['vnf_packages'][1]], headers=link_headers[1]) self.requests_mock.register_uri( 'GET', links[1], json=[self._vnf_packages['vnf_packages'][2]], headers=link_headers[2]) self.requests_mock.register_uri( 'GET', links[2], json=[], headers=self.header) actual_columns, data = self.list_vnf_package.take_action(parsed_args) kwargs = {} headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_package.get_attributes(**kwargs), long_listing=True) expected_data = [] for vnf_package_obj in self._vnf_packages['vnf_packages']: expected_data.append(vnf_package_fakes.get_vnf_package_data( vnf_package_obj, columns=columns, list_action=True, **kwargs)) self.assertCountEqual(self.get_list_columns(**kwargs), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_exclude_fields(self): parsed_args = self.check_parser( self.list_vnf_package, ["--exclude_fields", 'softwareImages,checksum,' 'userDefinedData,additionalArtifacts', "--filter", '(eq,onboardingState,ONBOARDED)'], [('exclude_fields', 'softwareImages,checksum,' 'userDefinedData,additionalArtifacts'), ('filter', '(eq,onboardingState,ONBOARDED)')]) vnf_packages = self._get_vnf_packages(onboarded_vnf_package=True) updated_vnf_packages = {'vnf_packages': []} for vnf_pkg in vnf_packages['vnf_packages']: vnf_pkg.pop('softwareImages') vnf_pkg.pop('checksum') vnf_pkg.pop('userDefinedData') vnf_pkg.pop('additionalArtifacts') updated_vnf_packages['vnf_packages'].append(vnf_pkg) self._get_mock_response_for_list_vnf_packages( 'filter=(eq,onboardingState,ONBOARDED)&' 'exclude_fields=softwareImages,checksum,' 'userDefinedData,additionalArtifacts', json=updated_vnf_packages) actual_columns, data = self.list_vnf_package.take_action(parsed_args) expected_data = [] headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_package.get_attributes( exclude_fields=['softwareImages', 'checksum', 'userDefinedData', 'additionalArtifacts']), long_listing=True) for vnf_package_obj in updated_vnf_packages['vnf_packages']: expected_data.append(vnf_package_fakes.get_vnf_package_data( vnf_package_obj, columns=columns, list_action=True)) expected_columns = self.get_list_columns( exclude_fields=['Software Images', 'Checksum', 'User Defined Data', 'Additional Artifacts']) self.assertCountEqual(expected_columns, actual_columns) self.assertListItemsEqual(expected_data, list(data)) @ddt.data((['--all_fields', '--fields', 'softwareImages'], [('all_fields', True), ('fields', 'softwareImages')]), (['--all_fields', '--exclude_fields', 'checksum'], [('all_fields', True), ('exclude_fields', 'checksum')]), (['--fields', 'softwareImages', '--exclude_fields', 'checksum'], [('fields', 'softwareImages'), ('exclude_fields', 'checksum')])) @ddt.unpack def test_take_action_with_invalid_combination(self, arglist, verifylist): self.assertRaises(base.ParserException, self.check_parser, self.list_vnf_package, arglist, verifylist) def test_take_action_with_valid_combination(self): parsed_args = self.check_parser( self.list_vnf_package, ["--fields", 'softwareImages,checksum', "--exclude_default"], [('fields', 'softwareImages,checksum'), ('exclude_default', True)]) vnf_packages = self._get_vnf_packages(onboarded_vnf_package=True) updated_vnf_packages = {'vnf_packages': []} for vnf_pkg in vnf_packages['vnf_packages']: vnf_pkg.pop('userDefinedData') updated_vnf_packages['vnf_packages'].append(vnf_pkg) self._get_mock_response_for_list_vnf_packages( 'exclude_default&fields=softwareImages,checksum', json=updated_vnf_packages) actual_columns, data = self.list_vnf_package.take_action(parsed_args) expected_data = [] headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_package.get_attributes( extra_fields=['softwareImages', 'checksum'], exclude_default=True), long_listing=True) for vnf_package_obj in updated_vnf_packages['vnf_packages']: expected_data.append(vnf_package_fakes.get_vnf_package_data( vnf_package_obj, columns=columns, list_action=True, exclude_default=True)) self.assertCountEqual(self.get_list_columns( extra_fields=['Software Images', 'Checksum'], exclude_default=True), actual_columns) self.assertListItemsEqual(expected_data, list(data)) @ddt.ddt class TestShowVnfPackage(TestVnfPackage): def setUp(self): super(TestShowVnfPackage, self).setUp() self.show_vnf_package = vnf_package.ShowVnfPackage( self.app, self.app_args, cmd_name='vnf package show') @ddt.data(True, False) def test_take_action(self, onboarded): vnf_package_obj = vnf_package_fakes.vnf_package_obj( onboarded_state=onboarded) arglist = [vnf_package_obj['id']] verifylist = [('vnf_package', vnf_package_obj['id'])] parsed_args = self.check_parser(self.show_vnf_package, arglist, verifylist) url = self.url + '/vnfpkgm/v1/vnf_packages/' + vnf_package_obj['id'] self.requests_mock.register_uri('GET', url, json=vnf_package_obj, headers=self.header) columns, data = (self.show_vnf_package.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnf_package( vnf_package_obj=vnf_package_obj, action='show'), columns) headers, attributes = vnf_package._get_columns(vnf_package_obj) self.assertListItemsEqual( vnf_package_fakes.get_vnf_package_data(vnf_package_obj, columns=attributes), data) def test_show_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.show_vnf_package, [], []) class TestDeleteVnfPackage(TestVnfPackage): def setUp(self): super(TestDeleteVnfPackage, self).setUp() self.delete_vnf_package = vnf_package.DeleteVnfPackage( self.app, self.app_args, cmd_name='vnf package delete') # The Vnf Package to delete self._vnf_package = vnf_package_fakes.create_vnf_packages(count=3) def _mock_request_url_for_delete(self, vnf_pkg_index): url = (self.url + '/vnfpkgm/v1/vnf_packages/' + self._vnf_package['vnf_packages'][vnf_pkg_index]['id']) json = self._vnf_package['vnf_packages'][vnf_pkg_index] self.requests_mock.register_uri('GET', url, json=json, headers=self.header) self.requests_mock.register_uri('DELETE', url, headers=self.header, json={}) def test_delete_one_vnf_package(self): arglist = [self._vnf_package['vnf_packages'][0]['id']] verifylist = [('vnf-package', [self._vnf_package['vnf_packages'] [0]['id']])] parsed_args = self.check_parser(self.delete_vnf_package, arglist, verifylist) self._mock_request_url_for_delete(0) result = self.delete_vnf_package.take_action(parsed_args) self.assertIsNone(result) def test_delete_multiple_vnf_package(self): arglist = [] for vnf_pkg in self._vnf_package['vnf_packages']: arglist.append(vnf_pkg['id']) verifylist = [('vnf-package', arglist)] parsed_args = self.check_parser(self.delete_vnf_package, arglist, verifylist) for i in range(0, 3): self._mock_request_url_for_delete(i) result = self.delete_vnf_package.take_action(parsed_args) self.assertIsNone(result) def test_delete_multiple_vnf_package_exception(self): arglist = [ self._vnf_package['vnf_packages'][0]['id'], 'xxxx-yyyy-zzzz', self._vnf_package['vnf_packages'][1]['id'], ] verifylist = [ ('vnf-package', arglist), ] parsed_args = self.check_parser(self.delete_vnf_package, arglist, verifylist) self._mock_request_url_for_delete(0) url = (self.url + '/vnfpkgm/v1/vnf_packages/' + 'xxxx-yyyy-zzzz') body = {"error": exceptions.NotFound('404')} self.requests_mock.register_uri('GET', url, body=body, status_code=404, headers=self.header) self._mock_request_url_for_delete(1) self.assertRaises(exceptions.CommandError, self.delete_vnf_package.take_action, parsed_args) @ddt.ddt class TestUploadVnfPackage(TestVnfPackage): # The new vnf package created. _vnf_package = vnf_package_fakes.vnf_package_obj( attrs={'userDefinedData': {'Test_key': 'Test_value'}}) def setUp(self): super(TestUploadVnfPackage, self).setUp() self.upload_vnf_package = vnf_package.UploadVnfPackage( self.app, self.app_args, cmd_name='vnf package upload') def test_upload_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.upload_vnf_package, [], []) def _mock_request_url_for_upload(self, method, status_code=202, body={}): if method == 'PUT': self.header = {'content-type': 'application/zip'} url = (self.url + '/vnfpkgm/v1/vnf_packages/' + self._vnf_package['id'] + '/package_content') else: url = (self.url + '/vnfpkgm/v1/vnf_packages/' + self._vnf_package['id'] + '/package_content/' 'upload_from_uri') self.requests_mock.register_uri(method, url, json=body, headers=self.header, status_code=status_code) def _get_arglist_and_verifylist(self, method, path): if method == 'path': arglist = [ self._vnf_package['id'], "--path", path ] verifylist = [ ('path', path), ('vnf_package', self._vnf_package['id']) ] else: arglist = [ self._vnf_package['id'], "--url", "http://uri:8000/test.zip", "--user-name", "Test_username", "--password", "Test_passoword", ] verifylist = [ ('url', "http://uri:8000/test.zip"), ('user_name', 'Test_username'), ('password', 'Test_passoword'), ('vnf_package', self._vnf_package['id']) ] return arglist, verifylist @ddt.data('path', 'url') def test_upload_vnf_package(self, method): path = None if method == 'path': zip_file, temp_dir = _create_zip() path = zip_file arglist, verifylist = self._get_arglist_and_verifylist(method, path) parsed_args = self.check_parser(self.upload_vnf_package, arglist, verifylist) with mock.patch.object(proxy_client.ClientBase, '_handle_fault_response') as m: if method == 'url': self._mock_request_url_for_upload('POST') self.upload_vnf_package.take_action(parsed_args) else: self._mock_request_url_for_upload('PUT') self.upload_vnf_package.take_action(parsed_args) # Delete temporary folder shutil.rmtree(temp_dir) # check no fault response is received self.assertNotCalled(m) def test_upload_vnf_package_with_conflict_error(self): # Scenario in which vnf package is already in on-boarded state zip_file, temp_dir = _create_zip() arglist, verifylist = self._get_arglist_and_verifylist('path', zip_file) parsed_args = self.check_parser(self.upload_vnf_package, arglist, verifylist) body = {"conflictingRequest": { "message": "VNF Package " + self._vnf_package['id'] + " onboarding state is not CREATED", "code": 409}} self._mock_request_url_for_upload('PUT', status_code=409, body=body) self.assertRaises(exceptions.TackerClientException, self.upload_vnf_package.take_action, parsed_args) # Delete temporary folder shutil.rmtree(temp_dir) def test_upload_vnf_package_failed_with_404_not_found(self): # Scenario in which vnf package is not found zip_file, temp_dir = _create_zip() arglist = [ 'dumy-id', "--path", zip_file ] verifylist = [ ('path', zip_file), ('vnf_package', 'dumy-id') ] parsed_args = self.check_parser(self.upload_vnf_package, arglist, verifylist) error_message = "Can not find requested vnf package: dummy-id" body = {"itemNotFound": {"message": error_message, "code": 404}} url = self.url + '/vnfpkgm/v1/vnf_packages/dumy-id/package_content' self.requests_mock.register_uri( 'PUT', url, json=body, status_code=404) exception = self.assertRaises( exceptions.TackerClientException, self.upload_vnf_package.take_action, parsed_args) self.assertEqual(error_message, exception.message) # Delete temporary folder shutil.rmtree(temp_dir) @ddt.ddt class TestUpdateVnfPackage(TestVnfPackage): def setUp(self): super(TestUpdateVnfPackage, self).setUp() self.update_vnf_package = vnf_package.UpdateVnfPackage( self.app, self.app_args, cmd_name='vnf package update') @ddt.data((["--user-data", 'Test_key=Test_value', "--operational-state", 'DISABLED'], [('user_data', {'Test_key': 'Test_value'}), ('operational_state', 'DISABLED')]), (["--user-data", 'Test_key=Test_value'], [('user_data', {'Test_key': 'Test_value'})]), (["--operational-state", 'DISABLED'], [('operational_state', 'DISABLED')])) @ddt.unpack def test_take_action(self, arglist, verifylist): vnf_package_obj = vnf_package_fakes.vnf_package_obj( onboarded_state=True) arglist.append(vnf_package_obj['id']) verifylist.append(('vnf_package', vnf_package_obj['id'])) parsed_args = self.check_parser(self.update_vnf_package, arglist, verifylist) url = os.path.join(self.url, 'vnfpkgm/v1/vnf_packages', vnf_package_obj['id']) fake_response = vnf_package_fakes.get_fake_update_vnf_package_obj( arglist) self.requests_mock.register_uri('PATCH', url, json=fake_response, headers=self.header) columns, data = self.update_vnf_package.take_action(parsed_args) self.assertCountEqual(_get_columns_vnf_package( vnf_package_obj=fake_response, action='update'), columns) self.assertListItemsEqual( vnf_package_fakes.get_vnf_package_data(fake_response), data) def test_update_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.update_vnf_package, [], []) def test_update_without_user_data_and_operational_state(self): vnf_package_obj = vnf_package_fakes.vnf_package_obj( onboarded_state=True) arglist = [vnf_package_obj['id']] verifylist = [('vnf_package', vnf_package_obj['id'])] parsed_args = self.check_parser(self.update_vnf_package, arglist, verifylist) self.assertRaises(SystemExit, self.update_vnf_package.take_action, parsed_args) @ddt.ddt class TestDownloadVnfPackage(TestVnfPackage): # The new vnf package created. _vnf_package = vnf_package_fakes.vnf_package_obj( attrs={'userDefinedData': {'Test_key': 'Test_value'}}) def setUp(self): super(TestDownloadVnfPackage, self).setUp() self.download_vnf_package = vnf_package.DownloadVnfPackage( self.app, self.app_args, cmd_name='vnf package download') def test_download_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.download_vnf_package, [], []) def _mock_request_url_for_download_vnfd(self, content_type, vnfd_data): self.header = {'content-type': content_type} url = os.path.join(self.url, 'vnfpkgm/v1/vnf_packages', self._vnf_package['id'], 'vnfd') if content_type == 'text/plain': self.requests_mock.register_uri('GET', url, headers=self.header, text=vnfd_data) else: self.requests_mock.register_uri('GET', url, headers=self.header, content=vnfd_data) def _get_arglist_and_verifylist(self, accept_type, file_name): arglist = [ self._vnf_package['id'], '--vnfd', '--type', accept_type, '--file', file_name ] verifylist = [ ('type', accept_type), ('vnfd', True), ('vnf_package', self._vnf_package['id']), ('file', file_name) ] return arglist, verifylist def test_download_vnfd_from_vnf_package_for_type_text_plain(self): test_file = ('./tackerclient/tests//unit/osc/v1/fixture_data/' 'sample_vnf_package/Definitions/' 'etsi_nfv_sol001_common_types.yaml') local_file = tempfile.NamedTemporaryFile(suffix='vnfd_data.yaml') vnfd_data = open(test_file, 'r').read() arglist, verifylist = self._get_arglist_and_verifylist( 'text/plain', local_file.name) parsed_args = self.check_parser(self.download_vnf_package, arglist, verifylist) self._mock_request_url_for_download_vnfd('text/plain', vnfd_data) self.download_vnf_package.take_action(parsed_args) self.assertTrue(filecmp.cmp(test_file, local_file.name), "Downloaded contents don't match test file") @ddt.data('application/zip', 'both') def test_download_vnfd_from_vnf_package(self, accept_type): test_file, temp_dir = _create_zip() # File in which VNFD data will be stored. # For testing purpose we are creating temporary zip file. local_file = tempfile.NamedTemporaryFile(suffix='vnfd_data.zip') vnfd_data = open(test_file, 'rb').read() arglist, verifylist = self._get_arglist_and_verifylist( accept_type, local_file.name) parsed_args = self.check_parser(self.download_vnf_package, arglist, verifylist) # When --type argument is set to 'both', then 'Accept' header in # request is set to 'text/plain,application/zip' now it is up to the # NFVO to choose the format to return for a single-file VNFD and for # a multi-file VNFD, a ZIP file shall be returned. Here we have taken # the example of multi-file vnfd hence its retuning zip file and # setting the 'Content-Type' as 'application/zip' in response header. self._mock_request_url_for_download_vnfd('application/zip', vnfd_data) self.download_vnf_package.take_action(parsed_args) self.assertTrue(filecmp.cmp(test_file, local_file.name), "Downloaded contents don't match test file") self.assertTrue(self._check_valid_zip_file(local_file.name)) shutil.rmtree(temp_dir) def _check_valid_zip_file(self, zip_file): with zipfile.ZipFile(zip_file) as zf: ret = zf.testzip() return False if ret else True @mock.patch('builtins.print') def test_download_vnfd_from_vnf_package_without_file_arg(self, mock_print): # --file argument is optional when --type is set to 'text/plain'. arglist = [ self._vnf_package['id'], '--vnfd', '--type', 'text/plain', ] verifylist = [ ('type', 'text/plain'), ('vnfd', True), ('vnf_package', self._vnf_package['id']), ] parsed_args = self.check_parser(self.download_vnf_package, arglist, verifylist) test_file = ('./tackerclient/tests//unit/osc/v1/fixture_data/' 'sample_vnf_package/Definitions/' 'etsi_nfv_sol001_common_types.yaml') vnfd_data = open(test_file, 'r').read() self._mock_request_url_for_download_vnfd('text/plain', vnfd_data) self.download_vnf_package.take_action(parsed_args) mock_print.assert_called_once_with(vnfd_data) @ddt.data('application/zip', 'both') def test_download_vnfd_from_vnf_package_failed_with_no_file_arg( self, accept_type): arglist = [ self._vnf_package['id'], '--vnfd', '--type', accept_type, ] verifylist = [ ('type', accept_type), ('vnfd', True), ('vnf_package', self._vnf_package['id']), ] parsed_args = self.check_parser(self.download_vnf_package, arglist, verifylist) with mock.patch.object(sys.stdout, "isatty") as mock_isatty: mock_isatty.return_value = True self.assertRaises(SystemExit, self.download_vnf_package.take_action, parsed_args) def test_download_vnf_package(self): file_name = 'vnf_package_data.zip' test_file, temp_dir = _create_zip() # file in which VNF Package data will be stored. # for testing purpose we are creating temporary zip file. local_file = tempfile.NamedTemporaryFile(suffix=file_name) vnf_package_data = open(test_file, 'rb').read() arglist = [ self._vnf_package['id'], '--file', local_file.name ] verifylist = [ ('vnf_package', self._vnf_package['id']), ('file', local_file.name) ] parsed_args = self.check_parser(self.download_vnf_package, arglist, verifylist) url = os.path.join(self.url, '/vnfpkgm/v1/vnf_packages', self._vnf_package['id'], 'package_content') self.requests_mock.register_uri( 'GET', url, headers={'content-type': 'application/zip'}, content=vnf_package_data) self.download_vnf_package.take_action(parsed_args) self.assertTrue(filecmp.cmp(test_file, local_file.name), "Downloaded contents don't match test file") self.assertTrue(self._check_valid_zip_file(local_file.name)) shutil.rmtree(temp_dir) @ddt.ddt class TestDownloadVnfPackageArtifact(TestVnfPackage): # The new vnf package created. _vnf_package = vnf_package_fakes.vnf_package_obj( attrs={'userDefinedData': {'Test_key': 'Test_value'}}) def setUp(self): super(TestDownloadVnfPackageArtifact, self).setUp() self.download_vnf_package_artifacts = vnf_package.\ DownloadVnfPackageArtifact( self.app, self.app_args, cmd_name='vnf package artifact download') def test_download_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.download_vnf_package_artifacts, [], []) def _mock_request_url_for_download_artifacts( self, artifact_path, artifact_data): self.header = {'content-type': 'text/plain'} url = os.path.join(self.url, 'vnfpkgm/v1/vnf_packages', self._vnf_package['id'], 'artifacts', artifact_path) self.requests_mock.register_uri('GET', url, headers=self.header, text=artifact_data) def _get_arglist_and_verifylist(self, localfile): arglist = [ self._vnf_package['id'], localfile.name[1:], '--file', localfile.name ] verifylist = [ ('vnf_package', self._vnf_package['id']), ('artifact_path', localfile.name[1:]), ('file', localfile.name) ] return arglist, verifylist def test_download_artifacts_from_vnf_package(self): test_file = ('./tackerclient/tests//unit/osc/v1/fixture_data/' 'sample_vnf_package_artifacts/Scripts/' 'install.sh') local_file = tempfile.NamedTemporaryFile(suffix='install.sh') artifact_data = open(test_file, 'r').read() arglist, verifylist = self._get_arglist_and_verifylist( local_file) parsed_args = self.check_parser( self.download_vnf_package_artifacts, arglist, verifylist) self._mock_request_url_for_download_artifacts( local_file.name[1:], artifact_data) self.download_vnf_package_artifacts.take_action(parsed_args) self.assertTrue(filecmp.cmp(test_file, local_file.name), "Downloaded contents don't match test file") ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/test_vnflcm.py0000664000175000017500000011620300000000000026330 0ustar00zuulzuul00000000000000# Copyright (C) 2020 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from io import StringIO import os import sys from unittest import mock import ddt from oslo_utils.fixture import uuidsentinel from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v1.vnflcm import vnflcm from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v1 import vnflcm_fakes from tackerclient.v1_0 import client as proxy_client class TestVnfLcm(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfLcm, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager def _get_columns_vnflcm(action='create'): columns = ['ID', 'Instantiation State', 'VNF Instance Description', 'VNF Instance Name', 'VNF Product Name', 'VNF Provider', 'VNF Software Version', 'VNFD ID', 'VNFD Version', 'Links', 'VNF Configurable Properties'] if action == 'show': columns.extend(['Instantiated Vnf Info', 'VIM Connection Info']) if action == 'list': columns = [ele for ele in columns if ele not in ['VNFD Version', 'VNF Instance Description', 'VNF Configurable Properties']] columns.remove('Links') return columns @ddt.ddt class TestCreateVnfLcm(TestVnfLcm): def setUp(self): super(TestCreateVnfLcm, self).setUp() self.create_vnf_lcm = vnflcm.CreateVnfLcm( self.app, self.app_args, cmd_name='vnflcm create') def test_create_no_args(self): self.assertRaises(base.ParserException, self.check_parser, self.create_vnf_lcm, [], []) @ddt.data({"optional_arguments": True, "instantiate": True}, {"optional_arguments": False, "instantiate": False}) @ddt.unpack def test_take_action(self, optional_arguments, instantiate): arglist = [uuidsentinel.vnf_package_vnfd_id] verifylist = [('vnfd_id', uuidsentinel.vnf_package_vnfd_id)] if optional_arguments: arglist.extend(['--name', 'test', '--description', 'test']) verifylist.extend([('name', 'test'), ('description', 'test')]) # command param if instantiate: param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "instantiate_vnf_instance_param_sample.json") arglist.extend(['--I', param_file]) verifylist.append(('I', param_file)) parsed_args = self.check_parser(self.create_vnf_lcm, arglist, verifylist) json = vnflcm_fakes.vnf_instance_response() self.requests_mock.register_uri( 'POST', os.path.join(self.url, 'vnflcm/v1/vnf_instances'), json=json, headers=self.header) if instantiate: self.requests_mock.register_uri( 'POST', os.path.join(self.url, 'vnflcm/v1/vnf_instances', json['id'], 'instantiate'), json={}, headers=self.header) sys.stdout = buffer = StringIO() actual_columns, data = (self.create_vnf_lcm.take_action(parsed_args)) headers, attributes = vnflcm._get_columns(json) expected_message = ( 'VNF Instance ' + json['id'] + ' is created and instantiation ' 'request has been accepted.') if instantiate: self.assertEqual(expected_message, buffer.getvalue().strip()) self.assertCountEqual(_get_columns_vnflcm(), actual_columns) self.assertListItemsEqual(vnflcm_fakes.get_vnflcm_data( json, columns=attributes), data) class TestShowVnfLcm(TestVnfLcm): def setUp(self): super(TestShowVnfLcm, self).setUp() self.show_vnf_lcm = vnflcm.ShowVnfLcm( self.app, self.app_args, cmd_name='vnflcm show') def test_take_action(self): vnf_instance = vnflcm_fakes.vnf_instance_response( instantiation_state='INSTANTIATED') arglist = [vnf_instance['id']] verifylist = [('vnf_instance', vnf_instance['id'])] # command param parsed_args = self.check_parser(self.show_vnf_lcm, arglist, verifylist) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id']), json=vnf_instance, headers=self.header) columns, data = (self.show_vnf_lcm.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnflcm(action='show'), columns) headers, attributes = vnflcm._get_columns(vnf_instance, action='show') self.assertListItemsEqual( vnflcm_fakes.get_vnflcm_data(vnf_instance, columns=attributes), data) class TestListVnfLcm(TestVnfLcm): def setUp(self): super(TestListVnfLcm, self).setUp() self.list_vnf_instance = vnflcm.ListVnfLcm( self.app, self.app_args, cmd_name='vnflcm list') def test_take_action(self): vnf_instances = vnflcm_fakes.create_vnf_instances(count=3) parsed_args = self.check_parser(self.list_vnf_instance, [], []) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/vnf_instances'), json=vnf_instances, headers=self.header) actual_columns, data = self.list_vnf_instance.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( vnflcm._attr_map, long_listing=True) expected_data = [] for vnf_instance_obj in vnf_instances: expected_data.append(vnflcm_fakes.get_vnflcm_data( vnf_instance_obj, columns=columns, list_action=True)) self.assertCountEqual(_get_columns_vnflcm(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) def test_take_action_with_pagination(self): vnf_instances = vnflcm_fakes.create_vnf_instances(count=3) next_links_num = 3 parsed_args = self.check_parser(self.list_vnf_instance, [], []) path = os.path.join(self.url, 'vnflcm/v1/vnf_instances') links = [0] * next_links_num link_headers = [0] * next_links_num for i in range(next_links_num): links[i] = ( '{base_url}?nextpage_opaque_marker={vnf_instance_id}'.format( base_url=path, vnf_instance_id=vnf_instances[i]['id'])) link_headers[i] = copy.deepcopy(self.header) link_headers[i]['Link'] = '<{link_url}>; rel="next"'.format( link_url=links[i]) self.requests_mock.register_uri( 'GET', path, json=[vnf_instances[0]], headers=link_headers[0]) self.requests_mock.register_uri( 'GET', links[0], json=[vnf_instances[1]], headers=link_headers[1]) self.requests_mock.register_uri( 'GET', links[1], json=[vnf_instances[2]], headers=link_headers[2]) self.requests_mock.register_uri( 'GET', links[2], json=[], headers=self.header) actual_columns, data = self.list_vnf_instance.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( vnflcm._attr_map, long_listing=True) expected_data = [] for vnf_instance_obj in vnf_instances: expected_data.append(vnflcm_fakes.get_vnflcm_data( vnf_instance_obj, columns=columns, list_action=True)) self.assertCountEqual(_get_columns_vnflcm(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) class TestInstantiateVnfLcm(TestVnfLcm): def setUp(self): super(TestInstantiateVnfLcm, self).setUp() self.instantiate_vnf_lcm = vnflcm.InstantiateVnfLcm( self.app, self.app_args, cmd_name='vnflcm instantiate') def test_take_action(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "instantiate_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('instantiation_request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.instantiate_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'instantiate') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() with mock.patch.object(proxy_client.ClientBase, '_handle_fault_response') as m: self.instantiate_vnf_lcm.take_action(parsed_args) # check no fault response is received self.assertNotCalled(m) self.assertEqual( 'Instantiate request for VNF Instance ' + vnf_instance['id'] + ' has been accepted.', buffer.getvalue().strip()) def test_take_action_vnf_instance_not_found(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "instantiate_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('instantiation_request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.instantiate_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'instantiate') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.instantiate_vnf_lcm.take_action, parsed_args) def test_take_action_param_file_not_exists(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./not_exists.json" arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('instantiation_request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.instantiate_vnf_lcm, arglist, verifylist) ex = self.assertRaises(exceptions.InvalidInput, self.instantiate_vnf_lcm.take_action, parsed_args) expected_msg = ("Invalid input: " "User does not have read privileges to it") self.assertEqual(expected_msg, str(ex)) @mock.patch("os.open") @mock.patch("os.access") def test_take_action_invalid_format_param_file(self, mock_open, mock_access): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./invalid_param_file.json" arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('instantiation_request_file', sample_param_file)] mock_open.return_value = "invalid_json_data" # command param parsed_args = self.check_parser(self.instantiate_vnf_lcm, arglist, verifylist) ex = self.assertRaises(exceptions.InvalidInput, self.instantiate_vnf_lcm.take_action, parsed_args) expected_msg = "Failed to load parameter file." self.assertIn(expected_msg, str(ex)) @ddt.ddt class TestHealVnfLcm(TestVnfLcm): def setUp(self): super(TestHealVnfLcm, self).setUp() self.heal_vnf_lcm = vnflcm.HealVnfLcm( self.app, self.app_args, cmd_name='vnflcm heal') _heal_sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "heal_vnf_instance_param_sample.json") @ddt.data((['--cause', 'test-cause', "--vnfc-instance", 'vnfc-id-1', 'vnfc-id-2'], [('cause', 'test-cause'), ('vnfc_instance', ['vnfc-id-1', 'vnfc-id-2'])]), (['--cause', 'test-cause'], [('cause', 'test-cause')]), (["--vnfc-instance", 'vnfc-id-1', 'vnfc-id-2'], [('vnfc_instance', ['vnfc-id-1', 'vnfc-id-2'])]), (["--additional-param-file", _heal_sample_param_file], [('additional_param_file', _heal_sample_param_file)]), ([], [])) @ddt.unpack def test_take_action(self, arglist, verifylist): vnf_instance = vnflcm_fakes.vnf_instance_response() arglist.insert(0, vnf_instance['id']) verifylist.extend([('vnf_instance', vnf_instance['id'])]) # command param parsed_args = self.check_parser(self.heal_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'heal') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() result_error = self.heal_vnf_lcm.take_action(parsed_args) self.assertIsNone(result_error) actual_message = buffer.getvalue().strip() expected_message = ("Heal request for VNF Instance %s has been " "accepted.") % vnf_instance['id'] self.assertIn(expected_message, actual_message) def test_take_action_vnf_instance_not_found(self): vnf_instance = vnflcm_fakes.vnf_instance_response() arglist = [vnf_instance['id']] verifylist = [('vnf_instance', vnf_instance['id'])] # command param parsed_args = self.check_parser(self.heal_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'heal') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.heal_vnf_lcm.take_action, parsed_args) def test_take_action_param_file_not_exists(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./not_exists.json" arglist = [vnf_instance['id'], '--additional-param-file', sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('additional_param_file', sample_param_file)] # command param parsed_args = self.check_parser(self.heal_vnf_lcm, arglist, verifylist) ex = self.assertRaises(exceptions.InvalidInput, self.heal_vnf_lcm.take_action, parsed_args) expected_msg = ("Invalid input: " "User does not have read privileges to it") self.assertEqual(expected_msg, str(ex)) @ddt.ddt class TestTerminateVnfLcm(TestVnfLcm): def setUp(self): super(TestTerminateVnfLcm, self).setUp() self.terminate_vnf_instance = vnflcm.TerminateVnfLcm( self.app, self.app_args, cmd_name='vnflcm terminate') @ddt.data({'termination_type': 'GRACEFUL', 'delete_vnf': True}, {'termination_type': 'FORCEFUL', 'delete_vnf': False}) @ddt.unpack def test_take_action(self, termination_type, delete_vnf): # argument 'delete_vnf' decides deletion of vnf instance post # termination. vnf_instance = vnflcm_fakes.vnf_instance_response() arglist = ['--termination-type', termination_type, vnf_instance['id']] verifylist = [('termination_type', termination_type), ('vnf_instance', vnf_instance['id'])] if delete_vnf: arglist.extend(['--D']) verifylist.extend([('D', True)]) if termination_type == 'GRACEFUL': arglist.extend(['--graceful-termination-timeout', '60']) verifylist.append(('graceful_termination_timeout', 60)) parsed_args = self.check_parser(self.terminate_vnf_instance, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'terminate') with mock.patch.object(proxy_client.ClientBase, '_handle_fault_response') as m: self.requests_mock.register_uri('POST', url, json={}, headers=self.header) if delete_vnf: self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id']), json=vnf_instance, headers=self.header) self.requests_mock.register_uri( 'DELETE', os.path.join( self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id']), json={}, headers=self.header) sys.stdout = buffer = StringIO() result = self.terminate_vnf_instance.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ("Terminate request for VNF Instance '%s'" " has been accepted.") % vnf_instance['id'] self.assertIn(expected_message, actual_message) if delete_vnf: expected_message = ("VNF Instance '%s' is deleted successfully" % vnf_instance['id']) self.assertIn(expected_message, actual_message) self.assertIsNone(result) self.assertNotCalled(m) def test_take_action_terminate_and_delete_wait_failed(self): vnf_instance = vnflcm_fakes.vnf_instance_response() termination_type = 'GRACEFUL' arglist = ['--termination-type', termination_type, '--D', '--graceful-termination-timeout', '5', vnf_instance['id']] verifylist = [('termination_type', termination_type), ('D', True), ('graceful_termination_timeout', 5), ('vnf_instance', vnf_instance['id'])] parsed_args = self.check_parser(self.terminate_vnf_instance, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'terminate') self.requests_mock.register_uri('POST', url, json={}, headers=self.header) # set the instantiateState to "INSTANTIATED", so that the # _wait_until_vnf_is_terminated will fail vnf_instance['instantiationState'] = 'INSTANTIATED' self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id']), json=vnf_instance, headers=self.header) sys.stdout = buffer = StringIO() with mock.patch.object(self.app.client_manager.tackerclient, 'delete_vnf_instance') as mock_delete: result = self.assertRaises( exceptions.CommandError, self.terminate_vnf_instance.take_action, parsed_args) actual_message = buffer.getvalue().strip() # Terminate vnf instance verification expected_message = ("Terminate request for VNF Instance '%s'" " has been accepted.") % vnf_instance['id'] self.assertIn(expected_message, actual_message) # Verify it fails to wait for termination before delete expected_message = ("Couldn't verify vnf instance is terminated " "within '%(timeout)s' seconds. Unable to " "delete vnf instance %(id)s" % {'timeout': 15, 'id': vnf_instance['id']}) self.assertIn(expected_message, str(result)) self.assertNotCalled(mock_delete) def test_terminate_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.terminate_vnf_instance, [], []) def test_take_action_vnf_instance_not_found(self): vnf_instance = vnflcm_fakes.vnf_instance_response() termination_type = 'GRACEFUL' arglist = ['--termination-type', termination_type, '--D', '--graceful-termination-timeout', '5', vnf_instance['id']] verifylist = [('termination_type', termination_type), ('D', True), ('graceful_termination_timeout', 5), ('vnf_instance', vnf_instance['id'])] parsed_args = self.check_parser(self.terminate_vnf_instance, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'terminate') self.requests_mock.register_uri('POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.terminate_vnf_instance.take_action, parsed_args) class TestDeleteVnfLcm(TestVnfLcm): def setUp(self): super(TestDeleteVnfLcm, self).setUp() self.delete_vnf_instance = vnflcm.DeleteVnfLcm( self.app, self.app_args, cmd_name='vnflcm delete') # Vnf Instance to delete self.vnf_instances = vnflcm_fakes.create_vnf_instances(count=3) def _mock_request_url_for_delete(self, vnf_index): url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', self.vnf_instances[vnf_index]['id']) json = self.vnf_instances[vnf_index] self.requests_mock.register_uri('GET', url, json=json, headers=self.header) self.requests_mock.register_uri('DELETE', url, headers=self.header, json={}) def test_delete_one_vnf_instance(self): arglist = [self.vnf_instances[0]['id']] verifylist = [('vnf_instances', [self.vnf_instances[0]['id']])] parsed_args = self.check_parser(self.delete_vnf_instance, arglist, verifylist) self._mock_request_url_for_delete(0) sys.stdout = buffer = StringIO() result = self.delete_vnf_instance.take_action(parsed_args) self.assertIsNone(result) self.assertEqual(("Vnf instance '%s' is deleted successfully") % self.vnf_instances[0]['id'], buffer.getvalue().strip()) def test_delete_multiple_vnf_instance(self): arglist = [] for vnf_pkg in self.vnf_instances: arglist.append(vnf_pkg['id']) verifylist = [('vnf_instances', arglist)] parsed_args = self.check_parser(self.delete_vnf_instance, arglist, verifylist) for i in range(0, 3): self._mock_request_url_for_delete(i) sys.stdout = buffer = StringIO() result = self.delete_vnf_instance.take_action(parsed_args) self.assertIsNone(result) self.assertEqual('All specified vnf instances are deleted ' 'successfully', buffer.getvalue().strip()) def test_delete_multiple_vnf_instance_exception(self): arglist = [ self.vnf_instances[0]['id'], 'xxxx-yyyy-zzzz', self.vnf_instances[1]['id'], ] verifylist = [('vnf_instances', arglist)] parsed_args = self.check_parser(self.delete_vnf_instance, arglist, verifylist) self._mock_request_url_for_delete(0) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', 'xxxx-yyyy-zzzz') self.requests_mock.register_uri( 'GET', url, exc=exceptions.ConnectionFailed) self._mock_request_url_for_delete(1) exception = self.assertRaises(exceptions.CommandError, self.delete_vnf_instance.take_action, parsed_args) self.assertEqual('Failed to delete 1 of 3 vnf instances.', exception.message) class TestUpdateVnfLcm(TestVnfLcm): def setUp(self): super(TestUpdateVnfLcm, self).setUp() self.update_vnf_lcm = vnflcm.UpdateVnfLcm( self.app, self.app_args, cmd_name='vnflcm modify') def test_take_action(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "update_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], '--I', sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('I', sample_param_file)] # command param parsed_args = self.check_parser( self.update_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id']) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() self.update_vnf_lcm.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ('Update vnf:' + vnf_instance['id']) self.assertEqual(expected_message, actual_message) def test_take_action_param_file_not_exists(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./not_exists.json" arglist = [vnf_instance['id'], '--I', sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('I', sample_param_file)] # command param parsed_args = self.check_parser(self.update_vnf_lcm, arglist, verifylist) self.assertRaises(exceptions.InvalidInput, self.update_vnf_lcm.take_action, parsed_args) @ddt.ddt class TestScaleVnfLcm(TestVnfLcm): def setUp(self): super(TestScaleVnfLcm, self).setUp() self.scale_vnf_lcm = vnflcm.ScaleVnfLcm( self.app, self.app_args, cmd_name='vnflcm scale') @ddt.data('SCALE_IN', 'SCALE_OUT') def test_take_action(self, scale_type): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "scale_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], '--aspect-id', uuidsentinel.aspect_id, '--number-of-steps', '1', '--type', scale_type, '--additional-param-file', sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('aspect_id', uuidsentinel.aspect_id), ('number_of_steps', 1), ('type', scale_type), ('additional_param_file', sample_param_file)] # command param parsed_args = self.check_parser(self.scale_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'scale') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() self.scale_vnf_lcm.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ("Scale request for VNF Instance %s has been " "accepted.") % vnf_instance['id'] self.assertEqual(expected_message, actual_message) @ddt.data('SCALE_IN', 'SCALE_OUT') def test_take_action_no_param_file(self, scale_type): vnf_instance = vnflcm_fakes.vnf_instance_response() arglist = [vnf_instance['id'], '--aspect-id', uuidsentinel.aspect_id, '--number-of-steps', '1', '--type', scale_type] verifylist = [('vnf_instance', vnf_instance['id']), ('aspect_id', uuidsentinel.aspect_id), ('number_of_steps', 1), ('type', scale_type)] parsed_args = self.check_parser(self.scale_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'scale') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() self.scale_vnf_lcm.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ("Scale request for VNF Instance %s has been " "accepted.") % vnf_instance['id'] self.assertEqual(expected_message, actual_message) @ddt.data('SCALE_IN', 'SCALE_OUT') def test_take_action_param_file_not_exists(self, scale_type): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./not_exists.json" arglist = [vnf_instance['id'], '--aspect-id', uuidsentinel.aspect_id, '--number-of-steps', '2', '--type', scale_type, '--additional-param-file', sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('aspect_id', uuidsentinel.aspect_id), ('number_of_steps', 2), ('type', scale_type), ('additional_param_file', sample_param_file)] # command param parsed_args = self.check_parser(self.scale_vnf_lcm, arglist, verifylist) ex = self.assertRaises(exceptions.InvalidInput, self.scale_vnf_lcm.take_action, parsed_args) expected_msg = ("Invalid input: " "User does not have read privileges to it") self.assertEqual(expected_msg, str(ex)) @ddt.data('SCALE_IN', 'SCALE_OUT') def test_take_action_vnf_instance_not_found(self, scale_type): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "update_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], '--aspect-id', uuidsentinel.aspect_id, '--number-of-steps', '3', '--type', scale_type, '--additional-param-file', sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('aspect_id', uuidsentinel.aspect_id), ('number_of_steps', 3), ('type', scale_type), ('additional_param_file', sample_param_file)] # command param parsed_args = self.check_parser(self.scale_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id']) self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.scale_vnf_lcm.take_action, parsed_args) class TestChangeExtConnVnfLcm(TestVnfLcm): def setUp(self): super(TestChangeExtConnVnfLcm, self).setUp() self.change_ext_conn_vnf_lcm = vnflcm.ChangeExtConnVnfLcm( self.app, self.app_args, cmd_name='vnflcm change-ext-conn') def test_take_action(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "change_ext_conn_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.change_ext_conn_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'change_ext_conn') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() with mock.patch.object(proxy_client.ClientBase, '_handle_fault_response') as m: self.change_ext_conn_vnf_lcm.take_action(parsed_args) # check no fault response is received self.assertNotCalled(m) self.assertEqual( ('Change External VNF Connectivity for VNF Instance {0} ' 'has been accepted.'.format(vnf_instance['id'])), buffer.getvalue().strip()) def test_take_action_vnf_instance_not_found(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "change_ext_conn_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.change_ext_conn_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'change_ext_conn') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.change_ext_conn_vnf_lcm.take_action, parsed_args) def test_take_action_param_file_not_exists(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./not_exists.json" arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser( self.change_ext_conn_vnf_lcm, arglist, verifylist) ex = self.assertRaises( exceptions.InvalidInput, self.change_ext_conn_vnf_lcm.take_action, parsed_args) expected_msg = ("Invalid input: " "User does not have read privileges to it") self.assertEqual(expected_msg, str(ex)) @mock.patch("os.open") @mock.patch("os.access") def test_take_action_invalid_format_param_file(self, mock_open, mock_access): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./invalid_param_file.json" arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] mock_open.return_value = "invalid_json_data" # command param parsed_args = self.check_parser(self.change_ext_conn_vnf_lcm, arglist, verifylist) ex = self.assertRaises( exceptions.InvalidInput, self.change_ext_conn_vnf_lcm.take_action, parsed_args) expected_msg = "Failed to load parameter file." self.assertIn(expected_msg, str(ex)) class TestChangeVnfPkgVnfLcm(TestVnfLcm): def setUp(self): super(TestChangeVnfPkgVnfLcm, self).setUp() self.change_vnfpkg_vnf_lcm = vnflcm.ChangeVnfPkgVnfLcm( self.app, self.app_args, cmd_name='vnflcm change-vnfpkg') def test_take_action_with_v1_version(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v2/vnflcm/samples/" "change_vnfpkg_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.change_vnfpkg_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v1/vnf_instances', vnf_instance['id'], 'change_vnfpkg') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=400, json={}) ex = self.assertRaises(exceptions.UnsupportedCommandVersion, self.change_vnfpkg_vnf_lcm.take_action, parsed_args) expected_msg = "This command is not supported in version 1" self.assertEqual(expected_msg, str(ex)) class TestVnfLcmV1(base.FixturedTestCase): client_fixture_class = client.ClientFixture api_version = '1' def setUp(self): super(TestVnfLcmV1, self).setUp() def test_client_v2(self): self.assertEqual(self.cs.vnf_lcm_client.headers, {'Version': '1.3.0'}) self.assertEqual(self.cs.vnf_lcm_client.vnf_instances_path, '/vnflcm/v1/vnf_instances') # check of other paths is omitted. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/test_vnflcm_op_occs.py0000664000175000017500000005662300000000000030046 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from io import StringIO import os import sys import ddt from oslo_utils.fixture import uuidsentinel from unittest import mock from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v1.vnflcm import vnflcm_op_occs from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v1 import vnflcm_op_occs_fakes def _get_columns_vnflcm_op_occs(action='show', parameter=None): if action == 'fail': return ['ID', 'Operation State', 'State Entered Time', 'Start Time', 'VNF Instance ID', 'Operation', 'Is Automatic Invocation', 'Is Cancel Pending', 'Error', 'Links'] elif action == 'list': if parameter is not None: return ['ID', 'Operation'] else: return ['ID', 'Operation State', 'VNF Instance ID', 'Operation'] else: return ['ID', 'Operation State', 'State Entered Time', 'Start Time', 'VNF Instance ID', 'Grant ID', 'Operation', 'Is Automatic Invocation', 'Operation Parameters', 'Is Cancel Pending', 'Cancel Mode', 'Error', 'Resource Changes', 'Changed Info', 'Changed External Connectivity', 'Links'] class TestVnfLcm(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfLcm, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager @ddt.ddt class TestCancelVnfLcmOp(TestVnfLcm): def setUp(self): super(TestCancelVnfLcmOp, self).setUp() self.cancel_vnf_lcm = vnflcm_op_occs.CancelVnfLcmOp( self.app, self.app_args, cmd_name='vnflcm op cancel') @ddt.data('GRACEFUL', 'FORCEFUL') def test_take_action(self, cancel_mode): """take_action normal system test""" arglist = ['--cancel-mode', cancel_mode, uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('cancel_mode', cancel_mode), ('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] parsed_args = self.check_parser( self.cancel_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'cancel') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() self.cancel_vnf_lcm.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ( 'Cancel request for LCM operation ' + uuidsentinel.vnf_lcm_op_occ_id + ' has been accepted') self.assertEqual(expected_message, actual_message) def test_terminate_no_options(self): self.assertRaises(base.ParserException, self.check_parser, self.cancel_vnf_lcm, [], []) def test_take_action_vnf_lcm_op_occ_id_not_found(self): """take_action abnomaly system test""" arglist = [uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] parsed_args = self.check_parser( self.cancel_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'cancel') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.cancel_vnf_lcm.take_action, parsed_args) class TestRollbackVnfLcmOp(TestVnfLcm): def setUp(self): super(TestRollbackVnfLcmOp, self).setUp() self.rollback_vnf_lcm = vnflcm_op_occs.RollbackVnfLcmOp( self.app, self.app_args, cmd_name='vnflcm op rollback') def test_take_action(self): """take_action normal system test""" arglist = [uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] parsed_args = self.check_parser( self.rollback_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'rollback') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() self.rollback_vnf_lcm.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ( 'Rollback request for LCM operation ' + uuidsentinel.vnf_lcm_op_occ_id + ' has been accepted') self.assertEqual(expected_message, actual_message) def test_take_action_vnf_lcm_op_occ_id_not_found(self): """take_action abnomaly system test""" arglist = [uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] parsed_args = self.check_parser( self.rollback_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'rollback') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.rollback_vnf_lcm.take_action, parsed_args) class TestFailVnfLcmOp(TestVnfLcm): def setUp(self): super(TestFailVnfLcmOp, self).setUp() self.fail_vnf_lcm = vnflcm_op_occs.FailVnfLcmOp( self.app, self.app_args, cmd_name='vnflcm op fail') def test_take_action(self): """Test of take_action()""" vnflcm_op_occ = vnflcm_op_occs_fakes.vnflcm_op_occ_response( action='fail') arg_list = [vnflcm_op_occ['id']] verify_list = [('vnf_lcm_op_occ_id', vnflcm_op_occ['id'])] # command param parsed_args = self.check_parser( self.fail_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', vnflcm_op_occ['id'], 'fail') self.requests_mock.register_uri( 'POST', url, headers=self.header, json=vnflcm_op_occ) columns, data = (self.fail_vnf_lcm.take_action(parsed_args)) expected_columns = _get_columns_vnflcm_op_occs(action='fail') self.assertCountEqual(expected_columns, columns) def test_take_action_vnf_lcm_op_occ_id_not_found(self): """Test if vnf-lcm-op-occ-id does not find""" arg_list = [uuidsentinel.vnf_lcm_op_occ_id] verify_list = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.fail_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'fail') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.fail_vnf_lcm.take_action, parsed_args) def test_take_action_vnf_lcm_op_occ_state_is_conflict(self): """Test if vnf-lcm-op-occ state is conflict""" arg_list = [uuidsentinel.vnf_lcm_op_occ_id] verify_list = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.fail_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'fail') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=409, json={}) self.assertRaises(exceptions.TackerClientException, self.fail_vnf_lcm.take_action, parsed_args) def test_take_action_vnf_lcm_op_occ_internal_server_error(self): """Test if request is internal server error""" arg_list = [uuidsentinel.vnf_lcm_op_occ_id] verify_list = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.fail_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'fail') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.fail_vnf_lcm.take_action, parsed_args) def test_take_action_vnf_lcm_op_occ_missing_vnf_lcm_op_occ_id_argument( self): """Test if vnflcm_op_occ_id is not provided""" arg_list = [] verify_list = [('vnf_lcm_op_occ_id', arg_list)] self.assertRaises(base.ParserException, self.check_parser, self.fail_vnf_lcm, arg_list, verify_list) class TestRetryVnfLcmOp(TestVnfLcm): def setUp(self): super(TestRetryVnfLcmOp, self).setUp() self.retry_vnf_lcm = vnflcm_op_occs.RetryVnfLcmOp( self.app, self.app_args, cmd_name='vnflcm op retry') def test_take_action(self): """Test of take_action()""" arg_list = [uuidsentinel.vnf_lcm_op_occ_id] verify_list = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.retry_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'retry') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() self.retry_vnf_lcm.take_action(parsed_args) actual_message = buffer.getvalue().strip() expected_message = ( 'Retry request for LCM operation ' + uuidsentinel.vnf_lcm_op_occ_id + ' has been accepted') self.assertEqual(expected_message, actual_message) def test_take_action_vnf_lcm_op_occ_id_not_found(self): """Test if vnf-lcm-op-occ-id is not found.""" arglist = [uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.retry_vnf_lcm, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'retry') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.retry_vnf_lcm.take_action, parsed_args) def test_take_action_vnf_lcm_op_occ_state_is_conflict(self): """Test if vnf-lcm-op-occ state is conflict""" arg_list = [uuidsentinel.vnf_lcm_op_occ_id] verify_list = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.retry_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'retry') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=409, json={}) self.assertRaises(exceptions.TackerClientException, self.retry_vnf_lcm.take_action, parsed_args) def test_take_action_vnf_lcm_op_occ_internal_server_error(self): """Test if request is internal server error""" arg_list = [uuidsentinel.vnf_lcm_op_occ_id] verify_list = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.retry_vnf_lcm, arg_list, verify_list) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id, 'retry') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.retry_vnf_lcm.take_action, parsed_args) def test_take_action_vnf_lcm_op_occ_missing_vnf_lcm_op_occ_id_argument( self): """Test if vnflcm_op_occ_id is not provided""" arg_list = [] verify_list = [('vnf_lcm_op_occ_id', arg_list)] self.assertRaises(base.ParserException, self.check_parser, self.retry_vnf_lcm, arg_list, verify_list) class TestListVnfLcmOp(TestVnfLcm): def setUp(self): super(TestListVnfLcmOp, self).setUp() self.list_vnflcm_op_occ = vnflcm_op_occs.ListVnfLcmOp( self.app, self.app_args, cmd_name='vnflcm op list') def test_take_action(self): vnflcm_op_occs_obj = vnflcm_op_occs_fakes.create_vnflcm_op_occs( count=3) parsed_args = self.check_parser(self.list_vnflcm_op_occ, [], []) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/vnf_lcm_op_occs'), json=vnflcm_op_occs_obj, headers=self.header) actual_columns, data = self.list_vnflcm_op_occ.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnflcm_op_occ.get_attributes(), long_listing=True) expected_data = [] for vnflcm_op_occ_obj_idx in vnflcm_op_occs_obj: expected_data.append(vnflcm_op_occs_fakes.get_vnflcm_op_occ_data( vnflcm_op_occ_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnflcm_op_occs(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) def test_take_action_with_filter(self): vnflcm_op_occs_obj = vnflcm_op_occs_fakes.create_vnflcm_op_occs( count=3) parsed_args = self.check_parser( self.list_vnflcm_op_occ, ["--filter", '(eq,operationState,STARTING)'], [('filter', '(eq,operationState,STARTING)')]) self.requests_mock.register_uri( 'GET', os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs?' 'filter=(eq,operationState,STARTING)'), json=vnflcm_op_occs_obj, headers=self.header) actual_columns, data = self.list_vnflcm_op_occ.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnflcm_op_occ.get_attributes(), long_listing=True) expected_data = [] for vnflcm_op_occ_obj_idx in vnflcm_op_occs_obj: expected_data.append(vnflcm_op_occs_fakes.get_vnflcm_op_occ_data( vnflcm_op_occ_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnflcm_op_occs(action='list'), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_incorrect_filter(self): parsed_args = self.check_parser( self.list_vnflcm_op_occ, ["--filter", '(operationState)'], [('filter', '(operationState)')]) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs?filter=(operationState)') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=400, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnflcm_op_occ.take_action, parsed_args) def test_take_action_internal_server_error(self): parsed_args = self.check_parser( self.list_vnflcm_op_occ, ["--filter", '(eq,operationState,STARTING)'], [('filter', '(eq,operationState,STARTING)')]) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs?' 'filter=(eq,operationState,STARTING)') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnflcm_op_occ.take_action, parsed_args) def test_take_action_with_exclude_fields(self): vnflcm_op_occs_obj = vnflcm_op_occs_fakes.create_vnflcm_op_occs( count=3) parsed_args = self.check_parser( self.list_vnflcm_op_occ, ["--exclude-fields", 'VNF Instance ID,Operation State'], [('exclude_fields', 'VNF Instance ID,Operation State')]) self.requests_mock.register_uri( 'GET', os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs?' 'exclude-fields=VNF Instance ID,Operation State'), json=vnflcm_op_occs_obj, headers=self.header) actual_columns, data = self.list_vnflcm_op_occ.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnflcm_op_occ.get_attributes( exclude=['VNF Instance ID', 'Operation State']), long_listing=True) expected_data = [] for vnflcm_op_occ_obj_idx in vnflcm_op_occs_obj: expected_data.append( vnflcm_op_occs_fakes.get_vnflcm_op_occ_data( vnflcm_op_occ_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnflcm_op_occs( action='list', parameter="exclude_fields"), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_pagination(self): next_links_num = 3 vnflcm_op_occs_obj = vnflcm_op_occs_fakes.create_vnflcm_op_occs( count=next_links_num) parsed_args = self.check_parser(self.list_vnflcm_op_occ, [], []) path = os.path.join(self.url, 'vnflcm/v1/vnf_lcm_op_occs') links = [0] * next_links_num link_headers = [0] * next_links_num for i in range(next_links_num): links[i] = ( '{base_url}?nextpage_opaque_marker={vnflcm_op_occ_id}'.format( base_url=path, vnflcm_op_occ_id=vnflcm_op_occs_obj[i]['id'])) link_headers[i] = copy.deepcopy(self.header) link_headers[i]['Link'] = '<{link_url}>; rel="next"'.format( link_url=links[i]) self.requests_mock.register_uri( 'GET', path, json=[vnflcm_op_occs_obj[0]], headers=link_headers[0]) self.requests_mock.register_uri( 'GET', links[0], json=[vnflcm_op_occs_obj[1]], headers=link_headers[1]) self.requests_mock.register_uri( 'GET', links[1], json=[vnflcm_op_occs_obj[2]], headers=link_headers[2]) self.requests_mock.register_uri( 'GET', links[2], json=[], headers=self.header) actual_columns, data = self.list_vnflcm_op_occ.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( self.list_vnflcm_op_occ.get_attributes(), long_listing=True) expected_data = [] for vnflcm_op_occ_obj_idx in vnflcm_op_occs_obj: expected_data.append(vnflcm_op_occs_fakes.get_vnflcm_op_occ_data( vnflcm_op_occ_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnflcm_op_occs(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) class TestShowVnfLcmOp(TestVnfLcm): def setUp(self): super(TestShowVnfLcmOp, self).setUp() self.show_vnf_lcm_op_occs = vnflcm_op_occs.ShowVnfLcmOp( self.app, self.app_args, cmd_name='vnflcm op show') def test_take_action(self): """Test of take_action()""" vnflcm_op_occ = vnflcm_op_occs_fakes.vnflcm_op_occ_response() arglist = [vnflcm_op_occ['id']] verifylist = [('vnf_lcm_op_occ_id', vnflcm_op_occ['id'])] # command param parsed_args = self.check_parser( self.show_vnf_lcm_op_occs, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', vnflcm_op_occ['id']) self.requests_mock.register_uri( 'GET', url, headers=self.header, json=vnflcm_op_occ) columns, data = (self.show_vnf_lcm_op_occs.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnflcm_op_occs(), columns) def test_take_action_vnf_lcm_op_occ_id_not_found(self): """Test if vnf-lcm-op-occ-id does not find.""" arglist = [uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.show_vnf_lcm_op_occs, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_lcm_op_occs.take_action, parsed_args) def test_take_action_internal_server_error(self): """Test for internal server error.""" arglist = [uuidsentinel.vnf_lcm_op_occ_id] verifylist = [('vnf_lcm_op_occ_id', uuidsentinel.vnf_lcm_op_occ_id)] # command param parsed_args = self.check_parser( self.show_vnf_lcm_op_occs, arglist, verifylist) url = os.path.join( self.url, 'vnflcm/v1/vnf_lcm_op_occs', uuidsentinel.vnf_lcm_op_occ_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_lcm_op_occs.take_action, parsed_args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/test_vnflcm_subsc.py0000664000175000017500000002423200000000000027527 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Nippon Telegraph and Telephone Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import os import sys from io import StringIO from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v1.vnflcm import vnflcm_subsc from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1 import test_vnflcm from tackerclient.tests.unit.osc.v1 import vnflcm_subsc_fakes def _get_columns_vnflcm_subsc(action=None): columns = ['ID', 'Filter', 'Callback URI', 'Links'] if action == 'list': columns = [ele for ele in columns if ele not in ['Filter', 'Links']] return columns class TestCreateLccnSubscription(test_vnflcm.TestVnfLcm): def setUp(self): super(TestCreateLccnSubscription, self).setUp() self.create_subscription = vnflcm_subsc.CreateLccnSubscription( self.app, self.app_args, cmd_name='vnflcm subsc create') def test_create_no_args(self): self.assertRaises(base.ParserException, self.check_parser, self.create_subscription, [], []) def test_take_action(self): subscription = vnflcm_subsc_fakes.lccn_subsc_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "create_lccn_subscription_param_sample.json") arglist = [sample_param_file] verifylist = [('create_request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.create_subscription, arglist, verifylist) self.requests_mock.register_uri( 'POST', os.path.join(self.url, 'vnflcm/v1/subscriptions'), json=subscription, headers=self.header) actual_columns, data = (self.create_subscription.take_action( parsed_args)) headers, attributes = vnflcm_subsc._get_columns(subscription) self.assertCountEqual(_get_columns_vnflcm_subsc(), actual_columns) self.assertListItemsEqual(vnflcm_subsc_fakes.get_subscription_data( subscription, columns=attributes), data) class TestListLccnSubscription(test_vnflcm.TestVnfLcm): def setUp(self): super(TestListLccnSubscription, self).setUp() self.list_subscription = vnflcm_subsc.ListLccnSubscription( self.app, self.app_args, cmd_name='vnflcm subsc list') def test_take_action(self): subscriptions = vnflcm_subsc_fakes.create_subscriptions(count=3) parsed_args = self.check_parser(self.list_subscription, [], []) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/subscriptions'), json=subscriptions, headers=self.header) actual_columns, data = self.list_subscription.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( self.list_subscription.get_attributes(), long_listing=True) expected_data = [] for subscription_obj in subscriptions: expected_data.append(vnflcm_subsc_fakes.get_subscription_data( subscription_obj, columns=columns, list_action=True)) self.assertCountEqual(_get_columns_vnflcm_subsc(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) def test_take_action_with_pagination(self): subscriptions = vnflcm_subsc_fakes.create_subscriptions(count=3) next_links_num = 3 path = os.path.join(self.url, 'vnflcm/v1/subscriptions') parsed_args = self.check_parser(self.list_subscription, [], []) links = [0] * next_links_num link_headers = [0] * next_links_num for i in range(next_links_num): links[i] = ( '{base_url}?nextpage_opaque_marker={subscription_id}'.format( base_url=path, subscription_id=subscriptions[i]['id'])) link_headers[i] = copy.deepcopy(self.header) link_headers[i]['Link'] = '<{link_url}>; rel="next"'.format( link_url=links[i]) self.requests_mock.register_uri( 'GET', path, json=[subscriptions[0]], headers=link_headers[0]) self.requests_mock.register_uri( 'GET', links[0], json=[subscriptions[1]], headers=link_headers[1]) self.requests_mock.register_uri( 'GET', links[1], json=[subscriptions[2]], headers=link_headers[2]) self.requests_mock.register_uri( 'GET', links[2], json=[], headers=self.header) actual_columns, data = self.list_subscription.take_action(parsed_args) headers, columns = tacker_osc_utils.get_column_definitions( self.list_subscription.get_attributes(), long_listing=True) expected_data = [] for subscription_obj in subscriptions: expected_data.append(vnflcm_subsc_fakes.get_subscription_data( subscription_obj, columns=columns, list_action=True)) self.assertCountEqual(_get_columns_vnflcm_subsc(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) class TestShowLccnSubscription(test_vnflcm.TestVnfLcm): def setUp(self): super(TestShowLccnSubscription, self).setUp() self.show_subscription = vnflcm_subsc.ShowLccnSubscription( self.app, self.app_args, cmd_name='vnflcm subsc show') def test_take_action(self): subscription = vnflcm_subsc_fakes.lccn_subsc_response() arglist = [subscription['id']] verifylist = [('subscription_id', subscription['id'])] # command param parsed_args = self.check_parser(self.show_subscription, arglist, verifylist) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnflcm/v1/subscriptions', subscription['id']), json=subscription, headers=self.header) columns, data = (self.show_subscription.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnflcm_subsc(), columns) headers, attributes = vnflcm_subsc._get_columns(subscription) self.assertListItemsEqual( vnflcm_subsc_fakes.get_subscription_data(subscription, columns=attributes), data) class TestDeleteLccnSubscription(test_vnflcm.TestVnfLcm): subscriptions = vnflcm_subsc_fakes.create_subscriptions(count=3) def setUp(self): super(TestDeleteLccnSubscription, self).setUp() self.delete_subscription = vnflcm_subsc.DeleteLccnSubscription( self.app, self.app_args, cmd_name='vnflcm subsc delete') def _mock_request_url_for_delete(self, subsc_index): url = os.path.join(self.url, 'vnflcm/v1/subscriptions', self.subscriptions[subsc_index]['id']) json = self.subscriptions[subsc_index] self.requests_mock.register_uri('GET', url, json=json, headers=self.header) self.requests_mock.register_uri('DELETE', url, headers=self.header, json={}) def test_delete_one_subscription(self): arglist = [self.subscriptions[0]['id']] verifylist = [('subscription_id', [self.subscriptions[0]['id']])] parsed_args = self.check_parser(self.delete_subscription, arglist, verifylist) self._mock_request_url_for_delete(0) sys.stdout = buffer = StringIO() result = self.delete_subscription.take_action(parsed_args) self.assertIsNone(result) self.assertEqual(("Lccn Subscription '%s' is deleted successfully") % self.subscriptions[0]['id'], buffer.getvalue().strip()) def test_delete_multiple_subscription(self): arglist = [] for subscription in self.subscriptions: arglist.append(subscription['id']) verifylist = [('subscription_id', arglist)] parsed_args = self.check_parser(self.delete_subscription, arglist, verifylist) for i in range(0, 3): self._mock_request_url_for_delete(i) sys.stdout = buffer = StringIO() result = self.delete_subscription.take_action(parsed_args) self.assertIsNone(result) self.assertEqual('All specified Lccn Subscriptions are deleted ' 'successfully', buffer.getvalue().strip()) def test_delete_multiple_subscription_exception(self): arglist = [ self.subscriptions[0]['id'], 'xxxx-yyyy-zzzz', self.subscriptions[1]['id'], ] verifylist = [('subscription_id', arglist)] parsed_args = self.check_parser(self.delete_subscription, arglist, verifylist) self._mock_request_url_for_delete(0) url = os.path.join(self.url, 'vnflcm/v1/subscriptions', 'xxxx-yyyy-zzzz') self.requests_mock.register_uri( 'GET', url, exc=exceptions.ConnectionFailed) self._mock_request_url_for_delete(1) exception = self.assertRaises(exceptions.CommandError, self.delete_subscription.take_action, parsed_args) self.assertEqual('Failed to delete 1 of 3 Lccn Subscriptions.', exception.message) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/vnf_package_fakes.py0000664000175000017500000001515700000000000027427 0ustar00zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def vnf_package_obj(attrs=None, onboarded_state=False): """Create a fake vnf package. :param Dictionary attrs: A dictionary with all attributes :return: A FakeVnfPackage dict """ attrs = attrs or {} # Set default attributes. fake_vnf_package = {"id": "60a6ac16-b50d-4e92-964b-b3cf98c7cf5c", "_links": {"self": {"href": "string"}, "packageContent": {"href": "string"} }, "onboardingState": "CREATED", "operationalState": "DISABLED", "usageState": "NOT_IN_USE", "userDefinedData": {'key': 'value'}} if onboarded_state: fake_vnf_package = {"id": "60a6ac16-b50d-4e92-964b-b3cf98c7cf5c", "vnfdId": "string", "vnfProvider": "string", "vnfProductName": "string", "vnfSoftwareVersion": "string", "vnfdVersion": "string", "softwareImages": [ { "id": "string", "name": "string", "provider": "string", "version": "string", "checksum": { "algorithm": "string", "hash": "string" }, "containerFormat": "AKI", "diskFormat": "AKI", "createdAt": "2015-06-03T18:49:19.000000", "minDisk": '0', "minRam": '0', "size": '0', "userMetadata": {}, "imagePath": "string" } ], "checksum": { "algorithm": "string", "hash": "string" }, "onboardingState": "ONBOARDED", "operationalState": "ENABLED", "usageState": "IN_USE", "userDefinedData": {'key': 'value'}, "_links": { "self": { "href": "string" }, "vnfd": { "href": "string" }, "packageContent": { "href": "string" } }, "additionalArtifacts": [ { "artifactPath": "string", "metadata": {}, "checksum": { "algorithm": "string", "hash": "string" } }] } # Overwrite default attributes. fake_vnf_package.update(attrs) return fake_vnf_package def get_vnf_package_data(vnf_package_obj, **kwargs): """Get the vnf package data from a FakeVnfPackage dict object. :param vnf_package_obj: A FakeVnfPackage dict object :return: A list which may include the following values: [{'packageContent': {'href': 'string'}, 'self': {'href': 'string'}, 'vnfd': {'href': 'string'}}, '60a6ac16-b50d-4e92-964b-b3cf98c7cf5c', 'CREATED', 'DISABLED', 'NOT_IN_USE', {'Test_key': 'Test_value'}] """ complex_attributes = ['softwareImages', 'checksum', '_links', 'userDefinedData', 'additionalArtifacts'] for attribute in complex_attributes: if vnf_package_obj.get(attribute): vnf_package_obj.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_package_obj[attribute])}) if kwargs.get('list_action'): # In case of List VNF packages we get empty string as data for # 'vnfProductName' if onboardingState is CREATED. Hence to match # up with actual data we are adding here empty string. if not vnf_package_obj.get('vnfProductName'): vnf_package_obj['vnfProductName'] = '' if kwargs.get('columns'): # return the list of data as per column order return tuple([vnf_package_obj[key] for key in kwargs.get('columns')]) return tuple([vnf_package_obj[key] for key in sorted( vnf_package_obj.keys())]) def create_vnf_packages(count=2, onboarded_vnf_package=False): """Create multiple fake vnf packages. :param Dictionary attrs: A dictionary with all attributes :param int count: The number of vnf_packages to fake :return: A list of fake vnf packages dictionary """ vnf_packages = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() vnf_packages.append(vnf_package_obj( attrs={'id': unique_id}, onboarded_state=onboarded_vnf_package)) return {'vnf_packages': vnf_packages} def get_fake_update_vnf_package_obj(arglist): fake_update_vnf_package_dict = {} if '--user-data' in arglist: fake_update_vnf_package_dict.update( {"userDefinedData": {'Test_key': 'Test_value'}}) if '--operational-state' in arglist: fake_update_vnf_package_dict.update({ "operationalState": "DISABLED", }) return fake_update_vnf_package_dict ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/vnflcm_fakes.py0000664000175000017500000001314700000000000026445 0ustar00zuulzuul00000000000000# Copyright (C) 2020 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils.fixture import uuidsentinel from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def vnf_instance_response(attrs=None, instantiation_state='NOT_INSTANTIATED'): """Create a fake vnf instance. :param Dictionary attrs: A dictionary with all attributes :return: A vnf instance dict """ attrs = attrs or {} # Set default attributes. dummy_vnf_instance = { "id": uuidsentinel.vnf_instance_id, "vnfInstanceName": "Fake-VNF-Instance", "vnfInstanceDescription": "Fake VNF", "vnfdId": uuidsentinel.vnf_package_vnfd_id, "vnfProvider": "NTT NS lab", "vnfProductName": "Sample VNF", "vnfSoftwareVersion": "1.0", "vnfdVersion": "1.0", "_links": "vnflcm/v1/vnf_instances/" + uuidsentinel.vnf_instance_id + "/instantiate", "instantiationState": instantiation_state, "vnfConfigurableProperties": { "test": "test_value"}} if instantiation_state == 'INSTANTIATED': dummy_vnf_instance.update({ "vimConnectionInfo": [{ 'id': uuidsentinel.uuid, 'vimId': uuidsentinel.vimId, 'vimType': 'openstack', 'interfaceInfo': {'k': 'v'}, 'accessInfo': {'k': 'v'}, 'extra': {'k': 'v'} }], "instantiatedVnfInfo": { "flavourId": uuidsentinel.flavourId, "vnfState": "STARTED", "extCpInfo": [{ 'id': uuidsentinel.extCpInfo_uuid, 'cpdId': uuidsentinel.cpdId_uuid, 'cpProtocolInfo': [{ 'layerProtocol': 'IP_OVER_ETHERNET', 'ipOverEthernet': '{}' }], 'extLinkPortId': uuidsentinel.extLinkPortId_uuid, 'metadata': {'k': 'v'}, 'associatedVnfcCpId': uuidsentinel.associatedVnfcCpId_uuid }], "extVirtualLinkInfo": [{ 'id': uuidsentinel.extVirtualLinkInfo_uuid, 'resourceHandle': {}, 'extLinkPorts': [] }], "extManagedVirtualLinkInfo": [{ "id": uuidsentinel.extManagedVirtualLinkInfo_uuid, 'vnfVirtualLinkDescId': {}, 'networkResource': {}, 'vnfLinkPorts': [] }], "vnfcResourceInfo": [{ 'id': uuidsentinel.vnfcResourceInfo_uuid, 'vduId': uuidsentinel.vduId_uuid, 'computeResource': {}, 'storageResourceIds': [], 'reservationId': uuidsentinel.reservationId, }], "vnfVirtualLinkResourceInfo": [{ 'id': uuidsentinel.vnfVirtualLinkResourceInfo, 'vnfVirtualLinkDescId': 'VL4', 'networkResource': {}, 'reservationId': uuidsentinel.reservationId, 'vnfLinkPorts': [], 'metadata': {'k': 'v'} }], "virtualStorageResourceInfo": [{ 'id': uuidsentinel.virtualStorageResourceInfo, 'virtualStorageDescId': uuidsentinel.virtualStorageDescId, 'storageResource': {}, 'reservationId': uuidsentinel.reservationId, 'metadata': {'k': 'v'} }] }, "_links": { 'self': 'self_link', 'indicators': None, 'instantiate': 'instantiate_link' } }) # Overwrite default attributes. dummy_vnf_instance.update(attrs) return dummy_vnf_instance def get_vnflcm_data(vnf_instance, list_action=False, columns=None): """Get the vnf instance data. :return: A tuple object sorted based on the name of the columns. """ vnf = vnf_instance.copy() complex_attributes = ['vimConnectionInfo', 'instantiatedVnfInfo', '_links'] for attribute in complex_attributes: if vnf.get(attribute): vnf.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf[attribute])}) if list_action: for item in ['vnfInstanceDescription', 'vnfdVersion']: vnf.pop(item) # return the list of data as per column order if columns: return tuple([vnf[key] for key in columns]) return tuple([vnf[key] for key in sorted( vnf.keys())]) def create_vnf_instances(count=2): """Create multiple fake vnf instances. :param count: The number of vnf instances to fake :return: A list of fake vnf instances dictionary """ vnf_instances = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() vnf_instances.append(vnf_instance_response(attrs={'id': unique_id})) return vnf_instances ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/vnflcm_op_occs_fakes.py0000664000175000017500000000645500000000000030156 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils.fixture import uuidsentinel from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def vnflcm_op_occ_response(attrs=None, action=''): """Create a fake vnflcm op occurrence. :param Dictionary attrs: A dictionary with all attributes :return: A vnf lcm op occs dict """ attrs = attrs or {} # Set default attributes. dummy_vnf_lcm_op_occ = { "id": uuidsentinel.vnflcm_op_occ_id, "operationState": "STARTING", "stateEnteredTime": "2018-12-22T16:59:45.187Z", "startTime": "2018-12-22T16:59:45.187Z", "vnfInstanceId": "376f37f3-d4e9-4d41-8e6a-9b0ec98695cc", "grantId": "", "operation": "INSTANTIATE", "isAutomaticInvocation": "true", "operationParams": { "flavourId": "default", "instantiationLevelId": "n-mme-min" }, "isCancelPending": "true", "cancelMode": "", "error": { "status": "500", "detail": "internal server error" }, "resourceChanges": [], "changedInfo": [], "changedExtConnectivity": [], "_links": { "self": "" } } if action == 'fail': fail_not_needed_columns = [ 'grantId', 'operationParams', 'cancelMode', 'resourceChanges', 'changedInfo', 'changedExtConnectivity'] for key in fail_not_needed_columns: del dummy_vnf_lcm_op_occ[key] # Overwrite default attributes. dummy_vnf_lcm_op_occ.update(attrs) return dummy_vnf_lcm_op_occ def get_vnflcm_op_occ_data(vnf_lcm_op_occ, columns=None): """Get the vnflcm op occurrence. :return: A tuple object sorted based on the name of the columns. """ complex_attributes = [ 'operationParams', 'error', 'resourceChanges', 'changedInfo', 'changedExtConnectivity', 'links'] for attribute in complex_attributes: if vnf_lcm_op_occ.get(attribute): vnf_lcm_op_occ.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_lcm_op_occ[attribute])}) # return the list of data as per column order if columns: return tuple([vnf_lcm_op_occ[key] for key in columns]) return tuple([vnf_lcm_op_occ[key] for key in sorted( vnf_lcm_op_occ.keys())]) def create_vnflcm_op_occs(count=2): """Create multiple fake vnflcm op occs. :param count: The number of vnflcm op occs to fake :return: A list of fake vnflcm op occs dictionary """ vnflcm_op_occs = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() vnflcm_op_occs.append(vnflcm_op_occ_response(attrs={'id': unique_id})) return vnflcm_op_occs ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v1/vnflcm_subsc_fakes.py0000664000175000017500000001545500000000000027650 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Nippon Telegraph and Telephone Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils.fixture import uuidsentinel from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def lccn_subsc_response(attrs=None): """Create a fake subscription. :param Dictionary attrs: A dictionary with all attributes :return: A subscription dict """ attrs = attrs or {} id = uuidsentinel.lccn_subsc_id # Set default attributes. dummy_subscription = { "id": id, "filter": { "vnfInstanceSubscriptionFilter": { "vnfdIds": [ "dummy-vnfdId-1", "dummy-vnfdId-2" ], "vnfProductsFromProviders": [ { "vnfProvider": "dummy-vnfProvider-1", "vnfProducts": [ { "vnfProductName": "dummy-vnfProductName-1-1", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] }, { "vnfProductName": "dummy-vnfProductName-1-2", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] } ] }, { "vnfProvider": "dummy-vnfProvider-2", "vnfProducts": [ { "vnfProductName": "dummy-vnfProductName-2-1", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] }, { "vnfProductName": "dummy-vnfProductName-2-2", "versions": [ { "vnfSoftwareVersion": "1.0", "vnfdVersions": ["1.0", "2.0"] }, { "vnfSoftwareVersion": "1.1", "vnfdVersions": ["1.1", "2.1"] } ] } ] } ], "vnfInstanceIds": [ "dummy-vnfInstanceId-1", "dummy-vnfInstanceId-2" ], "vnfInstanceNames": [ "dummy-vnfInstanceName-1", "dummy-vnfInstanceName-2" ] }, "notificationTypes": [ "VnfLcmOperationOccurrenceNotification", "VnfIdentifierCreationNotification", "VnfIdentifierDeletionNotification" ], "operationTypes": [ "INSTANTIATE", "SCALE", "TERMINATE", "HEAL", "MODIFY_INFO", "CHANGE_EXT_CONN" ], "operationStates": [ "COMPLETED", "FAILED", "FAILED_TEMP", "PROCESSING", "ROLLING_BACK", "ROLLED_BACK", "STARTING" ] }, "callbackUri": "http://localhost:9990/notification/callback/test", "_links": { "self": { "href": "http://127.0.0.1:9890/vnflcm/v2/subscriptions/" + id } } } # Overwrite default attributes. dummy_subscription.update(attrs) return dummy_subscription def get_subscription_data(subscription, list_action=False, columns=None): """Get the subscription data. :return: A tuple object sorted based on the name of the columns. """ subsc = subscription.copy() complex_attributes = ['filter', '_links'] for attribute in complex_attributes: if subsc.get(attribute): subsc.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( subsc[attribute])}) if list_action: for item in ['filter', '_links']: subsc.pop(item) # return the list of data as per column order if columns: return tuple([subsc[key] for key in columns]) return tuple([subsc[key] for key in sorted( subsc.keys())]) def create_subscriptions(count=2): """Create multiple fake subscriptions. :param count: The number of subscriptions to fake :return: A list of fake subscriptions dictionary """ uri = "http://localhost:9990/notification/callback/" subscriptions = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() subscriptions.append(lccn_subsc_response( attrs={'id': unique_id, 'callbackUri': uri + str(i)})) return subscriptions ././@PaxHeader0000000000000000000000000000003200000000000011450 xustar000000000000000026 mtime=1715864642.75062 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/0000775000175000017500000000000000000000000023430 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/__init__.py0000664000175000017500000000000000000000000025527 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/test_vnffm_alarm.py0000664000175000017500000002612400000000000027336 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import os from oslo_utils.fixture import uuidsentinel from unittest import mock from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v2.vnffm import vnffm_alarm from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v2 import vnffm_alarm_fakes class TestVnfFmAlarm(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfFmAlarm, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager def _get_columns_vnffm_alarm(action=None): if action == 'update': columns = ['Ack State'] else: columns = ['ID', 'Managed Object Id', 'Ack State', 'Perceived Severity', 'Event Type', 'Probable Cause'] if action == 'show': columns.extend([ 'Vnfc Instance Ids', 'Root Cause Faulty Resource', 'Alarm Raised Time', 'Alarm Changed Time', 'Alarm Cleared Time', 'Alarm Acknowledged Time', 'Event Time', 'Fault Type', 'Is Root Cause', 'Correlated Alarm Ids', 'Fault Details', 'Links' ]) return columns class TestListVnfFmAlarm(TestVnfFmAlarm): def setUp(self): super(TestListVnfFmAlarm, self).setUp() self.list_vnf_fm_alarms = vnffm_alarm.ListVnfFmAlarm( self.app, self.app_args, cmd_name='vnffm alarm list') def test_take_action(self): vnffm_alarms_obj = vnffm_alarm_fakes.create_vnf_fm_alarms( count=3) parsed_args = self.check_parser(self.list_vnf_fm_alarms, [], []) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnffm/v1/alarms'), json=vnffm_alarms_obj, headers=self.header) actual_columns, data = self.list_vnf_fm_alarms.take_action(parsed_args) _, columns = tacker_osc_utils.get_column_definitions( vnffm_alarm._ATTR_MAP, long_listing=True) expected_data = [] for vnffm_alarm_obj_idx in vnffm_alarms_obj: expected_data.append(vnffm_alarm_fakes.get_vnffm_alarm_data( vnffm_alarm_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnffm_alarm(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) def test_take_action_with_filter(self): vnffm_alarms_obj = vnffm_alarm_fakes.create_vnf_fm_alarms( count=3) parsed_args = self.check_parser( self.list_vnf_fm_alarms, ["--filter", '(eq,perceivedSeverity,WARNING)'], [('filter', '(eq,perceivedSeverity,WARNING)')]) self.requests_mock.register_uri( 'GET', os.path.join( self.url, 'vnffm/v1/alarms?filter=(eq,perceivedSeverity,WARNING)'), json=vnffm_alarms_obj, headers=self.header) actual_columns, data = self.list_vnf_fm_alarms.take_action(parsed_args) _, columns = tacker_osc_utils.get_column_definitions( vnffm_alarm._ATTR_MAP, long_listing=True) expected_data = [] for vnffm_alarm_obj_idx in vnffm_alarms_obj: expected_data.append(vnffm_alarm_fakes.get_vnffm_alarm_data( vnffm_alarm_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnffm_alarm(action='list'), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_incorrect_filter(self): parsed_args = self.check_parser( self.list_vnf_fm_alarms, ["--filter", '(perceivedSeverity)'], [('filter', '(perceivedSeverity)')]) url = os.path.join( self.url, 'vnffm/v1/alarms?filter=(perceivedSeverity)') self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=400, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnf_fm_alarms.take_action, parsed_args) def test_take_action_internal_server_error(self): parsed_args = self.check_parser( self.list_vnf_fm_alarms, ["--filter", '(eq,perceivedSeverity,WARNING)'], [('filter', '(eq,perceivedSeverity,WARNING)')]) url = os.path.join( self.url, 'vnffm/v1/alarms?filter=(eq,perceivedSeverity,WARNING)') self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnf_fm_alarms.take_action, parsed_args) class TestShowVnfFmAlarm(TestVnfFmAlarm): def setUp(self): super(TestShowVnfFmAlarm, self).setUp() self.show_vnf_fm_alarm = vnffm_alarm.ShowVnfFmAlarm( self.app, self.app_args, cmd_name='vnffm alarm show') def test_take_action(self): """Test of take_action()""" vnffm_alarm_obj = vnffm_alarm_fakes.vnf_fm_alarm_response() arglist = [vnffm_alarm_obj['id']] verifylist = [('vnf_fm_alarm_id', vnffm_alarm_obj['id'])] # command param parsed_args = self.check_parser( self.show_vnf_fm_alarm, arglist, verifylist) url = os.path.join( self.url, 'vnffm/v1/alarms', vnffm_alarm_obj['id']) self.requests_mock.register_uri( 'GET', url, headers=self.header, json=vnffm_alarm_obj) columns, _ = (self.show_vnf_fm_alarm.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnffm_alarm(action='show'), columns) def test_take_action_vnf_lcm_op_occ_id_not_found(self): """Test if vnf-lcm-op-occ-id does not find.""" arglist = [uuidsentinel.vnf_fm_alarm_id] verifylist = [('vnf_fm_alarm_id', uuidsentinel.vnf_fm_alarm_id)] # command param parsed_args = self.check_parser( self.show_vnf_fm_alarm, arglist, verifylist) url = os.path.join( self.url, 'vnffm/v1/alarms', uuidsentinel.vnf_fm_alarm_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_fm_alarm.take_action, parsed_args) def test_take_action_internal_server_error(self): """Test for internal server error.""" arglist = [uuidsentinel.vnf_fm_alarm_id] verifylist = [('vnf_fm_alarm_id', uuidsentinel.vnf_fm_alarm_id)] # command param parsed_args = self.check_parser( self.show_vnf_fm_alarm, arglist, verifylist) url = os.path.join( self.url, 'vnffm/v1/alarms', uuidsentinel.vnf_fm_alarm_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_fm_alarm.take_action, parsed_args) @ddt.ddt class TestUpdateVnfFmAlarm(TestVnfFmAlarm): def setUp(self): super(TestUpdateVnfFmAlarm, self).setUp() self.update_vnf_fm_alarm = vnffm_alarm.UpdateVnfFmAlarm( self.app, self.app_args, cmd_name='vnffm alarm update') @ddt.data('ACKNOWLEDGED', 'UNACKNOWLEDGED') def test_take_action(self, ack_state): """Test of take_action()""" vnffm_alarm_obj = vnffm_alarm_fakes.vnf_fm_alarm_response( None, 'update') arg_list = ['--ack-state', ack_state, uuidsentinel.vnf_fm_alarm_id] verify_list = [('ack_state', ack_state), ('vnf_fm_alarm_id', uuidsentinel.vnf_fm_alarm_id)] # command param parsed_args = self.check_parser( self.update_vnf_fm_alarm, arg_list, verify_list) url = os.path.join( self.url, 'vnffm/v1/alarms', uuidsentinel.vnf_fm_alarm_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, json=vnffm_alarm_obj) actual_columns, data = ( self.update_vnf_fm_alarm.take_action(parsed_args)) expected_columns = _get_columns_vnffm_alarm(action='update') self.assertCountEqual(expected_columns, actual_columns) _, columns = vnffm_alarm._get_columns( vnffm_alarm_obj, action='update') expected_data = vnffm_alarm_fakes.get_vnffm_alarm_data( vnffm_alarm_obj, columns=columns) self.assertEqual(expected_data, data) @ddt.data('ACKNOWLEDGED') def test_take_action_vnf_lcm_op_occ_id_not_found(self, ack_state): """Test if vnf-lcm-op-occ-id does not find""" arg_list = ['--ack-state', ack_state, uuidsentinel.vnf_fm_alarm_id] verify_list = [('ack_state', ack_state), ('vnf_fm_alarm_id', uuidsentinel.vnf_fm_alarm_id)] # command param parsed_args = self.check_parser( self.update_vnf_fm_alarm, arg_list, verify_list) url = os.path.join( self.url, 'vnffm/v1/alarms', uuidsentinel.vnf_fm_alarm_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.update_vnf_fm_alarm.take_action, parsed_args) @ddt.data('UNACKNOWLEDGED') def test_take_action_vnf_lcm_op_occ_state_is_conflict(self, ack_state): """Test if vnf-lcm-op-occ state is conflict""" arg_list = ['--ack-state', ack_state, uuidsentinel.vnf_fm_alarm_id] verify_list = [('ack_state', ack_state), ('vnf_fm_alarm_id', uuidsentinel.vnf_fm_alarm_id)] # command param parsed_args = self.check_parser( self.update_vnf_fm_alarm, arg_list, verify_list) url = os.path.join( self.url, 'vnffm/v1/alarms', uuidsentinel.vnf_fm_alarm_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, status_code=409, json={}) self.assertRaises(exceptions.TackerClientException, self.update_vnf_fm_alarm.take_action, parsed_args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/test_vnffm_sub.py0000664000175000017500000002751400000000000027037 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import os import sys from io import StringIO from oslo_utils.fixture import uuidsentinel from unittest import mock from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v2.vnffm import vnffm_sub from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v2 import vnffm_sub_fakes class TestVnfFmSub(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfFmSub, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager def _get_columns_vnffm_sub(action=None): columns = ['ID', 'Callback Uri'] if action == 'show' or action == 'create': columns.extend(['Filter', 'Links']) return columns @ddt.ddt class TestCreateVnfFmSub(TestVnfFmSub): def setUp(self): super(TestCreateVnfFmSub, self).setUp() self.create_vnf_fm_sub = vnffm_sub.CreateVnfFmSub( self.app, self.app_args, cmd_name='vnffm sub create') def test_create_no_args(self): self.assertRaises(base.ParserException, self.check_parser, self.create_vnf_fm_sub, [], []) @ddt.unpack def test_take_action(self): param_file = ("./tackerclient/osc/v2/vnffm/samples/" "create_vnf_fm_subscription_param_sample.json") arg_list = [param_file] verify_list = [('request_file', param_file)] parsed_args = self.check_parser(self.create_vnf_fm_sub, arg_list, verify_list) json = vnffm_sub_fakes.vnf_fm_sub_response() self.requests_mock.register_uri( 'POST', os.path.join(self.url, 'vnffm/v1/subscriptions'), json=json, headers=self.header) actual_columns, data = ( self.create_vnf_fm_sub.take_action(parsed_args)) _, attributes = vnffm_sub._get_columns(json) self.assertCountEqual(_get_columns_vnffm_sub("create"), actual_columns) self.assertListItemsEqual(vnffm_sub_fakes.get_vnffm_sub_data( json, columns=attributes), data) class TestListVnfFmSub(TestVnfFmSub): def setUp(self): super(TestListVnfFmSub, self).setUp() self.list_vnffm_sub = vnffm_sub.ListVnfFmSub( self.app, self.app_args, cmd_name='vnffm sub list') def test_take_action(self): vnffm_subs_obj = vnffm_sub_fakes.create_vnf_fm_subs( count=3) parsed_args = self.check_parser(self.list_vnffm_sub, [], []) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnffm/v1/subscriptions'), json=vnffm_subs_obj, headers=self.header) actual_columns, data = self.list_vnffm_sub.take_action(parsed_args) _, columns = tacker_osc_utils.get_column_definitions( vnffm_sub._ATTR_MAP, long_listing=True) expected_data = [] for vnffm_sub_obj_idx in vnffm_subs_obj: expected_data.append(vnffm_sub_fakes.get_vnffm_sub_data( vnffm_sub_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnffm_sub(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) def test_take_action_with_filter(self): vnffm_subs_obj = vnffm_sub_fakes.create_vnf_fm_subs( count=3) parsed_args = self.check_parser( self.list_vnffm_sub, ["--filter", '(eq,callbackUri,/nfvo/notify/alarm)'], [('filter', '(eq,callbackUri,/nfvo/notify/alarm)')]) self.requests_mock.register_uri( 'GET', os.path.join( self.url, 'vnffm/v1/subscriptions?' 'filter=(eq,callbackUri,/nfvo/notify/alarm)'), json=vnffm_subs_obj, headers=self.header) actual_columns, data = self.list_vnffm_sub.take_action(parsed_args) _, columns = tacker_osc_utils.get_column_definitions( vnffm_sub._ATTR_MAP, long_listing=True) expected_data = [] for vnffm_sub_obj_idx in vnffm_subs_obj: expected_data.append(vnffm_sub_fakes.get_vnffm_sub_data( vnffm_sub_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnffm_sub(action='list'), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_incorrect_filter(self): parsed_args = self.check_parser( self.list_vnffm_sub, ["--filter", '(callbackUri)'], [('filter', '(callbackUri)')]) url = os.path.join( self.url, 'vnffm/v1/subscriptions?filter=(callbackUri)') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=400, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnffm_sub.take_action, parsed_args) def test_take_action_internal_server_error(self): parsed_args = self.check_parser( self.list_vnffm_sub, ["--filter", '(eq,callbackUri,/nfvo/notify/alarm)'], [('filter', '(eq,callbackUri,/nfvo/notify/alarm)')]) url = os.path.join( self.url, 'vnffm/v1/subscriptions?' 'filter=(eq,callbackUri,/nfvo/notify/alarm)') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnffm_sub.take_action, parsed_args) class TestShowVnfFmSub(TestVnfFmSub): def setUp(self): super(TestShowVnfFmSub, self).setUp() self.show_vnf_fm_subs = vnffm_sub.ShowVnfFmSub( self.app, self.app_args, cmd_name='vnffm sub show') def test_take_action(self): """Test of take_action()""" vnffm_sub_obj = vnffm_sub_fakes.vnf_fm_sub_response() arg_list = [vnffm_sub_obj['id']] verify_list = [('vnf_fm_sub_id', vnffm_sub_obj['id'])] # command param parsed_args = self.check_parser( self.show_vnf_fm_subs, arg_list, verify_list) url = os.path.join( self.url, 'vnffm/v1/subscriptions', vnffm_sub_obj['id']) self.requests_mock.register_uri( 'GET', url, headers=self.header, json=vnffm_sub_obj) columns, _ = (self.show_vnf_fm_subs.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnffm_sub('show'), columns) def test_take_action_vnf_fm_sub_id_not_found(self): """Test if vnf-lcm-op-occ-id does not find.""" arg_list = [uuidsentinel.vnf_fm_sub_id] verify_list = [('vnf_fm_sub_id', uuidsentinel.vnf_fm_sub_id)] # command param parsed_args = self.check_parser( self.show_vnf_fm_subs, arg_list, verify_list) url = os.path.join( self.url, 'vnffm/v1/subscriptions', uuidsentinel.vnf_fm_sub_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_fm_subs.take_action, parsed_args) def test_take_action_internal_server_error(self): """Test for internal server error.""" arg_list = [uuidsentinel.vnf_fm_sub_id] verify_list = [('vnf_fm_sub_id', uuidsentinel.vnf_fm_sub_id)] # command param parsed_args = self.check_parser( self.show_vnf_fm_subs, arg_list, verify_list) url = os.path.join( self.url, 'vnffm/v1/subscriptions', uuidsentinel.vnf_fm_sub_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_fm_subs.take_action, parsed_args) class TestDeleteVnfFmSub(TestVnfFmSub): def setUp(self): super(TestDeleteVnfFmSub, self).setUp() self.delete_vnf_fm_sub = vnffm_sub.DeleteVnfFmSub( self.app, self.app_args, cmd_name='vnffm sub delete') # Vnf Fm subscription to delete self.vnf_fm_subs = vnffm_sub_fakes.create_vnf_fm_subs(count=3) def _mock_request_url_for_delete(self, index): url = os.path.join(self.url, 'vnffm/v1/subscriptions', self.vnf_fm_subs[index]['id']) self.requests_mock.register_uri('DELETE', url, headers=self.header, json={}) def test_delete_one_vnf_fm_sub(self): arg_list = [self.vnf_fm_subs[0]['id']] verify_list = [('vnf_fm_sub_id', [self.vnf_fm_subs[0]['id']])] parsed_args = self.check_parser(self.delete_vnf_fm_sub, arg_list, verify_list) self._mock_request_url_for_delete(0) sys.stdout = buffer = StringIO() result = self.delete_vnf_fm_sub.take_action(parsed_args) self.assertIsNone(result) self.assertEqual( (f"VNF FM subscription '{self.vnf_fm_subs[0]['id']}' " f"deleted successfully"), buffer.getvalue().strip()) def test_delete_multiple_vnf_fm_sub(self): arg_list = [] for obj in self.vnf_fm_subs: arg_list.append(obj['id']) verify_list = [('vnf_fm_sub_id', arg_list)] parsed_args = self.check_parser(self.delete_vnf_fm_sub, arg_list, verify_list) for i in range(0, 3): self._mock_request_url_for_delete(i) sys.stdout = buffer = StringIO() result = self.delete_vnf_fm_sub.take_action(parsed_args) self.assertIsNone(result) self.assertEqual('All specified VNF FM subscriptions are deleted ' 'successfully', buffer.getvalue().strip()) def test_delete_multiple_vnf_fm_sub_exception(self): arg_list = [ self.vnf_fm_subs[0]['id'], 'xxxx-yyyy-zzzz', self.vnf_fm_subs[1]['id'], ] verify_list = [('vnf_fm_sub_id', arg_list)] parsed_args = self.check_parser(self.delete_vnf_fm_sub, arg_list, verify_list) self._mock_request_url_for_delete(0) url = os.path.join(self.url, 'vnffm/v1/subscriptions', 'xxxx-yyyy-zzzz') self.requests_mock.register_uri( 'GET', url, exc=exceptions.ConnectionFailed) self._mock_request_url_for_delete(1) exception = self.assertRaises(exceptions.CommandError, self.delete_vnf_fm_sub.take_action, parsed_args) self.assertEqual('Failed to delete 1 of 3 VNF FM subscriptions.', exception.message) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/test_vnflcm.py0000664000175000017500000001370300000000000026332 0ustar00zuulzuul00000000000000# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from io import StringIO import os import sys from unittest import mock from tackerclient.common import exceptions from tackerclient.osc.v1.vnflcm import vnflcm from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v1 import test_vnflcm from tackerclient.tests.unit.osc.v1 import vnflcm_fakes from tackerclient.v1_0 import client as proxy_client class TestVnfLcmV2(base.FixturedTestCase): client_fixture_class = client.ClientFixture api_version = '2' def setUp(self): super(TestVnfLcmV2, self).setUp() def test_client_v2(self): self.assertEqual(self.cs.vnf_lcm_client.headers, {'Version': '2.0.0'}) self.assertEqual(self.cs.vnf_lcm_client.vnf_instances_path, '/vnflcm/v2/vnf_instances') # check of other paths is omitted. class TestChangeVnfPkgVnfLcm(test_vnflcm.TestVnfLcm): api_version = '2' def setUp(self): super(TestChangeVnfPkgVnfLcm, self).setUp() self.change_vnfpkg_vnf_lcm = vnflcm.ChangeVnfPkgVnfLcm( self.app, self.app_args, cmd_name='vnflcm change-vnfpkg') def test_take_action(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v2/vnflcm/samples/" "change_vnfpkg_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.change_vnfpkg_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v2/vnf_instances', vnf_instance['id'], 'change_vnfpkg') self.requests_mock.register_uri( 'POST', url, headers=self.header, json={}) sys.stdout = buffer = StringIO() with mock.patch.object(proxy_client.ClientBase, '_handle_fault_response') as m: self.change_vnfpkg_vnf_lcm.take_action(parsed_args) # check no fault response is received self.assertNotCalled(m) self.assertEqual( ('Change Current VNF Package for VNF Instance {0} ' 'has been accepted.'.format(vnf_instance['id'])), buffer.getvalue().strip()) def test_take_action_vnf_instance_not_found(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = ("./tackerclient/osc/v1/vnflcm/samples/" "change_vnfpkg_vnf_instance_param_sample.json") arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser(self.change_vnfpkg_vnf_lcm, arglist, verifylist) url = os.path.join(self.url, 'vnflcm/v2/vnf_instances', vnf_instance['id'], 'change_vnfpkg') self.requests_mock.register_uri( 'POST', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.change_vnfpkg_vnf_lcm.take_action, parsed_args) def test_take_action_param_file_not_exists(self): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./not_exists.json" arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] # command param parsed_args = self.check_parser( self.change_vnfpkg_vnf_lcm, arglist, verifylist) ex = self.assertRaises( exceptions.InvalidInput, self.change_vnfpkg_vnf_lcm.take_action, parsed_args) expected_msg = ("Invalid input: " "User does not have read privileges to it") self.assertEqual(expected_msg, str(ex)) @mock.patch("os.open") @mock.patch("os.access") def test_take_action_invalid_format_param_file(self, mock_access, mock_open): vnf_instance = vnflcm_fakes.vnf_instance_response() sample_param_file = "./invalid_param_file.json" arglist = [vnf_instance['id'], sample_param_file] verifylist = [('vnf_instance', vnf_instance['id']), ('request_file', sample_param_file)] mock_open.return_value = "invalid_json_data" mock_access.return_value = True # command param parsed_args = self.check_parser(self.change_vnfpkg_vnf_lcm, arglist, verifylist) ex = self.assertRaises( exceptions.InvalidInput, self.change_vnfpkg_vnf_lcm.take_action, parsed_args) expected_msg = "Failed to load parameter file." self.assertIn(expected_msg, str(ex)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/test_vnfpm_job.py0000664000175000017500000004442500000000000027032 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import os import sys from io import StringIO from oslo_utils.fixture import uuidsentinel from unittest import mock from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v2.vnfpm import vnfpm_job from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v2 import vnfpm_job_fakes class TestVnfPmJob(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfPmJob, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager def _get_columns_vnfpm_job(action=None): if action == 'update': columns = ['Callback Uri'] else: columns = ['ID', 'Object Type', 'Object Instance Ids', 'Sub Object Instance Ids', 'Criteria', 'Callback Uri', 'Reports', 'Links'] if action == 'list': columns = [ ele for ele in columns if ele not in [ 'Criteria', 'Sub Object Instance Ids', 'Reports', 'Links' ] ] return columns @ddt.ddt class TestCreateVnfPmJob(TestVnfPmJob): def setUp(self): super(TestCreateVnfPmJob, self).setUp() self.create_vnf_pm_job = vnfpm_job.CreateVnfPmJob( self.app, self.app_args, cmd_name='vnfpm job create') def test_create_no_args(self): self.assertRaises(base.ParserException, self.check_parser, self.create_vnf_pm_job, [], []) @ddt.unpack def test_take_action(self): param_file = ("./tackerclient/osc/v2/vnfpm/samples/" "create_vnf_pm_job_param_sample.json") arg_list = [param_file] verify_list = [('request_file', param_file)] parsed_args = self.check_parser(self.create_vnf_pm_job, arg_list, verify_list) json = vnfpm_job_fakes.vnf_pm_job_response() self.requests_mock.register_uri( 'POST', os.path.join(self.url, 'vnfpm/v2/pm_jobs'), json=json, headers=self.header) actual_columns, data = ( self.create_vnf_pm_job.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnfpm_job(), actual_columns) _, attributes = vnfpm_job._get_columns(json) expected_data = vnfpm_job_fakes.get_vnfpm_job_data( json, columns=attributes) self.assertListItemsEqual(expected_data, data) @ddt.ddt class TestListVnfPmJob(TestVnfPmJob): def setUp(self): super(TestListVnfPmJob, self).setUp() self.list_vnf_pm_jobs = vnfpm_job.ListVnfPmJob( self.app, self.app_args, cmd_name='vnfpm job list') self._vnf_pm_jobs = self._get_vnf_pm_jobs() def _get_vnf_pm_jobs(self): return vnfpm_job_fakes.create_vnf_pm_jobs(count=3) def get_list_columns(self, all_fields=False, exclude_fields=None, extra_fields=None, exclude_default=False): columns = ['Id', 'Object Type', 'Links'] complex_columns = [ 'Object Instance Ids', 'Sub Object Instance Ids', 'Criteria', 'Reports' ] simple_columns = ['Callback Uri'] if extra_fields: columns.extend(extra_fields) if exclude_fields: columns.extend([field for field in complex_columns if field not in exclude_fields]) if all_fields: columns.extend(complex_columns) columns.extend(simple_columns) if exclude_default: columns.extend(simple_columns) return columns def _get_mock_response_for_list_vnf_pm_jobs( self, filter_attribute, json=None): self.requests_mock.register_uri( 'GET', self.url + '/vnfpm/v2/pm_jobs?' + filter_attribute, json=json if json else self._get_vnf_pm_jobs(), headers=self.header) def test_take_action_default_fields(self): parsed_args = self.check_parser(self.list_vnf_pm_jobs, [], []) self.requests_mock.register_uri( 'GET', self.url + '/vnfpm/v2/pm_jobs', json=self._vnf_pm_jobs, headers=self.header) actual_columns, data = self.list_vnf_pm_jobs.take_action(parsed_args) expected_data = [] _, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_pm_jobs.get_attributes(), long_listing=True) for vnf_pm_job_obj in self._vnf_pm_jobs['vnf_pm_jobs']: expected_data.append(vnfpm_job_fakes.get_vnfpm_job_data( vnf_pm_job_obj, columns=columns)) self.assertCountEqual(self.get_list_columns(), actual_columns) self.assertListItemsEqual(expected_data, list(data)) @ddt.data('all_fields', 'exclude_default') def test_take_action(self, arg): parsed_args = self.check_parser( self.list_vnf_pm_jobs, ["--" + arg, "--filter", '(eq,objectType,VNFC)'], [(arg, True), ('filter', '(eq,objectType,VNFC)')]) vnf_pm_jobs = self._get_vnf_pm_jobs() self._get_mock_response_for_list_vnf_pm_jobs( 'filter=(eq,objectType,VNFC)&' + arg, json=vnf_pm_jobs) actual_columns, data = self.list_vnf_pm_jobs.take_action(parsed_args) expected_data = [] kwargs = {arg: True} _, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_pm_jobs.get_attributes(**kwargs), long_listing=True) for vnf_pm_job_obj in vnf_pm_jobs['vnf_pm_jobs']: expected_data.append(vnfpm_job_fakes.get_vnfpm_job_data( vnf_pm_job_obj, columns=columns)) self.assertCountEqual(self.get_list_columns(**kwargs), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_exclude_fields(self): parsed_args = self.check_parser( self.list_vnf_pm_jobs, ["--exclude_fields", 'objectInstanceIds,criteria', "--filter", '(eq,objectType,VNFC)'], [('exclude_fields', 'objectInstanceIds,criteria'), ('filter', '(eq,objectType,VNFC)')]) vnf_pm_jobs = self._get_vnf_pm_jobs() updated_vnf_pm_jobs = {'vnf_pm_jobs': []} for vnf_pm_job_obj in vnf_pm_jobs['vnf_pm_jobs']: vnf_pm_job_obj.pop('objectInstanceIds') vnf_pm_job_obj.pop('criteria') updated_vnf_pm_jobs['vnf_pm_jobs'].append(vnf_pm_job_obj) self._get_mock_response_for_list_vnf_pm_jobs( 'filter=(eq,objectType,VNFC)&' 'exclude_fields=objectInstanceIds,criteria', json=updated_vnf_pm_jobs) actual_columns, data = self.list_vnf_pm_jobs.take_action(parsed_args) expected_data = [] _, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_pm_jobs.get_attributes( exclude_fields=['objectInstanceIds', 'criteria']), long_listing=True) for updated_vnf_pm_obj in updated_vnf_pm_jobs['vnf_pm_jobs']: expected_data.append(vnfpm_job_fakes.get_vnfpm_job_data( updated_vnf_pm_obj, columns=columns)) expected_columns = self.get_list_columns( exclude_fields=['Object Instance Ids', 'Criteria']) self.assertCountEqual(expected_columns, actual_columns) self.assertListItemsEqual(expected_data, list(data)) @ddt.data((['--all_fields', '--fields', 'objectInstanceIds'], [('all_fields', True), ('fields', 'objectInstanceIds')]), (['--all_fields', '--exclude_fields', 'criteria'], [('all_fields', True), ('exclude_fields', 'criteria')]), (['--fields', 'objectInstanceIds', '--exclude_fields', 'criteria'], [('fields', 'objectInstanceIds'), ('exclude_fields', 'criteria')])) @ddt.unpack def test_take_action_with_invalid_combination(self, arglist, verifylist): self.assertRaises(base.ParserException, self.check_parser, self.list_vnf_pm_jobs, arglist, verifylist) def test_take_action_with_valid_combination(self): parsed_args = self.check_parser( self.list_vnf_pm_jobs, [ "--fields", 'subObjectInstanceIds,criteria', "--exclude_default" ], [ ('fields', 'subObjectInstanceIds,criteria'), ('exclude_default', True) ]) vnf_pm_jobs = self._get_vnf_pm_jobs() updated_vnf_pm_jobs = {'vnf_pm_jobs': []} for vnf_pm_job_obj in vnf_pm_jobs['vnf_pm_jobs']: # vnf_pm_job_obj.pop('userDefinedData') updated_vnf_pm_jobs['vnf_pm_jobs'].append(vnf_pm_job_obj) self._get_mock_response_for_list_vnf_pm_jobs( 'exclude_default&fields=subObjectInstanceIds,criteria', json=updated_vnf_pm_jobs) actual_columns, data = self.list_vnf_pm_jobs.take_action(parsed_args) expected_data = [] _, columns = tacker_osc_utils.get_column_definitions( self.list_vnf_pm_jobs.get_attributes( extra_fields=['subObjectInstanceIds', 'criteria'], exclude_default=True), long_listing=True) for updated_vnf_pm_job_obj in updated_vnf_pm_jobs['vnf_pm_jobs']: expected_data.append(vnfpm_job_fakes.get_vnfpm_job_data( updated_vnf_pm_job_obj, columns=columns)) expected_columns = self.get_list_columns( extra_fields=['Sub Object Instance Ids', 'Criteria'], exclude_default=True) self.assertCountEqual(expected_columns, actual_columns) self.assertListItemsEqual(expected_data, list(data)) class TestShowVnfPmJob(TestVnfPmJob): def setUp(self): super(TestShowVnfPmJob, self).setUp() self.show_vnf_pm_jobs = vnfpm_job.ShowVnfPmJob( self.app, self.app_args, cmd_name='vnfpm job show') def test_take_action(self): """Test of take_action()""" vnfpm_job_obj = vnfpm_job_fakes.vnf_pm_job_response() arg_list = [vnfpm_job_obj['id']] verify_list = [('vnf_pm_job_id', vnfpm_job_obj['id'])] # command param parsed_args = self.check_parser( self.show_vnf_pm_jobs, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', vnfpm_job_obj['id']) self.requests_mock.register_uri( 'GET', url, headers=self.header, json=vnfpm_job_obj) columns, data = (self.show_vnf_pm_jobs.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnfpm_job('show'), columns) _, attributes = vnfpm_job._get_columns(vnfpm_job_obj) self.assertListItemsEqual( vnfpm_job_fakes.get_vnfpm_job_data( vnfpm_job_obj, columns=attributes), data) def test_take_action_vnf_pm_job_id_not_found(self): """Test if vnf-pm-job-id does not find.""" arg_list = [uuidsentinel.vnf_pm_job_id] verify_list = [('vnf_pm_job_id', uuidsentinel.vnf_pm_job_id)] # command param parsed_args = self.check_parser( self.show_vnf_pm_jobs, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', uuidsentinel.vnf_pm_job_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_pm_jobs.take_action, parsed_args) def test_take_action_internal_server_error(self): """Test for internal server error.""" arg_list = [uuidsentinel.vnf_pm_job_id] verify_list = [('vnf_pm_job_id', uuidsentinel.vnf_pm_job_id)] # command param parsed_args = self.check_parser( self.show_vnf_pm_jobs, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', uuidsentinel.vnf_pm_job_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_pm_jobs.take_action, parsed_args) @ddt.ddt class TestUpdateVnfPmJob(TestVnfPmJob): def setUp(self): super(TestUpdateVnfPmJob, self).setUp() self.update_vnf_pm_job = vnfpm_job.UpdateVnfPmJob( self.app, self.app_args, cmd_name='vnfpm job update') def test_take_action(self): """Test of take_action()""" param_file = ("./tackerclient/osc/v2/vnfpm/samples/" "update_vnf_pm_job_param_sample.json") arg_list = [uuidsentinel.vnf_pm_job_id, param_file] verify_list = [ ('vnf_pm_job_id', uuidsentinel.vnf_pm_job_id), ('request_file', param_file) ] vnfpm_job_obj = vnfpm_job_fakes.vnf_pm_job_response( None, 'update') # command param parsed_args = self.check_parser( self.update_vnf_pm_job, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', uuidsentinel.vnf_pm_job_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, json=vnfpm_job_obj) actual_columns, data = ( self.update_vnf_pm_job.take_action(parsed_args)) expected_columns = _get_columns_vnfpm_job(action='update') self.assertCountEqual(expected_columns, actual_columns) _, columns = vnfpm_job._get_columns( vnfpm_job_obj, action='update') expected_data = vnfpm_job_fakes.get_vnfpm_job_data( vnfpm_job_obj, columns=columns) self.assertEqual(expected_data, data) def test_take_action_vnf_pm_job_id_not_found(self): """Test if vnf-pm-job-id does not find""" param_file = ("./tackerclient/osc/v2/vnfpm/samples/" "update_vnf_pm_job_param_sample.json") arg_list = [uuidsentinel.vnf_pm_job_id, param_file] verify_list = [ ('vnf_pm_job_id', uuidsentinel.vnf_pm_job_id), ('request_file', param_file) ] # command param parsed_args = self.check_parser( self.update_vnf_pm_job, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', uuidsentinel.vnf_pm_job_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.update_vnf_pm_job.take_action, parsed_args) class TestDeleteVnfPmJob(TestVnfPmJob): def setUp(self): super(TestDeleteVnfPmJob, self).setUp() self.delete_vnf_pm_job = vnfpm_job.DeleteVnfPmJob( self.app, self.app_args, cmd_name='vnfpm job delete') # Vnf Fm job to delete self.vnf_pm_jobs = vnfpm_job_fakes.create_vnf_pm_jobs( count=3)['vnf_pm_jobs'] def _mock_request_url_for_delete(self, index): url = os.path.join(self.url, 'vnfpm/v2/pm_jobs', self.vnf_pm_jobs[index]['id']) self.requests_mock.register_uri('DELETE', url, headers=self.header, json={}) def test_delete_one_vnf_pm_job(self): arg_list = [self.vnf_pm_jobs[0]['id']] verify_list = [('vnf_pm_job_id', [self.vnf_pm_jobs[0]['id']])] parsed_args = self.check_parser(self.delete_vnf_pm_job, arg_list, verify_list) self._mock_request_url_for_delete(0) sys.stdout = buffer = StringIO() result = self.delete_vnf_pm_job.take_action(parsed_args) self.assertIsNone(result) self.assertEqual( (f"VNF PM job '{self.vnf_pm_jobs[0]['id']}' " f"deleted successfully"), buffer.getvalue().strip()) def test_delete_multiple_vnf_pm_job(self): arg_list = [] for obj in self.vnf_pm_jobs: arg_list.append(obj['id']) verify_list = [('vnf_pm_job_id', arg_list)] parsed_args = self.check_parser(self.delete_vnf_pm_job, arg_list, verify_list) for i in range(0, 3): self._mock_request_url_for_delete(i) sys.stdout = buffer = StringIO() result = self.delete_vnf_pm_job.take_action(parsed_args) self.assertIsNone(result) self.assertEqual('All specified VNF PM jobs are deleted ' 'successfully', buffer.getvalue().strip()) def test_delete_multiple_vnf_pm_job_exception(self): arg_list = [ self.vnf_pm_jobs[0]['id'], 'xxxx-yyyy-zzzz', self.vnf_pm_jobs[1]['id'], ] verify_list = [('vnf_pm_job_id', arg_list)] parsed_args = self.check_parser(self.delete_vnf_pm_job, arg_list, verify_list) self._mock_request_url_for_delete(0) url = os.path.join(self.url, 'vnfpm/v2/jobs', 'xxxx-yyyy-zzzz') self.requests_mock.register_uri( 'GET', url, exc=exceptions.ConnectionFailed) self._mock_request_url_for_delete(1) exception = self.assertRaises(exceptions.CommandError, self.delete_vnf_pm_job.take_action, parsed_args) self.assertEqual('Failed to delete 1 of 3 VNF PM jobs.', exception.message) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/test_vnfpm_report.py0000664000175000017500000001135200000000000027564 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from oslo_utils.fixture import uuidsentinel from unittest import mock from tackerclient.common import exceptions from tackerclient.osc.v2.vnfpm import vnfpm_report from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v2 import vnfpm_report_fakes class TestVnfPmReport(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfPmReport, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager def _get_columns_vnfpm_report(): columns = ['Entries'] return columns class TestShowVnfPmReport(TestVnfPmReport): def setUp(self): super(TestShowVnfPmReport, self).setUp() self.show_vnf_pm_reports = vnfpm_report.ShowVnfPmReport( self.app, self.app_args, cmd_name='vnfpm report show') def test_take_action(self): """Test of take_action()""" vnfpm_report_obj = vnfpm_report_fakes.vnf_pm_report_response() vnf_pm_job_id = uuidsentinel.vnf_pm_job_id vnf_pm_report_id = uuidsentinel.vnfpm_report_obj arg_list = [vnf_pm_job_id, vnf_pm_report_id] verify_list = [ ('vnf_pm_job_id', vnf_pm_job_id), ('vnf_pm_report_id', vnf_pm_report_id) ] # command param parsed_args = self.check_parser( self.show_vnf_pm_reports, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', vnf_pm_job_id, 'reports', vnf_pm_report_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, json=vnfpm_report_obj) columns, data = (self.show_vnf_pm_reports.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnfpm_report(), columns) _, attributes = vnfpm_report._get_columns(vnfpm_report_obj) expected_data = vnfpm_report_fakes.get_vnfpm_report_data( vnfpm_report_obj, columns=attributes) print(f'123, {expected_data}') print(f'456, {data}') self.assertListItemsEqual(expected_data, data) def test_take_action_vnf_pm_report_id_not_found(self): """Test if vnf-pm-report-id does not find.""" vnf_pm_job_id = uuidsentinel.vnf_pm_job_id vnf_pm_report_id = uuidsentinel.vnf_pm_report_id arg_list = [vnf_pm_job_id, vnf_pm_report_id] verify_list = [ ('vnf_pm_job_id', vnf_pm_job_id), ('vnf_pm_report_id', vnf_pm_report_id) ] # command param parsed_args = self.check_parser( self.show_vnf_pm_reports, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', vnf_pm_job_id, 'reports', vnf_pm_report_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_pm_reports.take_action, parsed_args) def test_take_action_internal_server_error(self): """Test for internal server error.""" vnf_pm_job_id = uuidsentinel.vnf_pm_job_id vnf_pm_report_id = uuidsentinel.vnf_pm_report_id arg_list = [vnf_pm_job_id, vnf_pm_report_id] verify_list = [ ('vnf_pm_job_id', vnf_pm_job_id), ('vnf_pm_report_id', vnf_pm_report_id) ] # command param parsed_args = self.check_parser( self.show_vnf_pm_reports, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/pm_jobs', vnf_pm_job_id, 'reports', vnf_pm_report_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_pm_reports.take_action, parsed_args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/test_vnfpm_threshold.py0000664000175000017500000003670300000000000030254 0ustar00zuulzuul00000000000000# Copyright (C) 2023 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import os import sys from io import StringIO from oslo_utils.fixture import uuidsentinel from unittest import mock from tackerclient.common import exceptions from tackerclient.osc import utils as tacker_osc_utils from tackerclient.osc.v2.vnfpm import vnfpm_threshold from tackerclient.tests.unit.osc import base from tackerclient.tests.unit.osc.v1.fixture_data import client from tackerclient.tests.unit.osc.v2 import vnfpm_threshold_fakes class TestVnfPmThreshold(base.FixturedTestCase): client_fixture_class = client.ClientFixture def setUp(self): super(TestVnfPmThreshold, self).setUp() self.url = client.TACKER_URL self.header = {'content-type': 'application/json'} self.app = mock.Mock() self.app_args = mock.Mock() self.client_manager = self.cs self.app.client_manager.tackerclient = self.client_manager def _get_columns_vnfpm_threshold(action=None): if action == 'update': columns = ['Callback Uri'] else: columns = ['ID', 'Object Type', 'Object Instance Id', 'Sub Object Instance Ids', 'Criteria', 'Callback Uri', 'Links'] if action == 'list': columns = [ 'ID', 'Object Type', 'Links' ] return columns @ddt.ddt class TestCreateVnfPmThreshold(TestVnfPmThreshold): def setUp(self): super(TestCreateVnfPmThreshold, self).setUp() self.create_vnf_pm_threshold = vnfpm_threshold.CreateVnfPmThreshold( self.app, self.app_args, cmd_name='vnfpm threshold create') def test_create_no_args(self): self.assertRaises(base.ParserException, self.check_parser, self.create_vnf_pm_threshold, [], []) @ddt.unpack def test_take_action(self): param_file = ("./tackerclient/osc/v2/vnfpm/samples/" "create_vnf_pm_threshold_param_sample.json") arg_list = [param_file] verify_list = [('request_file', param_file)] parsed_args = self.check_parser(self.create_vnf_pm_threshold, arg_list, verify_list) response_json = vnfpm_threshold_fakes.vnf_pm_threshold_response() self.requests_mock.register_uri( 'POST', os.path.join(self.url, 'vnfpm/v2/thresholds'), json=response_json, headers=self.header) actual_columns, data = ( self.create_vnf_pm_threshold.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnfpm_threshold(), actual_columns) _, attributes = vnfpm_threshold._get_columns(response_json) expected_data = vnfpm_threshold_fakes.get_vnfpm_threshold_data( response_json, columns=attributes) self.assertListItemsEqual(expected_data, data) @ddt.ddt class TestListVnfPmThreshold(TestVnfPmThreshold): def setUp(self): super(TestListVnfPmThreshold, self).setUp() self.list_vnf_pm_thresholds = vnfpm_threshold.ListVnfPmThreshold( self.app, self.app_args, cmd_name='vnfpm threshold list') def test_take_action(self): vnf_pm_threshold_objs = vnfpm_threshold_fakes.create_vnf_pm_thresholds( count=3) parsed_args = self.check_parser(self.list_vnf_pm_thresholds, [], []) self.requests_mock.register_uri( 'GET', os.path.join(self.url, 'vnfpm/v2/thresholds'), json=vnf_pm_threshold_objs, headers=self.header) actual_columns, data = self.list_vnf_pm_thresholds.take_action( parsed_args) _, columns = tacker_osc_utils.get_column_definitions( vnfpm_threshold._ATTR_MAP, long_listing=True) expected_data = [] for vnf_pm_threshold_obj_idx in vnf_pm_threshold_objs: expected_data.append( vnfpm_threshold_fakes.get_vnfpm_threshold_data( vnf_pm_threshold_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnfpm_threshold(action='list'), actual_columns) self.assertCountEqual(expected_data, list(data)) def test_take_action_with_filter(self): vnf_pm_threshold_objs = vnfpm_threshold_fakes.create_vnf_pm_thresholds( count=3) parsed_args = self.check_parser( self.list_vnf_pm_thresholds, ["--filter", '(eq,perceivedSeverity,WARNING)'], [('filter', '(eq,perceivedSeverity,WARNING)')]) self.requests_mock.register_uri( 'GET', os.path.join( self.url, 'vnfpm/v2/thresholds?filter=(eq,perceivedSeverity,WARNING)'), json=vnf_pm_threshold_objs, headers=self.header) actual_columns, data = self.list_vnf_pm_thresholds.take_action( parsed_args) _, columns = tacker_osc_utils.get_column_definitions( vnfpm_threshold._ATTR_MAP, long_listing=True) expected_data = [] for vnf_pm_threshold_obj_idx in vnf_pm_threshold_objs: expected_data.append( vnfpm_threshold_fakes.get_vnfpm_threshold_data( vnf_pm_threshold_obj_idx, columns=columns)) self.assertCountEqual(_get_columns_vnfpm_threshold(action='list'), actual_columns) self.assertListItemsEqual(expected_data, list(data)) def test_take_action_with_incorrect_filter(self): parsed_args = self.check_parser( self.list_vnf_pm_thresholds, ["--filter", '(perceivedSeverity)'], [('filter', '(perceivedSeverity)')]) url = os.path.join( self.url, 'vnfpm/v2/thresholds?filter=(perceivedSeverity)') self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=400, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnf_pm_thresholds.take_action, parsed_args) def test_take_action_internal_server_error(self): parsed_args = self.check_parser( self.list_vnf_pm_thresholds, ["--filter", '(eq,perceivedSeverity,WARNING)'], [('filter', '(eq,perceivedSeverity,WARNING)')]) url = os.path.join( self.url, 'vnfpm/v2/thresholds?filter=(eq,perceivedSeverity,WARNING)') self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.list_vnf_pm_thresholds.take_action, parsed_args) class TestShowVnfPmThreshold(TestVnfPmThreshold): def setUp(self): super(TestShowVnfPmThreshold, self).setUp() self.show_vnf_pm_thresholds = vnfpm_threshold.ShowVnfPmThreshold( self.app, self.app_args, cmd_name='vnfpm threshold show') def test_take_action(self): vnfpm_threshold_obj = vnfpm_threshold_fakes.vnf_pm_threshold_response() arg_list = [vnfpm_threshold_obj['id']] verify_list = [('vnf_pm_threshold_id', vnfpm_threshold_obj['id'])] # command param parsed_args = self.check_parser( self.show_vnf_pm_thresholds, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/thresholds', vnfpm_threshold_obj['id']) self.requests_mock.register_uri( 'GET', url, headers=self.header, json=vnfpm_threshold_obj) columns, data = (self.show_vnf_pm_thresholds.take_action(parsed_args)) self.assertCountEqual(_get_columns_vnfpm_threshold('show'), columns) _, attributes = vnfpm_threshold._get_columns(vnfpm_threshold_obj) self.assertListItemsEqual( vnfpm_threshold_fakes.get_vnfpm_threshold_data( vnfpm_threshold_obj, columns=attributes), data) def test_take_action_vnf_pm_threshold_id_not_found(self): arg_list = [uuidsentinel.vnf_pm_threshold_id] verify_list = [('vnf_pm_threshold_id', uuidsentinel.vnf_pm_threshold_id)] # command param parsed_args = self.check_parser( self.show_vnf_pm_thresholds, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/thresholds', uuidsentinel.vnf_pm_threshold_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_pm_thresholds.take_action, parsed_args) def test_take_action_internal_server_error(self): arg_list = [uuidsentinel.vnf_pm_threshold_id] verify_list = [('vnf_pm_threshold_id', uuidsentinel.vnf_pm_threshold_id)] # command param parsed_args = self.check_parser( self.show_vnf_pm_thresholds, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/thresholds', uuidsentinel.vnf_pm_threshold_id) self.requests_mock.register_uri( 'GET', url, headers=self.header, status_code=500, json={}) self.assertRaises(exceptions.TackerClientException, self.show_vnf_pm_thresholds.take_action, parsed_args) @ddt.ddt class TestUpdateVnfPmThreshold(TestVnfPmThreshold): def setUp(self): super(TestUpdateVnfPmThreshold, self).setUp() self.update_vnf_pm_threshold = vnfpm_threshold.UpdateVnfPmThreshold( self.app, self.app_args, cmd_name='vnfpm threshold update') def test_take_action(self): param_file = ("./tackerclient/osc/v2/vnfpm/samples/" "update_vnf_pm_threshold_param_sample.json") arg_list = [uuidsentinel.vnf_pm_threshold_id, param_file] verify_list = [ ('vnf_pm_threshold_id', uuidsentinel.vnf_pm_threshold_id), ('request_file', param_file) ] vnfpm_threshold_obj = vnfpm_threshold_fakes.vnf_pm_threshold_response( None, 'update') # command param parsed_args = self.check_parser( self.update_vnf_pm_threshold, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/thresholds', uuidsentinel.vnf_pm_threshold_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, json=vnfpm_threshold_obj) actual_columns, data = ( self.update_vnf_pm_threshold.take_action(parsed_args)) expected_columns = _get_columns_vnfpm_threshold(action='update') self.assertCountEqual(expected_columns, actual_columns) _, columns = vnfpm_threshold._get_columns( vnfpm_threshold_obj, action='update') expected_data = vnfpm_threshold_fakes.get_vnfpm_threshold_data( vnfpm_threshold_obj, columns=columns) self.assertEqual(expected_data, data) def test_take_action_vnf_pm_threshold_id_not_found(self): param_file = ("./tackerclient/osc/v2/vnfpm/samples/" "update_vnf_pm_threshold_param_sample.json") arg_list = [uuidsentinel.vnf_pm_threshold_id, param_file] verify_list = [ ('vnf_pm_threshold_id', uuidsentinel.vnf_pm_threshold_id), ('request_file', param_file) ] # command param parsed_args = self.check_parser( self.update_vnf_pm_threshold, arg_list, verify_list) url = os.path.join( self.url, 'vnfpm/v2/thresholds', uuidsentinel.vnf_pm_threshold_id) self.requests_mock.register_uri( 'PATCH', url, headers=self.header, status_code=404, json={}) self.assertRaises(exceptions.TackerClientException, self.update_vnf_pm_threshold.take_action, parsed_args) class TestDeleteVnfPmThreshold(TestVnfPmThreshold): def setUp(self): super(TestDeleteVnfPmThreshold, self).setUp() self.delete_vnf_pm_threshold = vnfpm_threshold.DeleteVnfPmThreshold( self.app, self.app_args, cmd_name='vnfpm threshold delete') # Vnf Pm threshold to delete self.vnf_pm_thresholds = ( vnfpm_threshold_fakes.create_vnf_pm_thresholds(count=3)) def _mock_request_url_for_delete(self, index): url = os.path.join(self.url, 'vnfpm/v2/thresholds', self.vnf_pm_thresholds[index]['id']) self.requests_mock.register_uri('DELETE', url, headers=self.header, json={}) def test_delete_one_vnf_pm_threshold(self): arg_list = [self.vnf_pm_thresholds[0]['id']] verify_list = [('vnf_pm_threshold_id', [self.vnf_pm_thresholds[0]['id']])] parsed_args = self.check_parser(self.delete_vnf_pm_threshold, arg_list, verify_list) self._mock_request_url_for_delete(0) sys.stdout = buffer = StringIO() result = self.delete_vnf_pm_threshold.take_action(parsed_args) self.assertIsNone(result) self.assertEqual( (f"VNF PM threshold '{self.vnf_pm_thresholds[0]['id']}' " f"deleted successfully"), buffer.getvalue().strip()) def test_delete_multiple_vnf_pm_threshold(self): arg_list = [] for obj in self.vnf_pm_thresholds: arg_list.append(obj['id']) verify_list = [('vnf_pm_threshold_id', arg_list)] parsed_args = self.check_parser(self.delete_vnf_pm_threshold, arg_list, verify_list) for i in range(0, len(self.vnf_pm_thresholds)): self._mock_request_url_for_delete(i) sys.stdout = buffer = StringIO() result = self.delete_vnf_pm_threshold.take_action(parsed_args) self.assertIsNone(result) self.assertEqual('All specified VNF PM thresholds are deleted ' 'successfully', buffer.getvalue().strip()) def test_delete_multiple_vnf_pm_threshold_exception(self): arg_list = [ self.vnf_pm_thresholds[0]['id'], 'xxxx-yyyy-zzzz', self.vnf_pm_thresholds[1]['id'], ] verify_list = [('vnf_pm_threshold_id', arg_list)] parsed_args = self.check_parser(self.delete_vnf_pm_threshold, arg_list, verify_list) self._mock_request_url_for_delete(0) url = os.path.join(self.url, 'vnfpm/v2/thresholds', 'xxxx-yyyy-zzzz') self.requests_mock.register_uri( 'GET', url, exc=exceptions.ConnectionFailed) self._mock_request_url_for_delete(1) exception = self.assertRaises(exceptions.CommandError, self.delete_vnf_pm_threshold.take_action, parsed_args) self.assertEqual( f'Failed to delete 1 of {len(self.vnf_pm_thresholds)} ' 'VNF PM thresholds.', exception.message) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/vnffm_alarm_fakes.py0000664000175000017500000000771500000000000027455 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def create_vnf_fm_alarms(count=2): """Create multiple fake vnf packages. :param int count: The number of vnf_fm_alarms to fake :return: A list of fake vnf fm alarms dictionary """ vnf_fm_alarms = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() vnf_fm_alarms.append(vnf_fm_alarm_response(attrs={'id': unique_id})) return vnf_fm_alarms def vnf_fm_alarm_response(attrs=None, action=None): """Create a fake vnf fm alarm. :param Dictionary attrs: A dictionary with all attributes :return: A FakeVnfFmAlarm dict """ if action == 'update': fake_vnf_fm_alarm = { "ackState": "UNACKNOWLEDGED" } return fake_vnf_fm_alarm attrs = attrs or {} # Set default attributes. fake_vnf_fm_alarm = { "id": "78a39661-60a8-4824-b989-88c1b0c3534a", "managedObjectId": "c61314d0-f583-4ab3-a457-46426bce02d3", "vnfcInstanceIds": "0e5f3086-4e79-47ed-a694-54c29155fa26", "rootCauseFaultyResource": { "faultyResource": { "vimConnectionId": "0d57e928-86a4-4445-a4bd-1634edae73f3", "resourceId": "4e6ccbe1-38ec-4b1b-a278-64de09ba01b3", "vimLevelResourceType": "OS::Nova::Server" }, "faultyResourceType": "COMPUTE" }, "alarmRaisedTime": "2021-09-03 10:21:03", "alarmChangedTime": "2021-09-04 10:21:03", "alarmClearedTime": "2021-09-05 10:21:03", "alarmAcknowledgedTime": "2021-09-06 10:21:03", "ackState": "UNACKNOWLEDGED", "perceivedSeverity": "WARNING", "eventTime": "2021-09-07 10:06:03", "eventType": "EQUIPMENT_ALARM", "faultType": "Fault Type", "probableCause": "The server cannot be connected.", "isRootCause": False, "correlatedAlarmIds": [ "c88b624e-e997-4b17-b674-10ca2bab62e0", "c16d41fd-12e2-49a6-bb17-72faf702353f" ], "faultDetails": [ "Fault", "Details" ], "_links": { "self": { "href": "/vnffm/v1/alarms/" "78a39661-60a8-4824-b989-88c1b0c3534a" }, "objectInstance": { "href": "/vnflcm/v1/vnf_instances/" "0e5f3086-4e79-47ed-a694-54c29155fa26" } } } # Overwrite default attributes. fake_vnf_fm_alarm.update(attrs) return fake_vnf_fm_alarm def get_vnffm_alarm_data(vnf_fm_alarm, columns=None): """Get the vnffm alarm. :return: A tuple object sorted based on the name of the columns. """ complex_attributes = [ 'vnfcInstanceIds', 'rootCauseFaultyResource', 'correlatedAlarmIds', 'faultDetails', '_links' ] for attribute in complex_attributes: if vnf_fm_alarm.get(attribute): vnf_fm_alarm.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_fm_alarm[attribute])}) # return the list of data as per column order if columns: return tuple([vnf_fm_alarm[key] for key in columns]) return tuple([vnf_fm_alarm[key] for key in sorted( vnf_fm_alarm.keys())]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/vnffm_sub_fakes.py0000664000175000017500000000745000000000000027146 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def create_vnf_fm_subs(count=2): """Create multiple fake vnf packages. :param int count: The number of vnf_fm_subs to fake :return: A list of fake vnf fm subs dictionary """ vnf_fm_subs = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() vnf_fm_subs.append(vnf_fm_sub_response(attrs={'id': unique_id})) return vnf_fm_subs def vnf_fm_sub_response(attrs=None): """Create a fake vnf fm sub. :param Dictionary attrs: A dictionary with all attributes :return: A FakeVnfFmAlarm dict """ attrs = attrs or {} # Set default attributes. fake_vnf_fm_sub = { "id": "78a39661-60a8-4824-b989-88c1b0c3534a", "filter": { "vnfInstanceSubscriptionFilter": { "vnfdIds": [ "dummy-vnfdId-1" ], "vnfProductsFromProviders": [ { "vnfProvider": "dummy-vnfProvider-1", "vnfProducts": [ { "vnfProductName": "dummy-vnfProductName-1-1", "versions": [ { "vnfSoftwareVersion": 1.0, "vnfdVersions": [1.0, 2.0] } ] } ] } ], "vnfInstanceIds": [ "dummy-vnfInstanceId-1" ], "vnfInstanceNames": [ "dummy-vnfInstanceName-1" ] }, "notificationTypes": [ "AlarmNotification" ], "faultyResourceTypes": [ "COMPUTE" ], "perceivedSeverities": [ "WARNING" ], "eventTypes": [ "EQUIPMENT_ALARM" ], "probableCauses": [ "The server cannot be connected." ] }, "callbackUri": "/nfvo/notify/alarm", "_links": { "self": { "href": "/vnffm/v1/subscriptions/" "78a39661-60a8-4824-b989-88c1b0c3534a" } } } # Overwrite default attributes. fake_vnf_fm_sub.update(attrs) return fake_vnf_fm_sub def get_vnffm_sub_data(vnf_fm_sub, columns=None): """Get the vnffm sub. :return: A tuple object sorted based on the name of the columns. """ complex_attributes = ['filter', '_links'] for attribute in complex_attributes: if vnf_fm_sub.get(attribute): vnf_fm_sub.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_fm_sub[attribute])}) # return the list of data as per column order if columns: return tuple([vnf_fm_sub[key] for key in columns]) return tuple([vnf_fm_sub[key] for key in sorted( vnf_fm_sub.keys())]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/vnfpm_job_fakes.py0000664000175000017500000000712400000000000027137 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def create_vnf_pm_jobs(count=2): """Create multiple fake vnf pm jobs. :param int count: The number of vnf_pm_jobs to fake :return: A list of fake vnf pm jobs dictionary """ vnf_pm_jobs = [] for i in range(0, count): unique_id = uuidutils.generate_uuid() vnf_pm_jobs.append(vnf_pm_job_response(attrs={'id': unique_id})) return {'vnf_pm_jobs': vnf_pm_jobs} def vnf_pm_job_response(attrs=None, action=None): """Create a fake vnf pm job. :param Dictionary attrs: A dictionary with all attributes :return: A pm job dict """ if action == 'update': fake_vnf_pm_job = { "callbackUri": "/nfvo/notify/job" } return fake_vnf_pm_job attrs = attrs or {} # Set default attributes. fake_vnf_pm_job = { "id": "2bb72d78-b1d9-48fe-8c64-332654ffeb5d", "objectType": "VNFC", "objectInstanceIds": [ "object-instance-id-1" ], "subObjectInstanceIds": [ "sub-object-instance-id-2" ], "criteria": { "performanceMetric": [ "VCpuUsageMeanVnf.object-instance-id-1" ], "performanceMetricGroup": [ "VirtualisedComputeResource" ], "collectionPeriod": 500, "reportingPeriod": 1000, "reportingBoundary": "2022/07/25 10:43:55" }, "callbackUri": "/nfvo/notify/job", "reports": [{ "href": "/vnfpm/v2/pm_jobs/2bb72d78-b1d9-48fe-8c64-332654ffeb5d/" "reports/09d46aed-3ec2-45d9-bfa2-add431e069b3", "readyTime": "2022/07/25 10:43:55", "expiryTime": "2022/07/25 10:43:55", "fileSize": 9999 }], "_links": { "self": { "href": "/vnfpm/v2/pm_jobs/" "78a39661-60a8-4824-b989-88c1b0c3534a" }, "objects": [{ "href": "/vnflcm/v1/vnf_instances/" "0e5f3086-4e79-47ed-a694-54c29155fa26" }] } } # Overwrite default attributes. fake_vnf_pm_job.update(attrs) return fake_vnf_pm_job def get_vnfpm_job_data(vnf_pm_job, columns=None): """Get the vnfpm job. :return: A tuple object sorted based on the name of the columns. """ complex_attributes = [ 'objectInstanceIds', 'subObjectInstanceIds', 'criteria', 'reports', '_links', 'authentication' ] for attribute in complex_attributes: if vnf_pm_job.get(attribute): vnf_pm_job.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_pm_job[attribute])}) # return the list of data as per column order if columns is None: columns = sorted(vnf_pm_job.keys()) return tuple([vnf_pm_job[key] for key in columns]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/vnfpm_report_fakes.py0000664000175000017500000000437600000000000027706 0ustar00zuulzuul00000000000000# Copyright (C) 2022 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tackerclient.osc import utils as tacker_osc_utils def vnf_pm_report_response(attrs=None): """Create a fake vnf pm report. :param Dictionary attrs: A dictionary with all attributes :return: A pm report dict """ attrs = attrs or {} # Set default attributes. fake_vnf_pm_report = { "entries": [ { "objectType": "VNFC", "objectInstanceId": "2bb72d78-b1d9-48fe-8c64-332654ffeb5d", "subObjectInstanceId": "09d46aed-3ec2-45d9-bfa2-add431e069b3", "performanceMetric": "VCpuUsagePeakVnf.2bb72d78-b1d9-48fe-8c64-332654ffeb5d,", "performanceValues": [ { "timeStamp": "2022/07/27 08:58:58", "value": "1.88", "context": { "key": "value" } } ] } ] } # Overwrite default attributes. fake_vnf_pm_report.update(attrs) return fake_vnf_pm_report def get_vnfpm_report_data(vnf_pm_report, columns=None): """Get the vnfpm report. :return: A tuple object sorted based on the name of the columns. """ attribute = 'entries' if vnf_pm_report.get(attribute): vnf_pm_report.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_pm_report[attribute])}) # return the list of data as per column order if columns is None: columns = sorted(vnf_pm_report.keys()) return tuple([vnf_pm_report[key] for key in columns]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/osc/v2/vnfpm_threshold_fakes.py0000664000175000017500000000676400000000000030372 0ustar00zuulzuul00000000000000# Copyright (C) 2023 Fujitsu # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import uuidutils from tackerclient.osc import utils as tacker_osc_utils def create_vnf_pm_thresholds(count=2): """Create multiple fake vnf pm thresholds. :param int count: The number of vnf_pm_thresholds to fake :return: A list of fake vnf pm thresholds dictionary """ vnf_pm_thresholds = [] for _ in range(0, count): unique_id = uuidutils.generate_uuid() vnf_pm_thresholds.append(vnf_pm_threshold_response( attrs={'id': unique_id})) return vnf_pm_thresholds def vnf_pm_threshold_response(attrs=None, action=None): """Create a fake vnf pm threshold. :param Dictionary attrs: A dictionary with all attributes :param String action: The operation performed on threshold :return: A pm threshold dict """ if action == 'update': fake_vnf_pm_threshold = { "callbackUri": "/nfvo/notify/threshold", } return fake_vnf_pm_threshold attrs = attrs or {} # Set default attributes. fake_vnf_pm_threshold = { "id": "2bb72d78-b1d9-48fe-8c64-332654ffeb5d", "objectType": "Vnfc", "objectInstanceId": "object-instance-id-1", "subObjectInstanceIds": [ "sub-object-instance-id-2" ], "criteria": { "performanceMetric": "VCpuUsageMeanVnf.object-instance-id-1", "thresholdType": "SIMPLE", "simpleThresholdDetails": { "thresholdValue": 500.5, "hysteresis": 10.5 } }, "callbackUri": "/nfvo/notify/threshold", "_links": { "self": { "href": "/vnfpm/v2/thresholds/" "78a39661-60a8-4824-b989-88c1b0c3534a" }, "object": { "href": "/vnflcm/v1/vnf_instances/" "0e5f3086-4e79-47ed-a694-54c29155fa26" } } } # Overwrite default attributes. fake_vnf_pm_threshold.update(attrs) return fake_vnf_pm_threshold def get_vnfpm_threshold_data(vnf_pm_threshold, columns=None): """Get the vnfpm threshold. :param Dictionary vnf_pm_threshold: A dictionary with vnf_pm_threshold :param List columns: A list of column names :return: A tuple object sorted based on the name of the columns. """ complex_attributes = ['subObjectInstanceIds', 'criteria', '_links', 'authentication'] for attribute in complex_attributes: if vnf_pm_threshold.get(attribute): vnf_pm_threshold.update( {attribute: tacker_osc_utils.FormatComplexDataColumn( vnf_pm_threshold[attribute])}) # return the list of data as per column order if columns is None: columns = sorted(vnf_pm_threshold.keys()) return tuple([vnf_pm_threshold[key] for key in columns]) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_auth.py0000664000175000017500000003453000000000000024674 0ustar00zuulzuul00000000000000# Copyright 2012 NEC Corporation # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import copy import json from unittest import mock import uuid from keystoneclient import exceptions as k_exceptions import requests import testtools from tackerclient import client from tackerclient.common import exceptions USERNAME = 'testuser' USER_ID = 'testuser_id' TENANT_NAME = 'testtenant' TENANT_ID = 'testtenant_id' PASSWORD = 'password' AUTH_URL = 'authurl' ENDPOINT_URL = 'localurl' ENDPOINT_OVERRIDE = 'otherurl' TOKEN = 'tokentoken' REGION = 'RegionTest' NOAUTH = 'noauth' KS_TOKEN_RESULT = { 'access': { 'token': {'id': TOKEN, 'expires': '2012-08-11T07:49:01Z', 'tenant': {'id': str(uuid.uuid1())}}, 'user': {'id': str(uuid.uuid1())}, 'serviceCatalog': [ {'endpoints_links': [], 'endpoints': [{'adminURL': ENDPOINT_URL, 'internalURL': ENDPOINT_URL, 'publicURL': ENDPOINT_URL, 'region': REGION}], 'type': 'nfv-orchestration', 'name': 'Tacker Service'} ] } } ENDPOINTS_RESULT = { 'endpoints': [{ 'type': 'nfv-orchestration', 'name': 'Tacker Service', 'region': REGION, 'adminURL': ENDPOINT_URL, 'internalURL': ENDPOINT_URL, 'publicURL': ENDPOINT_URL }] } def get_response(status_code, headers=None): response = mock.Mock().CreateMock(requests.Response) response.headers = headers or {} response.status_code = status_code return response resp_200 = get_response(200) resp_401 = get_response(401) headers = {'X-Auth-Token': '', 'User-Agent': 'python-tackerclient'} expected_headers = {'X-Auth-Token': TOKEN, 'User-Agent': 'python-tackerclient'} agent_header = {'User-Agent': 'python-tackerclient'} class CLITestAuthNoAuth(testtools.TestCase): def setUp(self): """Prepare the test environment.""" super(CLITestAuthNoAuth, self).setUp() self.client = client.HTTPClient(username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, endpoint_url=ENDPOINT_URL, auth_strategy=NOAUTH, region_name=REGION) self.addCleanup(mock.patch.stopall) @mock.patch('tackerclient.client.HTTPClient.request') def test_get_noauth(self, mock_request): mock_request.return_value = (resp_200, '') self.client.do_request('/resource', 'GET', headers=headers) mock_request.assert_called_once_with( ENDPOINT_URL + '/resource', 'GET', headers=headers, content_type=None) self.assertEqual(self.client.endpoint_url, ENDPOINT_URL) class CLITestAuthKeystone(testtools.TestCase): # Auth Body expected auth_body = ('{"auth": {"tenantName": "testtenant", ' '"passwordCredentials": ' '{"username": "testuser", "password": "password"}}}') def setUp(self): """Prepare the test environment.""" super(CLITestAuthKeystone, self).setUp() self.client = client.HTTPClient(username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION) self.addCleanup(mock.patch.stopall) def test_reused_token_get_auth_info(self): """Test Client.get_auth_info(). Test that Client.get_auth_info() works even if client was instantiated with predefined token. """ client_ = client.HTTPClient(username=USERNAME, tenant_name=TENANT_NAME, token=TOKEN, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION) expected = {'auth_token': TOKEN, 'auth_tenant_id': None, 'auth_user_id': None, 'endpoint_url': self.client.endpoint_url} self.assertEqual(client_.get_auth_info(), expected) @mock.patch('tackerclient.client.HTTPClient.request') def test_get_token(self, mock_request): mock_request.return_value = (resp_200, json.dumps(KS_TOKEN_RESULT)) self.client.do_request('/resource', 'GET') mock_request.assert_called_with( ENDPOINT_URL + '/resource', 'GET', headers=expected_headers, content_type=None) self.assertEqual(self.client.endpoint_url, ENDPOINT_URL) self.assertEqual(self.client.auth_token, TOKEN) @mock.patch('tackerclient.client.HTTPClient.request') def test_refresh_token(self, mock_request): self.client.auth_token = TOKEN self.client.endpoint_url = ENDPOINT_URL # If a token is expired, tacker server returns 401 mock_request.return_value = (resp_401, '') self.assertRaises(exceptions.Unauthorized, self.client.do_request, '/resource', 'GET') mock_request.return_value = (resp_200, json.dumps(KS_TOKEN_RESULT)) self.client.do_request('/resource', 'GET') mock_request.assert_called_with( ENDPOINT_URL + '/resource', 'GET', headers=expected_headers, content_type=None) @mock.patch('tackerclient.client.HTTPClient.request') def test_refresh_token_no_auth_url(self, mock_request): self.client.auth_url = None self.client.auth_token = TOKEN self.client.endpoint_url = ENDPOINT_URL # If a token is expired, tacker server returns 401 mock_request.return_value = (resp_401, '') self.assertRaises(exceptions.NoAuthURLProvided, self.client.do_request, '/resource', 'GET') expected_url = ENDPOINT_URL + '/resource' mock_request.assert_called_with(expected_url, 'GET', headers=expected_headers, content_type=None) def test_get_endpoint_url_with_invalid_auth_url(self): # Handle the case when auth_url is not provided self.client.auth_url = None self.assertRaises(exceptions.NoAuthURLProvided, self.client._get_endpoint_url) @mock.patch('tackerclient.client.HTTPClient.request') def test_get_endpoint_url(self, mock_request): self.client.auth_token = TOKEN mock_request.return_value = (resp_200, json.dumps(ENDPOINTS_RESULT)) self.client.do_request('/resource', 'GET') mock_request.assert_called_with( ENDPOINT_URL + '/resource', 'GET', headers=expected_headers, content_type=None) mock_request.return_value = (resp_200, '') self.client.do_request('/resource', 'GET', headers=headers) mock_request.assert_called_with( ENDPOINT_URL + '/resource', 'GET', headers=headers, content_type=None) @mock.patch('tackerclient.client.HTTPClient.request') def test_use_given_endpoint_url(self, mock_request): self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION, endpoint_url=ENDPOINT_OVERRIDE) self.assertEqual(self.client.endpoint_url, ENDPOINT_OVERRIDE) self.client.auth_token = TOKEN mock_request.return_value = (resp_200, '') self.client.do_request('/resource', 'GET', headers=headers) mock_request.assert_called_with( ENDPOINT_OVERRIDE + '/resource', 'GET', headers=headers, content_type=None) self.assertEqual(self.client.endpoint_url, ENDPOINT_OVERRIDE) @mock.patch('tackerclient.client.HTTPClient.request') def test_get_endpoint_url_other(self, mock_request): self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION, endpoint_type='otherURL') self.client.auth_token = TOKEN mock_request.return_value = (resp_200, json.dumps(ENDPOINTS_RESULT)) self.assertRaises(exceptions.EndpointTypeNotFound, self.client.do_request, '/resource', 'GET') expected_url = AUTH_URL + '/tokens/%s/endpoints' % TOKEN headers = {'User-Agent': 'python-tackerclient'} mock_request.assert_called_with(expected_url, 'GET', headers=headers) @mock.patch('tackerclient.client.HTTPClient.request') def test_get_endpoint_url_failed(self, mock_request): self.client.auth_token = TOKEN self.client.auth_url = AUTH_URL + '/tokens/%s/endpoints' % TOKEN mock_request.return_value = (resp_401, '') self.assertRaises(exceptions.Unauthorized, self.client.do_request, '/resource', 'GET') def test_endpoint_type(self): resources = copy.deepcopy(KS_TOKEN_RESULT) endpoints = resources['access']['serviceCatalog'][0]['endpoints'][0] endpoints['internalURL'] = 'internal' endpoints['adminURL'] = 'admin' endpoints['publicURL'] = 'public' # Test default behavior is to choose public. self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION) self.client._extract_service_catalog(resources) self.assertEqual(self.client.endpoint_url, 'public') # Test admin url self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION, endpoint_type='adminURL') self.client._extract_service_catalog(resources) self.assertEqual(self.client.endpoint_url, 'admin') # Test public url self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION, endpoint_type='publicURL') self.client._extract_service_catalog(resources) self.assertEqual(self.client.endpoint_url, 'public') # Test internal url self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION, endpoint_type='internalURL') self.client._extract_service_catalog(resources) self.assertEqual(self.client.endpoint_url, 'internal') # Test url that isn't found in the service catalog self.client = client.HTTPClient( username=USERNAME, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION, endpoint_type='privateURL') self.assertRaises(k_exceptions.EndpointNotFound, self.client._extract_service_catalog, resources) @mock.patch('tackerclient.client.HTTPClient.request') @mock.patch('tackerclient.common.utils.http_log_req') def test_strip_credentials_from_log(self, mock_http_log_req, mock_request,): body = ('{"auth": {"tenantId": "testtenant_id",' '"passwordCredentials": {"password": "password",' '"userId": "testuser_id"}}}') expected_body = ('{"auth": {"tenantId": "testtenant_id",' '"REDACTEDCredentials": {"REDACTED": "REDACTED",' '"userId": "testuser_id"}}}') _headers = {'headers': expected_headers, 'body': expected_body, 'content_type': None} mock_request.return_value = (resp_200, json.dumps(KS_TOKEN_RESULT)) self.client.do_request('/resource', 'GET', body=body) args, kwargs = mock_http_log_req.call_args # Check that credentials are stripped while logging. self.assertEqual(_headers, args[2]) class CLITestAuthKeystoneWithId(CLITestAuthKeystone): # Auth Body expected auth_body = ('{"auth": {"passwordCredentials": ' '{"password": "password", "userId": "testuser_id"}, ' '"tenantId": "testtenant_id"}}') def setUp(self): """Prepare the test environment.""" super(CLITestAuthKeystoneWithId, self).setUp() self.client = client.HTTPClient(user_id=USER_ID, tenant_id=TENANT_ID, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION) class CLITestAuthKeystoneWithIdandName(CLITestAuthKeystone): # Auth Body expected auth_body = ('{"auth": {"passwordCredentials": ' '{"password": "password", "userId": "testuser_id"}, ' '"tenantId": "testtenant_id"}}') def setUp(self): """Prepare the test environment.""" super(CLITestAuthKeystoneWithIdandName, self).setUp() self.client = client.HTTPClient(username=USERNAME, user_id=USER_ID, tenant_id=TENANT_ID, tenant_name=TENANT_NAME, password=PASSWORD, auth_url=AUTH_URL, region_name=REGION) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_casual_args.py0000664000175000017500000001123400000000000026213 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import testtools from tackerclient.common import exceptions from tackerclient.tacker import v1_0 as tackerV10 class CLITestArgs(testtools.TestCase): def test_empty(self): _mydict = tackerV10.parse_args_to_dict([]) self.assertEqual({}, _mydict) def test_default_bool(self): _specs = ['--my_bool', '--arg1', 'value1'] _mydict = tackerV10.parse_args_to_dict(_specs) self.assertTrue(_mydict['my_bool']) def test_bool_true(self): _specs = ['--my-bool', 'type=bool', 'true', '--arg1', 'value1'] _mydict = tackerV10.parse_args_to_dict(_specs) self.assertTrue(_mydict['my_bool']) def test_bool_false(self): _specs = ['--my_bool', 'type=bool', 'false', '--arg1', 'value1'] _mydict = tackerV10.parse_args_to_dict(_specs) self.assertFalse(_mydict['my_bool']) def test_nargs(self): _specs = ['--tag', 'x', 'y', '--arg1', 'value1'] _mydict = tackerV10.parse_args_to_dict(_specs) self.assertIn('x', _mydict['tag']) self.assertIn('y', _mydict['tag']) def test_badarg(self): _specs = ['--tag=t', 'x', 'y', '--arg1', 'value1'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) def test_badarg_with_minus(self): _specs = ['--arg1', 'value1', '-D'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) def test_goodarg_with_minus_number(self): _specs = ['--arg1', 'value1', '-1', '-1.0'] _mydict = tackerV10.parse_args_to_dict(_specs) self.assertEqual(['value1', '-1', '-1.0'], _mydict['arg1']) def test_badarg_duplicate(self): _specs = ['--tag=t', '--arg1', 'value1', '--arg1', 'value1'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) def test_badarg_early_type_specification(self): _specs = ['type=dict', 'key=value'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) def test_arg(self): _specs = ['--tag=t', '--arg1', 'value1'] self.assertEqual('value1', tackerV10.parse_args_to_dict(_specs)['arg1']) def test_dict_arg(self): _specs = ['--tag=t', '--arg1', 'type=dict', 'key1=value1,key2=value2'] arg1 = tackerV10.parse_args_to_dict(_specs)['arg1'] self.assertEqual('value1', arg1['key1']) self.assertEqual('value2', arg1['key2']) def test_dict_arg_with_attribute_named_type(self): _specs = ['--tag=t', '--arg1', 'type=dict', 'type=value1,key2=value2'] arg1 = tackerV10.parse_args_to_dict(_specs)['arg1'] self.assertEqual('value1', arg1['type']) self.assertEqual('value2', arg1['key2']) def test_list_of_dict_arg(self): _specs = ['--tag=t', '--arg1', 'type=dict', 'list=true', 'key1=value1,key2=value2'] arg1 = tackerV10.parse_args_to_dict(_specs)['arg1'] self.assertEqual('value1', arg1[0]['key1']) self.assertEqual('value2', arg1[0]['key2']) def test_clear_action(self): _specs = ['--anyarg', 'action=clear'] args = tackerV10.parse_args_to_dict(_specs) self.assertIsNone(args['anyarg']) def test_bad_values_str(self): _specs = ['--strarg', 'type=str'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) def test_bad_values_list(self): _specs = ['--listarg', 'list=true', 'type=str'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) _specs = ['--listarg', 'type=list'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) _specs = ['--listarg', 'type=list', 'action=clear'] self.assertRaises(exceptions.CommandError, tackerV10.parse_args_to_dict, _specs) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_cli10.py0000664000175000017500000007672300000000000024655 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import contextlib import fixtures import io import sys import testtools from unittest import mock import urllib from urllib import parse as urlparse from tackerclient.common import exceptions from tackerclient import shell from tackerclient.tacker import v1_0 as tackerV1_0 from tackerclient.tacker.v1_0 import TackerCommand from tackerclient.tests.unit import test_utils from tackerclient.v1_0 import client API_VERSION = "1.0" FORMAT = 'json' TOKEN = 'testtoken' ENDURL = 'localurl' @contextlib.contextmanager def capture_std_streams(): fake_stdout, fake_stderr = io.StringIO(), io.StringIO() stdout, stderr = sys.stdout, sys.stderr try: sys.stdout, sys.stderr = fake_stdout, fake_stderr yield fake_stdout, fake_stderr finally: sys.stdout, sys.stderr = stdout, stderr class FakeStdout(io.IOBase): def __init__(self): self.content = [] def write(self, text): self.content.append(text) def make_string(self): result = '' for line in self.content: result = result + line return result class MyResp(object): def __init__(self, status_code, headers=None, reason=None): self.status_code = status_code self.headers = headers or {} self.reason = reason class MyApp(object): def __init__(self, _stdout): self.stdout = _stdout def end_url(path, query=None, format=FORMAT): _url_str = ENDURL + "/v" + API_VERSION + path + "." + format return query and _url_str + "?" + query or _url_str class MyUrlComparator(object): def __init__(self, lhs, client): self.lhs = lhs self.client = client def equals(self, rhs): lhsp = urlparse.urlparse(self.lhs) rhsp = urlparse.urlparse(rhs) lhs_qs = urlparse.parse_qsl(lhsp.query) rhs_qs = urlparse.parse_qsl(rhsp.query) return (lhsp.scheme == rhsp.scheme and lhsp.netloc == rhsp.netloc and lhsp.path == rhsp.path and len(lhs_qs) == len(rhs_qs) and set(lhs_qs) == set(rhs_qs)) def __str__(self): if self.client and self.client.format != FORMAT: lhs_parts = self.lhs.split("?", 1) if len(lhs_parts) == 2: lhs = ("%s.%s?%s" % (lhs_parts[0][:-4], self.client.format, lhs_parts[1])) else: lhs = ("%s.%s" % (lhs_parts[0][:-4], self.client.format)) return lhs return self.lhs def __repr__(self): return str(self) def __eq__(self, rhs): return self.equals(rhs) def __ne__(self, rhs): return not self.__eq__(rhs) class MyComparator(object): def __init__(self, lhs, client): self.lhs = lhs self.client = client def _com_dict(self, lhs, rhs): if len(lhs) != len(rhs): return False for key, value in lhs.items(): if key not in rhs: return False rhs_value = rhs[key] if not self._com(value, rhs_value): return False return True def _com_list(self, lhs, rhs): if len(lhs) != len(rhs): return False for lhs_value in lhs: if lhs_value not in rhs: return False return True def _com(self, lhs, rhs): if lhs is None: return rhs is None if isinstance(lhs, dict): if not isinstance(rhs, dict): return False return self._com_dict(lhs, rhs) if isinstance(lhs, list): if not isinstance(rhs, list): return False return self._com_list(lhs, rhs) if isinstance(lhs, tuple): if not isinstance(rhs, tuple): return False return self._com_list(lhs, rhs) return lhs == rhs def equals(self, rhs): if self.client: rhs = self.client.deserialize(rhs, 200) return self._com(self.lhs, rhs) def __repr__(self): if self.client: return self.client.serialize(self.lhs) return str(self.lhs) def __eq__(self, rhs): return self.equals(rhs) def __ne__(self, rhs): return not self.__eq__(rhs) class CLITestV10Base(testtools.TestCase): format = 'json' test_id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' id_field = 'id' def _find_resourceid(self, client, resource, name_or_id): return name_or_id def setUp(self, plurals={}): """Prepare the test environment.""" super(CLITestV10Base, self).setUp() self.endurl = ENDURL self.fake_stdout = FakeStdout() self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.fake_stdout)) self.useFixture(fixtures.MonkeyPatch( 'tackerclient.tacker.v1_0.find_resourceid_by_name_or_id', self._find_resourceid)) self.useFixture(fixtures.MonkeyPatch( 'tackerclient.tacker.v1_0.find_resourceid_by_id', self._find_resourceid)) self.client = client.LegacyClient(token=TOKEN, endpoint_url=self.endurl) @mock.patch.object(TackerCommand, 'get_client') def _test_create_resource(self, resource, cmd, name, myid, args, position_names, position_values, mock_get, tenant_id=None, get_client_called_count=1, tags=None, admin_state_up=True, extra_body=None, **kwargs): mock_get.return_value = self.client non_admin_status_resources = ['vim'] if (resource in non_admin_status_resources): body = {resource: {}, } else: body = {resource: {'admin_state_up': admin_state_up, }, } if tenant_id: body[resource].update({'tenant_id': tenant_id}) if tags: body[resource].update({'tags': tags}) if extra_body: body[resource].update(extra_body) body[resource].update(kwargs) for i in range(len(position_names)): body[resource].update({position_names[i]: position_values[i]}) ress = {resource: {self.id_field: myid}, } if name: ress[resource].update({'name': name}) self.client.format = self.format resstr = self.client.serialize(ress) # url method body resource_plural = tackerV1_0._get_resource_plural(resource, self.client) path = getattr(self.client, resource_plural + "_path") # Work around for LP #1217791. XML deserializer called from # MyComparator does not decodes XML string correctly. if self.format == 'json': _body = MyComparator(body, self.client) _content_type = 'application/json' else: _body = self.client.serialize(body) _content_type = 'application/zip' with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(200), resstr) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser('create_' + resource) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( end_url(path, format=self.format), 'POST', body=_body, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type=_content_type) self.assertEqual(get_client_called_count, mock_get.call_count) _str = self.fake_stdout.make_string() self.assertIn(myid, _str) if name: self.assertIn(name, _str) @mock.patch.object(TackerCommand, 'get_client') def _test_list_columns(self, cmd, resources_collection, resources_out, mock_get, args=['-f', 'json']): mock_get.return_value = self.client self.client.format = self.format resstr = self.client.serialize(resources_out) path = getattr(self.client, resources_collection + "_path") with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(200), resstr) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser("list_" + resources_collection) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( end_url(path, format=self.format), 'GET', body=None, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') mock_get.assert_called_once_with() def _test_list_resources(self, resources, cmd, detail=False, tags=[], fields_1=[], fields_2=[], page_size=None, sort_key=[], sort_dir=[], response_contents=None, base_args=None, path=None, template_source=None): if response_contents is None: contents = [{self.id_field: 'myid1', }, {self.id_field: 'myid2', }, ] else: contents = response_contents reses = {resources: contents} self.client.format = self.format resstr = self.client.serialize(reses) # url method body query = "" args = base_args if base_args is not None else [] if detail: args.append('-D') args.extend(['--request-format', self.format]) if fields_1: for field in fields_1: args.append('--fields') args.append(field) if template_source is not None: args.append("--template-source") args.append(template_source) query += 'template_source=' + template_source if tags: args.append('--') args.append("--tag") for tag in tags: args.append(tag) if isinstance(tag, str): tag = urllib.quote(tag.encode('utf-8')) if query: query += "&tag=" + tag else: query = "tag=" + tag if (not tags) and fields_2: args.append('--') if fields_2: args.append("--fields") for field in fields_2: args.append(field) if detail: query = query and query + '&verbose=True' or 'verbose=True' fields_1.extend(fields_2) for field in fields_1: if query: query += "&fields=" + field else: query = "fields=" + field if page_size: args.append("--page-size") args.append(str(page_size)) if query: query += "&limit=%s" % page_size else: query = "limit=%s" % page_size if sort_key: for key in sort_key: args.append('--sort-key') args.append(key) if query: query += '&' query += 'sort_key=%s' % key if sort_dir: len_diff = len(sort_key) - len(sort_dir) if len_diff > 0: sort_dir += ['asc'] * len_diff elif len_diff < 0: sort_dir = sort_dir[:len(sort_key)] for dir in sort_dir: args.append('--sort-dir') args.append(dir) if query: query += '&' query += 'sort_dir=%s' % dir if path is None: path = getattr(self.client, resources + "_path") with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(200), resstr) with mock.patch.object(TackerCommand, 'get_client') as mock_get: mock_get.return_value = self.client cmd_parser = cmd.get_parser("list_" + resources) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( MyUrlComparator(end_url(path, query, format=self.format), self.client), 'GET', body=None, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') _str = self.fake_stdout.make_string() if response_contents is None: self.assertIn('myid1', _str) return _str @mock.patch.object(TackerCommand, 'get_client') def _test_list_sub_resources(self, resources, api_resource, cmd, myid, mock_get, detail=False, tags=[], fields_1=[], fields_2=[], page_size=None, sort_key=[], sort_dir=[], response_contents=None, base_args=None, path=None): mock_get.return_value = self.client if response_contents is None: contents = [{self.id_field: 'myid1', }, {self.id_field: 'myid2', }, ] else: contents = response_contents reses = {api_resource: contents} self.client.format = self.format resstr = self.client.serialize(reses) # url method body query = "" args = base_args if base_args is not None else [] if detail: args.append('-D') args.extend(['--request-format', self.format]) if fields_1: for field in fields_1: args.append('--fields') args.append(field) if tags: args.append('--') args.append("--tag") for tag in tags: args.append(tag) if isinstance(tag, str): tag = urllib.quote(tag.encode('utf-8')) if query: query += "&tag=" + tag else: query = "tag=" + tag if (not tags) and fields_2: args.append('--') if fields_2: args.append("--fields") for field in fields_2: args.append(field) if detail: query = query and query + '&verbose=True' or 'verbose=True' fields_1.extend(fields_2) for field in fields_1: if query: query += "&fields=" + field else: query = "fields=" + field if page_size: args.append("--page-size") args.append(str(page_size)) if query: query += "&limit=%s" % page_size else: query = "limit=%s" % page_size if sort_key: for key in sort_key: args.append('--sort-key') args.append(key) if query: query += '&' query += 'sort_key=%s' % key if sort_dir: len_diff = len(sort_key) - len(sort_dir) if len_diff > 0: sort_dir += ['asc'] * len_diff elif len_diff < 0: sort_dir = sort_dir[:len(sort_key)] for dir in sort_dir: args.append('--sort-dir') args.append(dir) if query: query += '&' query += 'sort_dir=%s' % dir if path is None: path = getattr(self.client, resources + "_path") with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(200), resstr) comparator = MyUrlComparator( end_url(path % myid, query=query, format=self.format), self.client) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser("list_" + resources) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( comparator, 'GET', body=None, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') _str = self.fake_stdout.make_string() if response_contents is None: self.assertIn('myid1', _str) return _str # TODO(gongysh) add pagination unit test BUG 1633255 # def _test_list_sub_resources_with_pagination( # self, resources, api_resource, cmd, myid): # self.mox.StubOutWithMock(cmd, "get_client") # self.mox.StubOutWithMock(self.client.httpclient, "request") # cmd.get_client().MultipleTimes().AndReturn(self.client) # path = getattr(self.client, resources + "_path") # fake_query = "marker=myid2&limit=2" # reses1 = {api_resource: [{'id': 'myid1', }, # {'id': 'myid2', }], # '%s_links' % api_resource: [ # {'href': end_url(path % myid, fake_query), # 'rel': 'next'}] # } # reses2 = {api_resource: [{'id': 'myid3', }, # {'id': 'myid4', }]} # self.client.format = self.format # resstr1 = self.client.serialize(reses1) # resstr2 = self.client.serialize(reses2) # self.client.httpclient.request( # end_url(path % myid, "", format=self.format), 'GET', # body=None, # headers=mox.ContainsKeyValue( # 'X-Auth-Token', TOKEN)).AndReturn((MyResp(200), resstr1)) # self.client.httpclient.request( # MyUrlComparator(end_url(path % myid, fake_query, # format=self.format), self.client), 'GET', # body=None, headers=mox.ContainsKeyValue( # 'X-Auth-Token', TOKEN)).AndReturn((MyResp(200), resstr2)) # self.mox.ReplayAll() # cmd_parser = cmd.get_parser("list_" + resources) # args = [myid, '--request-format', self.format] # shell.run_command(cmd, cmd_parser, args) # self.mox.VerifyAll() # self.mox.UnsetStubs() # def _test_list_resources_with_pagination(self, resources, cmd): # self.mox.StubOutWithMock(cmd, "get_client") # self.mox.StubOutWithMock(self.client.httpclient, "request") # cmd.get_client().MultipleTimes().AndReturn(self.client) # path = getattr(self.client, resources + "_path") # fake_query = "marker=myid2&limit=2" # reses1 = {resources: [{'id': 'myid1', }, # {'id': 'myid2', }], # '%s_links' % resources: [ # {'href': end_url(path, fake_query), # 'rel': 'next'}]} # reses2 = {resources: [{'id': 'myid3', }, # {'id': 'myid4', }]} # self.client.format = self.format # resstr1 = self.client.serialize(reses1) # resstr2 = self.client.serialize(reses2) # self.client.httpclient.request( # end_url(path, "", format=self.format), 'GET', # body=None, # headers=mox.ContainsKeyValue( # 'X-Auth-Token', TOKEN)).AndReturn((MyResp(200), resstr1)) # self.client.httpclient.request( # MyUrlComparator(end_url(path, fake_query, format=self.format), # self.client), 'GET', body=None, # headers=mox.ContainsKeyValue( # 'X-Auth-Token', TOKEN)).AndReturn((MyResp(200), resstr2)) # self.mox.ReplayAll() # cmd_parser = cmd.get_parser("list_" + resources) # args = ['--request-format', self.format] # shell.run_command(cmd, cmd_parser, args) # self.mox.VerifyAll() # self.mox.UnsetStubs() @mock.patch.object(TackerCommand, 'get_client') def _test_update_resource(self, resource, cmd, myid, args, extrafields, mock_get, get_client_called_count=1): mock_get.return_value = self.client body = {resource: extrafields} path = getattr(self.client, resource + "_path") self.client.format = self.format # Work around for LP #1217791. XML deserializer called from # MyComparator does not decodes XML string correctly. if self.format == 'json': _body = MyComparator(body, self.client) _content_type = 'application/json' else: _body = self.client.serialize(body) _content_type = 'application/zip' with mock.patch.object(self.client.httpclient, 'request') as mock_req: comparator = MyUrlComparator( end_url(path % myid, format=self.format), self.client) mock_req.return_value = (MyResp(204), None) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser("update_" + resource) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( comparator, 'PUT', body=_body, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type=_content_type) self.assertEqual(get_client_called_count, mock_get.call_count) _str = self.fake_stdout.make_string() self.assertIn(myid, _str) def _test_show_resource(self, resource, cmd, myid, args, fields=[]): with mock.patch.object(cmd, 'get_client') as mock_get: mock_get.return_value = self.client query = "&".join(["fields=%s" % field for field in fields]) expected_res = {resource: {self.id_field: myid, 'name': 'myname', }, } self.client.format = self.format resstr = self.client.serialize(expected_res) path = getattr(self.client, resource + "_path") with mock.patch.object(self.client.httpclient, 'request') as\ mock_req: mock_req.return_value = (MyResp(200), resstr) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser("show_" + resource) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( end_url(path % myid, query, format=self.format), 'GET', body=None, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') _str = self.fake_stdout.make_string() mock_get.assert_called_once_with() self.assertIn(myid, _str) self.assertIn('myname', _str) @mock.patch.object(TackerCommand, 'get_client') def _test_delete_resource(self, resource, cmd, myid, args, mock_get): deleted_msg = {'vnf': 'delete initiated'} mock_get.return_value = self.client path = getattr(self.client, resource + "_path") with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(204), None) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser("delete_" + resource) shell.run_command(cmd, cmd_parser, args) if '--force' in args: body_str = '{"' + resource + \ '": {"attributes": {"force": true}}}' mock_req.assert_called_once_with( end_url(path % myid, format=self.format), 'DELETE', body=body_str, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') else: mock_req.assert_called_once_with( end_url(path % myid, format=self.format), 'DELETE', body=None, headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') mock_get.assert_called_once_with() _str = self.fake_stdout.make_string() msg = 'All specified %(resource)s(s) %(msg)s successfully\n' % { 'msg': deleted_msg.get(resource, 'deleted'), 'resource': resource} self.assertEqual(msg, _str) @mock.patch.object(TackerCommand, 'get_client') def _test_update_resource_action(self, resource, cmd, myid, action, args, body, mock_get, retval=None): mock_get.return_value = self.client path = getattr(self.client, resource + "_path") path_action = '%s/%s' % (myid, action) with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(204), retval) args.extend(['--request-format', self.format]) cmd_parser = cmd.get_parser("delete_" + resource) shell.run_command(cmd, cmd_parser, args) mock_req.assert_called_once_with( end_url(path % path_action, format=self.format), 'PUT', body=MyComparator(body, self.client), headers=test_utils.ContainsKeyValue('X-Auth-Token', TOKEN), content_type='application/json') _str = self.fake_stdout.make_string() self.assertIn(myid, _str) class ClientV1TestJson(CLITestV10Base): def test_do_request_unicode(self): self.client.format = self.format unicode_text = '\u7f51\u7edc' action = '/test' params = {'test': unicode_text} body = params expect_body = self.client.serialize(body) self.client.httpclient.auth_token = unicode_text with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(200), expect_body) res_body = self.client.do_request('PUT', action, body=body, params=params) expected_uri = 'localurl/v1.0/test.json?test=%E7%BD%91%E7%BB%9C' mock_req.assert_called_with( expected_uri, 'PUT', body=expect_body, headers={'X-Auth-Token': unicode_text, 'User-Agent': 'python-tackerclient'}, content_type='application/json') # test response with unicode self.assertEqual(res_body, body) def test_do_request_error_without_response_body(self): self.client.format = self.format params = {'test': 'value'} expect_query = urlparse.urlencode(params) self.client.httpclient.auth_token = 'token' with mock.patch.object(self.client.httpclient, 'request') as mock_req: mock_req.return_value = (MyResp(400, reason='An error'), '') self.client.httpclient.request( end_url('/test', query=expect_query, format=self.format), 'PUT', body='', headers={'X-Auth-Token': 'token'} ) error = self.assertRaises(exceptions.TackerClientException, self.client.do_request, 'PUT', '/test', body='', params=params) self.assertEqual("400-tackerFault", str(error)) class CLITestV10ExceptionHandler(CLITestV10Base): def _test_exception_handler_v10( self, expected_exception, status_code, expected_msg, error_type=None, error_msg=None, error_detail=None, error_content=None): if error_content is None: error_content = {'TackerError': {'type': error_type, 'message': error_msg, 'detail': error_detail}} e = self.assertRaises(expected_exception, client.exception_handler_v10, status_code, error_content) self.assertEqual(status_code, e.status_code) self.assertEqual(expected_exception.__name__, e.__class__.__name__) if expected_msg is None: if error_detail: expected_msg = '\n'.join([error_msg, error_detail]) else: expected_msg = error_msg self.assertEqual(expected_msg, e.message) def test_exception_handler_v10_unknown_error_to_per_code_exception(self): for status_code, client_exc in exceptions.HTTP_EXCEPTION_MAP.items(): error_msg = 'Unknown error' error_detail = 'This is detail' self._test_exception_handler_v10( client_exc, status_code, error_msg + '\n' + error_detail, 'UnknownError', error_msg, error_detail) def test_exception_handler_v10_tacker_unknown_status_code(self): error_msg = 'Unknown error' error_detail = 'This is detail' self._test_exception_handler_v10( exceptions.TackerClientException, 501, error_msg + '\n' + error_detail, 'UnknownError', error_msg, error_detail) def test_exception_handler_v10_bad_tacker_error(self): error_content = {'TackerError': {'unknown_key': 'UNKNOWN'}} self._test_exception_handler_v10( exceptions.TackerClientException, 500, expected_msg={'unknown_key': 'UNKNOWN'}, error_content=error_content) def test_exception_handler_v10_error_dict_contains_message(self): error_content = {'message': 'This is an error message'} self._test_exception_handler_v10( exceptions.TackerClientException, 500, expected_msg='500-tackerFault', error_content=error_content) def test_exception_handler_v10_error_dict_not_contain_message(self): error_content = 'tackerFault' expected_msg = '%s-%s' % (500, error_content) self._test_exception_handler_v10( exceptions.TackerClientException, 500, expected_msg=expected_msg, error_content=error_content) def test_exception_handler_v10_default_fallback(self): error_content = 'This is an error message' expected_msg = '%s-%s' % (500, error_content) self._test_exception_handler_v10( exceptions.TackerClientException, 500, expected_msg=expected_msg, error_content=error_content) def test_exception_handler_v10_tacker_etsi_error(self): """Test ETSI error response""" known_error_map = [ ({ "status": "status 1", "detail": "sample 1" }, 400), ({ "status": "status 2", "detail": "sample 2" }, 404), ({ "status": "status 3", "detail": "sample 3" }, 409) ] for error_content, status_code in known_error_map: self._test_exception_handler_v10( exceptions.TackerClientException, status_code, expected_msg=error_content['detail'], error_content=error_content) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_command_meta.py0000664000175000017500000000244500000000000026357 0ustar00zuulzuul00000000000000# Copyright 2013 Intel Corporation # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import testtools from tackerclient.tacker import v1_0 as tackerV10 class TestCommandMeta(testtools.TestCase): def test_tacker_command_meta_defines_log(self): class FakeCommand(tackerV10.TackerCommand): pass self.assertTrue(hasattr(FakeCommand, 'log')) self.assertIsInstance(FakeCommand.log, logging.getLoggerClass()) self.assertEqual(FakeCommand.log.name, __name__ + ".FakeCommand") def test_tacker_command_log_defined_explicitly(self): class FakeCommand(tackerV10.TackerCommand): log = None self.assertTrue(hasattr(FakeCommand, 'log')) self.assertIsNone(FakeCommand.log) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_http.py0000664000175000017500000000475500000000000024720 0ustar00zuulzuul00000000000000# Copyright (C) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from unittest import mock from tackerclient.client import HTTPClient from tackerclient.common import exceptions from tackerclient.tests.unit.test_cli10 import MyResp AUTH_TOKEN = 'test_token' END_URL = 'test_url' METHOD = 'GET' URL = 'http://test.test:1234/v1.0/test' headers = {'User-Agent': 'python-tackerclient'} class TestHTTPClient(testtools.TestCase): def setUp(self): super(TestHTTPClient, self).setUp() self.addCleanup(mock.patch.stopall) self.http = HTTPClient(token=AUTH_TOKEN, endpoint_url=END_URL) @mock.patch('tackerclient.client.HTTPClient.request') def test_request_error(self, mock_request): mock_request.side_effect = Exception('error msg') self.assertRaises( exceptions.ConnectionFailed, self.http._cs_request, URL, METHOD ) @mock.patch('tackerclient.client.HTTPClient.request') def test_request_success(self, mock_request): rv_should_be = MyResp(200), 'test content' mock_request.return_value = rv_should_be self.assertEqual(rv_should_be, self.http._cs_request(URL, METHOD)) @mock.patch('tackerclient.client.HTTPClient.request') def test_request_unauthorized(self, mock_request): mock_request.return_value = MyResp(401), 'unauthorized message' e = self.assertRaises(exceptions.Unauthorized, self.http._cs_request, URL, METHOD) self.assertEqual('unauthorized message', str(e)) mock_request.assert_called_with(URL, METHOD, headers=headers) @mock.patch('tackerclient.client.HTTPClient.request') def test_request_forbidden_is_returned_to_caller(self, mock_request): rv_should_be = MyResp(403), 'forbidden message' mock_request.return_value = rv_should_be self.assertEqual(rv_should_be, self.http._cs_request(URL, METHOD)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_shell.py0000664000175000017500000001575000000000000025045 0ustar00zuulzuul00000000000000# Copyright (C) 2013 Yahoo! Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import fixtures import io import logging import os import re import sys import testtools from testtools import matchers from unittest import mock from keystoneclient import session from tackerclient.common import clientmanager from tackerclient import shell as openstack_shell DEFAULT_USERNAME = 'username' DEFAULT_PASSWORD = 'password' DEFAULT_TENANT_ID = 'tenant_id' DEFAULT_TENANT_NAME = 'tenant_name' DEFAULT_AUTH_URL = 'http://127.0.0.1:5000/v1.0/' DEFAULT_TOKEN = '3bcc3d3a03f44e3d8377f9247b0ad155' DEFAULT_URL = 'http://tacker.example.org:9890/' DEFAULT_API_VERSION = '1.0' class ShellTest(testtools.TestCase): FAKE_ENV = { 'OS_USERNAME': DEFAULT_USERNAME, 'OS_PASSWORD': DEFAULT_PASSWORD, 'OS_TENANT_ID': DEFAULT_TENANT_ID, 'OS_TENANT_NAME': DEFAULT_TENANT_NAME, 'OS_AUTH_URL': DEFAULT_AUTH_URL} # Patch os.environ to avoid required auth info. def setUp(self): super(ShellTest, self).setUp() for var in self.FAKE_ENV: self.useFixture( fixtures.EnvironmentVariable( var, self.FAKE_ENV[var])) def shell(self, argstr, check=False): orig = (sys.stdout, sys.stderr) clean_env = {} _old_env, os.environ = os.environ, clean_env.copy() try: sys.stdout = io.StringIO() sys.stderr = io.StringIO() _shell = openstack_shell.TackerShell(DEFAULT_API_VERSION) _shell.run(argstr.split()) except SystemExit: exc_type, exc_value, exc_traceback = sys.exc_info() self.assertEqual(exc_value.code, 0) finally: stdout = sys.stdout.getvalue() stderr = sys.stderr.getvalue() sys.stdout.close() sys.stderr.close() sys.stdout, sys.stderr = orig os.environ = _old_env return stdout, stderr def test_run_unknown_command(self): self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) stdout, stderr = self.shell('fake', check=True) self.assertFalse(stdout) self.assertEqual("Unknown command ['fake']", stderr.strip()) def test_help(self): required = 'usage:' help_text, stderr = self.shell('help') self.assertThat( help_text, matchers.MatchesRegex(required)) self.assertFalse(stderr) def test_help_on_subcommand(self): required = [ '.*?^usage: .* vim-list'] stdout, stderr = self.shell('help vim-list') for r in required: self.assertThat( stdout, matchers.MatchesRegex(r, re.DOTALL | re.MULTILINE)) self.assertFalse(stderr) def test_help_command(self): required = 'usage:' help_text, stderr = self.shell('help vim-create') self.assertThat( help_text, matchers.MatchesRegex(required)) self.assertFalse(stderr) def test_unknown_auth_strategy(self): self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) stdout, stderr = self.shell('--os-auth-strategy fake ' 'vim-list') self.assertFalse(stdout) def test_auth(self): with mock.patch.object(openstack_shell.TackerShell, 'run_subcommand'), \ mock.patch.object(session, 'Session'), \ mock.patch.object(clientmanager, 'ClientManager') as mock_cmgr: shell = openstack_shell.TackerShell(DEFAULT_API_VERSION) shell.options = mock.Mock() auth_session = shell._get_keystone_session() cmdline = ('--os-username test ' '--os-password test ' '--os-tenant-name test ' '--os-auth-url http://127.0.0.1:5000/ ' '--os-auth-strategy keystone vim-list') shell.authenticate_user() shell.run(cmdline.split()) mock_cmgr.assert_called_with( raise_errors=False, retries=0, timeout=None, token='', url='', auth_url='http://127.0.0.1:5000/', tenant_name='test', tenant_id='tenant_id', username='test', user_id='', password='test', region_name='', api_version={'nfv-orchestration': '1.0'}, auth_strategy='keystone', service_type='nfv-orchestration', endpoint_type='publicURL', insecure=False, ca_cert=None, log_credentials=True, session=auth_session, auth=auth_session.auth) def test_build_option_parser(self): tacker_shell = openstack_shell.TackerShell(DEFAULT_API_VERSION) result = tacker_shell.build_option_parser('descr', DEFAULT_API_VERSION) self.assertIsInstance(result, argparse.ArgumentParser) @mock.patch.object(openstack_shell.TackerShell, 'run') def test_main_with_unicode(self, mock_run): mock_run.return_value = 0 unicode_text = '\u7f51\u7edc' argv = ['net-list', unicode_text, unicode_text.encode('utf-8')] ret = openstack_shell.main(argv=argv) mock_run.assert_called_once_with(['net-list', unicode_text, unicode_text]) self.assertEqual(0, ret) def test_endpoint_option(self): shell = openstack_shell.TackerShell(DEFAULT_API_VERSION) parser = shell.build_option_parser('descr', DEFAULT_API_VERSION) # Neither $OS_ENDPOINT_TYPE nor --endpoint-type namespace = parser.parse_args([]) self.assertEqual('publicURL', namespace.endpoint_type) # --endpoint-type but not $OS_ENDPOINT_TYPE namespace = parser.parse_args(['--endpoint-type=admin']) self.assertEqual('admin', namespace.endpoint_type) def test_endpoint_environment_variable(self): fixture = fixtures.EnvironmentVariable("OS_ENDPOINT_TYPE", "public") self.useFixture(fixture) shell = openstack_shell.TackerShell(DEFAULT_API_VERSION) parser = shell.build_option_parser('descr', DEFAULT_API_VERSION) # $OS_ENDPOINT_TYPE but not --endpoint-type namespace = parser.parse_args([]) self.assertEqual("public", namespace.endpoint_type) # --endpoint-type and $OS_ENDPOINT_TYPE namespace = parser.parse_args(['--endpoint-type=admin']) self.assertEqual('admin', namespace.endpoint_type) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_ssl.py0000664000175000017500000000605400000000000024534 0ustar00zuulzuul00000000000000# Copyright (C) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock import fixtures from keystoneclient import session import requests import testtools from tackerclient import client from tackerclient.common import clientmanager from tackerclient.common import exceptions from tackerclient import shell as openstack_shell AUTH_TOKEN = 'test_token' END_URL = 'test_url' METHOD = 'GET' URL = 'http://test.test:1234/v1.0/' CA_CERT = '/tmp/test/path' DEFAULT_API_VERSION = '1.0' class TestSSL(testtools.TestCase): def setUp(self): super(TestSSL, self).setUp() self.useFixture(fixtures.EnvironmentVariable('OS_TOKEN', AUTH_TOKEN)) self.useFixture(fixtures.EnvironmentVariable('OS_URL', END_URL)) self.addCleanup(mock.patch.stopall) def _test_verify_client_manager(self, cacert): with mock.patch.object(session, 'Session'), \ mock.patch.object(clientmanager, 'ClientManager') as mock_cmgr: mock_cmgr.return_value = 0 shell = openstack_shell.TackerShell(DEFAULT_API_VERSION) shell.options = mock.Mock() auth_session = shell._get_keystone_session() shell.run(cacert) mock_cmgr.assert_called_with( api_version={'nfv-orchestration': '1.0'}, auth=auth_session.auth, auth_strategy='keystone', auth_url='', ca_cert=CA_CERT, endpoint_type='publicURL', insecure=False, log_credentials=True, password='', raise_errors=False, region_name='', retries=0, service_type='nfv-orchestration', session=auth_session, tenant_id='', tenant_name='', timeout=None, token='test_token', url='test_url', user_id='', username='') def test_ca_cert_passed(self): cacert = ['--os-cacert', CA_CERT] self._test_verify_client_manager(cacert) def test_ca_cert_passed_as_env_var(self): self.useFixture(fixtures.EnvironmentVariable('OS_CACERT', CA_CERT)) self._test_verify_client_manager([]) @mock.patch.object(client.HTTPClient, 'request') def test_proper_exception_is_raised_when_cert_validation_fails(self, mock_req): http = client.HTTPClient(token=AUTH_TOKEN, endpoint_url=END_URL) mock_req.side_effect = requests.exceptions.SSLError() self.assertRaises( exceptions.SslCertificateValidationError, http._cs_request, URL, METHOD ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_utils.py0000664000175000017500000001174200000000000025073 0ustar00zuulzuul00000000000000# Copyright (C) 2013 Yahoo! Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from tackerclient.common import exceptions from tackerclient.common import utils class TestUtils(testtools.TestCase): def test_string_to_bool_true(self): self.assertTrue(utils.str2bool('true')) def test_string_to_bool_false(self): self.assertFalse(utils.str2bool('false')) def test_string_to_bool_None(self): self.assertIsNone(utils.str2bool(None)) def test_string_to_dictionary(self): input_str = 'key1=value1,key2=value2' expected = {'key1': 'value1', 'key2': 'value2'} self.assertEqual(expected, utils.str2dict(input_str)) def test_none_string_to_dictionary(self): input_str = '' expected = {} self.assertEqual(expected, utils.str2dict(input_str)) input_str = None expected = {} self.assertEqual(expected, utils.str2dict(input_str)) def test_get_dict_item_properties(self): item = {'name': 'test_name', 'id': 'test_id'} fields = ('name', 'id') actual = utils.get_item_properties(item=item, fields=fields) self.assertEqual(('test_name', 'test_id'), actual) def test_get_object_item_properties_mixed_case_fields(self): class Fake(object): def __init__(self): self.id = 'test_id' self.name = 'test_name' self.test_user = 'test' fields = ('name', 'id', 'test user') mixed_fields = ('test user', 'ID') item = Fake() actual = utils.get_item_properties(item, fields, mixed_fields) self.assertEqual(('test_name', 'test_id', 'test'), actual) def test_get_object_item_desired_fields_differ_from_item(self): class Fake(object): def __init__(self): self.id = 'test_id_1' self.name = 'test_name' self.test_user = 'test' fields = ('name', 'id', 'test user') item = Fake() actual = utils.get_item_properties(item, fields) self.assertNotEqual(('test_name', 'test_id', 'test'), actual) def test_get_object_item_desired_fields_is_empty(self): class Fake(object): def __init__(self): self.id = 'test_id_1' self.name = 'test_name' self.test_user = 'test' fields = [] item = Fake() actual = utils.get_item_properties(item, fields) self.assertEqual((), actual) def test_get_object_item_with_formatters(self): class Fake(object): def __init__(self): self.id = 'test_id' self.name = 'test_name' self.test_user = 'test' class FakeCallable(object): def __call__(self, *args, **kwargs): return 'pass' fields = ('name', 'id', 'test user', 'is_public') formatters = {'is_public': FakeCallable()} item = Fake() act = utils.get_item_properties(item, fields, formatters=formatters) self.assertEqual(('test_name', 'test_id', 'test', 'pass'), act) class ImportClassTestCase(testtools.TestCase): def test_get_client_class_invalid_version(self): self.assertRaises( exceptions.UnsupportedVersion, utils.get_client_class, 'image', '2', {'image': '2'}) class ContainsKeyValue(object): """Checks whether a key/value pair is in a dict parameter. The ContainsKeyValue class is a helper for mock.assert_*() method. It enables strict check than the built in mock.ANY helper, and is the equivalent of the mox.ContainsKeyValue() function from the legacy mox library Example usage could be: mock_some_method.assert_called_once_with( "hello", ContainsKeyValue('foo', bar), mock.ANY, "world", ContainsKeyValue('hello', world)) """ def __init__(self, wantkey, wantvalue): self.wantkey = wantkey self.wantvalue = wantvalue def __eq__(self, other): try: return other[self.wantkey] == self.wantvalue except (KeyError, TypeError): return False def __ne__(self, other): try: return other[self.wantkey] != self.wantvalue except (KeyError, TypeError): return True def __repr__(self): return "" ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/test_validators.py0000664000175000017500000001000300000000000026070 0ustar00zuulzuul00000000000000# Copyright 2014 NEC Corporation # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from tackerclient.common import exceptions from tackerclient.common import validators class FakeParsedArgs(object): pass class ValidatorTest(testtools.TestCase): def _test_validate_int(self, attr_val, attr_name='attr1', min_value=1, max_value=10): obj = FakeParsedArgs() setattr(obj, attr_name, attr_val) ret = validators.validate_int_range(obj, attr_name, min_value, max_value) # Come here only if there is no exception. self.assertIsNone(ret) def _test_validate_int_error(self, attr_val, expected_msg, attr_name='attr1', expected_exc=None, min_value=1, max_value=10): if expected_exc is None: expected_exc = exceptions.CommandError e = self.assertRaises(expected_exc, self._test_validate_int, attr_val, attr_name, min_value, max_value) self.assertEqual(expected_msg, str(e)) def test_validate_int_min_max(self): self._test_validate_int(1) self._test_validate_int(10) self._test_validate_int('1') self._test_validate_int('10') self._test_validate_int('0x0a') self._test_validate_int_error( 0, 'attr1 "0" should be an integer [1:10].') self._test_validate_int_error( 11, 'attr1 "11" should be an integer [1:10].') self._test_validate_int_error( '0x10', 'attr1 "0x10" should be an integer [1:10].') def test_validate_int_min_only(self): self._test_validate_int(1, max_value=None) self._test_validate_int(10, max_value=None) self._test_validate_int(11, max_value=None) self._test_validate_int_error( 0, 'attr1 "0" should be an integer greater than or equal to 1.', max_value=None) def test_validate_int_max_only(self): self._test_validate_int(0, min_value=None) self._test_validate_int(1, min_value=None) self._test_validate_int(10, min_value=None) self._test_validate_int_error( 11, 'attr1 "11" should be an integer smaller than or equal to 10.', min_value=None) def test_validate_int_no_limit(self): self._test_validate_int(0, min_value=None, max_value=None) self._test_validate_int(1, min_value=None, max_value=None) self._test_validate_int(10, min_value=None, max_value=None) self._test_validate_int(11, min_value=None, max_value=None) self._test_validate_int_error( 'abc', 'attr1 "abc" should be an integer.', min_value=None, max_value=None) def _test_validate_subnet(self, attr_val, attr_name='attr1'): obj = FakeParsedArgs() setattr(obj, attr_name, attr_val) ret = validators.validate_ip_subnet(obj, attr_name) # Come here only if there is no exception. self.assertIsNone(ret) def test_validate_ip_subnet(self): self._test_validate_subnet('192.168.2.0/24') self._test_validate_subnet('192.168.2.3/20') self._test_validate_subnet('192.168.2.1') e = self.assertRaises(exceptions.CommandError, self._test_validate_subnet, '192.168.2.256') self.assertEqual('attr1 "192.168.2.256" is not a valid CIDR.', str(e)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7546198 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/0000775000175000017500000000000000000000000022737 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/__init__.py0000664000175000017500000000000000000000000025036 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7546198 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/0000775000175000017500000000000000000000000024403 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_config.yaml0000664000175000017500000000026300000000000027410 0ustar00zuulzuul00000000000000auth_url: 'http://1.2.3.4:5000' username: 'xyz' password: '12345' project_name: 'abc' project_domain_name: 'prj_domain_name' user_domain_name: 'user_domain_name' type: 'openstack'././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_config_with_false_cert_verify.yaml0000664000175000017500000000031000000000000034207 0ustar00zuulzuul00000000000000auth_url: 'http://1.2.3.4:5000' username: 'xyz' password: '12345' project_name: 'abc' project_domain_name: 'prj_domain_name' user_domain_name: 'user_domain_name' cert_verify: 'False' type: 'openstack'././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_config_without_auth_url.yaml0000664000175000017500000000022400000000000033073 0ustar00zuulzuul00000000000000username: 'xyz' password: '12345' project_name: 'abc' project_domain_name: 'prj_domain_name' user_domain_name: 'user_domain_name' type: 'openstack' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_k8s_bearer_token.yaml0000664000175000017500000000016100000000000031365 0ustar00zuulzuul00000000000000auth_url: 'https://1.2.3.4:6443' bearer_token: 'xyz' ssl_ca_cert: None project_name: 'default' type: 'kubernetes'././@PaxHeader0000000000000000000000000000020700000000000011454 xustar0000000000000000113 path=python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_k8s_bearer_token_without_auth_url.yaml 22 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_k8s_bearer_token_without_auth_url.y0000664000175000017500000000012000000000000034354 0ustar00zuulzuul00000000000000bearer_token: 'xyz' ssl_ca_cert: None project_name: 'default' type: 'kubernetes'././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_k8s_config.yaml0000664000175000017500000000020300000000000030167 0ustar00zuulzuul00000000000000auth_url: 'https://1.2.3.4:6443' username: 'xyz' password: '12345' ssl_ca_cert: 'abcxyz' project_name: 'default' type: 'kubernetes'././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/samples/vim_k8s_config_without_auth_url.yaml0000664000175000017500000000014200000000000033657 0ustar00zuulzuul00000000000000username: 'xyz' password: '12345' ssl_ca_cert: 'abcxyz' project_name: 'default' type: 'kubernetes'././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/test_cli10_vim.py0000664000175000017500000002011600000000000026133 0ustar00zuulzuul00000000000000# Copyright 2015-2016 Brocade Communications Systems Inc # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from tackerclient.common import exceptions from tackerclient.common import utils from tackerclient.tacker.v1_0.nfvo import vim from tackerclient.tests.unit import test_cli10 API_VERSION = "1.0" FORMAT = 'json' TOKEN = 'testtoken' ENDURL = 'localurl' class CLITestV10VIMJSON(test_cli10.CLITestV10Base): _RESOURCE = 'vim' _RESOURCES = 'vims' def setUp(self): plurals = {'vims': 'vim'} super(CLITestV10VIMJSON, self).setUp(plurals=plurals) self.vim_project = { 'name': 'abc', 'project_domain_name': 'prj_domain_name'} self.auth_cred = {'username': 'xyz', 'password': '12345', 'user_domain_name': 'user_domain_name', 'cert_verify': 'True'} self.auth_url = 'http://1.2.3.4:5000' self.type = 'openstack' def test_register_vim_all_params(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_config.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'openstack', 'name': name, 'description': description, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) def test_register_vim_with_false_cert_verify(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' # change cert_verify to False self.auth_cred = {'username': 'xyz', 'password': '12345', 'user_domain_name': 'user_domain_name', 'cert_verify': 'False'} description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_config_with_false_cert_verify.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'openstack', 'name': name, 'description': description, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) def test_register_vim_with_no_auth_url(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) my_id = 'my-id' name = 'test_vim' description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_config_without_auth_url.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'openstack', 'name': name, 'description': description, 'is_default': False} message = 'Auth URL must be specified' ex = self.assertRaises(exceptions.TackerClientException, self._test_create_resource, self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) self.assertEqual(message, ex.message) self.assertEqual(404, ex.status_code) def test_register_vim_with_mandatory_params(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_config.yaml') args = [ name, '--config-file', vim_config, ] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [ self.auth_cred, self.vim_project, self.auth_url, self.type ] extra_body = {'type': 'openstack', 'name': name, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, name, my_id, args, position_names, position_values, extra_body=extra_body) def test_list_vims(self): cmd = vim.ListVIM(test_cli10.MyApp(sys.stdout), None) self._test_list_resources(self._RESOURCES, cmd, True) def test_show_vim_id(self): cmd = vim.ShowVIM(test_cli10.MyApp(sys.stdout), None) args = ['--fields', 'id', self.test_id] self._test_show_resource(self._RESOURCE, cmd, self.test_id, args, ['id']) def test_show_vim_id_name(self): cmd = vim.ShowVIM(test_cli10.MyApp(sys.stdout), None) args = ['--fields', 'id', '--fields', 'name', self.test_id] self._test_show_resource(self._RESOURCE, cmd, self.test_id, args, ['id', 'name']) def test_update_vim_all_params(self): cmd = vim.UpdateVIM(test_cli10.MyApp(sys.stdout), None) update_config = utils.get_file_path( 'tests/unit/vm/samples/vim_config_without_auth_url.yaml') my_id = 'my-id' name = 'new_name' description = 'new_description' is_default = 'True' args = [ my_id, '--config-file', str(update_config), '--name', name, '--description', description, '--is_default', is_default] extra_fields = {'vim_project': self.vim_project, 'auth_cred': self.auth_cred, 'is_default': 'True', 'name': name, 'description': description} self._test_update_resource(self._RESOURCE, cmd, my_id, args, extra_fields) def test_update_vim_with_mandatory_params(self): cmd = vim.UpdateVIM(test_cli10.MyApp(sys.stdout), None) update_config = utils.get_file_path( 'tests/unit/vm/samples/vim_config_without_auth_url.yaml') my_id = 'my-id' args = [ my_id, '--config-file', str(update_config)] extra_fields = {'vim_project': self.vim_project, 'auth_cred': self.auth_cred} self._test_update_resource(self._RESOURCE, cmd, my_id, args, extra_fields) def test_delete_vim(self): cmd = vim.DeleteVIM(test_cli10.MyApp(sys.stdout), None) my_id = 'my-id' args = [my_id] self._test_delete_resource(self._RESOURCE, cmd, my_id, args) def test_multi_delete_vim(self): cmd = vim.DeleteVIM(test_cli10.MyApp(sys.stdout), None) vim_ids = 'my-id1 my-id2 my-id3' args = [vim_ids] self._test_delete_resource(self._RESOURCE, cmd, vim_ids, args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/test_cli10_vim_k8s.py0000664000175000017500000001554300000000000026730 0ustar00zuulzuul00000000000000# Copyright 2015-2016 Brocade Communications Systems Inc # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from tackerclient.common import exceptions from tackerclient.common import utils from tackerclient.tacker.v1_0.nfvo import vim from tackerclient.tests.unit import test_cli10 API_VERSION = "1.0" FORMAT = 'json' TOKEN = 'testtoken' ENDURL = 'localurl' class CLITestV10VIMJSON(test_cli10.CLITestV10Base): _RESOURCE = 'vim' _RESOURCES = 'vims' def setUp(self): plurals = {'vims': 'vim'} super(CLITestV10VIMJSON, self).setUp(plurals=plurals) self.vim_project = {'name': 'default'} self.auth_cred = {'username': 'xyz', 'password': '12345', 'ssl_ca_cert': 'abcxyz'} self.auth_url = 'https://1.2.3.4:6443' self.type = 'kubernetes' def test_register_vim_all_params(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_config.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'kubernetes', 'name': name, 'description': description, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) def test_register_vim_with_no_auth_url(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) my_id = 'my-id' name = 'test_vim' description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_config_without_auth_url.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'kubernetes', 'name': name, 'description': description, 'is_default': False} message = 'Auth URL must be specified' ex = self.assertRaises(exceptions.TackerClientException, self._test_create_resource, self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) self.assertEqual(message, ex.message) self.assertEqual(404, ex.status_code) def test_register_vim_with_mandatory_params(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_config.yaml') args = [ name, '--config-file', vim_config, ] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [ self.auth_cred, self.vim_project, self.auth_url, self.type ] extra_body = {'type': 'kubernetes', 'name': name, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, name, my_id, args, position_names, position_values, extra_body=extra_body) def test_list_vims(self): cmd = vim.ListVIM(test_cli10.MyApp(sys.stdout), None) self._test_list_resources(self._RESOURCES, cmd, True) def test_show_vim_id(self): cmd = vim.ShowVIM(test_cli10.MyApp(sys.stdout), None) args = ['--fields', 'id', self.test_id] self._test_show_resource(self._RESOURCE, cmd, self.test_id, args, ['id']) def test_show_vim_id_name(self): cmd = vim.ShowVIM(test_cli10.MyApp(sys.stdout), None) args = ['--fields', 'id', '--fields', 'name', self.test_id] self._test_show_resource(self._RESOURCE, cmd, self.test_id, args, ['id', 'name']) def test_update_vim_all_params(self): cmd = vim.UpdateVIM(test_cli10.MyApp(sys.stdout), None) update_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_config_without_auth_url.yaml') my_id = 'my-id' name = 'new_name' name = 'new_name' description = 'new_description' is_default = 'True' args = [ my_id, '--config-file', str(update_config), '--name', name, '--description', description, '--is_default', is_default] extra_fields = {'vim_project': self.vim_project, 'auth_cred': self.auth_cred, 'is_default': 'True', 'name': name, 'description': description} self._test_update_resource(self._RESOURCE, cmd, my_id, args, extra_fields) def test_update_vim_with_mandatory_params(self): cmd = vim.UpdateVIM(test_cli10.MyApp(sys.stdout), None) update_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_config_without_auth_url.yaml') my_id = 'my-id' args = [ my_id, '--config-file', str(update_config)] extra_fields = {'vim_project': self.vim_project, 'auth_cred': self.auth_cred} self._test_update_resource(self._RESOURCE, cmd, my_id, args, extra_fields) def test_delete_vim(self): cmd = vim.DeleteVIM(test_cli10.MyApp(sys.stdout), None) my_id = 'my-id' args = [my_id] self._test_delete_resource(self._RESOURCE, cmd, my_id, args) def test_multi_delete_vim(self): cmd = vim.DeleteVIM(test_cli10.MyApp(sys.stdout), None) vim_ids = 'my-id1 my-id2 my-id3' args = [vim_ids] self._test_delete_resource(self._RESOURCE, cmd, vim_ids, args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/test_cli10_vim_k8s_with_bearer_token.py0000664000175000017500000001547200000000000032504 0ustar00zuulzuul00000000000000# Copyright 2015-2016 Brocade Communications Systems Inc # All Rights Reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from tackerclient.common import exceptions from tackerclient.common import utils from tackerclient.tacker.v1_0.nfvo import vim from tackerclient.tests.unit import test_cli10 API_VERSION = "1.0" FORMAT = 'json' TOKEN = 'testtoken' ENDURL = 'localurl' class CLITestV10VIMJSON(test_cli10.CLITestV10Base): _RESOURCE = 'vim' _RESOURCES = 'vims' def setUp(self): plurals = {'vims': 'vim'} super(CLITestV10VIMJSON, self).setUp(plurals=plurals) self.vim_project = {'name': 'default'} self.auth_cred = {'bearer_token': 'xyz', 'ssl_ca_cert': "None"} self.auth_url = 'https://1.2.3.4:6443' self.type = 'kubernetes' def test_register_vim_all_params(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_bearer_token.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'kubernetes', 'name': name, 'description': description, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) def test_register_vim_with_no_auth_url(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) my_id = 'my-id' name = 'test_vim' description = 'Vim Description' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_bearer_token_without_auth_url.yaml') args = [ name, '--config-file', vim_config, '--description', description] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [self.auth_cred, self.vim_project, self.auth_url, self.type] extra_body = {'type': 'kubernetes', 'name': name, 'description': description, 'is_default': False} message = 'Auth URL must be specified' ex = self.assertRaises(exceptions.TackerClientException, self._test_create_resource, self._RESOURCE, cmd, None, my_id, args, position_names, position_values, extra_body=extra_body) self.assertEqual(message, ex.message) self.assertEqual(404, ex.status_code) def test_register_vim_with_mandatory_params(self): cmd = vim.CreateVIM(test_cli10.MyApp(sys.stdout), None) name = 'my-name' my_id = 'my-id' vim_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_bearer_token.yaml') args = [ name, '--config-file', vim_config, ] position_names = ['auth_cred', 'vim_project', 'auth_url', 'type'] position_values = [ self.auth_cred, self.vim_project, self.auth_url, self.type ] extra_body = {'type': 'kubernetes', 'name': name, 'is_default': False} self._test_create_resource(self._RESOURCE, cmd, name, my_id, args, position_names, position_values, extra_body=extra_body) def test_list_vims(self): cmd = vim.ListVIM(test_cli10.MyApp(sys.stdout), None) self._test_list_resources(self._RESOURCES, cmd, True) def test_show_vim_id(self): cmd = vim.ShowVIM(test_cli10.MyApp(sys.stdout), None) args = ['--fields', 'id', self.test_id] self._test_show_resource(self._RESOURCE, cmd, self.test_id, args, ['id']) def test_show_vim_id_name(self): cmd = vim.ShowVIM(test_cli10.MyApp(sys.stdout), None) args = ['--fields', 'id', '--fields', 'name', self.test_id] self._test_show_resource(self._RESOURCE, cmd, self.test_id, args, ['id', 'name']) def test_update_vim_all_params(self): cmd = vim.UpdateVIM(test_cli10.MyApp(sys.stdout), None) update_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_bearer_token_without_auth_url.yaml') my_id = 'my-id' name = 'new_name' description = 'new_description' is_default = 'True' args = [ my_id, '--config-file', str(update_config), '--name', name, '--description', description, '--is_default', is_default] extra_fields = {'vim_project': self.vim_project, 'auth_cred': self.auth_cred, 'is_default': 'True', 'name': name, 'description': description} self._test_update_resource(self._RESOURCE, cmd, my_id, args, extra_fields) def test_update_vim_with_mandatory_params(self): cmd = vim.UpdateVIM(test_cli10.MyApp(sys.stdout), None) update_config = utils.get_file_path( 'tests/unit/vm/samples/vim_k8s_bearer_token_without_auth_url.yaml') my_id = 'my-id' args = [ my_id, '--config-file', str(update_config)] extra_fields = {'vim_project': self.vim_project, 'auth_cred': self.auth_cred} self._test_update_resource(self._RESOURCE, cmd, my_id, args, extra_fields) def test_delete_vim(self): cmd = vim.DeleteVIM(test_cli10.MyApp(sys.stdout), None) my_id = 'my-id' args = [my_id] self._test_delete_resource(self._RESOURCE, cmd, my_id, args) def test_multi_delete_vim(self): cmd = vim.DeleteVIM(test_cli10.MyApp(sys.stdout), None) vim_ids = 'my-id1 my-id2 my-id3' args = [vim_ids] self._test_delete_resource(self._RESOURCE, cmd, vim_ids, args) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/tests/unit/vm/test_vim_utils.py0000664000175000017500000001776400000000000026402 0ustar00zuulzuul00000000000000# Copyright 2016 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from tackerclient.common import exceptions from tackerclient.tacker.v1_0.nfvo import vim_utils from unittest.mock import sentinel class TestVIMUtils(testtools.TestCase): def test_args2body_vim(self): config_param = {'project_name': sentinel.prj_name, 'username': sentinel.usrname1, 'password': sentinel.password1, 'project_domain_name': sentinel.prj_domain_name1, 'user_domain_name': sentinel.user_domain.name, 'cert_verify': 'True', 'type': 'openstack'} vim = {} auth_cred = config_param.copy() auth_cred.pop('project_name') auth_cred.pop('project_domain_name') auth_cred.pop('type') expected_vim = {'auth_cred': auth_cred, 'vim_project': {'name': sentinel.prj_name, 'project_domain_name': sentinel.prj_domain_name1}, 'type': 'openstack'} vim_utils.args2body_vim(config_param.copy(), vim) self.assertEqual(expected_vim, vim) def test_args2body_vim_extra(self): auth_cred = {'username': sentinel.usrname1, 'password': sentinel.password1, 'user_domain_name': sentinel.user_domain.name, 'cert_verify': 'True'} config_param = {'project_name': sentinel.prj_name, 'project_domain_name': sentinel.prj_domain_name1, 'type': 'openstack', 'extra': {'area': 'area_A@region_A'}, **auth_cred} vim = {} expected_vim = {'auth_cred': auth_cred, 'vim_project': {'name': sentinel.prj_name, 'project_domain_name': sentinel.prj_domain_name1}, 'type': 'openstack', 'extra': {'area': 'area_A@region_A'}} vim_utils.args2body_vim(config_param.copy(), vim) self.assertEqual(expected_vim, vim) def test_args2body_kubernetes_vim(self): config_param = {'username': sentinel.usrname1, 'password': sentinel.password1, 'ssl_ca_cert': 'abcxyz', 'project_name': sentinel.prj_name, 'type': 'kubernetes'} vim = {} auth_cred = config_param.copy() auth_cred.pop('project_name') auth_cred.pop('type') expected_vim = {'auth_cred': auth_cred, 'vim_project': {'name': sentinel.prj_name}, 'type': 'kubernetes'} vim_utils.args2body_vim(config_param.copy(), vim) self.assertEqual(expected_vim, vim) def test_args2body_kubernetes_vim_bearer(self): config_param = {'bearer_token': sentinel.bearer_token, 'ssl_ca_cert': "None", 'project_name': sentinel.prj_name, 'type': 'kubernetes'} vim = {} auth_cred = config_param.copy() auth_cred.pop('project_name') auth_cred.pop('type') expected_vim = {'auth_cred': auth_cred, 'vim_project': {'name': sentinel.prj_name}, 'type': 'kubernetes'} vim_utils.args2body_vim(config_param.copy(), vim) self.assertEqual(expected_vim, vim) def test_args2body_kubernetes_vim_oidc(self): config_param = {'oidc_token_url': sentinel.oidc_token_url, 'username': sentinel.username, 'password': sentinel.password, 'client_id': sentinel.client_id, 'client_secret': sentinel.client_secret, 'ssl_ca_cert': "None", 'project_name': sentinel.prj_name, 'type': 'kubernetes'} vim = {} auth_cred = config_param.copy() auth_cred.pop('project_name') auth_cred.pop('type') expected_vim = {'auth_cred': auth_cred, 'vim_project': {'name': sentinel.prj_name}, 'type': 'kubernetes'} vim_utils.args2body_vim(config_param.copy(), vim) self.assertEqual(expected_vim, vim) def test_args2body_kubernetes_vim_extra(self): extra_param = { 'helm_info': { 'masternode_ip': [ '192.168.10.110' ], 'masternode_username': 'helm_user', 'masternode_password': 'helm_pass' }} config_param = {'username': sentinel.usrname1, 'password': sentinel.password1, 'ssl_ca_cert': 'abcxyz', 'project_name': sentinel.prj_name, 'type': 'kubernetes', 'extra': extra_param} vim = {} auth_cred = config_param.copy() auth_cred.pop('project_name') auth_cred.pop('type') auth_cred.pop('extra') expected_vim = {'auth_cred': auth_cred, 'vim_project': {'name': sentinel.prj_name}, 'type': 'kubernetes', 'extra': extra_param} vim_utils.args2body_vim(config_param.copy(), vim) self.assertEqual(expected_vim, vim) def test_args2body_kubernetes_vim_oidc_no_username(self): config_param = {'oidc_token_url': sentinel.oidc_token_url, 'password': sentinel.password, 'client_id': sentinel.client_id, 'client_secret': sentinel.client_secret, 'ssl_ca_cert': "None", 'project_name': sentinel.prj_name, 'type': 'kubernetes'} vim = {} self.assertRaises(exceptions.TackerClientException, vim_utils.args2body_vim, config_param, vim) def test_args2body_vim_no_project(self): config_param = {'username': sentinel.usrname1, 'password': sentinel.password1, 'user_domain_name': sentinel.user_domain.name, 'cert_verify': 'True', 'type': 'openstack'} vim = {} self.assertRaises(exceptions.TackerClientException, vim_utils.args2body_vim, config_param, vim) def test_validate_auth_url_with_port(self): auth_url = "http://localhost:8000/test" url_parts = vim_utils.validate_auth_url(auth_url) self.assertEqual('http', url_parts.scheme) self.assertEqual('localhost:8000', url_parts.netloc) self.assertEqual(8000, url_parts.port) def test_validate_auth_url_without_port(self): auth_url = "http://localhost/test" url_parts = vim_utils.validate_auth_url(auth_url) self.assertEqual('http', url_parts.scheme) self.assertEqual('localhost', url_parts.netloc) def test_validate_auth_url_exception(self): auth_url = "localhost/test" self.assertRaises(exceptions.TackerClientException, vim_utils.validate_auth_url, auth_url) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7546198 python-tackerclient-2.1.0/tackerclient/v1_0/0000775000175000017500000000000000000000000020741 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/v1_0/__init__.py0000664000175000017500000000000000000000000023040 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/v1_0/client.py0000664000175000017500000011647500000000000022607 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation. # Copyright 2015 Hewlett-Packard Development Company, L.P. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import logging import re import time import requests from urllib import parse as urlparse from tackerclient import client from tackerclient.common import exceptions from tackerclient.common import serializer from tackerclient.common import utils from tackerclient.i18n import _ _logger = logging.getLogger(__name__) DEFAULT_DESC_LENGTH = 25 DEFAULT_ERROR_REASON_LENGTH = 100 STATUS_CODE_MAP = { 400: "badRequest", 401: "unauthorized", 403: "forbidden", 404: "itemNotFound", 405: "badMethod", 409: "conflictingRequest", 413: "overLimit", 415: "badMediaType", 429: "overLimit", 501: "notImplemented", 503: "serviceUnavailable"} def exception_handler_v10(status_code, error_content): """Exception handler for API v1.0 client. This routine generates the appropriate Tacker exception according to the contents of the response body. :param status_code: HTTP error status code :param error_content: deserialized body of error response """ etsi_error_content = error_content error_dict = None if isinstance(error_content, dict): error_dict = error_content.get('TackerError') if not error_dict: error_content = error_content.get(STATUS_CODE_MAP.get(status_code), 'tackerFault') # Find real error type bad_tacker_error_flag = False if error_dict: # If Tacker key is found, it will definitely contain # a 'message' and 'type' keys? try: error_type = error_dict['type'] error_message = error_dict['message'] if error_dict['detail']: error_message += "\n" + error_dict['detail'] except Exception: bad_tacker_error_flag = True if not bad_tacker_error_flag: # If corresponding exception is defined, use it. client_exc = getattr(exceptions, '%sClient' % error_type, None) # Otherwise look up per status-code client exception if not client_exc: client_exc = exceptions.HTTP_EXCEPTION_MAP.get(status_code) if client_exc: raise client_exc(message=error_message, status_code=status_code) else: raise exceptions.TackerClientException( status_code=status_code, message=error_message) else: raise exceptions.TackerClientException(status_code=status_code, message=error_dict) else: message = None if isinstance(error_content, dict): message = error_content.get('message') if message: raise exceptions.TackerClientException(status_code=status_code, message=message) # ETSI error response if isinstance(etsi_error_content, dict): if etsi_error_content.get('status') and \ etsi_error_content.get('detail'): message = etsi_error_content.get('detail') raise exceptions.TackerClientException(status_code=status_code, message=message) # If we end up here the exception was not a tacker error msg = "%s-%s" % (status_code, error_content) raise exceptions.TackerClientException(status_code=status_code, message=msg) class APIParamsCall(object): """A Decorator to support formating and tenant overriding and filters.""" def __init__(self, function): self.function = function def __get__(self, instance, owner): def with_params(*args, **kwargs): _format = instance.format if 'format' in kwargs: instance.format = kwargs['format'] ret = self.function(instance, *args, **kwargs) instance.format = _format return ret return with_params class ClientBase(object): """Client for the OpenStack Tacker v1.0 API. :param string username: Username for authentication. (optional) :param string user_id: User ID for authentication. (optional) :param string password: Password for authentication. (optional) :param string token: Token for authentication. (optional) :param string tenant_name: Tenant name. (optional) :param string tenant_id: Tenant id. (optional) :param string auth_strategy: 'keystone' by default, 'noauth' for no authentication against keystone. (optional) :param string auth_url: Keystone service endpoint for authorization. :param string service_type: Network service type to pull from the keystone catalog (e.g. 'network') (optional) :param string endpoint_type: Network service endpoint type to pull from the keystone catalog (e.g. 'publicURL', 'internalURL', or 'adminURL') (optional) :param string region_name: Name of a region to select when choosing an endpoint from the service catalog. :param string endpoint_url: A user-supplied endpoint URL for the tacker service. Lazy-authentication is possible for API service calls if endpoint is set at instantiation.(optional) :param integer timeout: Allows customization of the timeout for client http requests. (optional) :param bool insecure: SSL certificate validation. (optional) :param bool log_credentials: Allow for logging of passwords or not. Defaults to False. (optional) :param string ca_cert: SSL CA bundle file to use. (optional) :param integer retries: How many times idempotent (GET, PUT, DELETE) requests to Tacker server should be retried if they fail (default: 0). :param bool raise_errors: If True then exceptions caused by connection failure are propagated to the caller. (default: True) :param session: Keystone client auth session to use. (optional) :param auth: Keystone auth plugin to use. (optional) """ # API has no way to report plurals, so we have to hard code them # This variable should be overridden by a child class. EXTED_PLURALS = {} def __init__(self, **kwargs): """Initialize a new client for the Tacker v1.0 API.""" super(ClientBase, self).__init__() self.retries = kwargs.pop('retries', 0) self.raise_errors = kwargs.pop('raise_errors', True) self.httpclient = client.construct_http_client(**kwargs) self.version = '1.0' self.format = 'json' self.action_prefix = "/v%s" % (self.version) self.retry_interval = 1 self.rel = None self.params = None def _handle_fault_response(self, status_code, response_body): # Create exception with HTTP status code and message _logger.debug("Error message: %s", response_body) # Add deserialized error message to exception arguments try: des_error_body = self.deserialize(response_body, status_code) except Exception: # If unable to deserialized body it is probably not a # Tacker error des_error_body = {'message': response_body} # Raise the appropriate exception exception_handler_v10(status_code, des_error_body) def build_action(self, action): action += ".%s" % self.format action = self.action_prefix + action return action def _build_params_query(self, params=None): flag_params = [] keyval_params = {} for key, value in params.items(): if value is None: flag_params.append(key) else: keyval_params[key] = value flags_encoded = utils.safe_encode_list(flag_params) \ if flag_params else "" keyval_encoded = utils.safe_encode_dict(keyval_params) \ if keyval_params else "" query = "" for flag in flags_encoded: query = query + urlparse.quote_plus(flag) + '&' query = query + urlparse.urlencode(keyval_encoded, doseq=1) return query.strip('&') def do_request(self, method, action, body=None, headers=None, params=None): action = self.build_action(action) # Add format and tenant_id if type(params) is dict and params: query = self._build_params_query(params) action += '?' + query if body or body == {}: body = self.serialize(body) if headers is None: # self.httpclient.do_request is not accept 'headers=None'. headers = {} resp, replybody = self.httpclient.do_request( action, method, body=body, headers=headers, content_type=self.content_type()) if 'application/zip' == resp.headers.get('Content-Type'): self.format = 'zip' elif 'text/plain' == resp.headers.get('Content-Type'): self.format = 'text' elif 'artifacts' in action: self.format = 'any' else: self.format = 'json' url = None rel = None link = resp.headers.get('Link', None) if link is not None: url = re.findall('<(.*)>', link)[0] rel = re.findall('rel="(.*)"', link)[0] if rel == 'next': self.rel = 'next' query_str = urlparse.urlparse(url).query self.params = urlparse.parse_qs(query_str) status_code = resp.status_code if status_code in (requests.codes.ok, requests.codes.created, requests.codes.accepted, requests.codes.no_content): return self.deserialize(replybody, status_code) else: if not replybody: replybody = resp.reason self._handle_fault_response(status_code, replybody) def get_auth_info(self): return self.httpclient.get_auth_info() def serialize(self, data): """Serializes a dictionary JSON. A dictionary with a single key can be passed and it can contain any structure. """ if data is None: return None elif self.format in ('zip', 'text'): return data elif type(data) is dict: return serializer.Serializer().serialize(data, 'application/json') else: raise Exception(_("Unable to serialize object of type = '%s'") % type(data)) def deserialize(self, data, status_code): """Deserializes an JSON string into a dictionary.""" if status_code in (204, 202) or self.format in ('zip', 'text', 'any'): return data return serializer.Serializer().deserialize( data, 'application/json')['body'] def content_type(self, _format=None): """Returns the mime-type for either 'json, 'text', or 'zip'. Defaults to the currently set format. """ _format = _format or self.format if self.format == 'text': return "text/plain" elif self.format == 'both': return "text/plain,application/zip" else: return "application/%s" % (_format) def retry_request(self, method, action, body=None, headers=None, params=None): """Call do_request with the default retry configuration. Only idempotent requests should retry failed connection attempts. :raises ConnectionFailed: if the maximum # of retries is exceeded """ max_attempts = self.retries + 1 for i in range(max_attempts): try: return self.do_request(method, action, body=body, headers=headers, params=params) except exceptions.ConnectionFailed: # Exception has already been logged by do_request() if i < self.retries: _logger.debug('Retrying connection to Tacker service') time.sleep(self.retry_interval) elif self.raise_errors: raise if self.retries: msg = (_("Failed to connect to Tacker server after %d attempts") % max_attempts) else: msg = _("Failed to connect Tacker server") raise exceptions.ConnectionFailed(reason=msg) def delete(self, action, body=None, headers=None, params=None): return self.retry_request("DELETE", action, body=body, headers=headers, params=params) def get(self, action, body=None, headers=None, params=None): return self.retry_request("GET", action, body=body, headers=headers, params=params) def post(self, action, body=None, headers=None, params=None): # Do not retry POST requests to avoid the orphan objects problem. return self.do_request("POST", action, body=body, headers=headers, params=params) def put(self, action, body=None, headers=None, params=None): return self.retry_request("PUT", action, body=body, headers=headers, params=params) def patch(self, action, body=None, headers=None, params=None): return self.retry_request("PATCH", action, body=body, headers=headers, params=params) def list(self, collection, path, retrieve_all=True, headers=None, **params): if retrieve_all: res = [] for r in self._pagination(collection, path, headers, **params): if type(r) is list: res.extend(r) else: res.extend(r[collection]) return {collection: res} if collection else res else: return self._pagination(collection, path, headers, **params) def _pagination(self, collection, path, headers, **params): if params.get('page_reverse', False): linkrel = 'previous' else: linkrel = 'next' next = True while next: self.rel = None res = self.get(path, headers=headers, params=params) yield res next = False try: if type(res) is list: if self.rel == 'next': params = self.params next = True else: for link in res['%s_links' % collection]: if link['rel'] == linkrel: query_str = urlparse.urlparse(link['href']).query params = urlparse.parse_qs(query_str) next = True break except KeyError: break class LegacyClient(ClientBase): vims_path = '/vims' vim_path = '/vims/%s' # API has no way to report plurals, so we have to hard code them # EXTED_PLURALS = {} @APIParamsCall def show_vim(self, vim, **_params): return self.get(self.vim_path % vim, params=_params) _VIM = "vim" @APIParamsCall def create_vim(self, body): return self.post(self.vims_path, body=body) @APIParamsCall def delete_vim(self, vim): return self.delete(self.vim_path % vim) @APIParamsCall def update_vim(self, vim, body): return self.put(self.vim_path % vim, body=body) @APIParamsCall def list_vims(self, retrieve_all=True, **_params): return self.list('vims', self.vims_path, retrieve_all, **_params) class VnfPackageClient(ClientBase): """Client for vnfpackage APIs. Purpose of this class is to create required request url for vnfpackage APIs. """ vnfpackages_path = '/vnfpkgm/v1/vnf_packages' vnfpackage_path = '/vnfpkgm/v1/vnf_packages/%s' vnfpackage_vnfd_path = '/vnfpkgm/v1/vnf_packages/%s/vnfd' vnfpackage_download_path = '/vnfpkgm/v1/vnf_packages/%s/package_content' vnfpakcage_artifact_path = '/vnfpkgm/v1/vnf_packages/%(id)s/artifacts/' \ '%(artifact_path)s' def build_action(self, action): return action @APIParamsCall def create_vnf_package(self, body): return self.post(self.vnfpackages_path, body=body) @APIParamsCall def list_vnf_packages(self, retrieve_all=True, **_params): vnf_packages = self.list("vnf_packages", self.vnfpackages_path, retrieve_all, **_params) return vnf_packages @APIParamsCall def show_vnf_package(self, vnf_package, **_params): return self.get(self.vnfpackage_path % vnf_package, params=_params) @APIParamsCall def delete_vnf_package(self, vnf_package): return self.delete(self.vnfpackage_path % vnf_package) @APIParamsCall def upload_vnf_package(self, vnf_package, file_data=None, **attrs): if attrs.get('url'): json = {'addressInformation': attrs.get('url')} for key in ['userName', 'password']: if attrs.get(key): json.update({key: attrs.get(key)}) return self.post( '{base_path}/{id}/package_content/upload_from_uri'.format( id=vnf_package, base_path=self.vnfpackages_path), body=json) else: self.format = 'zip' return self.put('{base_path}/{id}/package_content'.format( id=vnf_package, base_path=self.vnfpackages_path), body=file_data) @APIParamsCall def download_vnf_package(self, vnf_package): self.format = 'zip' return self.get(self.vnfpackage_download_path % vnf_package) @APIParamsCall def download_vnfd_from_vnf_package(self, vnf_package, accept): """Read VNFD of an on-boarded VNF Package. :param vnf_package: The value can be either the ID of a vnf package or a :class:`~openstack.nfv_orchestration.v1. vnf_package` instance. :param accept: Valid values are 'text/plain', 'application/zip' and 'both'. According to these values 'Accept' header will be set as 'text/plain', 'application/zip', 'text/plain,application/zip' respectively. :returns: If the VNFD is implemented in the form of multiple files, a ZIP file embedding these files shall be returned. If the VNFD is implemented as a single file, either that file or a ZIP file embedding that file shall be returned. """ if accept == 'text/plain': self.format = 'text' elif accept == 'application/zip': self.format = 'zip' else: self.format = 'both' return self.get(self.vnfpackage_vnfd_path % vnf_package) @APIParamsCall def download_artifact_from_vnf_package(self, vnf_package, artifact_path): return self.get(self.vnfpakcage_artifact_path % {'id': vnf_package, 'artifact_path': artifact_path}) @APIParamsCall def update_vnf_package(self, vnf_package, body): return self.patch(self.vnfpackage_path % vnf_package, body=body) class VnfLCMClient(ClientBase): """Client for vnflcm APIs. Purpose of this class is to create required request url for vnflcm APIs. """ def __init__(self, api_version, **kwargs): super(VnfLCMClient, self).__init__(**kwargs) self.headers = {'Version': '1.3.0'} sol_api_version = 'v1' if api_version == '2': self.headers = {'Version': '2.0.0'} sol_api_version = 'v2' self.vnf_instances_path = ( '/vnflcm/{}/vnf_instances'.format(sol_api_version)) self.vnf_instance_path = ( '/vnflcm/{}/vnf_instances/%s'.format(sol_api_version)) self.vnf_lcm_op_occurrences_path = ( '/vnflcm/{}/vnf_lcm_op_occs'.format(sol_api_version)) self.vnf_lcm_op_occs_path = ( '/vnflcm/{}/vnf_lcm_op_occs/%s'.format(sol_api_version)) self.lccn_subscriptions_path = ( '/vnflcm/{}/subscriptions'.format(sol_api_version)) self.lccn_subscription_path = ( '/vnflcm/{}/subscriptions/%s'.format(sol_api_version)) def build_action(self, action): return action @APIParamsCall def create_vnf_instance(self, body): return self.post(self.vnf_instances_path, body=body, headers=self.headers) @APIParamsCall def show_vnf_instance(self, vnf_id, **_params): return self.get(self.vnf_instance_path % vnf_id, headers=self.headers, params=_params) @APIParamsCall def list_vnf_instances(self, retrieve_all=True, **_params): vnf_instances = self.list(None, self.vnf_instances_path, retrieve_all, headers=self.headers, **_params) return vnf_instances @APIParamsCall def instantiate_vnf_instance(self, vnf_id, body): return self.post((self.vnf_instance_path + "/instantiate") % vnf_id, body=body, headers=self.headers) @APIParamsCall def heal_vnf_instance(self, vnf_id, body): return self.post((self.vnf_instance_path + "/heal") % vnf_id, body=body, headers=self.headers) @APIParamsCall def terminate_vnf_instance(self, vnf_id, body): return self.post((self.vnf_instance_path + "/terminate") % vnf_id, body=body, headers=self.headers) @APIParamsCall def delete_vnf_instance(self, vnf_id): return self.delete(self.vnf_instance_path % vnf_id, headers=self.headers) @APIParamsCall def update_vnf_instance(self, vnf_id, body): return self.patch(self.vnf_instance_path % vnf_id, body=body, headers=self.headers) @APIParamsCall def scale_vnf_instance(self, vnf_id, body): return self.post((self.vnf_instance_path + "/scale") % vnf_id, body=body, headers=self.headers) @APIParamsCall def rollback_vnf_instance(self, occ_id): return self.post((self.vnf_lcm_op_occs_path + "/rollback") % occ_id, headers=self.headers) @APIParamsCall def cancel_vnf_instance(self, occ_id, body): return self.post((self.vnf_lcm_op_occs_path + "/cancel") % occ_id, body=body) @APIParamsCall def fail_vnf_instance(self, occ_id): return self.post((self.vnf_lcm_op_occs_path + "/fail") % occ_id, headers=self.headers) @APIParamsCall def change_ext_conn_vnf_instance(self, vnf_id, body): return self.post((self.vnf_instance_path + "/change_ext_conn") % vnf_id, body=body, headers=self.headers) @APIParamsCall def change_vnfpkg_vnf_instance(self, vnf_id, body): # NOTE: it is only supported by V2-API. if self.vnf_instance_path.split('/')[2] == 'v2': return self.post((self.vnf_instance_path + "/change_vnfpkg") % vnf_id, body=body, headers=self.headers) else: raise exceptions.UnsupportedCommandVersion(version='1') @APIParamsCall def retry_vnf_instance(self, occ_id): return self.post((self.vnf_lcm_op_occs_path + "/retry") % occ_id, headers=self.headers) @APIParamsCall def list_vnf_lcm_op_occs(self, retrieve_all=True, **_params): vnf_lcm_op_occs = self.list(None, self.vnf_lcm_op_occurrences_path, retrieve_all, headers=self.headers, **_params) return vnf_lcm_op_occs @APIParamsCall def show_vnf_lcm_op_occs(self, occ_id): return self.get(self.vnf_lcm_op_occs_path % occ_id, headers=self.headers) @APIParamsCall def create_lccn_subscription(self, body): return self.post(self.lccn_subscriptions_path, body=body, headers=self.headers) @APIParamsCall def delete_lccn_subscription(self, subsc_id): return self.delete(self.lccn_subscription_path % subsc_id, headers=self.headers) @APIParamsCall def list_lccn_subscriptions(self, retrieve_all=True, **_params): subscriptions = self.list(None, self.lccn_subscriptions_path, retrieve_all, headers=self.headers, **_params) return subscriptions @APIParamsCall def show_lccn_subscription(self, subsc_id): return self.get(self.lccn_subscription_path % subsc_id, headers=self.headers) @APIParamsCall def show_vnf_lcm_versions(self, major_version): if major_version is None: path = "/vnflcm/api_versions" else: path = "/vnflcm/{}/api_versions".format(major_version) # NOTE: This may be called with any combination of # --os-tacker-api-verson:[1, 2] and major_version:[None, 1, 2]. # Specifying "headers={'Version': '2.0.0'}" is most simple to # make all cases OK. return self.get(path, headers={'Version': '2.0.0'}) class VnfFMClient(ClientBase): headers = {'Version': '1.3.0'} vnf_fm_alarms_path = '/vnffm/v1/alarms' vnf_fm_alarm_path = '/vnffm/v1/alarms/%s' vnf_fm_subs_path = '/vnffm/v1/subscriptions' vnf_fm_sub_path = '/vnffm/v1/subscriptions/%s' def build_action(self, action): return action @APIParamsCall def list_vnf_fm_alarms(self, retrieve_all=True, **_params): vnf_fm_alarms = self.list( "vnf_fm_alarms", self.vnf_fm_alarms_path, retrieve_all, headers=self.headers, **_params) return vnf_fm_alarms @APIParamsCall def show_vnf_fm_alarm(self, vnf_fm_alarm_id): return self.get( self.vnf_fm_alarm_path % vnf_fm_alarm_id, headers=self.headers) @APIParamsCall def update_vnf_fm_alarm(self, vnf_fm_alarm_id, body): return self.patch( self.vnf_fm_alarm_path % vnf_fm_alarm_id, body=body, headers=self.headers) @APIParamsCall def create_vnf_fm_sub(self, body): return self.post( self.vnf_fm_subs_path, body=body, headers=self.headers) @APIParamsCall def list_vnf_fm_subs(self, retrieve_all=True, **_params): vnf_fm_subs = self.list("vnf_fm_subs", self.vnf_fm_subs_path, retrieve_all, headers=self.headers, **_params) return vnf_fm_subs @APIParamsCall def show_vnf_fm_sub(self, vnf_fm_sub_id): return self.get( self.vnf_fm_sub_path % vnf_fm_sub_id, headers=self.headers) @APIParamsCall def delete_vnf_fm_sub(self, vnf_fm_sub_id): return self.delete( self.vnf_fm_sub_path % vnf_fm_sub_id, headers=self.headers) class VnfPMClient(ClientBase): headers = {'Version': '2.1.0'} vnf_pm_jobs_path = '/vnfpm/v2/pm_jobs' vnf_pm_job_path = '/vnfpm/v2/pm_jobs/%s' vnf_pm_reports_path = '/vnfpm/v2/pm_jobs/%(job_id)s/reports/%(report_id)s' vnf_pm_thresholds_path = '/vnfpm/v2/thresholds' vnf_pm_threshold_path = '/vnfpm/v2/thresholds/%s' def build_action(self, action): return action @APIParamsCall def create_vnf_pm_job(self, body): return self.post( self.vnf_pm_jobs_path, body=body, headers=self.headers) @APIParamsCall def list_vnf_pm_jobs(self, retrieve_all=True, **_params): vnf_pm_jobs = self.list( "vnf_pm_jobs", self.vnf_pm_jobs_path, retrieve_all, headers=self.headers, **_params) return vnf_pm_jobs @APIParamsCall def show_vnf_pm_job(self, vnf_pm_job_id): return self.get( self.vnf_pm_job_path % vnf_pm_job_id, headers=self.headers) @APIParamsCall def update_vnf_pm_job(self, vnf_pm_job_id, body): return self.patch( self.vnf_pm_job_path % vnf_pm_job_id, body=body, headers=self.headers) @APIParamsCall def delete_vnf_pm_job(self, vnf_pm_job_id): return self.delete( self.vnf_pm_job_path % vnf_pm_job_id, headers=self.headers) @APIParamsCall def show_vnf_pm_report(self, vnf_pm_job_id, vnf_pm_report_id): return self.get( self.vnf_pm_reports_path % { 'job_id': vnf_pm_job_id, 'report_id': vnf_pm_report_id }, headers=self.headers) @APIParamsCall def create_vnf_pm_threshold(self, body): return self.post( self.vnf_pm_thresholds_path, body=body, headers=self.headers) @APIParamsCall def list_vnf_pm_thresholds(self, retrieve_all=True, **_params): return self.list( "vnf_pm_thresholds", self.vnf_pm_thresholds_path, retrieve_all, headers=self.headers, **_params) @APIParamsCall def show_vnf_pm_threshold(self, vnf_pm_threshold_id): return self.get( self.vnf_pm_threshold_path % vnf_pm_threshold_id, headers=self.headers) @APIParamsCall def update_vnf_pm_threshold(self, vnf_pm_threshold_id, body): return self.patch( self.vnf_pm_threshold_path % vnf_pm_threshold_id, body=body, headers=self.headers) @APIParamsCall def delete_vnf_pm_threshold(self, vnf_pm_threshold_id): return self.delete( self.vnf_pm_threshold_path % vnf_pm_threshold_id, headers=self.headers) class Client(object): """Unified interface to interact with multiple applications of tacker service. This class is a single entry point to interact with legacy tacker apis and vnf packages apis. Example:: from tackerclient.v1_0 import client tacker = client.Client(username=USER, password=PASS, tenant_name=TENANT_NAME, auth_url=KEYSTONE_URL) vnf_package = tacker.create_vnf_package(...) nsd = tacker.create_nsd(...) """ def __init__(self, **kwargs): api_version = kwargs.pop('api_version', '1') self.vnf_lcm_client = VnfLCMClient(api_version, **kwargs) self.vnf_fm_client = VnfFMClient(**kwargs) self.vnf_pm_client = VnfPMClient(**kwargs) self.vnf_package_client = VnfPackageClient(**kwargs) self.legacy_client = LegacyClient(**kwargs) # LegacyClient methods def delete(self, action, body=None, headers=None, params=None): return self.legacy_client.delete(action, body=body, headers=headers, params=params) def get(self, action, body=None, headers=None, params=None): return self.legacy_client.get(action, body=body, headers=headers, params=params) def post(self, action, body=None, headers=None, params=None): return self.legacy_client.post(action, body=body, headers=headers, params=params) def put(self, action, body=None, headers=None, params=None): return self.legacy_client.put(action, body=body, headers=headers, params=params) def list(self, collection, path, retrieve_all=True, **params): return self.legacy_client.list(collection, path, retrieve_all=retrieve_all, **params) def show_vim(self, vim, **_params): return self.legacy_client.show_vim(vim, **_params) def create_vim(self, body): return self.legacy_client.create_vim(body) def delete_vim(self, vim): return self.legacy_client.delete_vim(vim) def update_vim(self, vim, body): return self.legacy_client.update_vim(vim, body) def list_vims(self, retrieve_all=True, **_params): return self.legacy_client.list_vims(retrieve_all=retrieve_all, **_params) # VnfPackageClient methods def create_vnf_package(self, body): return self.vnf_package_client.create_vnf_package(body) def list_vnf_packages(self, retrieve_all=True, query_parameter=None, **_params): return self.vnf_package_client.list_vnf_packages( retrieve_all=retrieve_all, **_params) def show_vnf_package(self, vnf_package, **_params): return self.vnf_package_client.show_vnf_package(vnf_package, **_params) def upload_vnf_package(self, vnf_package, file_data=None, **_params): return self.vnf_package_client.upload_vnf_package( vnf_package, file_data=file_data, **_params) def delete_vnf_package(self, vnf_package): return self.vnf_package_client.delete_vnf_package(vnf_package) # VnfLCMClient methods. def create_vnf_instance(self, body): return self.vnf_lcm_client.create_vnf_instance(body) def show_vnf_instance(self, vnf_instance, **_params): return self.vnf_lcm_client.show_vnf_instance(vnf_instance, **_params) def list_vnf_instances(self, retrieve_all=True, **_params): return self.vnf_lcm_client.list_vnf_instances( retrieve_all=retrieve_all, **_params) def instantiate_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.instantiate_vnf_instance(vnf_id, body) def heal_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.heal_vnf_instance(vnf_id, body) def terminate_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.terminate_vnf_instance(vnf_id, body) def scale_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.scale_vnf_instance(vnf_id, body) def change_ext_conn_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.change_ext_conn_vnf_instance(vnf_id, body) def change_vnfpkg_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.change_vnfpkg_vnf_instance(vnf_id, body) def delete_vnf_instance(self, vnf_id): return self.vnf_lcm_client.delete_vnf_instance(vnf_id) def update_vnf_instance(self, vnf_id, body): return self.vnf_lcm_client.update_vnf_instance(vnf_id, body) def rollback_vnf_instance(self, occ_id): return self.vnf_lcm_client.rollback_vnf_instance(occ_id) def cancel_vnf_instance(self, occ_id, body): return self.vnf_lcm_client.cancel_vnf_instance(occ_id, body) def fail_vnf_instance(self, occ_id): return self.vnf_lcm_client.fail_vnf_instance(occ_id) def retry_vnf_instance(self, occ_id): return self.vnf_lcm_client.retry_vnf_instance(occ_id) def update_vnf_package(self, vnf_package, body): return self.vnf_package_client.update_vnf_package(vnf_package, body) def download_vnfd_from_vnf_package(self, vnf_package, accept): return self.vnf_package_client.download_vnfd_from_vnf_package( vnf_package, accept) def download_artifact_from_vnf_package(self, vnf_package, artifact_path): return self.vnf_package_client.download_artifact_from_vnf_package( vnf_package, artifact_path ) def download_vnf_package(self, vnf_package): return self.vnf_package_client.download_vnf_package(vnf_package) def list_vnf_lcm_op_occs(self, retrieve_all=True, **_params): return self.vnf_lcm_client.list_vnf_lcm_op_occs( retrieve_all=retrieve_all, **_params) def show_vnf_lcm_op_occs(self, occ_id): return self.vnf_lcm_client.show_vnf_lcm_op_occs(occ_id) def create_lccn_subscription(self, body): return self.vnf_lcm_client.create_lccn_subscription(body) def delete_lccn_subscription(self, subsc_id): return self.vnf_lcm_client.delete_lccn_subscription(subsc_id) def list_lccn_subscriptions(self, retrieve_all=True, **_params): return self.vnf_lcm_client.list_lccn_subscriptions( retrieve_all=retrieve_all, **_params) def show_lccn_subscription(self, subsc_id): return self.vnf_lcm_client.show_lccn_subscription(subsc_id) def show_vnf_lcm_versions(self, major_version): return self.vnf_lcm_client.show_vnf_lcm_versions(major_version) # VnfFMClient methods. def list_vnf_fm_alarms(self, retrieve_all=True, **_params): return self.vnf_fm_client.list_vnf_fm_alarms( retrieve_all=retrieve_all, **_params) def show_vnf_fm_alarm(self, vnf_fm_alarm_id): return self.vnf_fm_client.show_vnf_fm_alarm(vnf_fm_alarm_id) def update_vnf_fm_alarm(self, vnf_fm_alarm_id, body): return self.vnf_fm_client.update_vnf_fm_alarm(vnf_fm_alarm_id, body) def create_vnf_fm_sub(self, body): return self.vnf_fm_client.create_vnf_fm_sub(body) def list_vnf_fm_subs(self, retrieve_all=True, **_params): return self.vnf_fm_client.list_vnf_fm_subs( retrieve_all=retrieve_all, **_params) def show_vnf_fm_sub(self, vnf_fm_sub_id): return self.vnf_fm_client.show_vnf_fm_sub(vnf_fm_sub_id) def delete_vnf_fm_sub(self, vnf_fm_sub_id): return self.vnf_fm_client.delete_vnf_fm_sub(vnf_fm_sub_id) # VnfPMClient methods. def create_vnf_pm_job(self, body): return self.vnf_pm_client.create_vnf_pm_job(body) def list_vnf_pm_jobs(self, retrieve_all=True, **_params): return self.vnf_pm_client.list_vnf_pm_jobs( retrieve_all=retrieve_all, **_params) def show_vnf_pm_job(self, vnf_pm_job_id): return self.vnf_pm_client.show_vnf_pm_job(vnf_pm_job_id) def update_vnf_pm_job(self, vnf_pm_job_id, body): return self.vnf_pm_client.update_vnf_pm_job(vnf_pm_job_id, body) def delete_vnf_pm_job(self, vnf_pm_job_id): return self.vnf_pm_client.delete_vnf_pm_job(vnf_pm_job_id) def show_vnf_pm_report(self, vnf_pm_job_id, vnf_pm_report_id): return self.vnf_pm_client.show_vnf_pm_report( vnf_pm_job_id, vnf_pm_report_id) def create_vnf_pm_threshold(self, body): return self.vnf_pm_client.create_vnf_pm_threshold(body) def list_vnf_pm_thresholds(self, retrieve_all=True, **_params): return self.vnf_pm_client.list_vnf_pm_thresholds( retrieve_all=retrieve_all, **_params) def show_vnf_pm_threshold(self, vnf_pm_threshold_id): return self.vnf_pm_client.show_vnf_pm_threshold(vnf_pm_threshold_id) def update_vnf_pm_threshold(self, vnf_pm_threshold_id, body): return self.vnf_pm_client.update_vnf_pm_threshold( vnf_pm_threshold_id, body) def delete_vnf_pm_threshold(self, vnf_pm_threshold_id): return self.vnf_pm_client.delete_vnf_pm_threshold(vnf_pm_threshold_id) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tackerclient/version.py0000664000175000017500000000136700000000000022242 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version __version__ = pbr.version.VersionInfo('python-tackerclient').version_string() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/test-requirements.txt0000664000175000017500000000070400000000000021766 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. hacking>=4.0.0,<4.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 ddt>=1.0.1 # MIT fixtures>=3.0.0 # Apache-2.0/BSD python-subunit>=1.0.0 # Apache-2.0/BSD requests-mock>=1.2.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 testtools>=2.2.0 # MIT ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1715864642.7546198 python-tackerclient-2.1.0/tools/0000775000175000017500000000000000000000000016664 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tools/tacker.bash_completion0000664000175000017500000000156400000000000023233 0ustar00zuulzuul00000000000000_tacker_opts="" # lazy init _tacker_flags="" # lazy init _tacker_opts_exp="" # lazy init _tacker() { local cur prev nbc cflags COMPREPLY=() cur="${COMP_WORDS[COMP_CWORD]}" prev="${COMP_WORDS[COMP_CWORD-1]}" if [ "x$_tacker_opts" == "x" ] ; then nbc="`tacker bash-completion`" _tacker_opts="`echo "$nbc" | sed -e "s/--[a-z0-9_-]*//g" -e "s/\s\s*/ /g"`" _tacker_flags="`echo " $nbc" | sed -e "s/ [^-][^-][a-z0-9_-]*//g" -e "s/\s\s*/ /g"`" _tacker_opts_exp="`echo "$_tacker_opts" | sed -e "s/\s/|/g"`" fi if [[ " ${COMP_WORDS[@]} " =~ " "($_tacker_opts_exp)" " && "$prev" != "help" ]] ; then COMPLETION_CACHE=~/.tackerclient/*/*-cache cflags="$_tacker_flags "$(cat $COMPLETION_CACHE 2> /dev/null | tr '\n' ' ') COMPREPLY=($(compgen -W "${cflags}" -- ${cur})) else COMPREPLY=($(compgen -W "${_tacker_opts}" -- ${cur})) fi return 0 } complete -F _tacker tacker ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1715864589.0 python-tackerclient-2.1.0/tox.ini0000664000175000017500000000270500000000000017043 0ustar00zuulzuul00000000000000[tox] envlist = py39,py38,py36,pep8,docs minversion = 3.18.0 ignore_basepython_conflict = True [testenv] basepython = python3 setenv = VIRTUAL_ENV={envdir} LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=C usedevelop = True deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = stestr run --slowest {posargs} [testenv:pep8] commands = flake8 distribute = false [testenv:venv] commands = {posargs} [testenv:docs] deps = -r{toxinidir}/doc/requirements.txt commands = sphinx-build -W -b html doc/source doc/build/html [testenv:releasenotes] deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/doc/requirements.txt commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:cover] setenv = PYTHON=coverage run --source tackerclient --parallel-mode commands = stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml [flake8] # E125 continuation line does not distinguish itself from next logical line # W504 line break after binary operator ignore = E125,W504 show-source = true exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools # F821 undefined name 'unicode' # if isinstance(config, str) or isinstance(config, unicode): builtins = unicode