python-saharaclient-3.1.0/0000775000175000017500000000000013643577103015523 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/.zuul.yaml0000664000175000017500000000327113643576737017503 0ustar zuulzuul00000000000000- project: templates: - openstack-cover-jobs - openstack-lower-constraints-jobs - openstack-python3-ussuri-jobs - publish-openstack-docs-pti - check-requirements - release-notes-jobs-python3 - openstackclient-plugin-jobs check: jobs: - python-saharaclient-scenario - python-saharaclient-scenario-v2 - python-saharaclient-tempest - python-saharaclient-tempest-v2 gate: queue: sahara jobs: - python-saharaclient-scenario - python-saharaclient-scenario-v2 - python-saharaclient-tempest - python-saharaclient-tempest-v2 experimental: jobs: - openstack-tox-pypy - job: name: python-saharaclient-scenario description: | Run scenario tests for Sahara against python-saharaclient changes. parent: sahara-tests-scenario required-projects: - openstack/python-saharaclient - job: name: python-saharaclient-scenario-v2 description: | Run scenario tests for Sahara on API v2 against python-saharaclient changes. parent: sahara-tests-scenario-v2 required-projects: - openstack/python-saharaclient - job: name: python-saharaclient-tempest description: | Run Tempest tests from the Sahara plugin against python-saharaclient changes. parent: sahara-tests-tempest required-projects: - openstack/python-saharaclient - job: name: python-saharaclient-tempest-v2 description: | Run Tempest tests from the Sahara plugin on API v2 against python-saharaclient changes. parent: sahara-tests-tempest-v2 required-projects: - openstack/python-saharaclient python-saharaclient-3.1.0/AUTHORS0000664000175000017500000000711713643577103016601 0ustar zuulzuul00000000000000Adrien Vergé Alexander Ignatov Alexander Kuznetsov Alina Nesterova Andreas Jaeger Andreas Jaeger Andrew Lazarev Andrey Pavlov Chad Roberts Charles Short Chen Christian Berendt Cyril Roelandt Davanum Srinivas Denis Egorenko Dmitry Mescheryakov Doug Hellmann Ethan Gafford Flavio Percoco Hangdong Zhang Ivan Udovichenko Iwona Kotlarska James E. Blair Jamie Lennox Jaxon Wang Jeremy Freudberg Jeremy Liu Jeremy Stanley KATO Tomoyuki Khanh-Toan Tran Li, Chen Longgeek Luigi Toscano Matthew Farrellee Michael Ionkin Michael McCune Mikhail Lelyakin Mina Park Minkyung Maisy Kim Monty Taylor Ngo Quoc Cuong Nguyen Hai Nikita Konovalov Nikolay Mahotkin Ondřej Nový OpenStack Release Bot Pavlo Shchelokovskyy PavlovAndrey Petr Kovar Pritesh Kothari Renat Akhmerov Rui Chen Sergey Galkin Sergey Lukjanov Sergey Reshetnyak Sergey Reshetnyak Shrirang Phadke Shu Yingya Steve Martinelli Swapnil Kulkarni (coolsvap) Tang Chen Telles Nobrega Telles Nobrega Tetiana Lashchova Thomas Bechtold Thomas Goirand Tony Breeds Trevor McKay Vieri <15050873171@163.com> Vitaly Gridnev Vu Cong Tuan Yaroslav Lobankov Zhiqiang Fan bhagyashris caishan chioleong daohanli henriquetruta jiasen.lin kangyufei kavithahr llg8212 luhuichun malei qingszhao ricolin ricolin shu-mutou ting.wang venkatamahesh wu.chunyang yankee yuhara.motoki zemuvier zhouyunfeng zhu.rong python-saharaclient-3.1.0/python_saharaclient.egg-info/0000775000175000017500000000000013643577103023254 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/python_saharaclient.egg-info/entry_points.txt0000664000175000017500000002235713643577103026563 0ustar zuulzuul00000000000000[openstack.cli.extension] data_processing = saharaclient.osc.plugin [openstack.data_processing.v1] dataprocessing_cluster_create = saharaclient.osc.v1.clusters:CreateCluster dataprocessing_cluster_delete = saharaclient.osc.v1.clusters:DeleteCluster dataprocessing_cluster_list = saharaclient.osc.v1.clusters:ListClusters dataprocessing_cluster_scale = saharaclient.osc.v1.clusters:ScaleCluster dataprocessing_cluster_show = saharaclient.osc.v1.clusters:ShowCluster dataprocessing_cluster_template_create = saharaclient.osc.v1.cluster_templates:CreateClusterTemplate dataprocessing_cluster_template_delete = saharaclient.osc.v1.cluster_templates:DeleteClusterTemplate dataprocessing_cluster_template_export = saharaclient.osc.v1.cluster_templates:ExportClusterTemplate dataprocessing_cluster_template_import = saharaclient.osc.v1.cluster_templates:ImportClusterTemplate dataprocessing_cluster_template_list = saharaclient.osc.v1.cluster_templates:ListClusterTemplates dataprocessing_cluster_template_show = saharaclient.osc.v1.cluster_templates:ShowClusterTemplate dataprocessing_cluster_template_update = saharaclient.osc.v1.cluster_templates:UpdateClusterTemplate dataprocessing_cluster_update = saharaclient.osc.v1.clusters:UpdateCluster dataprocessing_cluster_verification = saharaclient.osc.v1.clusters:VerificationUpdateCluster dataprocessing_data_source_create = saharaclient.osc.v1.data_sources:CreateDataSource dataprocessing_data_source_delete = saharaclient.osc.v1.data_sources:DeleteDataSource dataprocessing_data_source_list = saharaclient.osc.v1.data_sources:ListDataSources dataprocessing_data_source_show = saharaclient.osc.v1.data_sources:ShowDataSource dataprocessing_data_source_update = saharaclient.osc.v1.data_sources:UpdateDataSource dataprocessing_image_list = saharaclient.osc.v1.images:ListImages dataprocessing_image_register = saharaclient.osc.v1.images:RegisterImage dataprocessing_image_show = saharaclient.osc.v1.images:ShowImage dataprocessing_image_tags_add = saharaclient.osc.v1.images:AddImageTags dataprocessing_image_tags_remove = saharaclient.osc.v1.images:RemoveImageTags dataprocessing_image_tags_set = saharaclient.osc.v1.images:SetImageTags dataprocessing_image_unregister = saharaclient.osc.v1.images:UnregisterImage dataprocessing_job_binary_create = saharaclient.osc.v1.job_binaries:CreateJobBinary dataprocessing_job_binary_delete = saharaclient.osc.v1.job_binaries:DeleteJobBinary dataprocessing_job_binary_download = saharaclient.osc.v1.job_binaries:DownloadJobBinary dataprocessing_job_binary_list = saharaclient.osc.v1.job_binaries:ListJobBinaries dataprocessing_job_binary_show = saharaclient.osc.v1.job_binaries:ShowJobBinary dataprocessing_job_binary_update = saharaclient.osc.v1.job_binaries:UpdateJobBinary dataprocessing_job_delete = saharaclient.osc.v1.jobs:DeleteJob dataprocessing_job_execute = saharaclient.osc.v1.jobs:ExecuteJob dataprocessing_job_list = saharaclient.osc.v1.jobs:ListJobs dataprocessing_job_show = saharaclient.osc.v1.jobs:ShowJob dataprocessing_job_template_create = saharaclient.osc.v1.job_templates:CreateJobTemplate dataprocessing_job_template_delete = saharaclient.osc.v1.job_templates:DeleteJobTemplate dataprocessing_job_template_list = saharaclient.osc.v1.job_templates:ListJobTemplates dataprocessing_job_template_show = saharaclient.osc.v1.job_templates:ShowJobTemplate dataprocessing_job_template_update = saharaclient.osc.v1.job_templates:UpdateJobTemplate dataprocessing_job_type_configs_get = saharaclient.osc.v1.job_types:GetJobTypeConfigs dataprocessing_job_type_list = saharaclient.osc.v1.job_types:ListJobTypes dataprocessing_job_update = saharaclient.osc.v1.jobs:UpdateJob dataprocessing_node_group_template_create = saharaclient.osc.v1.node_group_templates:CreateNodeGroupTemplate dataprocessing_node_group_template_delete = saharaclient.osc.v1.node_group_templates:DeleteNodeGroupTemplate dataprocessing_node_group_template_export = saharaclient.osc.v1.node_group_templates:ExportNodeGroupTemplate dataprocessing_node_group_template_import = saharaclient.osc.v1.node_group_templates:ImportNodeGroupTemplate dataprocessing_node_group_template_list = saharaclient.osc.v1.node_group_templates:ListNodeGroupTemplates dataprocessing_node_group_template_show = saharaclient.osc.v1.node_group_templates:ShowNodeGroupTemplate dataprocessing_node_group_template_update = saharaclient.osc.v1.node_group_templates:UpdateNodeGroupTemplate dataprocessing_plugin_configs_get = saharaclient.osc.v1.plugins:GetPluginConfigs dataprocessing_plugin_list = saharaclient.osc.v1.plugins:ListPlugins dataprocessing_plugin_show = saharaclient.osc.v1.plugins:ShowPlugin dataprocessing_plugin_update = saharaclient.osc.v1.plugins:UpdatePlugin [openstack.data_processing.v2] dataprocessing_cluster_create = saharaclient.osc.v2.clusters:CreateCluster dataprocessing_cluster_delete = saharaclient.osc.v2.clusters:DeleteCluster dataprocessing_cluster_list = saharaclient.osc.v2.clusters:ListClusters dataprocessing_cluster_scale = saharaclient.osc.v2.clusters:ScaleCluster dataprocessing_cluster_show = saharaclient.osc.v2.clusters:ShowCluster dataprocessing_cluster_template_create = saharaclient.osc.v2.cluster_templates:CreateClusterTemplate dataprocessing_cluster_template_delete = saharaclient.osc.v2.cluster_templates:DeleteClusterTemplate dataprocessing_cluster_template_export = saharaclient.osc.v2.cluster_templates:ExportClusterTemplate dataprocessing_cluster_template_import = saharaclient.osc.v2.cluster_templates:ImportClusterTemplate dataprocessing_cluster_template_list = saharaclient.osc.v2.cluster_templates:ListClusterTemplates dataprocessing_cluster_template_show = saharaclient.osc.v2.cluster_templates:ShowClusterTemplate dataprocessing_cluster_template_update = saharaclient.osc.v2.cluster_templates:UpdateClusterTemplate dataprocessing_cluster_update = saharaclient.osc.v2.clusters:UpdateCluster dataprocessing_cluster_update_keypair = saharaclient.osc.v2.clusters:UpdateKeypairCluster dataprocessing_cluster_verification = saharaclient.osc.v2.clusters:VerificationUpdateCluster dataprocessing_data_source_create = saharaclient.osc.v2.data_sources:CreateDataSource dataprocessing_data_source_delete = saharaclient.osc.v2.data_sources:DeleteDataSource dataprocessing_data_source_list = saharaclient.osc.v2.data_sources:ListDataSources dataprocessing_data_source_show = saharaclient.osc.v2.data_sources:ShowDataSource dataprocessing_data_source_update = saharaclient.osc.v2.data_sources:UpdateDataSource dataprocessing_image_list = saharaclient.osc.v2.images:ListImages dataprocessing_image_register = saharaclient.osc.v2.images:RegisterImage dataprocessing_image_show = saharaclient.osc.v2.images:ShowImage dataprocessing_image_tags_add = saharaclient.osc.v2.images:AddImageTags dataprocessing_image_tags_remove = saharaclient.osc.v2.images:RemoveImageTags dataprocessing_image_tags_set = saharaclient.osc.v2.images:SetImageTags dataprocessing_image_unregister = saharaclient.osc.v2.images:UnregisterImage dataprocessing_job_binary_create = saharaclient.osc.v2.job_binaries:CreateJobBinary dataprocessing_job_binary_delete = saharaclient.osc.v2.job_binaries:DeleteJobBinary dataprocessing_job_binary_download = saharaclient.osc.v2.job_binaries:DownloadJobBinary dataprocessing_job_binary_list = saharaclient.osc.v2.job_binaries:ListJobBinaries dataprocessing_job_binary_show = saharaclient.osc.v2.job_binaries:ShowJobBinary dataprocessing_job_binary_update = saharaclient.osc.v2.job_binaries:UpdateJobBinary dataprocessing_job_delete = saharaclient.osc.v2.jobs:DeleteJob dataprocessing_job_execute = saharaclient.osc.v2.jobs:ExecuteJob dataprocessing_job_list = saharaclient.osc.v2.jobs:ListJobs dataprocessing_job_show = saharaclient.osc.v2.jobs:ShowJob dataprocessing_job_template_create = saharaclient.osc.v2.job_templates:CreateJobTemplate dataprocessing_job_template_delete = saharaclient.osc.v2.job_templates:DeleteJobTemplate dataprocessing_job_template_list = saharaclient.osc.v2.job_templates:ListJobTemplates dataprocessing_job_template_show = saharaclient.osc.v2.job_templates:ShowJobTemplate dataprocessing_job_template_update = saharaclient.osc.v2.job_templates:UpdateJobTemplate dataprocessing_job_type_configs_get = saharaclient.osc.v2.job_types:GetJobTypeConfigs dataprocessing_job_type_list = saharaclient.osc.v2.job_types:ListJobTypes dataprocessing_job_update = saharaclient.osc.v2.jobs:UpdateJob dataprocessing_node_group_template_create = saharaclient.osc.v2.node_group_templates:CreateNodeGroupTemplate dataprocessing_node_group_template_delete = saharaclient.osc.v2.node_group_templates:DeleteNodeGroupTemplate dataprocessing_node_group_template_export = saharaclient.osc.v2.node_group_templates:ExportNodeGroupTemplate dataprocessing_node_group_template_import = saharaclient.osc.v2.node_group_templates:ImportNodeGroupTemplate dataprocessing_node_group_template_list = saharaclient.osc.v2.node_group_templates:ListNodeGroupTemplates dataprocessing_node_group_template_show = saharaclient.osc.v2.node_group_templates:ShowNodeGroupTemplate dataprocessing_node_group_template_update = saharaclient.osc.v2.node_group_templates:UpdateNodeGroupTemplate dataprocessing_plugin_configs_get = saharaclient.osc.v2.plugins:GetPluginConfigs dataprocessing_plugin_list = saharaclient.osc.v2.plugins:ListPlugins dataprocessing_plugin_show = saharaclient.osc.v2.plugins:ShowPlugin dataprocessing_plugin_update = saharaclient.osc.v2.plugins:UpdatePlugin python-saharaclient-3.1.0/python_saharaclient.egg-info/pbr.json0000664000175000017500000000005613643577103024733 0ustar zuulzuul00000000000000{"git_version": "830d460", "is_release": true}python-saharaclient-3.1.0/python_saharaclient.egg-info/SOURCES.txt0000664000175000017500000001555613643577103025154 0ustar zuulzuul00000000000000.coveragerc .stestr.conf .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog HACKING.rst LICENSE README.rst lower-constraints.txt requirements.txt setup.cfg setup.py test-requirements.txt tox.ini doc/requirements.txt doc/ext/__init__.py doc/ext/cli.py doc/ext/ext.py doc/ext/parser.py doc/source/conf.py doc/source/index.rst doc/source/_templates/sidebarlinks.html doc/source/_theme_rtd/layout.html doc/source/_theme_rtd/theme.conf doc/source/cli/index.rst doc/source/cli/intro.rst doc/source/cli/reference.rst doc/source/contributor/index.rst doc/source/reference/index.rst doc/source/reference/pythonclient.rst doc/source/reference/pythonclient_v2.rst python_saharaclient.egg-info/PKG-INFO python_saharaclient.egg-info/SOURCES.txt python_saharaclient.egg-info/dependency_links.txt python_saharaclient.egg-info/entry_points.txt python_saharaclient.egg-info/not-zip-safe python_saharaclient.egg-info/pbr.json python_saharaclient.egg-info/requires.txt python_saharaclient.egg-info/top_level.txt releasenotes/notes/.placeholder releasenotes/notes/api-v2-features-650eb8cc0f50a729.yaml releasenotes/notes/autogenerated-api-docs-3bc8513e63bfe610.yaml releasenotes/notes/autogenerated-cli-docs-c1e89ec6ea66c4a9.yaml releasenotes/notes/cli-deprecation-da0e7b6dfe77af52.yaml releasenotes/notes/designate-integration-16c59a6b57dbcfa4.yaml releasenotes/notes/drop-py2-7-862abe2ec0c32c5f.yaml releasenotes/notes/event-logs-c6d286e25dc7d9b1.yaml releasenotes/notes/experimental-v2-support-67ccf699e056ed78.yaml releasenotes/notes/fields-unset-068db4c3e680c37d.yaml releasenotes/notes/fix-job-binary-download-py3-5592eca2345305bd.yaml releasenotes/notes/fix-osc-520-regression-a92dff38f04e6a57.yaml releasenotes/notes/implement-pagination-2ba52769d240a3ce.yaml releasenotes/notes/job-binary-create-optional-bc0f9ee6426c5659.yaml releasenotes/notes/job-create-optional-034307a6b5db2cf2.yaml releasenotes/notes/job-execution-create-optional-1014a403e5ffa7ac.yaml releasenotes/notes/job-job-template-apiv2-change-93ffbf2b1360cddc.yaml releasenotes/notes/multiple-clusters-change-69a15f00597739d7.yaml releasenotes/notes/new-cli-6119bf8a4fb24ab6.yaml releasenotes/notes/osc-apiv2-4079c8cdb839ae42.yaml releasenotes/notes/plugin-api-f650c26a030b7df8.yaml releasenotes/notes/remove-functional-tests-c4b9d43c2c32d121.yaml releasenotes/notes/remove-old-cli-commands-06b9936ce044dd0f.yaml releasenotes/notes/remove-py26-dad75fc8d602b3c5.yaml releasenotes/notes/remove-py33-8364cb4805391750.yaml releasenotes/notes/rename_version_to_plugin-version-20cfe17530446391.yaml releasenotes/notes/rework-auth-c3e13a68a935671e.yaml releasenotes/notes/shares-update-d6f7e28acd27aa7f.yaml releasenotes/notes/start-using-reno-1f3418c11785c9ab.yaml releasenotes/notes/tags-update-c794416bcc035cb8.yaml releasenotes/notes/update-image-optional-f83c5746d88507cd.yaml releasenotes/notes/volume-mount-prefix-b6ef396a357cddd0.yaml releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/mitaka.rst releasenotes/source/newton.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst releasenotes/source/unreleased.rst releasenotes/source/_static/.placeholder releasenotes/source/_templates/.placeholder saharaclient/__init__.py saharaclient/_i18n.py saharaclient/client.py saharaclient/version.py saharaclient/api/__init__.py saharaclient/api/base.py saharaclient/api/client.py saharaclient/api/cluster_templates.py saharaclient/api/clusters.py saharaclient/api/data_sources.py saharaclient/api/helpers.py saharaclient/api/images.py saharaclient/api/job_binaries.py saharaclient/api/job_binary_internals.py saharaclient/api/job_executions.py saharaclient/api/job_types.py saharaclient/api/jobs.py saharaclient/api/node_group_templates.py saharaclient/api/parameters.py saharaclient/api/plugins.py saharaclient/api/v2/__init__.py saharaclient/api/v2/job_templates.py saharaclient/api/v2/jobs.py saharaclient/osc/__init__.py saharaclient/osc/plugin.py saharaclient/osc/utils.py saharaclient/osc/v1/__init__.py saharaclient/osc/v1/cluster_templates.py saharaclient/osc/v1/clusters.py saharaclient/osc/v1/data_sources.py saharaclient/osc/v1/images.py saharaclient/osc/v1/job_binaries.py saharaclient/osc/v1/job_templates.py saharaclient/osc/v1/job_types.py saharaclient/osc/v1/jobs.py saharaclient/osc/v1/node_group_templates.py saharaclient/osc/v1/plugins.py saharaclient/osc/v2/__init__.py saharaclient/osc/v2/cluster_templates.py saharaclient/osc/v2/clusters.py saharaclient/osc/v2/data_sources.py saharaclient/osc/v2/images.py saharaclient/osc/v2/job_binaries.py saharaclient/osc/v2/job_templates.py saharaclient/osc/v2/job_types.py saharaclient/osc/v2/jobs.py saharaclient/osc/v2/node_group_templates.py saharaclient/osc/v2/plugins.py saharaclient/tests/__init__.py saharaclient/tests/hacking/__init__.py saharaclient/tests/hacking/checks.py saharaclient/tests/hacking/commit_message.py saharaclient/tests/hacking/logging_checks.py saharaclient/tests/unit/__init__.py saharaclient/tests/unit/base.py saharaclient/tests/unit/test_base.py saharaclient/tests/unit/test_cluster_templates.py saharaclient/tests/unit/test_clusters.py saharaclient/tests/unit/test_data_sources.py saharaclient/tests/unit/test_hacking.py saharaclient/tests/unit/test_images.py saharaclient/tests/unit/test_job_binaries.py saharaclient/tests/unit/test_job_binary_internals.py saharaclient/tests/unit/test_job_executions.py saharaclient/tests/unit/test_job_types.py saharaclient/tests/unit/test_jobs.py saharaclient/tests/unit/test_manager.py saharaclient/tests/unit/test_node_group_templates.py saharaclient/tests/unit/test_plugins.py saharaclient/tests/unit/test_resource.py saharaclient/tests/unit/osc/__init__.py saharaclient/tests/unit/osc/test_plugin.py saharaclient/tests/unit/osc/v1/__init__.py saharaclient/tests/unit/osc/v1/fakes.py saharaclient/tests/unit/osc/v1/test_cluster_templates.py saharaclient/tests/unit/osc/v1/test_clusters.py saharaclient/tests/unit/osc/v1/test_data_sources.py saharaclient/tests/unit/osc/v1/test_images.py saharaclient/tests/unit/osc/v1/test_job_binaries.py saharaclient/tests/unit/osc/v1/test_job_templates.py saharaclient/tests/unit/osc/v1/test_job_types.py saharaclient/tests/unit/osc/v1/test_jobs.py saharaclient/tests/unit/osc/v1/test_node_group_templates.py saharaclient/tests/unit/osc/v1/test_plugins.py saharaclient/tests/unit/osc/v1/test_utils.py saharaclient/tests/unit/osc/v2/__init__.py saharaclient/tests/unit/osc/v2/test_cluster_templates.py saharaclient/tests/unit/osc/v2/test_clusters.py saharaclient/tests/unit/osc/v2/test_data_sources.py saharaclient/tests/unit/osc/v2/test_images.py saharaclient/tests/unit/osc/v2/test_job_binaries.py saharaclient/tests/unit/osc/v2/test_job_templates.py saharaclient/tests/unit/osc/v2/test_job_types.py saharaclient/tests/unit/osc/v2/test_jobs.py saharaclient/tests/unit/osc/v2/test_node_group_templates.py saharaclient/tests/unit/osc/v2/test_plugins.pypython-saharaclient-3.1.0/python_saharaclient.egg-info/PKG-INFO0000664000175000017500000000571313643577103024357 0ustar zuulzuul00000000000000Metadata-Version: 1.2 Name: python-saharaclient Version: 3.1.0 Summary: Client library for Sahara API Home-page: https://docs.openstack.org/python-saharaclient/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: Apache License, Version 2.0 Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/python-saharaclient.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on Python bindings to the OpenStack Sahara API =========================================== .. image:: https://img.shields.io/pypi/v/python-saharaclient.svg :target: https://pypi.org/project/python-saharaclient/ :alt: Latest Version This is a client for the OpenStack Sahara API. There's a Python API (the ``saharaclient`` module), and a command-line script (``sahara``). Each implements the OpenStack Sahara API. You can find documentation for both Python bindings and CLI in `Docs`_. Development takes place via the usual OpenStack processes as outlined in the `developer guide `_. .. _Docs: https://docs.openstack.org/python-saharaclient/latest/ * License: Apache License, Version 2.0 * `PyPi`_ - package installation * `Online Documentation`_ * `Blueprints`_ - feature specifications * `Bugs`_ - stories and issue tracking * `Source`_ * `Specs`_ * `How to Contribute`_ .. _PyPi: https://pypi.org/project/python-saharaclient .. _Online Documentation: https://docs.openstack.org/python-saharaclient/latest/ .. _Blueprints: http://specs.openstack.org/openstack/sahara-specs/ .. _Bugs: https://storyboard.openstack.org/#!/project/934 .. _Source: https://opendev.org/openstack/python-saharaclient .. _How to Contribute: https://docs.openstack.org/infra/manual/developers.html .. _Specs: https://specs.openstack.org/openstack/sahara-specs/ .. _Release Notes: https://docs.openstack.org/releasenotes/python-saharaclient Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Requires-Python: >=3.6 python-saharaclient-3.1.0/python_saharaclient.egg-info/dependency_links.txt0000664000175000017500000000000113643577103027322 0ustar zuulzuul00000000000000 python-saharaclient-3.1.0/python_saharaclient.egg-info/top_level.txt0000664000175000017500000000001513643577103026002 0ustar zuulzuul00000000000000saharaclient python-saharaclient-3.1.0/python_saharaclient.egg-info/not-zip-safe0000664000175000017500000000000113643577103025502 0ustar zuulzuul00000000000000 python-saharaclient-3.1.0/python_saharaclient.egg-info/requires.txt0000664000175000017500000000034113643577103025652 0ustar zuulzuul00000000000000pbr!=2.1.0,>=2.0.0 Babel!=2.4.0,>=2.3.4 keystoneauth1>=3.4.0 osc-lib>=2.0.0 oslo.log>=3.36.0 oslo.serialization!=2.19.1,>=2.18.0 oslo.i18n>=3.15.3 oslo.utils>=3.33.0 python-openstackclient>=5.2.0 requests>=2.14.2 six>=1.10.0 python-saharaclient-3.1.0/setup.cfg0000664000175000017500000002435213643577103017352 0ustar zuulzuul00000000000000[metadata] name = python-saharaclient summary = Client library for Sahara API description-file = README.rst license = Apache License, Version 2.0 author = OpenStack author-email = openstack-discuss@lists.openstack.org home-page = https://docs.openstack.org/python-saharaclient/latest/ python-requires = >=3.6 classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 [files] packages = saharaclient [entry_points] openstack.cli.extension = data_processing = saharaclient.osc.plugin openstack.data_processing.v1 = dataprocessing_plugin_list = saharaclient.osc.v1.plugins:ListPlugins dataprocessing_plugin_show = saharaclient.osc.v1.plugins:ShowPlugin dataprocessing_plugin_configs_get = saharaclient.osc.v1.plugins:GetPluginConfigs dataprocessing_plugin_update = saharaclient.osc.v1.plugins:UpdatePlugin dataprocessing_data_source_create = saharaclient.osc.v1.data_sources:CreateDataSource dataprocessing_data_source_list = saharaclient.osc.v1.data_sources:ListDataSources dataprocessing_data_source_show = saharaclient.osc.v1.data_sources:ShowDataSource dataprocessing_data_source_delete = saharaclient.osc.v1.data_sources:DeleteDataSource dataprocessing_data_source_update = saharaclient.osc.v1.data_sources:UpdateDataSource dataprocessing_image_list = saharaclient.osc.v1.images:ListImages dataprocessing_image_show = saharaclient.osc.v1.images:ShowImage dataprocessing_image_register = saharaclient.osc.v1.images:RegisterImage dataprocessing_image_unregister = saharaclient.osc.v1.images:UnregisterImage dataprocessing_image_tags_add = saharaclient.osc.v1.images:AddImageTags dataprocessing_image_tags_remove = saharaclient.osc.v1.images:RemoveImageTags dataprocessing_image_tags_set = saharaclient.osc.v1.images:SetImageTags dataprocessing_node_group_template_create = saharaclient.osc.v1.node_group_templates:CreateNodeGroupTemplate dataprocessing_node_group_template_list = saharaclient.osc.v1.node_group_templates:ListNodeGroupTemplates dataprocessing_node_group_template_show = saharaclient.osc.v1.node_group_templates:ShowNodeGroupTemplate dataprocessing_node_group_template_update = saharaclient.osc.v1.node_group_templates:UpdateNodeGroupTemplate dataprocessing_node_group_template_delete = saharaclient.osc.v1.node_group_templates:DeleteNodeGroupTemplate dataprocessing_node_group_template_import = saharaclient.osc.v1.node_group_templates:ImportNodeGroupTemplate dataprocessing_node_group_template_export = saharaclient.osc.v1.node_group_templates:ExportNodeGroupTemplate dataprocessing_cluster_template_create = saharaclient.osc.v1.cluster_templates:CreateClusterTemplate dataprocessing_cluster_template_list = saharaclient.osc.v1.cluster_templates:ListClusterTemplates dataprocessing_cluster_template_show = saharaclient.osc.v1.cluster_templates:ShowClusterTemplate dataprocessing_cluster_template_update = saharaclient.osc.v1.cluster_templates:UpdateClusterTemplate dataprocessing_cluster_template_delete = saharaclient.osc.v1.cluster_templates:DeleteClusterTemplate dataprocessing_cluster_template_import = saharaclient.osc.v1.cluster_templates:ImportClusterTemplate dataprocessing_cluster_template_export = saharaclient.osc.v1.cluster_templates:ExportClusterTemplate dataprocessing_cluster_create = saharaclient.osc.v1.clusters:CreateCluster dataprocessing_cluster_list = saharaclient.osc.v1.clusters:ListClusters dataprocessing_cluster_show = saharaclient.osc.v1.clusters:ShowCluster dataprocessing_cluster_update = saharaclient.osc.v1.clusters:UpdateCluster dataprocessing_cluster_delete = saharaclient.osc.v1.clusters:DeleteCluster dataprocessing_cluster_scale = saharaclient.osc.v1.clusters:ScaleCluster dataprocessing_cluster_verification = saharaclient.osc.v1.clusters:VerificationUpdateCluster dataprocessing_job_template_create = saharaclient.osc.v1.job_templates:CreateJobTemplate dataprocessing_job_template_list = saharaclient.osc.v1.job_templates:ListJobTemplates dataprocessing_job_template_show = saharaclient.osc.v1.job_templates:ShowJobTemplate dataprocessing_job_template_update = saharaclient.osc.v1.job_templates:UpdateJobTemplate dataprocessing_job_template_delete = saharaclient.osc.v1.job_templates:DeleteJobTemplate dataprocessing_job_type_list = saharaclient.osc.v1.job_types:ListJobTypes dataprocessing_job_type_configs_get = saharaclient.osc.v1.job_types:GetJobTypeConfigs dataprocessing_job_execute = saharaclient.osc.v1.jobs:ExecuteJob dataprocessing_job_list = saharaclient.osc.v1.jobs:ListJobs dataprocessing_job_show = saharaclient.osc.v1.jobs:ShowJob dataprocessing_job_update = saharaclient.osc.v1.jobs:UpdateJob dataprocessing_job_delete = saharaclient.osc.v1.jobs:DeleteJob dataprocessing_job_binary_create = saharaclient.osc.v1.job_binaries:CreateJobBinary dataprocessing_job_binary_list = saharaclient.osc.v1.job_binaries:ListJobBinaries dataprocessing_job_binary_show = saharaclient.osc.v1.job_binaries:ShowJobBinary dataprocessing_job_binary_update = saharaclient.osc.v1.job_binaries:UpdateJobBinary dataprocessing_job_binary_delete = saharaclient.osc.v1.job_binaries:DeleteJobBinary dataprocessing_job_binary_download = saharaclient.osc.v1.job_binaries:DownloadJobBinary openstack.data_processing.v2 = dataprocessing_node_group_template_create = saharaclient.osc.v2.node_group_templates:CreateNodeGroupTemplate dataprocessing_node_group_template_list = saharaclient.osc.v2.node_group_templates:ListNodeGroupTemplates dataprocessing_node_group_template_show = saharaclient.osc.v2.node_group_templates:ShowNodeGroupTemplate dataprocessing_node_group_template_update = saharaclient.osc.v2.node_group_templates:UpdateNodeGroupTemplate dataprocessing_node_group_template_delete = saharaclient.osc.v2.node_group_templates:DeleteNodeGroupTemplate dataprocessing_node_group_template_import = saharaclient.osc.v2.node_group_templates:ImportNodeGroupTemplate dataprocessing_node_group_template_export = saharaclient.osc.v2.node_group_templates:ExportNodeGroupTemplate dataprocessing_plugin_list = saharaclient.osc.v2.plugins:ListPlugins dataprocessing_plugin_show = saharaclient.osc.v2.plugins:ShowPlugin dataprocessing_plugin_configs_get = saharaclient.osc.v2.plugins:GetPluginConfigs dataprocessing_plugin_update = saharaclient.osc.v2.plugins:UpdatePlugin dataprocessing_data_source_create = saharaclient.osc.v2.data_sources:CreateDataSource dataprocessing_data_source_list = saharaclient.osc.v2.data_sources:ListDataSources dataprocessing_data_source_show = saharaclient.osc.v2.data_sources:ShowDataSource dataprocessing_data_source_delete = saharaclient.osc.v2.data_sources:DeleteDataSource dataprocessing_data_source_update = saharaclient.osc.v2.data_sources:UpdateDataSource dataprocessing_image_list = saharaclient.osc.v2.images:ListImages dataprocessing_image_show = saharaclient.osc.v2.images:ShowImage dataprocessing_image_register = saharaclient.osc.v2.images:RegisterImage dataprocessing_image_unregister = saharaclient.osc.v2.images:UnregisterImage dataprocessing_image_tags_add = saharaclient.osc.v2.images:AddImageTags dataprocessing_image_tags_remove = saharaclient.osc.v2.images:RemoveImageTags dataprocessing_image_tags_set = saharaclient.osc.v2.images:SetImageTags dataprocessing_cluster_template_create = saharaclient.osc.v2.cluster_templates:CreateClusterTemplate dataprocessing_cluster_template_list = saharaclient.osc.v2.cluster_templates:ListClusterTemplates dataprocessing_cluster_template_show = saharaclient.osc.v2.cluster_templates:ShowClusterTemplate dataprocessing_cluster_template_update = saharaclient.osc.v2.cluster_templates:UpdateClusterTemplate dataprocessing_cluster_template_delete = saharaclient.osc.v2.cluster_templates:DeleteClusterTemplate dataprocessing_cluster_template_import = saharaclient.osc.v2.cluster_templates:ImportClusterTemplate dataprocessing_cluster_template_export = saharaclient.osc.v2.cluster_templates:ExportClusterTemplate dataprocessing_cluster_create = saharaclient.osc.v2.clusters:CreateCluster dataprocessing_cluster_list = saharaclient.osc.v2.clusters:ListClusters dataprocessing_cluster_show = saharaclient.osc.v2.clusters:ShowCluster dataprocessing_cluster_update = saharaclient.osc.v2.clusters:UpdateCluster dataprocessing_cluster_delete = saharaclient.osc.v2.clusters:DeleteCluster dataprocessing_cluster_scale = saharaclient.osc.v2.clusters:ScaleCluster dataprocessing_cluster_verification = saharaclient.osc.v2.clusters:VerificationUpdateCluster dataprocessing_cluster_update_keypair = saharaclient.osc.v2.clusters:UpdateKeypairCluster dataprocessing_job_template_create = saharaclient.osc.v2.job_templates:CreateJobTemplate dataprocessing_job_template_list = saharaclient.osc.v2.job_templates:ListJobTemplates dataprocessing_job_template_show = saharaclient.osc.v2.job_templates:ShowJobTemplate dataprocessing_job_template_update = saharaclient.osc.v2.job_templates:UpdateJobTemplate dataprocessing_job_template_delete = saharaclient.osc.v2.job_templates:DeleteJobTemplate dataprocessing_job_type_list = saharaclient.osc.v2.job_types:ListJobTypes dataprocessing_job_type_configs_get = saharaclient.osc.v2.job_types:GetJobTypeConfigs dataprocessing_job_execute = saharaclient.osc.v2.jobs:ExecuteJob dataprocessing_job_list = saharaclient.osc.v2.jobs:ListJobs dataprocessing_job_show = saharaclient.osc.v2.jobs:ShowJob dataprocessing_job_update = saharaclient.osc.v2.jobs:UpdateJob dataprocessing_job_delete = saharaclient.osc.v2.jobs:DeleteJob dataprocessing_job_binary_create = saharaclient.osc.v2.job_binaries:CreateJobBinary dataprocessing_job_binary_list = saharaclient.osc.v2.job_binaries:ListJobBinaries dataprocessing_job_binary_show = saharaclient.osc.v2.job_binaries:ShowJobBinary dataprocessing_job_binary_update = saharaclient.osc.v2.job_binaries:UpdateJobBinary dataprocessing_job_binary_delete = saharaclient.osc.v2.job_binaries:DeleteJobBinary dataprocessing_job_binary_download = saharaclient.osc.v2.job_binaries:DownloadJobBinary [egg_info] tag_build = tag_date = 0 python-saharaclient-3.1.0/HACKING.rst0000664000175000017500000000241713643576737017341 0ustar zuulzuul00000000000000Sahara Style Commandments ========================= - Step 1: Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ - Step 2: Read on Sahara Specific Commandments ---------------------------- Commit Messages --------------- Using a common format for commit messages will help keep our git history readable. Follow these guidelines: - [S365] First, provide a brief summary of 50 characters or less. Summaries of greater than 72 characters will be rejected by the gate. - [S364] The first line of the commit message should provide an accurate description of the change, not just a reference to a bug or blueprint. Imports ------- - [S366, S367] Organize your imports according to the ``Import order`` Dictionaries/Lists ------------------ - [S360] Ensure default arguments are not mutable. - [S368] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs. For more information, please refer to http://legacy.python.org/dev/peps/pep-0274/ Logs ---- - [S373] Don't translate logs - [S374] You used a deprecated log level Importing json -------------- - [S375] It's more preferable to use ``jsonutils`` from ``oslo_serialization`` instead of ``json`` for operating with ``json`` objects. python-saharaclient-3.1.0/setup.py0000664000175000017500000000127113643576737017252 0ustar zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) python-saharaclient-3.1.0/requirements.txt0000664000175000017500000000106613643576737021026 0ustar zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. pbr!=2.1.0,>=2.0.0 # Apache-2.0 Babel!=2.4.0,>=2.3.4 # BSD keystoneauth1>=3.4.0 # Apache-2.0 osc-lib>=2.0.0 # Apache-2.0 oslo.log>=3.36.0 # Apache-2.0 oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0 oslo.i18n>=3.15.3 # Apache-2.0 oslo.utils>=3.33.0 # Apache-2.0 python-openstackclient>=5.2.0 # Apache-2.0 requests>=2.14.2 # Apache-2.0 six>=1.10.0 # MIT python-saharaclient-3.1.0/LICENSE0000664000175000017500000002363713643576737016557 0ustar zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. python-saharaclient-3.1.0/doc/0000775000175000017500000000000013643577103016270 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/0000775000175000017500000000000013643577103017570 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/conf.py0000664000175000017500000002110613643576737021103 0ustar zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import subprocess import sys import os import warnings on_rtd = os.environ.get('READTHEDOCS', None) == 'True' # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../../saharaclient')) sys.path.append(os.path.abspath('..')) sys.path.append(os.path.abspath('../bin')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'ext.cli', 'openstackdocstheme'] # openstackdocstheme options repository_name = 'openstack/python-saharaclient' bug_project = '934' bug_tag = 'doc' html_last_updated_fmt = '%Y-%m-%d %H:%M' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Sahara Client' copyright = u'2013, OpenStack Foundation' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # Version info from saharaclient.version import version_info as saharaclient_version release = saharaclient_version.release_string() # The short X.Y version. version = saharaclient_version.version_string() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' if on_rtd: html_theme_path = ['.'] html_theme = '_theme_rtd' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = 'Sahara Client' # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { 'index': ['sidebarlinks.html', 'localtoc.html', 'searchbox.html', 'sourcelink.html'], '**': ['localtoc.html', 'relations.html', 'searchbox.html', 'sourcelink.html'] } # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'SaharaClientDoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'saharaclientdoc.tex', u'Sahara Client', u'OpenStack Foundation', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'saharaclient', u'Sahara Client', [u'OpenStack Foundation'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Sahara Client', u'Sahara Client', u'OpenStack Foundation', 'Sahara Client', 'Sahara Client', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' python-saharaclient-3.1.0/doc/source/reference/0000775000175000017500000000000013643577103021526 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/reference/pythonclient_v2.rst0000664000175000017500000000277513643576737025436 0ustar zuulzuul00000000000000Python Sahara client for APIv2 ============================== Overview -------- There is also support for Sahara's experimental APIv2. Supported operations -------------------- Plugin ops ~~~~~~~~~~ .. autoclass:: saharaclient.api.plugins.PluginManagerV2 :members: :inherited-members: Image Registry ops ~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.images.ImageManagerV2 :members: :inherited-members: Node Group Template ops ~~~~~~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.node_group_templates.NodeGroupTemplateManagerV2 :members: :inherited-members: Cluster Template ops ~~~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.cluster_templates.ClusterTemplateManagerV2 :members: :inherited-members: Cluster ops ~~~~~~~~~~~ .. autoclass:: saharaclient.api.clusters.ClusterManagerV2 :members: :inherited-members: Data Source ops ~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.data_sources.DataSourceManagerV2 :members: :inherited-members: Job Binary ops ~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.job_binaries.JobBinariesManagerV2 :members: :inherited-members: Job Template ops ~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.v2.job_templates.JobTemplatesManagerV2 :members: :inherited-members: Job ops ~~~~~~~ .. autoclass:: saharaclient.api.v2.jobs.JobsManagerV2 :members: :inherited-members: Job Types ops ~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.job_types.JobTypesManager :members: :inherited-members: :noindex: python-saharaclient-3.1.0/doc/source/reference/pythonclient.rst0000664000175000017500000001042213643576737025013 0ustar zuulzuul00000000000000Python Sahara client ==================== Overview -------- Sahara Client provides a list of Python interfaces to communicate with the Sahara REST API. Sahara Client enables users to perform most of the existing operations like retrieving template lists, creating Clusters, submitting EDP Jobs, etc. Instantiating a Client ---------------------- To start using the Sahara Client users have to create an instance of the `Client` class. The client constructor has a list of parameters to authenticate and locate Sahara endpoint. .. autoclass:: saharaclient.api.client.Client :members: **Important!** It is not a mandatory rule to provide all of the parameters above. The minimum number should be enough to determine Sahara endpoint, check user authentication and tenant to operate in. Authentication check ~~~~~~~~~~~~~~~~~~~~ Passing authentication parameters to Sahara Client is deprecated. Keystone Session object should be used for this purpose. For example: .. sourcecode:: python from keystoneauth1.identity import v2 from keystoneauth1 import session from saharaclient import client auth = v2.Password(auth_url=AUTH_URL, username=USERNAME, password=PASSWORD, tenant_name=PROJECT_ID) ses = session.Session(auth=auth) sahara = client.Client('1.1', session=ses) .. For more information about Keystone Sessions, see `Using Sessions`_. .. _Using Sessions: https://docs.openstack.org/python-keystoneclient/latest/using-sessions.html Sahara endpoint discovery ~~~~~~~~~~~~~~~~~~~~~~~~~ If user has a direct URL pointing to Sahara REST API, it may be specified as `sahara_url`. If this parameter is missing, Sahara client will use Keystone Service Catalog to find the endpoint. There are two parameters: `service_type` and `endpoint_type` to configure endpoint search. Both parameters have default values. .. sourcecode:: python from keystoneauth1.identity import v2 from keystoneauth1 import session from saharaclient import client auth = v2.Password(auth_url=AUTH_URL, username=USERNAME, password=PASSWORD, tenant_name=PROJECT_ID) ses = session.Session(auth=auth) sahara = client.Client('1.1', session=ses, service_type="non-default-service-type", endpoint_type="internalURL") .. Object managers --------------- Sahara Client has a list of fields to operate with: * plugins * clusters * cluster_templates * node_group_templates * images * data_sources * job_binaries * job_binary_internals * job_executions * job_types Each of this fields is a reference to a Manager for a corresponding group of REST calls. Supported operations -------------------- Plugin ops ~~~~~~~~~~ .. autoclass:: saharaclient.api.plugins.PluginManagerV1 :members: :inherited-members: Image Registry ops ~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.images.ImageManagerV1 :members: :inherited-members: Node Group Template ops ~~~~~~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.node_group_templates.NodeGroupTemplateManagerV1 :members: :inherited-members: Cluster Template ops ~~~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.cluster_templates.ClusterTemplateManagerV1 :members: :inherited-members: Cluster ops ~~~~~~~~~~~ .. autoclass:: saharaclient.api.clusters.ClusterManagerV1 :members: :inherited-members: Data Source ops ~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.data_sources.DataSourceManagerV1 :members: :inherited-members: Job Binary Internal ops ~~~~~~~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.job_binary_internals.JobBinaryInternalsManager :members: create, update Job Binary ops ~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.job_binaries.JobBinariesManagerV1 :members: :inherited-members: Job ops ~~~~~~~ .. autoclass:: saharaclient.api.jobs.JobsManagerV1 :members: :inherited-members: Job Execution ops ~~~~~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.job_executions.JobExecutionsManager :members: :inherited-members: Job Types ops ~~~~~~~~~~~~~ .. autoclass:: saharaclient.api.job_types.JobTypesManager :members: :inherited-members: python-saharaclient-3.1.0/doc/source/reference/index.rst0000664000175000017500000000016213643576737023402 0ustar zuulzuul00000000000000=============== Reference guide =============== .. toctree:: :maxdepth: 2 pythonclient pythonclient_v2 python-saharaclient-3.1.0/doc/source/contributor/0000775000175000017500000000000013643577103022142 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/contributor/index.rst0000664000175000017500000000132513643576737024020 0ustar zuulzuul00000000000000Contributing ============ ``python-saharaclient`` is part of the Sahara project. It has a separate `storyboard`_ page which should be used to report bugs. Like the other projects of the OpenStack community, code contribution happens through `gerrit`_. Please refer to the `Sahara documentation`_ and its `How to Participate section`_ for more information on how to contribute to the project. .. _Sahara documentation: https://docs.openstack.org/sahara/latest/ .. _How to Participate section: https://docs.openstack.org/sahara/latest/contributor/how-to-participate.html .. _storyboard: https://storyboard.openstack.org/#!/project/934 .. _gerrit: https://docs.openstack.org/infra/manual/developers.html#development-workflow python-saharaclient-3.1.0/doc/source/_templates/0000775000175000017500000000000013643577103021725 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/_templates/sidebarlinks.html0000664000175000017500000000051213643576737025277 0ustar zuulzuul00000000000000

Useful Links

{% if READTHEDOCS %} {% endif %} python-saharaclient-3.1.0/doc/source/_theme_rtd/0000775000175000017500000000000013643577103021702 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/_theme_rtd/theme.conf0000664000175000017500000000010713643576737023665 0ustar zuulzuul00000000000000[theme] inherit = nature stylesheet = nature.css pygments_style = tangopython-saharaclient-3.1.0/doc/source/_theme_rtd/layout.html0000664000175000017500000000020513643576737024116 0ustar zuulzuul00000000000000{% extends "basic/layout.html" %} {% set css_files = css_files + ['_static/tweaks.css'] %} {% block relbar1 %}{% endblock relbar1 %}python-saharaclient-3.1.0/doc/source/cli/0000775000175000017500000000000013643577103020337 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/source/cli/reference.rst0000664000175000017500000000155713643576737023053 0ustar zuulzuul00000000000000CLI Reference ============= The following commands are currently supported by the Sahara CLI: Plugins ------- .. cli:: :module: saharaclient.osc.v1.plugins Images ------ .. cli:: :module: saharaclient.osc.v1.images Node Group Templates -------------------- .. cli:: :module: saharaclient.osc.v1.node_group_templates Cluster Templates ----------------- .. cli:: :module: saharaclient.osc.v1.cluster_templates Clusters -------- .. cli:: :module: saharaclient.osc.v1.clusters Data Sources ------------ .. cli:: :module: saharaclient.osc.v1.data_sources Job Binaries ------------ .. cli:: :module: saharaclient.osc.v1.job_binaries Job Types --------- .. cli:: :module: saharaclient.osc.v1.job_types Job Templates ------------- .. cli:: :module: saharaclient.osc.v1.job_templates Jobs ---- .. cli:: :module: saharaclient.osc.v1.jobs python-saharaclient-3.1.0/doc/source/cli/intro.rst0000664000175000017500000000317513643576737022246 0ustar zuulzuul00000000000000Introduction ============ The Sahara shell utility now is part of the OpenStackClient, so all shell commands take the following form: .. code-block:: bash $ openstack dataprocessing [arguments...] To get a list of all possible commands you can run: .. code-block:: bash $ openstack help dataprocessing To get detailed help for the command you can run: .. code-block:: bash $ openstack help dataprocessing For more information about commands and their parameters you can refer to :doc:`the Sahara CLI commands `. For more information about abilities and features of OpenStackClient CLI you can refer to `OpenStackClient documentation `_ Configuration ------------- The CLI is configured via environment variables and command-line options which are described in https://docs.openstack.org/python-openstackclient/latest/cli/authentication.html. Authentication using username/password is most commonly used and can be provided with environment variables: .. code-block:: bash export OS_AUTH_URL= export OS_PROJECT_NAME= export OS_USERNAME= export OS_PASSWORD= # (optional) or command-line options: .. code-block:: bash --os-auth-url --os-project-name --os-username [--os-password ] Additionally :program:`sahara` API url can be configured with parameter: .. code-block:: bash --os-data-processing-url or with environment variable: .. code-block:: bash export OS_DATA_PROCESSING_URL= python-saharaclient-3.1.0/doc/source/cli/index.rst0000664000175000017500000000015313643576737022213 0ustar zuulzuul00000000000000================= Sahara CLI client ================= .. toctree:: :maxdepth: 2 intro reference python-saharaclient-3.1.0/doc/source/index.rst0000664000175000017500000000144413643576737021450 0ustar zuulzuul00000000000000Python bindings to the OpenStack Sahara API =========================================== This is a client for OpenStack Sahara API. There's :doc:`a Python API client ` (the :mod:`saharaclient` module), and a :doc:`command-line utility ` (installed as an OpenStackClient plugin). Each implements the entire OpenStack Sahara API. You'll need credentials for an OpenStack cloud that implements the Data Processing API, in order to use the sahara client. You may want to read the `OpenStack Sahara Docs`__ -- the overview, at least -- to get an idea of the concepts. By understanding the concepts this library should make more sense. __ https://docs.openstack.org/sahara/latest/ Contents: .. toctree:: :maxdepth: 2 reference/index cli/index contributor/index python-saharaclient-3.1.0/doc/requirements.txt0000664000175000017500000000047713643576737021600 0ustar zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. openstackdocstheme>=1.18.1 # Apache-2.0 reno>=2.5.0 # Apache-2.0 sphinx!=1.6.6,!=1.6.7,!=2.1.0,>=1.6.2 # BSD python-saharaclient-3.1.0/doc/ext/0000775000175000017500000000000013643577103017070 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/doc/ext/parser.py0000664000175000017500000001163313643576737020756 0ustar zuulzuul00000000000000# Copyright (c) 2013 Alex Rudakov # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from argparse import _HelpAction, _SubParsersAction import re class NavigationException(Exception): pass def parser_navigate(parser_result, path, current_path=None): if isinstance(path, str): if path == '': return parser_result path = re.split('\s+', path) current_path = current_path or [] if len(path) == 0: return parser_result if 'children' not in parser_result: raise NavigationException( 'Current parser have no children elements. (path: %s)' % ' '.join(current_path)) next_hop = path.pop(0) for child in parser_result['children']: if child['name'] == next_hop: current_path.append(next_hop) return parser_navigate(child, path, current_path) raise NavigationException( 'Current parser have no children element with name: %s (path: %s)' % ( next_hop, ' '.join(current_path))) def _try_add_parser_attribute(data, parser, attribname): attribval = getattr(parser, attribname, None) if attribval is None: return if not isinstance(attribval, str): return if len(attribval) > 0: data[attribname] = attribval def _format_usage_without_prefix(parser): """ Use private argparse APIs to get the usage string without the 'usage: ' prefix. """ fmt = parser._get_formatter() fmt.add_usage(parser.usage, parser._actions, parser._mutually_exclusive_groups, prefix='') return fmt.format_help().strip() def parse_parser(parser, data=None, **kwargs): if data is None: data = { 'name': '', 'usage': parser.format_usage().strip(), 'bare_usage': _format_usage_without_prefix(parser), 'prog': parser.prog, } _try_add_parser_attribute(data, parser, 'description') _try_add_parser_attribute(data, parser, 'epilog') for action in parser._get_positional_actions(): if isinstance(action, _HelpAction): continue if isinstance(action, _SubParsersAction): helps = {} for item in action._choices_actions: helps[item.dest] = item.help # commands which share an existing parser are an alias, # don't duplicate docs subsection_alias = {} subsection_alias_names = set() for name, subaction in action._name_parser_map.items(): if subaction not in subsection_alias: subsection_alias[subaction] = [] else: subsection_alias[subaction].append(name) subsection_alias_names.add(name) for name, subaction in action._name_parser_map.items(): if name in subsection_alias_names: continue subalias = subsection_alias[subaction] subaction.prog = '%s %s' % (parser.prog, name) subdata = { 'name': name if not subalias else '%s (%s)' % (name, ', '.join(subalias)), 'help': helps.get(name, ''), 'usage': subaction.format_usage().strip(), 'bare_usage': _format_usage_without_prefix(subaction), } parse_parser(subaction, subdata, **kwargs) data.setdefault('children', []).append(subdata) continue if 'args' not in data: data['args'] = [] arg = { 'name': action.dest, 'help': action.help or '', 'metavar': action.metavar } if action.choices: arg['choices'] = action.choices data['args'].append(arg) show_defaults = ( ('skip_default_values' not in kwargs) or (kwargs['skip_default_values'] is False)) for action in parser._get_optional_actions(): if isinstance(action, _HelpAction): continue if 'options' not in data: data['options'] = [] option = { 'name': action.option_strings, 'default': action.default if show_defaults else '==SUPPRESS==', 'help': action.help or '' } if action.choices: option['choices'] = action.choices if "==SUPPRESS==" not in option['help']: data['options'].append(option) return data python-saharaclient-3.1.0/doc/ext/cli.py0000664000175000017500000000601513643576737020227 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import inspect import os import sys from docutils import nodes from . import ext def _get_command(classes): """Associates each command class with command depending on setup.cfg """ commands = {} setup_file = os.path.join( os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')), 'setup.cfg') for line in open(setup_file, 'r'): for cl in classes: if cl in line: commands[cl] = line.split(' = ')[0].strip().replace('_', ' ') return commands class ArgParseDirectiveOSC(ext.ArgParseDirective): """Sphinx extension that automatically documents commands and options of the module that contains OpenstackClient/cliff command objects Usage example: .. cli:: :module: saharaclient.osc.v1.clusters """ def run(self): module_name = self.options['module'] mod = __import__(module_name, globals(), locals()) classes = inspect.getmembers(sys.modules[module_name], inspect.isclass) classes_names = [cl[0] for cl in classes] commands = _get_command(classes_names) items = [] for cl in classes: parser = cl[1](None, None).get_parser(None) parser.prog = commands[cl[0]] items.append(nodes.subtitle(text=commands[cl[0]])) result = ext.parse_parser( parser, skip_default_values='nodefault' in self.options) result = ext.parser_navigate(result, '') nested_content = ext.nodes.paragraph() self.state.nested_parse( self.content, self.content_offset, nested_content) nested_content = nested_content.children for item in nested_content: if not isinstance(item, ext.nodes.definition_list): items.append(item) if 'description' in result: items.append(self._nested_parse_paragraph(result['description'])) items.append(ext.nodes.literal_block(text=result['usage'])) items.append(ext.print_command_args_and_opts( ext.print_arg_list(result, nested_content), ext.print_opt_list(result, nested_content), ext.print_subcommand_list(result, nested_content) )) if 'epilog' in result: items.append(self._nested_parse_paragraph(result['epilog'])) return items def setup(app): app.add_directive('cli', ArgParseDirectiveOSC) python-saharaclient-3.1.0/doc/ext/ext.py0000664000175000017500000003714613643576737020271 0ustar zuulzuul00000000000000# Copyright (c) 2013 Alex Rudakov # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from argparse import ArgumentParser import os from docutils import nodes from docutils.statemachine import StringList from docutils.parsers.rst.directives import flag, unchanged from docutils.parsers.rst import Directive from sphinx.util.nodes import nested_parse_with_titles from .parser import parse_parser, parser_navigate def map_nested_definitions(nested_content): if nested_content is None: raise Exception('Nested content should be iterable, not null') # build definition dictionary definitions = {} for item in nested_content: if not isinstance(item, nodes.definition_list): continue for subitem in item: if not isinstance(subitem, nodes.definition_list_item): continue if not len(subitem.children) > 0: continue classifier = '@after' idx = subitem.first_child_matching_class(nodes.classifier) if idx is not None: ci = subitem[idx] if len(ci.children) > 0: classifier = ci.children[0].astext() if classifier is not None and classifier not in ( '@replace', '@before', '@after'): raise Exception('Unknown classifier: %s' % classifier) idx = subitem.first_child_matching_class(nodes.term) if idx is not None: ch = subitem[idx] if len(ch.children) > 0: term = ch.children[0].astext() idx = subitem.first_child_matching_class(nodes.definition) if idx is not None: def_node = subitem[idx] def_node.attributes['classifier'] = classifier definitions[term] = def_node return definitions def print_arg_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'args' in data: for arg in data['args']: my_def = [nodes.paragraph(text=arg['help'])] if arg['help'] else [] name = arg['name'] my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0: my_def.append(nodes.paragraph(text='Undocumented')) if 'choices' in arg: my_def.append(nodes.paragraph( text=('Possible choices: %s' % ', '.join([str(c) for c in arg['choices']])))) items.append( nodes.option_list_item( '', nodes.option_group('', nodes.option_string(text=name)), nodes.description('', *my_def))) return nodes.option_list('', *items) if items else None def print_opt_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'options' in data: for opt in data['options']: names = [] my_def = [nodes.paragraph(text=opt['help'])] if opt['help'] else [] for name in opt['name']: option_declaration = [nodes.option_string(text=name)] if opt['default'] is not None \ and opt['default'] != '==SUPPRESS==': option_declaration += nodes.option_argument( '', text='=' + str(opt['default'])) names.append(nodes.option('', *option_declaration)) my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0: my_def.append(nodes.paragraph(text='Undocumented')) if 'choices' in opt: my_def.append(nodes.paragraph( text=('Possible choices: %s' % ', '.join([str(c) for c in opt['choices']])))) items.append( nodes.option_list_item( '', nodes.option_group('', *names), nodes.description('', *my_def))) return nodes.option_list('', *items) if items else None def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append(nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) if opt_list: items.append(nodes.definition_list_item( '', nodes.term(text='Options:'), nodes.definition('', opt_list))) if sub_list and len(sub_list): items.append(nodes.definition_list_item( '', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items) def apply_definition(definitions, my_def, name): if name in definitions: definition = definitions[name] classifier = definition['classifier'] if classifier == '@replace': return definition.children if classifier == '@after': return my_def + definition.children if classifier == '@before': return definition.children + my_def raise Exception('Unknown classifier: %s' % classifier) return my_def def print_subcommand_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'children' in data: for child in data['children']: my_def = [nodes.paragraph( text=child['help'])] if child['help'] else [] name = child['name'] my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0: my_def.append(nodes.paragraph(text='Undocumented')) if 'description' in child: my_def.append(nodes.paragraph(text=child['description'])) my_def.append(nodes.literal_block(text=child['usage'])) my_def.append(print_command_args_and_opts( print_arg_list(child, nested_content), print_opt_list(child, nested_content), print_subcommand_list(child, nested_content) )) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong(text=name)), nodes.definition('', *my_def) ) ) return nodes.definition_list('', *items) class ArgParseDirective(Directive): has_content = True option_spec = dict(module=unchanged, func=unchanged, ref=unchanged, prog=unchanged, path=unchanged, nodefault=flag, manpage=unchanged, nosubcommands=unchanged, passparser=flag) def _construct_manpage_specific_structure(self, parser_info): """ Construct a typical man page consisting of the following elements: NAME (automatically generated, out of our control) SYNOPSIS DESCRIPTION OPTIONS FILES SEE ALSO BUGS """ # SYNOPSIS section synopsis_section = nodes.section( '', nodes.title(text='Synopsis'), nodes.literal_block(text=parser_info["bare_usage"]), ids=['synopsis-section']) # DESCRIPTION section description_section = nodes.section( '', nodes.title(text='Description'), nodes.paragraph(text=parser_info.get( 'description', parser_info.get( 'help', "undocumented").capitalize())), ids=['description-section']) nested_parse_with_titles( self.state, self.content, description_section) if parser_info.get('epilog'): # TODO: do whatever sphinx does to understand ReST inside # docstrings magically imported from other places. The nested # parse method invoked above seem to be able to do this but # I haven't found a way to do it for arbitrary text description_section += nodes.paragraph( text=parser_info['epilog']) # OPTIONS section options_section = nodes.section( '', nodes.title(text='Options'), ids=['options-section']) if 'args' in parser_info: options_section += nodes.paragraph() options_section += nodes.subtitle(text='Positional arguments:') options_section += self._format_positional_arguments(parser_info) if 'options' in parser_info: options_section += nodes.paragraph() options_section += nodes.subtitle(text='Optional arguments:') options_section += self._format_optional_arguments(parser_info) items = [ # NOTE: we cannot generate NAME ourselves. It is generated by # docutils.writers.manpage synopsis_section, description_section, # TODO: files # TODO: see also # TODO: bugs ] if len(options_section.children) > 1: items.append(options_section) if 'nosubcommands' not in self.options: # SUBCOMMANDS section (non-standard) subcommands_section = nodes.section( '', nodes.title(text='Sub-Commands'), ids=['subcommands-section']) if 'children' in parser_info: subcommands_section += self._format_subcommands(parser_info) if len(subcommands_section) > 1: items.append(subcommands_section) if os.getenv("INCLUDE_DEBUG_SECTION"): import json # DEBUG section (non-standard) debug_section = nodes.section( '', nodes.title(text="Argparse + Sphinx Debugging"), nodes.literal_block(text=json.dumps(parser_info, indent=' ')), ids=['debug-section']) items.append(debug_section) return items def _format_positional_arguments(self, parser_info): assert 'args' in parser_info items = [] for arg in parser_info['args']: arg_items = [] if arg['help']: arg_items.append(nodes.paragraph(text=arg['help'])) else: arg_items.append(nodes.paragraph(text='Undocumented')) if 'choices' in arg: arg_items.append( nodes.paragraph( text='Possible choices: ' + ', '.join(arg['choices']))) items.append( nodes.option_list_item( '', nodes.option_group( '', nodes.option( '', nodes.option_string(text=arg['metavar']) ) ), nodes.description('', *arg_items))) return nodes.option_list('', *items) def _format_optional_arguments(self, parser_info): assert 'options' in parser_info items = [] for opt in parser_info['options']: names = [] opt_items = [] for name in opt['name']: option_declaration = [nodes.option_string(text=name)] if opt['default'] is not None \ and opt['default'] != '==SUPPRESS==': option_declaration += nodes.option_argument( '', text='=' + str(opt['default'])) names.append(nodes.option('', *option_declaration)) if opt['help']: opt_items.append(nodes.paragraph(text=opt['help'])) else: opt_items.append(nodes.paragraph(text='Undocumented')) if 'choices' in opt: opt_items.append( nodes.paragraph( text='Possible choices: ' + ', '.join(opt['choices']))) items.append( nodes.option_list_item( '', nodes.option_group('', *names), nodes.description('', *opt_items))) return nodes.option_list('', *items) def _format_subcommands(self, parser_info): assert 'children' in parser_info items = [] for subcmd in parser_info['children']: subcmd_items = [] if subcmd['help']: subcmd_items.append(nodes.paragraph(text=subcmd['help'])) else: subcmd_items.append(nodes.paragraph(text='Undocumented')) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong( text=subcmd['bare_usage'])), nodes.definition('', *subcmd_items))) return nodes.definition_list('', *items) def _nested_parse_paragraph(self, text): content = nodes.paragraph() self.state.nested_parse(StringList(text.split("\n")), 0, content) return content def run(self): if 'module' in self.options and 'func' in self.options: module_name = self.options['module'] attr_name = self.options['func'] elif 'ref' in self.options: _parts = self.options['ref'].split('.') module_name = '.'.join(_parts[0:-1]) attr_name = _parts[-1] else: raise self.error( ':module: and :func: should be specified, or :ref:') mod = __import__(module_name, globals(), locals(), [attr_name]) if not hasattr(mod, attr_name): raise self.error(( 'Module "%s" has no attribute "%s"\n' 'Incorrect argparse :module: or :func: values?' ) % (module_name, attr_name)) func = getattr(mod, attr_name) if isinstance(func, ArgumentParser): parser = func elif 'passparser' in self.options: parser = ArgumentParser() func(parser) else: parser = func() if 'path' not in self.options: self.options['path'] = '' path = str(self.options['path']) if 'prog' in self.options: parser.prog = self.options['prog'] result = parse_parser( parser, skip_default_values='nodefault' in self.options) result = parser_navigate(result, path) if 'manpage' in self.options: return self._construct_manpage_specific_structure(result) nested_content = nodes.paragraph() self.state.nested_parse( self.content, self.content_offset, nested_content) nested_content = nested_content.children items = [] # add common content between for item in nested_content: if not isinstance(item, nodes.definition_list): items.append(item) if 'description' in result: items.append(self._nested_parse_paragraph(result['description'])) items.append(nodes.literal_block(text=result['usage'])) items.append(print_command_args_and_opts( print_arg_list(result, nested_content), print_opt_list(result, nested_content), print_subcommand_list(result, nested_content) )) if 'epilog' in result: items.append(self._nested_parse_paragraph(result['epilog'])) return items def setup(app): app.add_directive('argparse', ArgParseDirective)python-saharaclient-3.1.0/doc/ext/__init__.py0000664000175000017500000000000013643576737021203 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/PKG-INFO0000664000175000017500000000571313643577103016626 0ustar zuulzuul00000000000000Metadata-Version: 1.2 Name: python-saharaclient Version: 3.1.0 Summary: Client library for Sahara API Home-page: https://docs.openstack.org/python-saharaclient/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: Apache License, Version 2.0 Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/python-saharaclient.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on Python bindings to the OpenStack Sahara API =========================================== .. image:: https://img.shields.io/pypi/v/python-saharaclient.svg :target: https://pypi.org/project/python-saharaclient/ :alt: Latest Version This is a client for the OpenStack Sahara API. There's a Python API (the ``saharaclient`` module), and a command-line script (``sahara``). Each implements the OpenStack Sahara API. You can find documentation for both Python bindings and CLI in `Docs`_. Development takes place via the usual OpenStack processes as outlined in the `developer guide `_. .. _Docs: https://docs.openstack.org/python-saharaclient/latest/ * License: Apache License, Version 2.0 * `PyPi`_ - package installation * `Online Documentation`_ * `Blueprints`_ - feature specifications * `Bugs`_ - stories and issue tracking * `Source`_ * `Specs`_ * `How to Contribute`_ .. _PyPi: https://pypi.org/project/python-saharaclient .. _Online Documentation: https://docs.openstack.org/python-saharaclient/latest/ .. _Blueprints: http://specs.openstack.org/openstack/sahara-specs/ .. _Bugs: https://storyboard.openstack.org/#!/project/934 .. _Source: https://opendev.org/openstack/python-saharaclient .. _How to Contribute: https://docs.openstack.org/infra/manual/developers.html .. _Specs: https://specs.openstack.org/openstack/sahara-specs/ .. _Release Notes: https://docs.openstack.org/releasenotes/python-saharaclient Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Requires-Python: >=3.6 python-saharaclient-3.1.0/releasenotes/0000775000175000017500000000000013643577103020214 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/source/0000775000175000017500000000000013643577103021514 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/source/unreleased.rst0000664000175000017500000000016013643576737024406 0ustar zuulzuul00000000000000============================== Current Series Release Notes ============================== .. release-notes:: python-saharaclient-3.1.0/releasenotes/source/newton.rst0000664000175000017500000000023213643576737023571 0ustar zuulzuul00000000000000=================================== Newton Series Release Notes =================================== .. release-notes:: :branch: origin/stable/newton python-saharaclient-3.1.0/releasenotes/source/ocata.rst0000664000175000017500000000023013643576737023344 0ustar zuulzuul00000000000000=================================== Ocata Series Release Notes =================================== .. release-notes:: :branch: origin/stable/ocata python-saharaclient-3.1.0/releasenotes/source/conf.py0000664000175000017500000001571013643576737023033 0ustar zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # Sahara Client Release Notes documentation build configuration file extensions = [ 'reno.sphinxext', 'openstackdocstheme' ] # openstackdocstheme options repository_name = 'openstack/python-saharaclient' bug_project = '934' bug_tag = 'releasenotes' html_last_updated_fmt = '%Y-%m-%d %H:%M' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Saharaclient Release Notes' copyright = u'2015, Sahara Developers' # Release notes are version independent. # The full version, including alpha/beta/rc tags. release = '' # The short X.Y version. version = '' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'SaharaClientReleaseNotesdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'SaharaClientReleaseNotes.tex', u'Sahara Client Release Notes Documentation', u'Sahara Client Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'saharaclientreleasenotes', u'Sahara Client Release Notes Documentation', [u'Sahara Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'SaharaClientReleaseNotes', u'Sahara Client Release Notes Documentation', u'Sahara Developers', 'SaharaClientReleaseNotes', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] python-saharaclient-3.1.0/releasenotes/source/_static/0000775000175000017500000000000013643577103023142 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/source/_static/.placeholder0000664000175000017500000000000013643576737025427 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/source/rocky.rst0000664000175000017500000000022113643576737023404 0ustar zuulzuul00000000000000=================================== Rocky Series Release Notes =================================== .. release-notes:: :branch: stable/rocky python-saharaclient-3.1.0/releasenotes/source/mitaka.rst0000664000175000017500000000023213643576737023525 0ustar zuulzuul00000000000000=================================== Mitaka Series Release Notes =================================== .. release-notes:: :branch: origin/stable/mitaka python-saharaclient-3.1.0/releasenotes/source/train.rst0000664000175000017500000000017613643576737023403 0ustar zuulzuul00000000000000========================== Train Series Release Notes ========================== .. release-notes:: :branch: stable/train python-saharaclient-3.1.0/releasenotes/source/_templates/0000775000175000017500000000000013643577103023651 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/source/_templates/.placeholder0000664000175000017500000000000013643576737026136 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/source/stein.rst0000664000175000017500000000022113643576737023377 0ustar zuulzuul00000000000000=================================== Stein Series Release Notes =================================== .. release-notes:: :branch: stable/stein python-saharaclient-3.1.0/releasenotes/source/pike.rst0000664000175000017500000000021713643576737023212 0ustar zuulzuul00000000000000=================================== Pike Series Release Notes =================================== .. release-notes:: :branch: stable/pike python-saharaclient-3.1.0/releasenotes/source/queens.rst0000664000175000017500000000022313643576737023557 0ustar zuulzuul00000000000000=================================== Queens Series Release Notes =================================== .. release-notes:: :branch: stable/queens python-saharaclient-3.1.0/releasenotes/source/index.rst0000664000175000017500000000031313643576737023366 0ustar zuulzuul00000000000000=========================== Saharaclient Release Notes =========================== .. toctree:: :maxdepth: 1 unreleased train stein rocky queens pike ocata newton mitaka python-saharaclient-3.1.0/releasenotes/notes/0000775000175000017500000000000013643577103021344 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/notes/drop-py2-7-862abe2ec0c32c5f.yaml0000664000175000017500000000034413643576737026432 0ustar zuulzuul00000000000000--- upgrade: - | Python 2.7 support has been dropped. Last release of python-saharaclient to support python 2.7 is OpenStack Train. The minimum version of Python now supported by python-saharaclient is Python 3.6. python-saharaclient-3.1.0/releasenotes/notes/plugin-api-f650c26a030b7df8.yaml0000664000175000017500000000020613643576737026517 0ustar zuulzuul00000000000000--- features: - Plugins updates are supported now in saharaclient. Also information about plugin labels is available for users. python-saharaclient-3.1.0/releasenotes/notes/start-using-reno-1f3418c11785c9ab.yaml0000664000175000017500000000007713643576737027625 0ustar zuulzuul00000000000000--- other: - > Start using reno to manage release notes. python-saharaclient-3.1.0/releasenotes/notes/.placeholder0000664000175000017500000000000013643576737023631 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/releasenotes/notes/osc-apiv2-4079c8cdb839ae42.yaml0000664000175000017500000000016513643576737026276 0ustar zuulzuul00000000000000--- features: - | Adding the ability for the CLI to communicate with OpenStack Sahara using the new APIv2. python-saharaclient-3.1.0/releasenotes/notes/autogenerated-api-docs-3bc8513e63bfe610.yaml0000664000175000017500000000013613643576737031000 0ustar zuulzuul00000000000000--- features: - > Automatically generated documentation for saharaclient API was added. python-saharaclient-3.1.0/releasenotes/notes/job-create-optional-034307a6b5db2cf2.yaml0000664000175000017500000000031413643576737030277 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1506448 `_] Now ``mains``, ``libs`` and ``description`` parameters of jobs ``create`` method are optional. python-saharaclient-3.1.0/releasenotes/notes/remove-old-cli-commands-06b9936ce044dd0f.yaml0000664000175000017500000000026113643576737031076 0ustar zuulzuul00000000000000--- prelude: > Old CLI commands are removed. Please use OpenStackClient instead. deprecations: - Old CLI commands are removed. Please use OpenStackClient instead. python-saharaclient-3.1.0/releasenotes/notes/fix-job-binary-download-py3-5592eca2345305bd.yaml0000664000175000017500000000012213643576737031524 0ustar zuulzuul00000000000000--- fixes: - | Fix the "job binary download" command when Python 3 is used. python-saharaclient-3.1.0/releasenotes/notes/cli-deprecation-da0e7b6dfe77af52.yaml0000664000175000017500000000011613643576737027745 0ustar zuulzuul00000000000000--- deprecations: - > Old CLI is deprecated and will not be maintained. python-saharaclient-3.1.0/releasenotes/notes/autogenerated-cli-docs-c1e89ec6ea66c4a9.yaml0000664000175000017500000000013613643576737031156 0ustar zuulzuul00000000000000--- features: - > Automatically generated documentation for saharaclient CLI was added. python-saharaclient-3.1.0/releasenotes/notes/volume-mount-prefix-b6ef396a357cddd0.yaml0000664000175000017500000000031113643576737030562 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1499697 `_] Now node group templates can be created and updated with ``volume_mount_prefix`` parameter. python-saharaclient-3.1.0/releasenotes/notes/remove-py26-dad75fc8d602b3c5.yaml0000664000175000017500000000022313643576737026710 0ustar zuulzuul00000000000000--- deprecations: - > [`bug 1519510 `_] Support of python 2.6 was dropped. python-saharaclient-3.1.0/releasenotes/notes/remove-functional-tests-c4b9d43c2c32d121.yaml0000664000175000017500000000023313643576737031236 0ustar zuulzuul00000000000000--- prelude: > Functional tests were replaced to sahara-tests repository. Please refer to README of sahara-tests about how to run these tests now. python-saharaclient-3.1.0/releasenotes/notes/remove-py33-8364cb4805391750.yaml0000664000175000017500000000022313643576737026253 0ustar zuulzuul00000000000000--- deprecations: - > [`bug 1526170 `_] Support of python 3.3 was dropped. python-saharaclient-3.1.0/releasenotes/notes/multiple-clusters-change-69a15f00597739d7.yaml0000664000175000017500000000022713643576737031201 0ustar zuulzuul00000000000000--- other: - When using APIv2, the creation of multiple clusters simultaneously now only supports the API behavior of Sahara 9.0.0.0b2 or later. python-saharaclient-3.1.0/releasenotes/notes/rework-auth-c3e13a68a935671e.yaml0000664000175000017500000000035613643576737026660 0ustar zuulzuul00000000000000--- upgrade: - | The Sahara client library now only supports authentication with a Keystone session object. Consequently the arguments which `saharaclient.api.Client` accepts, and the order of those arguments, have changed. python-saharaclient-3.1.0/releasenotes/notes/shares-update-d6f7e28acd27aa7f.yaml0000664000175000017500000000011113643576737027445 0ustar zuulzuul00000000000000--- features: - > Now shares can be edited on an existing cluster. python-saharaclient-3.1.0/releasenotes/notes/rename_version_to_plugin-version-20cfe17530446391.yaml0000664000175000017500000000067713643576737033025 0ustar zuulzuul00000000000000--- upgrade: - Option 'version' is replaced by 'plugin-version' option. fixes: - Option 'version' is a global option, which is used for getting the client version. So there were problems with the OpenStack client, when we specified 'version' of the plugin, but OSC treated that as a request for getting the current client version. Hence, to fix this problem, 'version' is replaced by 'plugin-version'. Related bug 1565775. python-saharaclient-3.1.0/releasenotes/notes/update-image-optional-f83c5746d88507cd.yaml0000664000175000017500000000024113643576737030604 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1510470 `_] Now ``desc`` parameter of ``update_image`` is optional. python-saharaclient-3.1.0/releasenotes/notes/designate-integration-16c59a6b57dbcfa4.yaml0000664000175000017500000000014113643576737031102 0ustar zuulzuul00000000000000--- features: - Added integration of Designate for hostname resolution through dns servers python-saharaclient-3.1.0/releasenotes/notes/fields-unset-068db4c3e680c37d.yaml0000664000175000017500000000024113643576737027062 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1534050 `_] Now object's fields can be unset with ``update`` calls. python-saharaclient-3.1.0/releasenotes/notes/tags-update-c794416bcc035cb8.yaml0000664000175000017500000000024613643576737026700 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1500790 `_] Now tags can be added and removed simultaneously in one call. python-saharaclient-3.1.0/releasenotes/notes/fix-osc-520-regression-a92dff38f04e6a57.yaml0000664000175000017500000000031713643576737030605 0ustar zuulzuul00000000000000--- fixes: - | A change in python-openstackclient 5.2.0 broke the image register command. The incompatibility is now solved but the python-openstackclient requirement has been bumped to 5.2.0. python-saharaclient-3.1.0/releasenotes/notes/api-v2-features-650eb8cc0f50a729.yaml0000664000175000017500000000116013643576737027367 0ustar zuulzuul00000000000000--- features: - | The basic saharaclient and the OSC plugin now include support for the enhanced boot from volume mechanism introduced in the Stein release of Sahara, and support for the keypair replacement mechanism introduced in the Rocky release of Sahara. The OSC plugin also now includes support for the force deletion of clusters feature introduced in the Queens release of Sahara, and support for the decommision of a specific instance feature (albeit only via the --json flag) introduced in the Queens release of Sahara. (All of these features are exclusive to Sahara's APIv2.) python-saharaclient-3.1.0/releasenotes/notes/new-cli-6119bf8a4fb24ab6.yaml0000664000175000017500000000012013643576737026065 0ustar zuulzuul00000000000000--- features: - > New CLI as part of the openstackclient was implemented. python-saharaclient-3.1.0/releasenotes/notes/job-binary-create-optional-bc0f9ee6426c5659.yaml0000664000175000017500000000030213643576737031610 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1508406 `_] Now ``description`` and ``extra`` parameters of jobs ``create`` method are optional. python-saharaclient-3.1.0/releasenotes/notes/implement-pagination-2ba52769d240a3ce.yaml0000664000175000017500000000010713643576737030570 0ustar zuulzuul00000000000000--- features: - > Pagination for list operations is implemented. python-saharaclient-3.1.0/releasenotes/notes/experimental-v2-support-67ccf699e056ed78.yaml0000664000175000017500000000011613643576737031244 0ustar zuulzuul00000000000000--- features: - Initial support for Sahara's experimental APIv2 is present. python-saharaclient-3.1.0/releasenotes/notes/event-logs-c6d286e25dc7d9b1.yaml0000664000175000017500000000021213643576737026626 0ustar zuulzuul00000000000000--- features: - Providing ability to make dump of event logs for clusters. Also displaying shorten version of event logs by option. python-saharaclient-3.1.0/releasenotes/notes/job-job-template-apiv2-change-93ffbf2b1360cddc.yaml0000664000175000017500000000031513643576737032271 0ustar zuulzuul00000000000000other: - When using APIv2, the viewing (GET) of specific job templates and jobs and the creation (POST) of job templates and jobs now only supports the API behavior of Sahara 9.0.0.0b3 or later. python-saharaclient-3.1.0/releasenotes/notes/job-execution-create-optional-1014a403e5ffa7ac.yaml0000664000175000017500000000030713643576737032360 0ustar zuulzuul00000000000000--- fixes: - > [`bug 1507966 `_] Now input_id, output_id, configs parameters of job executions create method are optional. python-saharaclient-3.1.0/saharaclient/0000775000175000017500000000000013643577103020161 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/api/0000775000175000017500000000000013643577103020732 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/api/clusters.py0000664000175000017500000002017713643576737023173 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from six.moves.urllib import parse from saharaclient.api import base class Cluster(base.Resource): resource_name = 'Cluster' class ClusterManagerV1(base.ResourceManager): resource_class = Cluster NotUpdated = base.NotUpdated() def create(self, name, plugin_name, hadoop_version, cluster_template_id=None, default_image_id=None, is_transient=None, description=None, cluster_configs=None, node_groups=None, user_keypair_id=None, anti_affinity=None, net_id=None, count=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None): """Launch a Cluster.""" data = { 'name': name, 'plugin_name': plugin_name, 'hadoop_version': hadoop_version, } return self._do_create(data, cluster_template_id, default_image_id, is_transient, description, cluster_configs, node_groups, user_keypair_id, anti_affinity, net_id, count, use_autoconfig, shares, is_public, is_protected, api_ver=1.1) def _do_create(self, data, cluster_template_id, default_image_id, is_transient, description, cluster_configs, node_groups, user_keypair_id, anti_affinity, net_id, count, use_autoconfig, shares, is_public, is_protected, api_ver): # Checking if count is greater than 1, otherwise we set it to None # so the created dict in the _copy_if_defined method does not contain # the count parameter. if count and count <= 1: count = None self._copy_if_defined(data, cluster_template_id=cluster_template_id, is_transient=is_transient, default_image_id=default_image_id, description=description, cluster_configs=cluster_configs, node_groups=node_groups, user_keypair_id=user_keypair_id, anti_affinity=anti_affinity, neutron_management_network=net_id, count=count, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected) if count: if api_ver >= 2: return self._create('/clusters', data) else: return self._create('/clusters/multiple', data) return self._create('/clusters', data, 'cluster') def scale(self, cluster_id, scale_object): """Scale an existing Cluster. :param scale_object: dict that describes scaling operation :Example: The following `scale_object` can be used to change the number of instances in the node group and add instances of new node group to existing cluster: .. sourcecode:: json { "add_node_groups": [ { "count": 3, "name": "new_ng", "node_group_template_id": "ngt_id" } ], "resize_node_groups": [ { "count": 2, "name": "old_ng" } ] } """ return self._update('/clusters/%s' % cluster_id, scale_object) def list(self, search_opts=None, limit=None, marker=None, sort_by=None, reverse=None): """Get a list of Clusters.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/clusters%s" % query return self._page(url, 'clusters', limit) def get(self, cluster_id, show_progress=False): """Get information about a Cluster.""" url = ('/clusters/%(cluster_id)s?%(params)s' % {"cluster_id": cluster_id, "params": parse.urlencode({"show_progress": show_progress})}) return self._get(url, 'cluster') def delete(self, cluster_id): """Delete a Cluster.""" self._delete('/clusters/%s' % cluster_id) def update(self, cluster_id, name=NotUpdated, description=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated, shares=NotUpdated): """Update a Cluster.""" data = {} self._copy_if_updated(data, name=name, description=description, is_public=is_public, is_protected=is_protected, shares=shares) return self._patch('/clusters/%s' % cluster_id, data) def verification_update(self, cluster_id, status): """Start a verification for a Cluster.""" data = {'verification': {'status': status}} return self._patch("/clusters/%s" % cluster_id, data) class ClusterManagerV2(ClusterManagerV1): def create(self, name, plugin_name, plugin_version, cluster_template_id=None, default_image_id=None, is_transient=None, description=None, cluster_configs=None, node_groups=None, user_keypair_id=None, anti_affinity=None, net_id=None, count=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None): """Launch a Cluster.""" data = { 'name': name, 'plugin_name': plugin_name, 'plugin_version': plugin_version, } return self._do_create(data, cluster_template_id, default_image_id, is_transient, description, cluster_configs, node_groups, user_keypair_id, anti_affinity, net_id, count, use_autoconfig, shares, is_public, is_protected, api_ver=2) def scale(self, cluster_id, scale_object): """Scale an existing Cluster. :param scale_object: dict that describes scaling operation :Example: The following `scale_object` can be used to change the number of instances in the node group (optionally specifiying which instances to delete) or add instances of a new node group to an existing cluster: .. sourcecode:: json { "add_node_groups": [ { "count": 3, "name": "new_ng", "node_group_template_id": "ngt_id" } ], "resize_node_groups": [ { "count": 2, "name": "old_ng", "instances": ["instance_id1", "instance_id2"] } ] } """ return self._update('/clusters/%s' % cluster_id, scale_object) def force_delete(self, cluster_id): """Force Delete a Cluster.""" data = {'force': True} return self._delete('/clusters/%s' % cluster_id, data) def update_keypair(self, cluster_id): """Reflect an updated keypair on the cluster.""" data = {'update_keypair': True} return self._patch("/clusters/%s" % cluster_id, data) # NOTE(jfreud): keep this around for backwards compatibility ClusterManager = ClusterManagerV1 python-saharaclient-3.1.0/saharaclient/api/job_types.py0000664000175000017500000000170713643576737023323 0ustar zuulzuul00000000000000# Copyright (c) 2015 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class JobType(base.Resource): resource_name = 'JobType' class JobTypesManager(base.ResourceManager): resource_class = JobType def list(self, search_opts=None): """Get a list of job types supported by plugins.""" query = base.get_query_string(search_opts) return self._list('/job-types%s' % query, 'job_types') python-saharaclient-3.1.0/saharaclient/api/jobs.py0000664000175000017500000000472213643576737022262 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class Job(base.Resource): resource_name = 'Job' class JobsManagerV1(base.ResourceManager): resource_class = Job NotUpdated = base.NotUpdated() def create(self, name, type, mains=None, libs=None, description=None, interface=None, is_public=None, is_protected=None): """Create a Job.""" data = { 'name': name, 'type': type } self._copy_if_defined(data, description=description, mains=mains, libs=libs, interface=interface, is_public=is_public, is_protected=is_protected) return self._create('/jobs', data, 'job') def list(self, search_opts=None, limit=None, marker=None, sort_by=None, reverse=None): """Get a list of Jobs.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/jobs%s" % query return self._page(url, 'jobs', limit) def get(self, job_id): """Get information about a Job""" return self._get('/jobs/%s' % job_id, 'job') def get_configs(self, job_type): """Get config hints for a specified Job type.""" return self._get('/jobs/config-hints/%s' % job_type) def delete(self, job_id): """Delete a Job""" self._delete('/jobs/%s' % job_id) def update(self, job_id, name=NotUpdated, description=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated): """Update a Job.""" data = {} self._copy_if_updated(data, name=name, description=description, is_public=is_public, is_protected=is_protected) return self._patch('/jobs/%s' % job_id, data) # NOTE(jfreud): keep this around for backwards compatibility JobsManager = JobsManagerV1 python-saharaclient-3.1.0/saharaclient/api/job_binary_internals.py0000664000175000017500000000446613643576737025527 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from six.moves.urllib import parse as urlparse from saharaclient.api import base class JobBinaryInternal(base.Resource): resource_name = 'JobBinaryInternal' class JobBinaryInternalsManager(base.ResourceManager): resource_class = JobBinaryInternal NotUpdated = base.NotUpdated() def create(self, name, data): """Create a Job Binary Internal. :param str data: raw data of script text """ return self._update('/job-binary-internals/%s' % urlparse.quote(name.encode('utf-8')), data, 'job_binary_internal', dump_json=False) def list(self, search_opts=None, limit=None, marker=None, sort_by=None, reverse=None): """Get a list of Job Binary Internals.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/job-binary-internals%s" % query return self._page(url, 'binaries', limit) def get(self, job_binary_id): """Get information about a Job Binary Internal.""" return self._get('/job-binary-internals/%s' % job_binary_id, 'job_binary_internal') def delete(self, job_binary_id): """Delete a Job Binary Internal.""" self._delete('/job-binary-internals/%s' % job_binary_id) def update(self, job_binary_id, name=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated): """Update a Job Binary Internal.""" data = {} self._copy_if_updated(data, name=name, is_public=is_public, is_protected=is_protected) return self._patch('/job-binary-internals/%s' % job_binary_id, data) python-saharaclient-3.1.0/saharaclient/api/helpers.py0000664000175000017500000000540313643576737022764 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import parameters as params class Helpers(object): def __init__(self, sahara_client): self.sahara = sahara_client self.plugins = self.sahara.plugins def _get_node_processes(self, plugin): processes = [] for proc_lst in plugin.node_processes.values(): processes += proc_lst return [(proc_name, proc_name) for proc_name in processes] def get_node_processes(self, plugin_name, hadoop_version): plugin = self.plugins.get_version_details(plugin_name, hadoop_version) return self._get_node_processes(plugin) def _extract_parameters(self, configs, scope, applicable_target): parameters = [] for config in configs: if (config['scope'] == scope and config['applicable_target'] == applicable_target): parameters.append(params.Parameter(config)) return parameters def get_cluster_general_configs(self, plugin_name, hadoop_version): plugin = self.plugins.get_version_details(plugin_name, hadoop_version) return self._extract_parameters(plugin.configs, 'cluster', "general") def get_general_node_group_configs(self, plugin_name, hadoop_version): plugin = self.plugins.get_version_details(plugin_name, hadoop_version) return self._extract_parameters(plugin.configs, 'node', 'general') def get_targeted_node_group_configs(self, plugin_name, hadoop_version): plugin = self.plugins.get_version_details(plugin_name, hadoop_version) parameters = dict() for service in plugin.node_processes.keys(): parameters[service] = self._extract_parameters(plugin.configs, 'node', service) return parameters def get_targeted_cluster_configs(self, plugin_name, hadoop_version): plugin = self.plugins.get_version_details(plugin_name, hadoop_version) parameters = dict() for service in plugin.node_processes.keys(): parameters[service] = self._extract_parameters(plugin.configs, 'cluster', service) return parameters python-saharaclient-3.1.0/saharaclient/api/v2/0000775000175000017500000000000013643577103021261 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/api/v2/jobs.py0000664000175000017500000000461613643576737022613 0ustar zuulzuul00000000000000# Copyright (c) 2018 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class Job(base.Resource): resource_name = 'Job' class JobsManagerV2(base.ResourceManager): resource_class = Job NotUpdated = base.NotUpdated() def list(self, search_opts=None, marker=None, limit=None, sort_by=None, reverse=None): """Get a list of Jobs.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/jobs%s" % query return self._page(url, 'jobs', limit) def get(self, obj_id): """Get information about a Job.""" return self._get('/jobs/%s' % obj_id, 'job') def delete(self, obj_id): """Delete a Job.""" self._delete('/jobs/%s' % obj_id) def create(self, job_template_id, cluster_id, input_id=None, output_id=None, configs=None, interface=None, is_public=None, is_protected=None): """Launch a Job.""" data = { "cluster_id": cluster_id, "job_template_id": job_template_id } self._copy_if_defined(data, input_id=input_id, output_id=output_id, job_configs=configs, interface=interface, is_public=is_public, is_protected=is_protected) return self._create('/jobs', data, 'job') def refresh_status(self, obj_id): """Refresh Job Status.""" return self._get( '/jobs/%s?refresh_status=True' % obj_id, 'job' ) def update(self, obj_id, is_public=NotUpdated, is_protected=NotUpdated): """Update a Job.""" data = {} self._copy_if_updated(data, is_public=is_public, is_protected=is_protected) return self._patch('/jobs/%s' % obj_id, data) python-saharaclient-3.1.0/saharaclient/api/v2/job_templates.py0000664000175000017500000000513013643576737024476 0ustar zuulzuul00000000000000# Copyright (c) 2018 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class JobTemplate(base.Resource): resource_name = 'Job Template' class JobTemplatesManagerV2(base.ResourceManager): resource_class = JobTemplate NotUpdated = base.NotUpdated() def create(self, name, type, mains=None, libs=None, description=None, interface=None, is_public=None, is_protected=None): """Create a Job Template.""" data = { 'name': name, 'type': type } self._copy_if_defined(data, description=description, mains=mains, libs=libs, interface=interface, is_public=is_public, is_protected=is_protected) return self._create('/%s' % 'job-templates', data, 'job_template') def list(self, search_opts=None, limit=None, marker=None, sort_by=None, reverse=None): """Get a list of Job Templates.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/%s%s" % ('job-templates', query) return self._page(url, 'job_templates', limit) def get(self, job_id): """Get information about a Job Template.""" return self._get('/%s/%s' % ('job-templates', job_id), 'job_template') def get_configs(self, job_type): """Get config hints for a specified Job Template type.""" return self._get('/%s/config-hints/%s' % ('job-templates', job_type)) def delete(self, job_id): """Delete a Job Template.""" self._delete('/%s/%s' % ('job-templates', job_id)) def update(self, job_id, name=NotUpdated, description=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated): """Update a Job Template.""" data = {} self._copy_if_updated(data, name=name, description=description, is_public=is_public, is_protected=is_protected) return self._patch('/%s/%s' % ('job-templates', job_id), data) python-saharaclient-3.1.0/saharaclient/api/v2/__init__.py0000664000175000017500000000000013643576737023374 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/api/base.py0000664000175000017500000002043413643576737022235 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import copy from oslo_serialization import jsonutils from six.moves.urllib import parse from saharaclient._i18n import _ class Resource(object): resource_name = 'Something' defaults = {} def __init__(self, manager, info): self.manager = manager info = info.copy() self._info = info self._set_defaults(info) self._add_details(info) def _set_defaults(self, info): for name, value in self.defaults.items(): if name not in info: info[name] = value def _add_details(self, info): for (k, v) in info.items(): try: setattr(self, k, v) self._info[k] = v except AttributeError: # In this case we already defined the attribute on the class pass def to_dict(self): return copy.deepcopy(self._info) def __str__(self): return '%s %s' % (self.resource_name, str(self._info)) def _check_items(obj, searches): try: return all(getattr(obj, attr) == value for (attr, value) in searches) except AttributeError: return False class NotUpdated(object): """A sentinel class to signal that parameter should not be updated.""" def __repr__(self): return 'NotUpdated' class ResourceManager(object): resource_class = None def __init__(self, api): self.api = api def find(self, **kwargs): return [i for i in self.list() if _check_items(i, kwargs.items())] def find_unique(self, **kwargs): found = self.find(**kwargs) if not found: raise APIException(error_code=404, error_message=_("No matches found.")) if len(found) > 1: raise APIException(error_code=409, error_message=_("Multiple matches found.")) return found[0] def _copy_if_defined(self, data, **kwargs): for var_name, var_value in kwargs.items(): if var_value is not None: data[var_name] = var_value def _copy_if_updated(self, data, **kwargs): for var_name, var_value in kwargs.items(): if not isinstance(var_value, NotUpdated): data[var_name] = var_value def _create(self, url, data, response_key=None, dump_json=True): if dump_json: kwargs = {'json': data} else: kwargs = {'data': data} resp = self.api.post(url, **kwargs) if resp.status_code != 202: self._raise_api_exception(resp) if response_key is not None: data = get_json(resp)[response_key] else: data = get_json(resp) return self.resource_class(self, data) def _update(self, url, data, response_key=None, dump_json=True): if dump_json: kwargs = {'json': data} else: kwargs = {'data': data} resp = self.api.put(url, **kwargs) if resp.status_code not in [200, 202]: self._raise_api_exception(resp) if response_key is not None: data = get_json(resp)[response_key] else: data = get_json(resp) return self.resource_class(self, data) def _patch(self, url, data, response_key=None, dump_json=True): if dump_json: kwargs = {'json': data} else: kwargs = {'data': data} resp = self.api.patch(url, **kwargs) if resp.status_code != 202: self._raise_api_exception(resp) if response_key is not None: data = get_json(resp)[response_key] else: data = get_json(resp) return self.resource_class(self, data) def _post(self, url, data, response_key=None, dump_json=True): if dump_json: kwargs = {'json': data} else: kwargs = {'data': data} resp = self.api.post(url, **kwargs) if resp.status_code != 202: self._raise_api_exception(resp) if response_key is not None: data = get_json(resp)[response_key] else: data = get_json(resp) return self.resource_class(self, data) def _list(self, url, response_key): resp = self.api.get(url) if resp.status_code == 200: data = get_json(resp)[response_key] return [self.resource_class(self, res) for res in data] else: self._raise_api_exception(resp) def _page(self, url, response_key, limit=None): resp = self.api.get(url) if resp.status_code == 200: result = get_json(resp) data = result[response_key] meta = result.get('markers') next, prev = None, None if meta: prev = meta.get('prev') next = meta.get('next') li = [self.resource_class(self, res) for res in data] return Page(li, prev, next, limit) else: self._raise_api_exception(resp) def _get(self, url, response_key=None): resp = self.api.get(url) if resp.status_code == 200: if response_key is not None: data = get_json(resp)[response_key] else: data = get_json(resp) return self.resource_class(self, data) else: self._raise_api_exception(resp) def _delete(self, url, data=None): if data is not None: kwargs = {'json': data} resp = self.api.delete(url, **kwargs) else: resp = self.api.delete(url) if resp.status_code not in [200, 204]: self._raise_api_exception(resp) if resp.status_code == 200: return get_json(resp) def _plurify_resource_name(self): return self.resource_class.resource_name + 's' def _raise_api_exception(self, resp): try: error_data = get_json(resp) except Exception: msg = _("Failed to parse response from Sahara: %s") % resp.reason raise APIException( error_code=resp.status_code, error_message=msg) raise APIException(error_code=error_data.get("error_code"), error_name=error_data.get("error_name"), error_message=error_data.get("error_message")) def get_json(response): """Provide backward compatibility with old versions of requests library.""" json_field_or_function = getattr(response, 'json', None) if callable(json_field_or_function): return response.json() else: return jsonutils.loads(response.content) class APIException(Exception): def __init__(self, error_code=None, error_name=None, error_message=None): super(APIException, self).__init__(error_message) self.error_code = error_code self.error_name = error_name self.error_message = error_message def get_query_string(search_opts, limit=None, marker=None, sort_by=None, reverse=None): opts = {} if marker is not None: opts['marker'] = marker if limit is not None: opts['limit'] = limit if sort_by is not None: if reverse: opts['sort_by'] = "-%s" % sort_by else: opts['sort_by'] = sort_by if search_opts is not None: opts.update(search_opts) if opts: qparams = sorted(opts.items(), key=lambda x: x[0]) query_string = "?%s" % parse.urlencode(qparams, doseq=True) else: query_string = "" return query_string class Page(list): def __init__(self, l, prev, next, limit): super(Page, self).__init__(l) self.prev = prev self.next = next self.limit = limit python-saharaclient-3.1.0/saharaclient/api/plugins.py0000664000175000017500000000615113643576737023004 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from six.moves.urllib import parse as urlparse from saharaclient.api import base class Plugin(base.Resource): resource_name = 'Plugin' def __init__(self, manager, info): base.Resource.__init__(self, manager, info) # Horizon requires each object in table to have an id self.id = self.name class _PluginManager(base.ResourceManager): resource_class = Plugin def list(self, search_opts=None): """Get a list of Plugins.""" query = base.get_query_string(search_opts) return self._list('/plugins%s' % query, 'plugins') def get(self, plugin_name): """Get information about a Plugin.""" return self._get('/plugins/%s' % plugin_name, 'plugin') def get_version_details(self, plugin_name, hadoop_version): """Get version details Get the list of Services and Service Parameters for a specified Plugin and Plugin Version. """ return self._get('/plugins/%s/%s' % (plugin_name, hadoop_version), 'plugin') def update(self, plugin_name, values): """Update plugin and then return updated result to user """ return self._patch("/plugins/%s" % plugin_name, values, 'plugin') class PluginManagerV1(_PluginManager): def convert_to_cluster_template(self, plugin_name, hadoop_version, template_name, filecontent): """Convert to cluster template Create Cluster Template directly, avoiding Cluster Template mechanism. """ resp = self.api.post('/plugins/%s/%s/convert-config/%s' % (plugin_name, hadoop_version, urlparse.quote(template_name)), data=filecontent) if resp.status_code != 202: raise RuntimeError('Failed to upload template file for plugin "%s"' ' and version "%s"' % (plugin_name, hadoop_version)) else: return base.get_json(resp)['cluster_template'] class PluginManagerV2(_PluginManager): def get_version_details(self, plugin_name, plugin_version): """Get version details Get the list of Services and Service Parameters for a specified Plugin and Plugin Version. """ return self._get('/plugins/%s/%s' % (plugin_name, plugin_version), 'plugin') # NOTE(jfreud): keep this around for backwards compatibility PluginManager = PluginManagerV1 python-saharaclient-3.1.0/saharaclient/api/job_binaries.py0000664000175000017500000000641113643576737023750 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class JobBinaries(base.Resource): resource_name = 'Job Binary' class JobBinariesManagerV1(base.ResourceManager): resource_class = JobBinaries version = 1.1 def create(self, name, url, description=None, extra=None, is_public=None, is_protected=None): """Create a Job Binary. :param dict extra: authentication info needed for some job binaries, containing the keys `user` and `password` for job binary in Swift or the keys `accesskey`, `secretkey`, and `endpoint` for job binary in S3 """ data = { "name": name, "url": url } self._copy_if_defined(data, description=description, extra=extra, is_public=is_public, is_protected=is_protected) return self._create('/job-binaries', data, 'job_binary') def list(self, search_opts=None, limit=None, marker=None, sort_by=None, reverse=None): """Get a list of Job Binaries.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/job-binaries%s" % query return self._page(url, 'binaries', limit) def get(self, job_binary_id): """Get information about a Job Binary.""" return self._get('/job-binaries/%s' % job_binary_id, 'job_binary') def delete(self, job_binary_id): """Delete a Job Binary.""" self._delete('/job-binaries/%s' % job_binary_id) def get_file(self, job_binary_id): """Download a Job Binary.""" resp = self.api.get('/job-binaries/%s/data' % job_binary_id) if resp.status_code != 200: self._raise_api_exception(resp) return resp.content def update(self, job_binary_id, data): """Update Job Binary. :param dict data: dict that contains fields that should be updated with new values. Fields that can be updated: * name * description * url * is_public * is_protected * extra - dict with the keys `user` and `password` for job binary in Swift, or with the keys `accesskey`, `secretkey`, and `endpoint` for job binary in S3 """ if self.version >= 2: UPDATE_FUNC = self._patch else: UPDATE_FUNC = self._update return UPDATE_FUNC( '/job-binaries/%s' % job_binary_id, data, 'job_binary') class JobBinariesManagerV2(JobBinariesManagerV1): version = 2 # NOTE(jfreud): keep this around for backwards compatibility JobBinariesManager = JobBinariesManagerV1 python-saharaclient-3.1.0/saharaclient/api/client.py0000664000175000017500000001047713643576737022607 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from keystoneauth1 import adapter from saharaclient.api import cluster_templates from saharaclient.api import clusters from saharaclient.api import data_sources from saharaclient.api import images from saharaclient.api import job_binaries from saharaclient.api import job_binary_internals from saharaclient.api import job_executions from saharaclient.api import job_types from saharaclient.api import jobs from saharaclient.api import node_group_templates from saharaclient.api import plugins from saharaclient.api.v2 import job_templates from saharaclient.api.v2 import jobs as jobs_v2 USER_AGENT = 'python-saharaclient' class HTTPClient(adapter.Adapter): def request(self, *args, **kwargs): kwargs.setdefault('raise_exc', False) kwargs.setdefault('allow', {'allow_experimental': True}) return super(HTTPClient, self).request(*args, **kwargs) class Client(object): _api_version = '1.1' """Client for the OpenStack Data Processing API. :param session: Keystone session object. Required. :param string sahara_url: Endpoint override. :param string endpoint_type: Desired Sahara endpoint type. :param string service_type: Sahara service name in Keystone catalog. :param string region_name: Name of a region to select when choosing an endpoint from the service catalog. """ def __init__(self, session=None, sahara_url=None, endpoint_type='publicURL', service_type='data-processing', region_name=None, **kwargs): if not session: raise RuntimeError("Must provide session") auth = session.auth kwargs['user_agent'] = USER_AGENT kwargs.setdefault('interface', endpoint_type) kwargs.setdefault('endpoint_override', sahara_url) client = HTTPClient(session=session, auth=auth, service_type=service_type, region_name=region_name, version=self._api_version, **kwargs) self._register_managers(client) def _register_managers(self, client): self.clusters = clusters.ClusterManagerV1(client) self.cluster_templates = ( cluster_templates.ClusterTemplateManagerV1(client) ) self.node_group_templates = ( node_group_templates.NodeGroupTemplateManagerV1(client) ) self.plugins = plugins.PluginManagerV1(client) self.images = images.ImageManagerV1(client) self.data_sources = data_sources.DataSourceManagerV1(client) self.jobs = jobs.JobsManagerV1(client) self.job_executions = job_executions.JobExecutionsManager(client) self.job_binaries = job_binaries.JobBinariesManagerV1(client) self.job_binary_internals = ( job_binary_internals.JobBinaryInternalsManager(client) ) self.job_types = job_types.JobTypesManager(client) class ClientV2(Client): _api_version = '2' def _register_managers(self, client): self.clusters = clusters.ClusterManagerV2(client) self.cluster_templates = ( cluster_templates.ClusterTemplateManagerV2(client) ) self.node_group_templates = ( node_group_templates.NodeGroupTemplateManagerV2(client) ) self.plugins = plugins.PluginManagerV2(client) self.images = images.ImageManagerV2(client) self.data_sources = data_sources.DataSourceManagerV2(client) self.job_templates = job_templates.JobTemplatesManagerV2(client) self.jobs = jobs_v2.JobsManagerV2(client) self.job_binaries = job_binaries.JobBinariesManagerV2(client) self.job_types = job_types.JobTypesManager(client) python-saharaclient-3.1.0/saharaclient/api/node_group_templates.py0000664000175000017500000002707713643576737025554 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class NodeGroupTemplate(base.Resource): resource_name = 'Node Group Template' class NodeGroupTemplateManagerV1(base.ResourceManager): resource_class = NodeGroupTemplate NotUpdated = base.NotUpdated() def create(self, name, plugin_name, hadoop_version, flavor_id, description=None, volumes_per_node=None, volumes_size=None, node_processes=None, node_configs=None, floating_ip_pool=None, security_groups=None, auto_security_group=None, availability_zone=None, volumes_availability_zone=None, volume_type=None, image_id=None, is_proxy_gateway=None, volume_local_to_instance=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None, volume_mount_prefix=None): """Create a Node Group Template.""" data = { 'name': name, 'plugin_name': plugin_name, 'hadoop_version': hadoop_version, 'flavor_id': flavor_id, 'node_processes': node_processes } return self._do_create(data, description, volumes_per_node, volumes_size, node_configs, floating_ip_pool, security_groups, auto_security_group, availability_zone, volumes_availability_zone, volume_type, image_id, is_proxy_gateway, volume_local_to_instance, use_autoconfig, shares, is_public, is_protected, volume_mount_prefix) def _do_create(self, data, description, volumes_per_node, volumes_size, node_configs, floating_ip_pool, security_groups, auto_security_group, availability_zone, volumes_availability_zone, volume_type, image_id, is_proxy_gateway, volume_local_to_instance, use_autoconfig, shares, is_public, is_protected, volume_mount_prefix, boot_from_volume=None, boot_volume_type=None, boot_volume_az=None, boot_volume_local=None): self._copy_if_defined(data, description=description, node_configs=node_configs, floating_ip_pool=floating_ip_pool, security_groups=security_groups, auto_security_group=auto_security_group, availability_zone=availability_zone, image_id=image_id, is_proxy_gateway=is_proxy_gateway, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected, boot_from_volume=boot_from_volume, boot_volume_type=boot_volume_type, boot_volume_availability_zone=boot_volume_az, boot_volume_local_to_instance=boot_volume_local ) if volumes_per_node: data.update({"volumes_per_node": volumes_per_node, "volumes_size": volumes_size}) if volumes_availability_zone: data.update({"volumes_availability_zone": volumes_availability_zone}) if volume_type: data.update({"volume_type": volume_type}) if volume_local_to_instance: data.update( {"volume_local_to_instance": volume_local_to_instance}) if volume_mount_prefix: data.update({"volume_mount_prefix": volume_mount_prefix}) return self._create('/node-group-templates', data, 'node_group_template') def update(self, ng_template_id, name=NotUpdated, plugin_name=NotUpdated, hadoop_version=NotUpdated, flavor_id=NotUpdated, description=NotUpdated, volumes_per_node=NotUpdated, volumes_size=NotUpdated, node_processes=NotUpdated, node_configs=NotUpdated, floating_ip_pool=NotUpdated, security_groups=NotUpdated, auto_security_group=NotUpdated, availability_zone=NotUpdated, volumes_availability_zone=NotUpdated, volume_type=NotUpdated, image_id=NotUpdated, is_proxy_gateway=NotUpdated, volume_local_to_instance=NotUpdated, use_autoconfig=NotUpdated, shares=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated, volume_mount_prefix=NotUpdated): """Update a Node Group Template.""" data = {} self._copy_if_updated( data, name=name, plugin_name=plugin_name, hadoop_version=hadoop_version, flavor_id=flavor_id, description=description, volumes_per_node=volumes_per_node, volumes_size=volumes_size, node_processes=node_processes, node_configs=node_configs, floating_ip_pool=floating_ip_pool, security_groups=security_groups, auto_security_group=auto_security_group, availability_zone=availability_zone, volumes_availability_zone=volumes_availability_zone, volume_type=volume_type, image_id=image_id, is_proxy_gateway=is_proxy_gateway, volume_local_to_instance=volume_local_to_instance, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected, volume_mount_prefix=volume_mount_prefix ) return self._update('/node-group-templates/%s' % ng_template_id, data, 'node_group_template') def list(self, search_opts=None, marker=None, limit=None, sort_by=None, reverse=None): """Get a list of Node Group Templates.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/node-group-templates%s" % query return self._page(url, 'node_group_templates', limit) def get(self, ng_template_id): """Get information about a Node Group Template.""" return self._get('/node-group-templates/%s' % ng_template_id, 'node_group_template') def delete(self, ng_template_id): """Delete a Node Group Template.""" self._delete('/node-group-templates/%s' % ng_template_id) def export(self, ng_template_id): """Export a Node Group Template.""" return self._get('/node-group-templates/%s/export' % ng_template_id) class NodeGroupTemplateManagerV2(NodeGroupTemplateManagerV1): NotUpdated = base.NotUpdated() def create(self, name, plugin_name, plugin_version, flavor_id, description=None, volumes_per_node=None, volumes_size=None, node_processes=None, node_configs=None, floating_ip_pool=None, security_groups=None, auto_security_group=None, availability_zone=None, volumes_availability_zone=None, volume_type=None, image_id=None, is_proxy_gateway=None, volume_local_to_instance=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None, volume_mount_prefix=None, boot_from_volume=None, boot_volume_type=None, boot_volume_availability_zone=None, boot_volume_local_to_instance=None): """Create a Node Group Template.""" data = { 'name': name, 'plugin_name': plugin_name, 'plugin_version': plugin_version, 'flavor_id': flavor_id, 'node_processes': node_processes } return self._do_create(data, description, volumes_per_node, volumes_size, node_configs, floating_ip_pool, security_groups, auto_security_group, availability_zone, volumes_availability_zone, volume_type, image_id, is_proxy_gateway, volume_local_to_instance, use_autoconfig, shares, is_public, is_protected, volume_mount_prefix, boot_from_volume, boot_volume_type, boot_volume_availability_zone, boot_volume_local_to_instance) def update(self, ng_template_id, name=NotUpdated, plugin_name=NotUpdated, plugin_version=NotUpdated, flavor_id=NotUpdated, description=NotUpdated, volumes_per_node=NotUpdated, volumes_size=NotUpdated, node_processes=NotUpdated, node_configs=NotUpdated, floating_ip_pool=NotUpdated, security_groups=NotUpdated, auto_security_group=NotUpdated, availability_zone=NotUpdated, volumes_availability_zone=NotUpdated, volume_type=NotUpdated, image_id=NotUpdated, is_proxy_gateway=NotUpdated, volume_local_to_instance=NotUpdated, use_autoconfig=NotUpdated, shares=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated, volume_mount_prefix=NotUpdated, boot_from_volume=NotUpdated, boot_volume_type=NotUpdated, boot_volume_availability_zone=NotUpdated, boot_volume_local_to_instance=NotUpdated): """Update a Node Group Template.""" data = {} self._copy_if_updated( data, name=name, plugin_name=plugin_name, plugin_version=plugin_version, flavor_id=flavor_id, description=description, volumes_per_node=volumes_per_node, volumes_size=volumes_size, node_processes=node_processes, node_configs=node_configs, floating_ip_pool=floating_ip_pool, security_groups=security_groups, auto_security_group=auto_security_group, availability_zone=availability_zone, volumes_availability_zone=volumes_availability_zone, volume_type=volume_type, image_id=image_id, is_proxy_gateway=is_proxy_gateway, volume_local_to_instance=volume_local_to_instance, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected, volume_mount_prefix=volume_mount_prefix, boot_from_volume=boot_from_volume, boot_volume_type=boot_volume_type, boot_volume_availability_zone=boot_volume_availability_zone, boot_volume_local_to_instance=boot_volume_local_to_instance ) return self._patch('/node-group-templates/%s' % ng_template_id, data, 'node_group_template') # NOTE(jfreud): keep this around for backwards compatibility NodeGroupTemplateManager = NodeGroupTemplateManagerV1 python-saharaclient-3.1.0/saharaclient/api/__init__.py0000664000175000017500000000000013643576737023045 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/api/cluster_templates.py0000664000175000017500000001637113643576737025067 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class ClusterTemplate(base.Resource): resource_name = 'Cluster Template' class ClusterTemplateManagerV1(base.ResourceManager): resource_class = ClusterTemplate NotUpdated = base.NotUpdated() def create(self, name, plugin_name, hadoop_version, description=None, cluster_configs=None, node_groups=None, anti_affinity=None, net_id=None, default_image_id=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None, domain_name=None): """Create a Cluster Template.""" data = { 'name': name, 'plugin_name': plugin_name, 'hadoop_version': hadoop_version, } return self._do_create(data, description, cluster_configs, node_groups, anti_affinity, net_id, default_image_id, use_autoconfig, shares, is_public, is_protected, domain_name) def _do_create(self, data, description, cluster_configs, node_groups, anti_affinity, net_id, default_image_id, use_autoconfig, shares, is_public, is_protected, domain_name): self._copy_if_defined(data, description=description, cluster_configs=cluster_configs, node_groups=node_groups, anti_affinity=anti_affinity, neutron_management_network=net_id, default_image_id=default_image_id, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected, domain_name=domain_name) return self._create('/cluster-templates', data, 'cluster_template') def update(self, cluster_template_id, name=NotUpdated, plugin_name=NotUpdated, hadoop_version=NotUpdated, description=NotUpdated, cluster_configs=NotUpdated, node_groups=NotUpdated, anti_affinity=NotUpdated, net_id=NotUpdated, default_image_id=NotUpdated, use_autoconfig=NotUpdated, shares=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated, domain_name=NotUpdated): """Update a Cluster Template.""" data = {} self._copy_if_updated(data, name=name, plugin_name=plugin_name, hadoop_version=hadoop_version, description=description, cluster_configs=cluster_configs, node_groups=node_groups, anti_affinity=anti_affinity, neutron_management_network=net_id, default_image_id=default_image_id, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected, domain_name=domain_name) return self._update('/cluster-templates/%s' % cluster_template_id, data, 'cluster_template') def list(self, search_opts=None, marker=None, limit=None, sort_by=None, reverse=None): """Get list of Cluster Templates.""" query = base.get_query_string(search_opts, marker=marker, limit=limit, sort_by=sort_by, reverse=reverse) url = "/cluster-templates%s" % query return self._page(url, 'cluster_templates', limit) def get(self, cluster_template_id): """Get information about a Cluster Template.""" return self._get('/cluster-templates/%s' % cluster_template_id, 'cluster_template') def delete(self, cluster_template_id): """Delete a Cluster Template.""" self._delete('/cluster-templates/%s' % cluster_template_id) def export(self, cluster_template_id): """Export a Cluster Template.""" return self._get('/cluster-templates/%s/export' % cluster_template_id) class ClusterTemplateManagerV2(ClusterTemplateManagerV1): NotUpdated = base.NotUpdated() def create(self, name, plugin_name, plugin_version, description=None, cluster_configs=None, node_groups=None, anti_affinity=None, net_id=None, default_image_id=None, use_autoconfig=None, shares=None, is_public=None, is_protected=None, domain_name=None): """Create a Cluster Template.""" data = { 'name': name, 'plugin_name': plugin_name, 'plugin_version': plugin_version } return self._do_create(data, description, cluster_configs, node_groups, anti_affinity, net_id, default_image_id, use_autoconfig, shares, is_public, is_protected, domain_name) def update(self, cluster_template_id, name=NotUpdated, plugin_name=NotUpdated, plugin_version=NotUpdated, description=NotUpdated, cluster_configs=NotUpdated, node_groups=NotUpdated, anti_affinity=NotUpdated, net_id=NotUpdated, default_image_id=NotUpdated, use_autoconfig=NotUpdated, shares=NotUpdated, is_public=NotUpdated, is_protected=NotUpdated, domain_name=NotUpdated): """Update a Cluster Template.""" data = {} self._copy_if_updated(data, name=name, plugin_name=plugin_name, plugin_version=plugin_version, description=description, cluster_configs=cluster_configs, node_groups=node_groups, anti_affinity=anti_affinity, neutron_management_network=net_id, default_image_id=default_image_id, use_autoconfig=use_autoconfig, shares=shares, is_public=is_public, is_protected=is_protected, domain_name=domain_name) return self._patch('/cluster-templates/%s' % cluster_template_id, data, 'cluster_template') # NOTE(jfreud): keep this around for backwards compatibility ClusterTemplateManager = ClusterTemplateManagerV1 python-saharaclient-3.1.0/saharaclient/api/parameters.py0000664000175000017500000000205413643576737023464 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. class Parameter(object): """This bean is used for building config entries.""" def __init__(self, config): self.name = config['name'] self.description = config.get('description', "No description") self.required = not config['is_optional'] self.default_value = config.get('default_value', None) self.initial_value = self.default_value self.param_type = config['config_type'] self.priority = int(config.get('priority', 2)) python-saharaclient-3.1.0/saharaclient/api/images.py0000664000175000017500000000574013643576737022573 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class Image(base.Resource): resource_name = 'Image' defaults = {'description': ''} class _ImageManager(base.ResourceManager): resource_class = Image def list(self, search_opts=None): """Get a list of registered images.""" query = base.get_query_string(search_opts) return self._list('/images%s' % query, 'images') def get(self, id): """Get information about an image""" return self._get('/images/%s' % id, 'image') def unregister_image(self, image_id): """Remove an Image from Sahara Image Registry.""" self._delete('/images/%s' % image_id) def update_image(self, image_id, user_name, desc=None): """Create or update an Image in Image Registry.""" desc = desc if desc else '' data = {"username": user_name, "description": desc} return self._post('/images/%s' % image_id, data) class ImageManagerV1(_ImageManager): def update_tags(self, image_id, new_tags): """Update an Image tags. :param new_tags: list of tags that will replace currently assigned tags """ # Do not add :param list in the docstring above until this is solved: # https://github.com/sphinx-doc/sphinx/issues/2549 old_image = self.get(image_id) old_tags = frozenset(old_image.tags) new_tags = frozenset(new_tags) to_add = list(new_tags - old_tags) to_remove = list(old_tags - new_tags) add_response, remove_response = None, None if to_add: add_response = self._post('/images/%s/tag' % image_id, {'tags': to_add}, 'image') if to_remove: remove_response = self._post('/images/%s/untag' % image_id, {'tags': to_remove}, 'image') return remove_response or add_response or self.get(image_id) class ImageManagerV2(_ImageManager): def get_tags(self, image_id): return self._get('/images/%s/tags' % image_id) def update_tags(self, image_id, new_tags): return self._update('/images/%s/tags' % image_id, {'tags': new_tags}) def delete_tags(self, image_id): return self._delete('/images/%s/tags' % image_id) # NOTE(jfreud): keep this around for backwards compatibility ImageManager = ImageManagerV1 python-saharaclient-3.1.0/saharaclient/api/job_executions.py0000664000175000017500000000452113643576737024342 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class JobExecution(base.Resource): resource_name = 'JobExecution' class JobExecutionsManager(base.ResourceManager): resource_class = JobExecution NotUpdated = base.NotUpdated() def list(self, search_opts=None, marker=None, limit=None, sort_by=None, reverse=None): """Get a list of Job Executions.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/job-executions%s" % query return self._page(url, 'job_executions', limit) def get(self, obj_id): """Get information about a Job Execution.""" return self._get('/job-executions/%s' % obj_id, 'job_execution') def delete(self, obj_id): """Delete a Job Execution.""" self._delete('/job-executions/%s' % obj_id) def create(self, job_id, cluster_id, input_id=None, output_id=None, configs=None, interface=None, is_public=None, is_protected=None): """Launch a Job.""" url = "/jobs/%s/execute" % job_id data = { "cluster_id": cluster_id, } self._copy_if_defined(data, input_id=input_id, output_id=output_id, job_configs=configs, interface=interface, is_public=is_public, is_protected=is_protected) return self._create(url, data, 'job_execution') def update(self, obj_id, is_public=NotUpdated, is_protected=NotUpdated): """Update a Job Execution.""" data = {} self._copy_if_updated(data, is_public=is_public, is_protected=is_protected) return self._patch('/job-executions/%s' % obj_id, data) python-saharaclient-3.1.0/saharaclient/api/data_sources.py0000664000175000017500000000636613643576737024007 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from saharaclient.api import base class DataSources(base.Resource): resource_name = 'Data Source' class DataSourceManagerV1(base.ResourceManager): resource_class = DataSources version = 1.1 def create(self, name, description, data_source_type, url, credential_user=None, credential_pass=None, is_public=None, is_protected=None, s3_credentials=None): """Create a Data Source.""" data = { 'name': name, 'description': description, 'type': data_source_type, 'url': url, } credentials = {} self._copy_if_defined(credentials, user=credential_user, password=credential_pass) credentials = credentials or s3_credentials self._copy_if_defined(data, is_public=is_public, is_protected=is_protected, credentials=credentials) return self._create('/data-sources', data, 'data_source') def list(self, search_opts=None, limit=None, marker=None, sort_by=None, reverse=None): """Get a list of Data Sources.""" query = base.get_query_string(search_opts, limit=limit, marker=marker, sort_by=sort_by, reverse=reverse) url = "/data-sources%s" % query return self._page(url, 'data_sources', limit) def get(self, data_source_id): """Get information about a Data Source.""" return self._get('/data-sources/%s' % data_source_id, 'data_source') def delete(self, data_source_id): """Delete a Data Source.""" self._delete('/data-sources/%s' % data_source_id) def update(self, data_source_id, update_data): """Update a Data Source. :param dict update_data: dict that contains fields that should be updated with new values. Fields that can be updated: * name * description * type * url * is_public * is_protected * credentials - dict with the keys `user` and `password` for data source in Swift, or with the keys `accesskey`, `secretkey`, `endpoint`, `ssl`, and `bucket_in_path` for data source in S3 """ if self.version >= 2: UPDATE_FUNC = self._patch else: UPDATE_FUNC = self._update return UPDATE_FUNC('/data-sources/%s' % data_source_id, update_data) class DataSourceManagerV2(DataSourceManagerV1): version = 2 # NOTE(jfreud): keep this around for backwards compatibility DataSourceManager = DataSourceManagerV1 python-saharaclient-3.1.0/saharaclient/osc/0000775000175000017500000000000013643577103020745 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/osc/v2/0000775000175000017500000000000013643577103021274 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/osc/v2/clusters.py0000664000175000017500000001647213643576737023540 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import utils as osc_utils from oslo_log import log as logging from saharaclient.osc import utils from saharaclient.osc.v1 import clusters as c_v1 def _format_cluster_output(app, data): data['image'] = data.pop('default_image_id') data['node_groups'] = c_v1._format_node_groups_list(data['node_groups']) data['anti_affinity'] = osc_utils.format_list(data['anti_affinity']) class CreateCluster(c_v1.CreateCluster): """Creates cluster""" log = logging.getLogger(__name__ + ".CreateCluster") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) if parsed_args.count and parsed_args.count > 1: clusters = [] for cluster in data['clusters']: clusters.append( utils.get_resource(client.clusters, cluster['cluster']['id'])) if parsed_args.wait: for cluster in clusters: if not osc_utils.wait_for_status( client.clusters.get, cluster.id): self.log.error( 'Error occurred during cluster creation: %s', data['id']) data = {} for cluster in clusters: data[cluster.name] = cluster.id else: if parsed_args.wait: if not osc_utils.wait_for_status( client.clusters.get, data['id']): self.log.error( 'Error occurred during cluster creation: %s', data['id']) data = client.clusters.get(data['id']).to_dict() _format_cluster_output(self.app, data) data = utils.prepare_data(data, c_v1.CLUSTER_FIELDS) return self.dict2columns(data) class ListClusters(c_v1.ListClusters): """Lists clusters""" log = logging.getLogger(__name__ + ".ListClusters") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin if parsed_args.plugin_version: search_opts['plugin_version'] = parsed_args.plugin_version data = client.clusters.list(search_opts=search_opts) if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.long: columns = ('name', 'id', 'plugin_name', 'plugin_version', 'status', 'description', 'default_image_id') column_headers = utils.prepare_column_headers( columns, {'default_image_id': 'image'}) else: columns = ('name', 'id', 'plugin_name', 'plugin_version', 'status') column_headers = utils.prepare_column_headers( columns, {'default_image_id': 'image'}) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowCluster(c_v1.ShowCluster): """Display cluster details""" log = logging.getLogger(__name__ + ".ShowCluster") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data, provision_steps = self._take_action(client, parsed_args) _format_cluster_output(self.app, data) data = self._show_cluster_info(data, provision_steps, parsed_args) return data class DeleteCluster(c_v1.DeleteCluster): """Deletes cluster""" log = logging.getLogger(__name__ + ".DeleteCluster") def get_parser(self, prog_name): parser = super(DeleteCluster, self).get_parser(prog_name) parser.add_argument( '--force', action='store_true', default=False, help='Force the deletion of the cluster', ) return parser def _choose_delete_mode(self, parsed_args): if parsed_args.force: return "force_delete" else: return "delete" class UpdateCluster(c_v1.UpdateCluster): """Updates cluster""" log = logging.getLogger(__name__ + ".UpdateCluster") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_cluster_output(self.app, data) data = utils.prepare_data(data, c_v1.CLUSTER_FIELDS) return self.dict2columns(data) class ScaleCluster(c_v1.ScaleCluster): """Scales cluster""" log = logging.getLogger(__name__ + ".ScaleCluster") def _get_json_arg_helptext(self): return ''' JSON representation of the cluster scale object. Other arguments (except for --wait) will not be taken into account if this one is provided. Specifiying a JSON object is also the only way to indicate specific instances to decomission. ''' def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_cluster_output(self.app, data) data = utils.prepare_data(data, c_v1.CLUSTER_FIELDS) return self.dict2columns(data) class VerificationUpdateCluster(c_v1.VerificationUpdateCluster): """Updates cluster verifications""" log = logging.getLogger(__name__ + ".VerificationUpdateCluster") class UpdateKeypairCluster(command.ShowOne): """Reflects an updated keypair on the cluster""" log = logging.getLogger(__name__ + ".UpdateKeypairCluster") def get_parser(self, prog_name): parser = super(UpdateKeypairCluster, self).get_parser(prog_name) parser.add_argument( 'cluster', metavar="", help="Name or ID of the cluster", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) client.clusters.update_keypair(cluster_id) sys.stdout.write( 'Cluster "{cluster}" keypair has been updated.\n' .format(cluster=parsed_args.cluster)) return {}, {} python-saharaclient-3.1.0/saharaclient/osc/v2/job_types.py0000664000175000017500000000351013643576737023657 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from os import path import sys from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc.v1 import job_types as jt_v1 class ListJobTypes(jt_v1.ListJobTypes): """Lists job types supported by plugins""" log = logging.getLogger(__name__ + ".ListJobTypes") class GetJobTypeConfigs(jt_v1.GetJobTypeConfigs): """Get job type configs""" log = logging.getLogger(__name__ + ".GetJobTypeConfigs") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if not parsed_args.file: parsed_args.file = parsed_args.job_type data = client.job_templates.get_configs(parsed_args.job_type).to_dict() if path.exists(parsed_args.file): self.log.error('File "%s" already exists. Choose another one with ' '--file argument.' % parsed_args.file) else: with open(parsed_args.file, 'w') as f: jsonutils.dump(data, f, indent=4) sys.stdout.write( '"%(type)s" job configs were saved in "%(file)s"' 'file' % {'type': parsed_args.job_type, 'file': parsed_args.file}) python-saharaclient-3.1.0/saharaclient/osc/v2/jobs.py0000664000175000017500000001034313643576737022620 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib import utils as osc_utils from oslo_log import log as logging from saharaclient.osc import utils from saharaclient.osc.v1 import jobs as jobs_v1 def _format_job_output(app, data): data['status'] = data['info']['status'] del data['info'] class ExecuteJob(jobs_v1.ExecuteJob): """Executes job""" log = logging.getLogger(__name__ + ".ExecuteJob") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_job_output(self.app, data) data = utils.prepare_data(data, jobs_v1.JOB_FIELDS) return self.dict2columns(data) class ListJobs(jobs_v1.ListJobs): """Lists jobs""" log = logging.getLogger(__name__ + ".ListJobs") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = client.jobs.list() for job in data: job.status = job.info['status'] if parsed_args.status: data = [job for job in data if job.info['status'] == parsed_args.status.replace( '-', '').upper()] if parsed_args.long: columns = ('id', 'cluster id', 'job template id', 'status', 'start time', 'end time') column_headers = utils.prepare_column_headers(columns) else: columns = ('id', 'cluster id', 'job template id', 'status') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowJob(jobs_v1.ShowJob): """Display job details""" log = logging.getLogger(__name__ + ".ShowJob") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = client.jobs.get(parsed_args.job).to_dict() _format_job_output(self.app, data) data = utils.prepare_data(data, jobs_v1.JOB_FIELDS) return self.dict2columns(data) class DeleteJob(jobs_v1.DeleteJob): """Deletes job""" log = logging.getLogger(__name__ + ".DeleteJob") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for job_id in parsed_args.job: client.jobs.delete(job_id) sys.stdout.write( 'Job "{job}" deletion has been started.\n'.format(job=job_id)) if parsed_args.wait: for job_id in parsed_args.job: wait_for_delete = utils.wait_for_delete(client.jobs, job_id) if not wait_for_delete: self.log.error( 'Error occurred during job deleting: %s' % job_id) else: sys.stdout.write( 'Job "{job}" has been removed successfully.\n'.format( job=job_id)) class UpdateJob(jobs_v1.UpdateJob): """Updates job""" log = logging.getLogger(__name__ + ".UpdateJob") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_job_output(self.app, data) data = utils.prepare_data(data, jobs_v1.JOB_FIELDS) return self.dict2columns(data) python-saharaclient-3.1.0/saharaclient/osc/v2/job_templates.py0000664000175000017500000000256613643576737024523 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_log import log as logging from saharaclient.osc.v1 import job_templates as jt_v1 class CreateJobTemplate(jt_v1.CreateJobTemplate): """Creates job template""" log = logging.getLogger(__name__ + ".CreateJobTemplate") class ListJobTemplates(jt_v1.ListJobTemplates): """Lists job templates""" log = logging.getLogger(__name__ + ".ListJobTemplates") class ShowJobTemplate(jt_v1.ShowJobTemplate): """Display job template details""" log = logging.getLogger(__name__ + ".ShowJobTemplate") class DeleteJobTemplate(jt_v1.DeleteJobTemplate): """Deletes job template""" log = logging.getLogger(__name__ + ".DeleteJobTemplate") class UpdateJobTemplate(jt_v1.UpdateJobTemplate): """Updates job template""" log = logging.getLogger(__name__ + ".UpdateJobTemplate") python-saharaclient-3.1.0/saharaclient/osc/v2/plugins.py0000664000175000017500000000217413643576737023347 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_log import log as logging from saharaclient.osc.v1 import plugins as p_v1 class ListPlugins(p_v1.ListPlugins): """Lists plugins""" log = logging.getLogger(__name__ + ".ListPlugins") class ShowPlugin(p_v1.ShowPlugin): """Display plugin details""" log = logging.getLogger(__name__ + ".ShowPlugin") class GetPluginConfigs(p_v1.GetPluginConfigs): """Get plugin configs""" log = logging.getLogger(__name__ + ".GetPluginConfigs") class UpdatePlugin(p_v1.UpdatePlugin): log = logging.getLogger(__name__ + ".UpdatePlugin") python-saharaclient-3.1.0/saharaclient/osc/v2/job_binaries.py0000664000175000017500000001644713643576737024324 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc import utils from saharaclient.osc.v1 import job_binaries as jb_v1 class CreateJobBinary(command.ShowOne): """Creates job binary""" log = logging.getLogger(__name__ + ".CreateJobBinary") def get_parser(self, prog_name): parser = super(CreateJobBinary, self).get_parser(prog_name) parser.add_argument( '--name', metavar="", help="Name of the job binary [REQUIRED if JSON is not provided]", ) creation_type = parser.add_mutually_exclusive_group() creation_type.add_argument( '--url', metavar='', help='URL for the job binary [REQUIRED if JSON and file are ' 'not provided]' ) parser.add_argument( '--description', metavar="", help="Description of the job binary" ) username = parser.add_mutually_exclusive_group() username.add_argument( '--username', metavar='', help='Username for accessing the job binary URL', ) username.add_argument( '--access-key', metavar='', help='S3 access key for accessing the job binary URL', ) password = parser.add_mutually_exclusive_group() password.add_argument( '--password', metavar='', help='Password for accessing the job binary URL', ) password.add_argument( '--secret-key', metavar='', help='S3 secret key for accessing the job binary URL', ) password.add_argument( '--password-prompt', dest="password_prompt", action="store_true", help='Prompt interactively for password', ) password.add_argument( '--secret-key-prompt', dest="secret_key_prompt", action="store_true", help='Prompt interactively for S3 secret key', ) parser.add_argument( '--s3-endpoint', metavar='', help='S3 endpoint for accessing the job binary URL (ignored if ' 'binary not in S3', ) parser.add_argument( '--public', action='store_true', default=False, help='Make the job binary public', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the job binary protected', ) parser.add_argument( '--json', metavar='', help='JSON representation of the job binary. Other ' 'arguments will not be taken into account if this one is ' 'provided' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.create(**template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password( self.app.stdin, confirm=False) if not parsed_args.password: parsed_args.password = parsed_args.secret_key if not parsed_args.username: parsed_args.username = parsed_args.access_key if parsed_args.password and not parsed_args.username: raise exceptions.CommandError( 'Username via --username, or S3 access key via ' '--access-key should be provided with password') if parsed_args.username and not parsed_args.password: raise exceptions.CommandError( 'Password should be provided via --password or ' '--secret-key, or entered interactively with ' '--password-prompt or --secret-key-prompt') if parsed_args.password and parsed_args.username: if not parsed_args.url: raise exceptions.CommandError( 'URL must be provided via --url') if parsed_args.url.startswith('s3'): if not parsed_args.s3_endpoint: raise exceptions.CommandError( 'S3 job binaries need an endpoint provided via ' '--s3-endpoint') extra = { 'accesskey': parsed_args.username, 'secretkey': parsed_args.password, 'endpoint': parsed_args.s3_endpoint, } else: extra = { 'user': parsed_args.username, 'password': parsed_args.password } else: extra = None data = client.job_binaries.create( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() data = utils.prepare_data(data, jb_v1.JOB_BINARY_FIELDS) return self.dict2columns(data) class ListJobBinaries(jb_v1.ListJobBinaries): """Lists job binaries""" log = logging.getLogger(__name__ + ".ListJobBinaries") class ShowJobBinary(jb_v1.ShowJobBinary): """Display job binary details""" log = logging.getLogger(__name__ + ".ShowJobBinary") class DeleteJobBinary(jb_v1.DeleteJobBinary): """Deletes job binary""" log = logging.getLogger(__name__ + ".DeleteJobBinary") class UpdateJobBinary(jb_v1.UpdateJobBinary): """Updates job binary""" log = logging.getLogger(__name__ + ".UpdateJobBinary") class DownloadJobBinary(jb_v1.DownloadJobBinary): """Downloads job binary""" log = logging.getLogger(__name__ + ".DownloadJobBinary") python-saharaclient-3.1.0/saharaclient/osc/v2/node_group_templates.py0000664000175000017500000002125513643576737026106 0ustar zuulzuul00000000000000# Copyright (c) 2018 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib import utils as osc_utils from saharaclient.osc import utils from saharaclient.osc.v1 import node_group_templates as ngt_v1 NGT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'node_processes', 'description', 'auto_security_group', 'security_groups', 'availability_zone', 'flavor_id', 'floating_ip_pool', 'volumes_per_node', 'volumes_size', 'volume_type', 'volume_local_to_instance', 'volume_mount_prefix', 'volumes_availability_zone', 'use_autoconfig', 'is_proxy_gateway', 'is_default', 'is_protected', 'is_public', 'boot_from_volume', 'boot_volume_type', 'boot_volume_availability_zone', 'boot_volume_local_to_instance'] def _format_ngt_output(data): data['node_processes'] = osc_utils.format_list(data['node_processes']) if data['volumes_per_node'] == 0: del data['volume_local_to_instance'] del data['volume_mount_prefix'] del data['volume_type'], del data['volumes_availability_zone'] del data['volumes_size'] if not data['boot_from_volume']: del data['boot_volume_type'] del data['boot_volume_availability_zone'] del data['boot_volume_local_to_instance'] class CreateNodeGroupTemplate(ngt_v1.CreateNodeGroupTemplate, utils.NodeGroupTemplatesUtils): """Creates node group template""" def get_parser(self, prog_name): parser = super(CreateNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( '--boot-from-volume', action='store_true', default=False, help="Make the node group bootable from volume", ) parser.add_argument( '--boot-volume-type', metavar="", help='Type of the boot volume. ' 'This parameter will be taken into account only ' 'if booting from volume.' ) parser.add_argument( '--boot-volume-availability-zone', metavar="", help='Name of the availability zone to create boot volume in.' ' This parameter will be taken into account only ' 'if booting from volume.' ) parser.add_argument( '--boot-volume-local-to-instance', action='store_true', default=False, help='Instance and volume guaranteed on the same host. ' 'This parameter will be taken into account only ' 'if booting from volume.' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._create_take_action(client, self.app, parsed_args) _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class ListNodeGroupTemplates(ngt_v1.ListNodeGroupTemplates, utils.NodeGroupTemplatesUtils): """Lists node group templates""" def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing return self._list_take_action(client, self.app, parsed_args) class ShowNodeGroupTemplate(ngt_v1.ShowNodeGroupTemplate, utils.NodeGroupTemplatesUtils): """Display node group template details""" def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.node_group_templates, parsed_args.node_group_template).to_dict() _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class DeleteNodeGroupTemplate(ngt_v1.DeleteNodeGroupTemplate, utils.NodeGroupTemplatesUtils): """Deletes node group template""" def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ngt in parsed_args.node_group_template: ngt_id = utils.get_resource_id( client.node_group_templates, ngt) client.node_group_templates.delete(ngt_id) sys.stdout.write( 'Node group template "{ngt}" has been removed ' 'successfully.\n'.format(ngt=ngt)) class UpdateNodeGroupTemplate(ngt_v1.UpdateNodeGroupTemplate, utils.NodeGroupTemplatesUtils): """Updates node group template""" def get_parser(self, prog_name): parser = super(UpdateNodeGroupTemplate, self).get_parser(prog_name) bootfromvolume = parser.add_mutually_exclusive_group() bootfromvolume.add_argument( '--boot-from-volume-enable', action='store_true', help='Makes node group bootable from volume.', dest='boot_from_volume' ) bootfromvolume.add_argument( '--boot-from-volume-disable', action='store_false', help='Makes node group not bootable from volume.', dest='boot_from_volume' ) parser.add_argument( '--boot-volume-type', metavar="", help='Type of the boot volume. ' 'This parameter will be taken into account only ' 'if booting from volume.' ) parser.add_argument( '--boot-volume-availability-zone', metavar="", help='Name of the availability zone to create boot volume in.' ' This parameter will be taken into account only ' 'if booting from volume.' ) bfv_locality = parser.add_mutually_exclusive_group() bfv_locality.add_argument( '--boot-volume-local-to-instance-enable', action='store_true', help='Makes boot volume explicitly local to instance.', dest='boot_volume_local_to_instance' ) bfv_locality.add_argument( '--boot-volume-local-to-instance-disable', action='store_false', help='Removes explicit instruction of boot volume locality.', dest='boot_volume_local_to_instance' ) parser.set_defaults(is_public=None, is_protected=None, is_proxy_gateway=None, volume_locality=None, use_auto_security_group=None, use_autoconfig=None, boot_from_volume=None, boot_volume_type=None, boot_volume_availability_zone=None, boot_volume_local_to_instance=None) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._update_take_action(client, self.app, parsed_args) _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class ImportNodeGroupTemplate(ngt_v1.ImportNodeGroupTemplate, utils.NodeGroupTemplatesUtils): """Imports node group template""" def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._import_take_action(client, parsed_args) _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class ExportNodeGroupTemplate(ngt_v1.ExportNodeGroupTemplate, utils.NodeGroupTemplatesUtils): """Export node group template to JSON""" def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing self._export_take_action(client, parsed_args) python-saharaclient-3.1.0/saharaclient/osc/v2/__init__.py0000664000175000017500000000000013643576737023407 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/osc/v2/cluster_templates.py0000664000175000017500000001135713643576737025430 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from osc_lib import utils as osc_utils from oslo_log import log as logging from saharaclient.osc import utils from saharaclient.osc.v1 import cluster_templates as ct_v1 def _format_ct_output(app, data): data['node_groups'] = ct_v1._format_node_groups_list(data['node_groups']) data['anti_affinity'] = osc_utils.format_list(data['anti_affinity']) class CreateClusterTemplate(ct_v1.CreateClusterTemplate): """Creates cluster template""" log = logging.getLogger(__name__ + ".CreateClusterTemplate") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_ct_output(self.app, data) data = utils.prepare_data(data, ct_v1.CT_FIELDS) return self.dict2columns(data) class ListClusterTemplates(ct_v1.ListClusterTemplates): """Lists cluster templates""" log = logging.getLogger(__name__ + ".ListClusterTemplates") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin if parsed_args.plugin_version: search_opts['plugin_version'] = parsed_args.plugin_version data = client.cluster_templates.list(search_opts=search_opts) if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.long: columns = ('name', 'id', 'plugin_name', 'plugin_version', 'node_groups', 'description') column_headers = utils.prepare_column_headers(columns) else: columns = ('name', 'id', 'plugin_name', 'plugin_version') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns, formatters={ 'node_groups': ct_v1._format_node_groups_list } ) for s in data) ) class ShowClusterTemplate(ct_v1.ShowClusterTemplate): """Display cluster template details""" log = logging.getLogger(__name__ + ".ShowClusterTemplate") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.cluster_templates, parsed_args.cluster_template).to_dict() _format_ct_output(self.app, data) data = utils.prepare_data(data, ct_v1.CT_FIELDS) return self.dict2columns(data) class DeleteClusterTemplate(ct_v1.DeleteClusterTemplate): """Deletes cluster template""" log = logging.getLogger(__name__ + ".DeleteClusterTemplate") class UpdateClusterTemplate(ct_v1.UpdateClusterTemplate): """Updates cluster template""" log = logging.getLogger(__name__ + ".UpdateClusterTemplate") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ct_id = utils.get_resource_id( client.cluster_templates, parsed_args.cluster_template) data = self._take_action(client, parsed_args, ct_id) _format_ct_output(self.app, data) data = utils.prepare_data(data, ct_v1.CT_FIELDS) return self.dict2columns(data) class ImportClusterTemplate(ct_v1.ImportClusterTemplate): """Imports cluster template""" log = logging.getLogger(__name__ + ".ImportClusterTemplate") def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_ct_output(self.app, data) data = utils.prepare_data(data, ct_v1.CT_FIELDS) return self.dict2columns(data) class ExportClusterTemplate(ct_v1.ExportClusterTemplate): """Export cluster template to JSON""" log = logging.getLogger(__name__ + ".ExportClusterTemplate") python-saharaclient-3.1.0/saharaclient/osc/v2/images.py0000664000175000017500000000327313643576737023134 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_log import log as logging from saharaclient.osc.v1 import images as images_v1 IMAGE_FIELDS = ['name', 'id', 'username', 'tags', 'status', 'description'] class ListImages(images_v1.ListImages): """Lists registered images""" log = logging.getLogger(__name__ + ".ListImages") class ShowImage(images_v1.ShowImage): """Display image details""" log = logging.getLogger(__name__ + ".ShowImage") class RegisterImage(images_v1.RegisterImage): """Register an image""" log = logging.getLogger(__name__ + ".RegisterImage") class UnregisterImage(images_v1.UnregisterImage): """Unregister image(s)""" log = logging.getLogger(__name__ + ".RegisterImage") class SetImageTags(images_v1.SetImageTags): """Set image tags (Replace current image tags with provided ones)""" log = logging.getLogger(__name__ + ".AddImageTags") class AddImageTags(images_v1.AddImageTags): """Add image tags""" log = logging.getLogger(__name__ + ".AddImageTags") class RemoveImageTags(images_v1.RemoveImageTags): """Remove image tags""" log = logging.getLogger(__name__ + ".RemoveImageTags") python-saharaclient-3.1.0/saharaclient/osc/v2/data_sources.py0000664000175000017500000000253713643576737024345 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_log import log as logging from saharaclient.osc.v1 import data_sources as ds_v1 class CreateDataSource(ds_v1.CreateDataSource): """Creates data source""" log = logging.getLogger(__name__ + ".CreateDataSource") class ListDataSources(ds_v1.ListDataSources): """Lists data sources""" log = logging.getLogger(__name__ + ".ListDataSources") class ShowDataSource(ds_v1.ShowDataSource): """Display data source details""" log = logging.getLogger(__name__ + ".ShowDataSource") class DeleteDataSource(ds_v1.DeleteDataSource): """Delete data source""" log = logging.getLogger(__name__ + ".DeleteDataSource") class UpdateDataSource(ds_v1.UpdateDataSource): """Update data source""" log = logging.getLogger(__name__ + ".UpdateDataSource") python-saharaclient-3.1.0/saharaclient/osc/utils.py0000664000175000017500000005434013643576737022501 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_serialization import jsonutils as json from oslo_utils import timeutils from oslo_utils import uuidutils from saharaclient.api import base def get_resource(manager, name_or_id, **kwargs): if uuidutils.is_uuid_like(name_or_id): return manager.get(name_or_id, **kwargs) else: resource = manager.find_unique(name=name_or_id) if kwargs: # we really need additional call to apply kwargs resource = manager.get(resource.id, **kwargs) return resource def created_at_sorted(objs, reverse=False): return sorted(objs, key=created_at_key, reverse=reverse) def random_name(prefix=None): return "%s-%s" % (prefix, uuidutils.generate_uuid()[:8]) def created_at_key(obj): return timeutils.parse_isotime(obj["created_at"]) def get_resource_id(manager, name_or_id): if uuidutils.is_uuid_like(name_or_id): return name_or_id else: return manager.find_unique(name=name_or_id).id def create_dict_from_kwargs(**kwargs): return {k: v for (k, v) in kwargs.items() if v is not None} def prepare_data(data, fields): new_data = {} for f in fields: if f in data: new_data[f.replace('_', ' ').capitalize()] = data[f] return new_data def unzip(data): return zip(*data) def extend_columns(columns, items): return unzip(list(unzip(columns)) + [('', '')] + items) def prepare_column_headers(columns, remap=None): remap = remap if remap else {} new_columns = [] for c in columns: for old, new in remap.items(): c = c.replace(old, new) new_columns.append(c.replace('_', ' ').capitalize()) return new_columns def get_by_name_substring(data, name): return [obj for obj in data if name in obj.name] def wait_for_delete(manager, obj_id, sleep_time=5, timeout=3000): s_time = timeutils.utcnow() while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout: try: manager.get(obj_id) except base.APIException as ex: if ex.error_code == 404: return True raise time.sleep(sleep_time) return False def get_api_version(app): return app.api_version['data_processing'] def is_api_v2(app): if get_api_version(app) == '2': return True return False def _cluster_templates_configure_ng(app, node_groups, client): node_groups_list = dict( map(lambda x: x.split(':', 1), node_groups)) node_groups = [] plugins_versions = set() for name, count in node_groups_list.items(): ng = get_resource(client.node_group_templates, name) node_groups.append({'name': ng.name, 'count': int(count), 'node_group_template_id': ng.id}) if is_api_v2(app): plugins_versions.add((ng.plugin_name, ng.plugin_version)) else: plugins_versions.add((ng.plugin_name, ng.hadoop_version)) if len(plugins_versions) != 1: raise exceptions.CommandError('Node groups with the same plugins ' 'and versions must be specified') plugin, plugin_version = plugins_versions.pop() return plugin, plugin_version, node_groups def _get_plugin_version(app, cluster_template, client): ct = get_resource(client.cluster_templates, cluster_template) if is_api_v2(app): return ct.plugin_name, ct.plugin_version, ct.id else: return ct.plugin_name, ct.hadoop_version, ct.id def create_job_templates(app, client, mains_ids, libs_ids, parsed_args): args_dict = dict(name=parsed_args.name, type=parsed_args.type, mains=mains_ids, libs=libs_ids, description=parsed_args.description, interface=parsed_args.interface, is_public=parsed_args.public, is_protected=parsed_args.protected) if is_api_v2(app): data = client.job_templates.create(**args_dict).to_dict() else: data = client.jobs.create(**args_dict).to_dict() return data def create_job_template_json(app, client, **template): if is_api_v2(app): data = client.job_templates.create(**template).to_dict() else: data = client.jobs.create(**template).to_dict() return data def list_job_templates(app, client, search_opts): if is_api_v2(app): data = client.job_templates.list(search_opts=search_opts) else: data = client.jobs.list(search_opts=search_opts) return data def get_job_templates_resources(app, client, parsed_args): if is_api_v2(app): data = get_resource( client.job_templates, parsed_args.job_template).to_dict() else: data = get_resource( client.jobs, parsed_args.job_template).to_dict() return data def delete_job_templates(app, client, jt): if is_api_v2(app): jt_id = get_resource_id(client.job_templates, jt) client.job_templates.delete(jt_id) else: jt_id = get_resource_id(client.jobs, jt) client.jobs.delete(jt_id) def get_job_template_id(app, client, parsed_args): if is_api_v2(app): jt_id = get_resource_id( client.job_templates, parsed_args.job_template) else: jt_id = get_resource_id( client.jobs, parsed_args.job_template) return jt_id def update_job_templates(app, client, jt_id, update_data): if is_api_v2(app): data = client.job_templates.update(jt_id, **update_data).job_template else: data = client.jobs.update(jt_id, **update_data).job return data def create_cluster_template(app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups): args_dict = dict( name=parsed_args.name, plugin_name=plugin, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.autoconfig, cluster_configs=configs, shares=shares, is_public=parsed_args.public, is_protected=parsed_args.protected, domain_name=parsed_args.domain_name) if is_api_v2(app): args_dict['plugin_version'] = plugin_version else: args_dict['hadoop_version'] = plugin_version data = client.cluster_templates.create(**args_dict).to_dict() return data def update_cluster_template(app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups, ct_id): args_dict = dict( name=parsed_args.name, plugin_name=plugin, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.use_autoconfig, cluster_configs=configs, shares=shares, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, domain_name=parsed_args.domain_name ) if is_api_v2(app): args_dict['plugin_version'] = plugin_version else: args_dict['hadoop_version'] = plugin_version update_dict = create_dict_from_kwargs(**args_dict) data = client.cluster_templates.update( ct_id, **update_dict).to_dict() return data def create_cluster(client, app, parsed_args, plugin, plugin_version, template_id, image_id, net_id): args = dict( name=parsed_args.name, plugin_name=plugin, cluster_template_id=template_id, default_image_id=image_id, description=parsed_args.description, is_transient=parsed_args.transient, user_keypair_id=parsed_args.user_keypair, net_id=net_id, count=parsed_args.count, is_public=parsed_args.public, is_protected=parsed_args.protected) if is_api_v2(app): args['plugin_version'] = plugin_version else: args['hadoop_version'] = plugin_version data = client.clusters.create(**args).to_dict() return data def create_job(client, app, jt_id, cluster_id, input_id, output_id, job_configs, parsed_args): args_dict = dict(cluster_id=cluster_id, input_id=input_id, output_id=output_id, interface=parsed_args.interface, configs=job_configs, is_public=parsed_args.public, is_protected=parsed_args.protected) if is_api_v2(app): args_dict['job_template_id'] = jt_id data = client.jobs.create(**args_dict).to_dict() else: args_dict['job_id'] = jt_id data = client.job_executions.create(**args_dict).to_dict() return data def create_job_json(client, app, **template): if is_api_v2(app): data = client.jobs.create(**template).to_dict() else: data = client.job_executions.create(**template).to_dict() return data def update_job(client, app, parsed_args, update_dict): if is_api_v2(app): data = client.jobs.update( parsed_args.job, **update_dict).job else: data = client.job_executions.update( parsed_args.job, **update_dict).job_execution return data def create_node_group_templates(client, app, parsed_args, flavor_id, configs, shares): if app.api_version['data_processing'] == '2': data = client.node_group_templates.create( name=parsed_args.name, plugin_name=parsed_args.plugin, plugin_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, volumes_size=parsed_args.volumes_size, node_processes=parsed_args.processes, floating_ip_pool=parsed_args.floating_ip_pool, security_groups=parsed_args.security_groups, auto_security_group=parsed_args.auto_security_group, availability_zone=parsed_args.availability_zone, volume_type=parsed_args.volumes_type, is_proxy_gateway=parsed_args.proxy_gateway, volume_local_to_instance=parsed_args.volumes_locality, use_autoconfig=parsed_args.autoconfig, is_public=parsed_args.public, is_protected=parsed_args.protected, node_configs=configs, shares=shares, volumes_availability_zone=( parsed_args.volumes_availability_zone), volume_mount_prefix=parsed_args.volumes_mount_prefix, boot_from_volume=parsed_args.boot_from_volume, boot_volume_type=parsed_args.boot_volume_type, boot_volume_availability_zone=( parsed_args.boot_volume_availability_zone), boot_volume_local_to_instance=( parsed_args.boot_volume_local_to_instance) ).to_dict() else: data = client.node_group_templates.create( name=parsed_args.name, plugin_name=parsed_args.plugin, hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, volumes_size=parsed_args.volumes_size, node_processes=parsed_args.processes, floating_ip_pool=parsed_args.floating_ip_pool, security_groups=parsed_args.security_groups, auto_security_group=parsed_args.auto_security_group, availability_zone=parsed_args.availability_zone, volume_type=parsed_args.volumes_type, is_proxy_gateway=parsed_args.proxy_gateway, volume_local_to_instance=parsed_args.volumes_locality, use_autoconfig=parsed_args.autoconfig, is_public=parsed_args.public, is_protected=parsed_args.protected, node_configs=configs, shares=shares, volumes_availability_zone=( parsed_args.volumes_availability_zone), volume_mount_prefix=parsed_args.volumes_mount_prefix).to_dict() return data class NodeGroupTemplatesUtils(object): def _create_take_action(self, client, app, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.node_group_templates.create(**template).to_dict() else: if (not parsed_args.name or not parsed_args.plugin or not parsed_args.plugin_version or not parsed_args.flavor or not parsed_args.processes): raise exceptions.CommandError( 'At least --name, --plugin, --plugin-version, --processes,' ' --flavor arguments should be specified or json template ' 'should be provided with --json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) compute_client = app.client_manager.compute flavor_id = osc_utils.find_resource( compute_client.flavors, parsed_args.flavor).id data = create_node_group_templates(client, app, parsed_args, flavor_id, configs, shares) return data def _list_take_action(self, client, app, parsed_args): search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin if parsed_args.plugin_version: search_opts['hadoop_version'] = parsed_args.plugin_version data = client.node_group_templates.list(search_opts=search_opts) if parsed_args.name: data = get_by_name_substring(data, parsed_args.name) if app.api_version['data_processing'] == '2': if parsed_args.long: columns = ('name', 'id', 'plugin_name', 'plugin_version', 'node_processes', 'description') column_headers = prepare_column_headers(columns) else: columns = ('name', 'id', 'plugin_name', 'plugin_version') column_headers = prepare_column_headers(columns) else: if parsed_args.long: columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'node_processes', 'description') column_headers = prepare_column_headers( columns, {'hadoop_version': 'plugin_version'}) else: columns = ('name', 'id', 'plugin_name', 'hadoop_version') column_headers = prepare_column_headers( columns, {'hadoop_version': 'plugin_version'}) return ( column_headers, (osc_utils.get_item_properties( s, columns, formatters={ 'node_processes': osc_utils.format_list } ) for s in data) ) def _update_take_action(self, client, app, parsed_args): ngt_id = get_resource_id( client.node_group_templates, parsed_args.node_group_template) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.node_group_templates.update( ngt_id, **template).to_dict() else: configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) flavor_id = None if parsed_args.flavor: compute_client = self.app.client_manager.compute flavor_id = osc_utils.find_resource( compute_client.flavors, parsed_args.flavor).id update_dict = create_dict_from_kwargs( name=parsed_args.name, plugin_name=parsed_args.plugin, hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, volumes_size=parsed_args.volumes_size, node_processes=parsed_args.processes, floating_ip_pool=parsed_args.floating_ip_pool, security_groups=parsed_args.security_groups, auto_security_group=parsed_args.use_auto_security_group, availability_zone=parsed_args.availability_zone, volume_type=parsed_args.volumes_type, is_proxy_gateway=parsed_args.is_proxy_gateway, volume_local_to_instance=parsed_args.volume_locality, use_autoconfig=parsed_args.use_autoconfig, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, node_configs=configs, shares=shares, volumes_availability_zone=( parsed_args.volumes_availability_zone), volume_mount_prefix=parsed_args.volumes_mount_prefix ) if app.api_version['data_processing'] == '2': if 'hadoop_version' in update_dict: update_dict.pop('hadoop_version') update_dict['plugin_version'] = parsed_args.plugin_version if parsed_args.boot_from_volume is not None: update_dict['boot_from_volume'] = ( parsed_args.boot_from_volume) if parsed_args.boot_volume_type is not None: update_dict['boot_volume_type'] = ( parsed_args.boot_volume_type) if parsed_args.boot_volume_availability_zone is not None: update_dict['boot_volume_availability_zone'] = ( parsed_args.boot_volume_availability_zone) if parsed_args.boot_volume_local_to_instance is not None: update_dict['boot_volume_local_to_instance'] = ( parsed_args.boot_volume_local_to_instance) data = client.node_group_templates.update( ngt_id, **update_dict).to_dict() return data def _import_take_action(self, client, parsed_args): if (not parsed_args.image_id or not parsed_args.flavor_id): raise exceptions.CommandError( 'At least --image_id and --flavor_id should be specified') blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) template['node_group_template']['floating_ip_pool'] = ( parsed_args.floating_ip_pool) template['node_group_template']['image_id'] = ( parsed_args.image_id) template['node_group_template']['flavor_id'] = ( parsed_args.flavor_id) template['node_group_template']['security_groups'] = ( parsed_args.security_groups) if parsed_args.name: template['node_group_template']['name'] = parsed_args.name data = client.node_group_templates.create( **template['node_group_template']).to_dict() return data def _export_take_action(self, client, parsed_args): ngt_id = get_resource_id( client.node_group_templates, parsed_args.node_group_template) response = client.node_group_templates.export(ngt_id) result = json.dumps(response._info, indent=4)+"\n" if parsed_args.file: with open(parsed_args.file, "w+") as file: file.write(result) else: sys.stdout.write(result) python-saharaclient-3.1.0/saharaclient/osc/plugin.py0000664000175000017500000000432213643576737022632 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """OpenStackClient plugin for Data Processing service.""" from osc_lib import utils from oslo_log import log as logging LOG = logging.getLogger(__name__) DEFAULT_DATA_PROCESSING_API_VERSION = "1.1" API_VERSION_OPTION = "os_data_processing_api_version" API_NAME = "data_processing" API_VERSIONS = { "1.1": "saharaclient.api.client.Client", "2": "saharaclient.api.client.ClientV2" } def make_client(instance): data_processing_client = utils.get_client_class( API_NAME, instance._api_version[API_NAME], API_VERSIONS) LOG.debug('Instantiating data-processing client: %s', data_processing_client) kwargs = utils.build_kwargs_dict('endpoint_type', instance._interface) client = data_processing_client( session=instance.session, region_name=instance._region_name, sahara_url=instance._cli_options.data_processing_url, **kwargs ) return client def build_option_parser(parser): """Hook to add global options.""" parser.add_argument( "--os-data-processing-api-version", metavar="", default=utils.env( 'OS_DATA_PROCESSING_API_VERSION', default=DEFAULT_DATA_PROCESSING_API_VERSION), help=("Data processing API version, default=" + DEFAULT_DATA_PROCESSING_API_VERSION + ' (Env: OS_DATA_PROCESSING_API_VERSION)')) parser.add_argument( "--os-data-processing-url", default=utils.env( "OS_DATA_PROCESSING_URL"), help=("Data processing API URL, " "(Env: OS_DATA_PROCESSING_API_URL)")) return parser python-saharaclient-3.1.0/saharaclient/osc/v1/0000775000175000017500000000000013643577103021273 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/osc/v1/clusters.py0000664000175000017500000005665413643576737023545 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc import utils CLUSTER_FIELDS = ["cluster_template_id", "use_autoconfig", "user_keypair_id", "status", "image", "node_groups", "id", "info", "anti_affinity", "plugin_version", "name", "is_transient", "is_protected", "description", "is_public", "neutron_management_network", "plugin_name"] def _format_node_groups_list(node_groups): return ', '.join( ['%s:%s' % (ng['name'], ng['count']) for ng in node_groups]) def _format_cluster_output(app, data): data['plugin_version'] = data.pop('hadoop_version') data['image'] = data.pop('default_image_id') data['node_groups'] = _format_node_groups_list(data['node_groups']) data['anti_affinity'] = osc_utils.format_list(data['anti_affinity']) def _prepare_health_checks(data): additional_data = {} ver = data.get('verification', {}) additional_fields = ['verification_status'] additional_data['verification_status'] = ver.get('status', 'UNKNOWN') for check in ver.get('checks', []): row_name = "Health check (%s)" % check['name'] additional_data[row_name] = check['status'] additional_fields.append(row_name) return additional_data, additional_fields class CreateCluster(command.ShowOne): """Creates cluster""" log = logging.getLogger(__name__ + ".CreateCluster") def get_parser(self, prog_name): parser = super(CreateCluster, self).get_parser(prog_name) parser.add_argument( '--name', metavar="", help="Name of the cluster [REQUIRED if JSON is not provided]", ) parser.add_argument( '--cluster-template', metavar="", help="Cluster template name or ID [REQUIRED if JSON is not " "provided]" ) parser.add_argument( '--image', metavar="", help='Image that will be used for cluster deployment (Name or ID) ' '[REQUIRED if JSON is not provided]' ) parser.add_argument( '--description', metavar="", help='Description of the cluster' ) parser.add_argument( '--user-keypair', metavar="", help='User keypair to get acces to VMs after cluster creation' ) parser.add_argument( '--neutron-network', metavar="", help='Instances of the cluster will get fixed IP addresses in ' 'this network. (Name or ID should be provided)' ) parser.add_argument( '--count', metavar="", type=int, help='Number of clusters to be created' ) parser.add_argument( '--public', action='store_true', default=False, help='Make the cluster public (Visible from other projects)', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the cluster protected', ) parser.add_argument( '--transient', action='store_true', default=False, help='Create transient cluster', ) parser.add_argument( '--json', metavar='', help='JSON representation of the cluster. Other ' 'arguments (except for --wait) will not be taken into ' 'account if this one is provided' ) parser.add_argument( '--wait', action='store_true', default=False, help='Wait for the cluster creation to complete', ) return parser def _take_action(self, client, parsed_args): network_client = self.app.client_manager.network if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') if 'count' in template: parsed_args.count = template['count'] data = client.clusters.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.cluster_template \ or not parsed_args.image: raise exceptions.CommandError( 'At least --name , --cluster-template, --image arguments ' 'should be specified or json template should be provided ' 'with --json argument') plugin, plugin_version, template_id = utils._get_plugin_version( self.app, parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) net_id = (network_client.find_network( parsed_args.neutron_network, ignore_missing=False).id if parsed_args.neutron_network else None) data = utils.create_cluster(client, self.app, parsed_args, plugin, plugin_version, template_id, image_id, net_id) return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) if parsed_args.count and parsed_args.count > 1: clusters = [ utils.get_resource(client.clusters, id) for id in data['clusters']] if parsed_args.wait: for cluster in clusters: if not osc_utils.wait_for_status( client.clusters.get, cluster.id): self.log.error( 'Error occurred during cluster creation: %s', data['id']) data = {} for cluster in clusters: data[cluster.name] = cluster.id else: if parsed_args.wait: if not osc_utils.wait_for_status( client.clusters.get, data['id']): self.log.error( 'Error occurred during cluster creation: %s', data['id']) data = client.clusters.get(data['id']).to_dict() _format_cluster_output(self.app, data) data = utils.prepare_data(data, CLUSTER_FIELDS) return self.dict2columns(data) class ListClusters(command.Lister): """Lists clusters""" log = logging.getLogger(__name__ + ".ListClusters") def get_parser(self, prog_name): parser = super(ListClusters, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--plugin', metavar="", help="List clusters with specific plugin" ) parser.add_argument( '--plugin-version', metavar="", help="List clusters with specific version of the " "plugin" ) parser.add_argument( '--name', metavar="", help="List clusters with specific substring in the name" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin if parsed_args.plugin_version: search_opts['hadoop_version'] = parsed_args.plugin_version data = client.clusters.list(search_opts=search_opts) if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.long: columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'status', 'description', 'default_image_id') column_headers = utils.prepare_column_headers( columns, {'hadoop_version': 'plugin_version', 'default_image_id': 'image'}) else: columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'status') column_headers = utils.prepare_column_headers( columns, {'hadoop_version': 'plugin_version', 'default_image_id': 'image'}) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowCluster(command.ShowOne): """Display cluster details""" log = logging.getLogger(__name__ + ".ShowCluster") def get_parser(self, prog_name): parser = super(ShowCluster, self).get_parser(prog_name) parser.add_argument( "cluster", metavar="", help="Name or id of the cluster to display", ) parser.add_argument( '--verification', action='store_true', default=False, help='List additional fields for verifications', ) parser.add_argument( '--show-progress', action='store_true', default=False, help='Provides ability to show brief details of event logs.' ) parser.add_argument( '--full-dump-events', action='store_true', default=False, help='Provides ability to make full dump with event log details.' ) return parser def _take_action(self, client, parsed_args): kwargs = {} if parsed_args.show_progress or parsed_args.full_dump_events: kwargs['show_progress'] = True data = utils.get_resource( client.clusters, parsed_args.cluster, **kwargs).to_dict() provision_steps = data.get('provision_progress', []) provision_steps = utils.created_at_sorted(provision_steps) if parsed_args.full_dump_events: file_name = utils.random_name('event-logs') # making full dump with open(file_name, 'w') as file: jsonutils.dump(provision_steps, file, indent=4) sys.stdout.write('Event log dump saved to file: %s\n' % file_name) return data, provision_steps def _show_cluster_info(self, data, provision_steps, parsed_args): fields = [] if parsed_args.verification: ver_data, fields = _prepare_health_checks(data) data.update(ver_data) fields.extend(CLUSTER_FIELDS) data = self.dict2columns(utils.prepare_data(data, fields)) if parsed_args.show_progress: output_steps = [] for step in provision_steps: st_name, st_type = step['step_name'], step['step_type'] description = "%s: %s" % (st_type, st_name) if step['successful'] is None: progress = "Step in progress" elif step['successful']: progress = "Step completed successfully" else: progress = 'Step has failed events' output_steps += [(description, progress)] data = utils.extend_columns(data, output_steps) return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data, provision_steps = self._take_action(client, parsed_args) _format_cluster_output(self.app, data) data = self._show_cluster_info(data, provision_steps, parsed_args) return data class DeleteCluster(command.Command): """Deletes cluster""" log = logging.getLogger(__name__ + ".DeleteCluster") def get_parser(self, prog_name): parser = super(DeleteCluster, self).get_parser(prog_name) parser.add_argument( "cluster", metavar="", nargs="+", help="Name(s) or id(s) of the cluster(s) to delete", ) parser.add_argument( '--wait', action='store_true', default=False, help='Wait for the cluster(s) delete to complete', ) return parser def _choose_delete_mode(self, parsed_args): return "delete" def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing delete_function_attr = self._choose_delete_mode(parsed_args) clusters = [] for cluster in parsed_args.cluster: cluster_id = utils.get_resource_id( client.clusters, cluster) getattr(client.clusters, delete_function_attr)(cluster_id) clusters.append((cluster_id, cluster)) sys.stdout.write( 'Cluster "{cluster}" deletion has been started.\n'.format( cluster=cluster)) if parsed_args.wait: for cluster_id, cluster_arg in clusters: if not utils.wait_for_delete(client.clusters, cluster_id): self.log.error( 'Error occurred during cluster deleting: %s' % cluster_id) else: sys.stdout.write( 'Cluster "{cluster}" has been removed ' 'successfully.\n'.format(cluster=cluster_arg)) class UpdateCluster(command.ShowOne): """Updates cluster""" log = logging.getLogger(__name__ + ".UpdateCluster") def get_parser(self, prog_name): parser = super(UpdateCluster, self).get_parser(prog_name) parser.add_argument( 'cluster', metavar="", help="Name or ID of the cluster", ) parser.add_argument( '--name', metavar="", help="New name of the cluster", ) parser.add_argument( '--description', metavar="", help='Description of the cluster' ) parser.add_argument( '--shares', metavar="", help='JSON representation of the manila shares' ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', help='Make the cluster public ' '(Visible from other projects)', dest='is_public' ) public.add_argument( '--private', action='store_false', help='Make the cluster private ' '(Visible only from this tenant)', dest='is_public' ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', help='Make the cluster protected', dest='is_protected' ) protected.add_argument( '--unprotected', action='store_false', help='Make the cluster unprotected', dest='is_protected' ) parser.set_defaults(is_public=None, is_protected=None) return parser def _take_action(self, client, parsed_args): cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, shares=shares ) data = client.clusters.update(cluster_id, **update_dict).cluster return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_cluster_output(self.app, data) data = utils.prepare_data(data, CLUSTER_FIELDS) return self.dict2columns(data) class ScaleCluster(command.ShowOne): """Scales cluster""" log = logging.getLogger(__name__ + ".ScaleCluster") def _get_json_arg_helptext(self): return ''' JSON representation of the cluster scale object. Other arguments (except for --wait) will not be taken into account if this one is provided ''' def get_parser(self, prog_name): parser = super(ScaleCluster, self).get_parser(prog_name) parser.add_argument( 'cluster', metavar="", help="Name or ID of the cluster", ) parser.add_argument( '--instances', nargs='+', metavar='', help='Node group templates and number of their instances to be ' 'scale to [REQUIRED if JSON is not provided]' ) parser.add_argument( '--json', metavar='', help=self._get_json_arg_helptext() ) parser.add_argument( '--wait', action='store_true', default=False, help='Wait for the cluster scale to complete', ) return parser def _take_action(self, client, parsed_args): cluster = utils.get_resource( client.clusters, parsed_args.cluster) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.clusters.scale(cluster.id, template).cluster else: scale_object = { "add_node_groups": [], "resize_node_groups": [] } scale_node_groups = dict( map(lambda x: x.split(':', 1), parsed_args.instances)) cluster_ng_map = { ng['node_group_template_id']: ng['name'] for ng in cluster.node_groups} for name, count in scale_node_groups.items(): ngt = utils.get_resource(client.node_group_templates, name) if ngt.id in cluster_ng_map: scale_object["resize_node_groups"].append({ "name": cluster_ng_map[ngt.id], "count": int(count) }) else: scale_object["add_node_groups"].append({ "node_group_template_id": ngt.id, "name": ngt.name, "count": int(count) }) if not scale_object['add_node_groups']: del scale_object['add_node_groups'] if not scale_object['resize_node_groups']: del scale_object['resize_node_groups'] data = client.clusters.scale(cluster.id, scale_object).cluster sys.stdout.write( 'Cluster "{cluster}" scaling has been started.\n'.format( cluster=parsed_args.cluster)) if parsed_args.wait: if not osc_utils.wait_for_status( client.clusters.get, data['id']): self.log.error( 'Error occurred during cluster scaling: %s' % cluster.id) data = client.clusters.get(cluster.id).cluster return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_cluster_output(self.app, data) data = utils.prepare_data(data, CLUSTER_FIELDS) return self.dict2columns(data) class VerificationUpdateCluster(command.ShowOne): """Updates cluster verifications""" log = logging.getLogger(__name__ + ".VerificationUpdateCluster") def get_parser(self, prog_name): parser = super(VerificationUpdateCluster, self).get_parser(prog_name) parser.add_argument( 'cluster', metavar="", help="Name or ID of the cluster", ) status = parser.add_mutually_exclusive_group(required=True) status.add_argument( '--start', action='store_const', const='START', help='Start health verification for the cluster', dest='status' ) status.add_argument( '--show', help='Show health of the cluster', action='store_true' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.show: data = utils.get_resource( client.clusters, parsed_args.cluster).to_dict() ver_data, ver_fields = _prepare_health_checks(data) data = utils.prepare_data(ver_data, ver_fields) return self.dict2columns(data) else: cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) client.clusters.verification_update( cluster_id, parsed_args.status) if parsed_args.status == 'START': print_status = 'started' sys.stdout.write( 'Cluster "{cluster}" health verification has been ' '{status}.\n'.format(cluster=parsed_args.cluster, status=print_status)) return {}, {} python-saharaclient-3.1.0/saharaclient/osc/v1/job_types.py0000664000175000017500000001076613643576737023671 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from os import path import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc import utils from saharaclient.osc.v1.job_templates import JOB_TYPES_CHOICES class ListJobTypes(command.Lister): """Lists job types supported by plugins""" log = logging.getLogger(__name__ + ".ListJobTypes") def get_parser(self, prog_name): parser = super(ListJobTypes, self).get_parser(prog_name) parser.add_argument( '--type', metavar="", choices=JOB_TYPES_CHOICES, help="Get information about specific job type" ) parser.add_argument( '--plugin', metavar="", help="Get only job types supported by this plugin" ) parser.add_argument( '--plugin-version', metavar="", help="Get only job types supported by specific version of the " "plugin. This parameter will be taken into account only if " "plugin is provided" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {} if parsed_args.type: search_opts['type'] = parsed_args.type if parsed_args.plugin: search_opts['plugin'] = parsed_args.plugin if parsed_args.plugin_version: search_opts['plugin_version'] = parsed_args.plugin_version elif parsed_args.plugin_version: raise exceptions.CommandError( '--plugin-version argument should be specified with --plugin ' 'argument') data = client.job_types.list(search_opts=search_opts) for job in data: plugins = [] for plugin in job.plugins: versions = ", ".join(sorted(plugin["versions"].keys())) if versions: versions = "(" + versions + ")" plugins.append(plugin["name"] + versions) job.plugins = ', '.join(plugins) columns = ('name', 'plugins') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class GetJobTypeConfigs(command.Command): """Get job type configs""" log = logging.getLogger(__name__ + ".GetJobTypeConfigs") def get_parser(self, prog_name): parser = super(GetJobTypeConfigs, self).get_parser(prog_name) parser.add_argument( "job_type", metavar="", choices=JOB_TYPES_CHOICES, help="Type of the job to provide config information about", ) parser.add_argument( '--file', metavar="", help='Destination file (defaults to job type)', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if not parsed_args.file: parsed_args.file = parsed_args.job_type data = client.jobs.get_configs(parsed_args.job_type).to_dict() if path.exists(parsed_args.file): self.log.error('File "%s" already exists. Choose another one with ' '--file argument.' % parsed_args.file) else: with open(parsed_args.file, 'w') as f: jsonutils.dump(data, f, indent=4) sys.stdout.write( '"%(type)s" job configs were saved in "%(file)s"' 'file' % {'type': parsed_args.job_type, 'file': parsed_args.file}) python-saharaclient-3.1.0/saharaclient/osc/v1/jobs.py0000664000175000017500000003116413643576737022623 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc import utils JOB_FIELDS = ['id', 'job_template_id', 'cluster_id', 'input_id', 'output_id', 'start_time', 'end_time', 'status', 'is_public', 'is_protected', 'engine_job_id'] JOB_STATUS_CHOICES = ['done-with-error', 'failed', 'killed', 'pending', 'running', 'succeeded', 'to-be-killed'] def _format_job_output(app, data): data['status'] = data['info']['status'] del data['info'] data['job_template_id'] = data.pop('job_id') class ExecuteJob(command.ShowOne): """Executes job""" log = logging.getLogger(__name__ + ".ExecuteJob") def get_parser(self, prog_name): parser = super(ExecuteJob, self).get_parser(prog_name) parser.add_argument( '--job-template', metavar="", help="Name or ID of the job template " "[REQUIRED if JSON is not provided]", ) parser.add_argument( '--cluster', metavar="", help="Name or ID of the cluster " "[REQUIRED if JSON is not provided]", ) parser.add_argument( '--input', metavar="", help="Name or ID of the input data source", ) parser.add_argument( '--output', metavar="", help="Name or ID of the output data source", ) parser.add_argument( '--params', metavar="", nargs='+', help="Parameters to add to the job" ) parser.add_argument( '--args', metavar="", nargs='+', help="Arguments to add to the job" ) parser.add_argument( '--public', action='store_true', default=False, help='Make the job public', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the job protected', ) configs = parser.add_mutually_exclusive_group() configs.add_argument( '--config-json', metavar='', help='JSON representation of the job configs' ) configs.add_argument( '--configs', metavar="", nargs='+', help="Configs to add to the job" ) parser.add_argument( '--interface', metavar='', help='JSON representation of the interface' ) parser.add_argument( '--json', metavar='', help='JSON representation of the job. Other arguments will not be ' 'taken into account if this one is provided' ) return parser def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = utils.create_job_json(client, self.app, template) else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_job_template_id(self.app, client, parsed_args) cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) if parsed_args.input not in [None, "", "None"]: input_id = utils.get_resource_id( client.data_sources, parsed_args.input) else: input_id = None if parsed_args.output not in [None, "", "None"]: output_id = utils.get_resource_id( client.data_sources, parsed_args.output) else: output_id = None data = utils.create_job(client, self.app, jt_id, cluster_id, input_id, output_id, job_configs, parsed_args) sys.stdout.write( 'Job "{job}" has been started successfully.\n'.format( job=data['id'])) return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_job_output(self.app, data) data = utils.prepare_data(data, JOB_FIELDS) return self.dict2columns(data) class ListJobs(command.Lister): """Lists jobs""" log = logging.getLogger(__name__ + ".ListJobs") def get_parser(self, prog_name): parser = super(ListJobs, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--status', metavar="", choices=JOB_STATUS_CHOICES, help="List jobs with specific status" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = client.job_executions.list() for job in data: job.status = job.info['status'] if parsed_args.status: data = [job for job in data if job.info['status'] == parsed_args.status.replace( '-', '').upper()] if parsed_args.long: columns = ('id', 'cluster id', 'job id', 'status', 'start time', 'end time') column_headers = utils.prepare_column_headers(columns) else: columns = ('id', 'cluster id', 'job id', 'status') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowJob(command.ShowOne): """Display job details""" log = logging.getLogger(__name__ + ".ShowJob") def get_parser(self, prog_name): parser = super(ShowJob, self).get_parser(prog_name) parser.add_argument( "job", metavar="", help="ID of the job to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = client.job_executions.get(parsed_args.job).to_dict() _format_job_output(self.app, data) data = utils.prepare_data(data, JOB_FIELDS) return self.dict2columns(data) class DeleteJob(command.Command): """Deletes job""" log = logging.getLogger(__name__ + ".DeleteJob") def get_parser(self, prog_name): parser = super(DeleteJob, self).get_parser(prog_name) parser.add_argument( "job", metavar="", nargs="+", help="ID(s) of the job(s) to delete", ) parser.add_argument( '--wait', action='store_true', default=False, help='Wait for the job(s) delete to complete', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for job_id in parsed_args.job: client.job_executions.delete(job_id) sys.stdout.write( 'Job "{job}" deletion has been started.\n'.format(job=job_id)) if parsed_args.wait: for job_id in parsed_args.job: wait_for_delete = utils.wait_for_delete( client.job_executions, job_id) if not wait_for_delete: self.log.error( 'Error occurred during job deleting: %s' % job_id) else: sys.stdout.write( 'Job "{job}" has been removed successfully.\n'.format( job=job_id)) class UpdateJob(command.ShowOne): """Updates job""" log = logging.getLogger(__name__ + ".UpdateJob") def get_parser(self, prog_name): parser = super(UpdateJob, self).get_parser(prog_name) parser.add_argument( 'job', metavar="", help="ID of the job to update", ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', help='Make the job public (Visible from other projects)', dest='is_public' ) public.add_argument( '--private', action='store_false', help='Make the job private (Visible only from this project)', dest='is_public' ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', help='Make the job protected', dest='is_protected' ) protected.add_argument( '--unprotected', action='store_false', help='Make the job unprotected', dest='is_protected' ) parser.set_defaults(is_public=None, is_protected=None) return parser def _take_action(self, client, parsed_args): update_dict = utils.create_dict_from_kwargs( is_public=parsed_args.is_public, is_protected=parsed_args.is_protected) data = utils.update_job(client, self.app, parsed_args, update_dict) return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_job_output(self.app, data) data = utils.prepare_data(data, JOB_FIELDS) return self.dict2columns(data) python-saharaclient-3.1.0/saharaclient/osc/v1/job_templates.py0000664000175000017500000002500413643576737024512 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc import utils JOB_TEMPLATE_FIELDS = ['name', 'id', 'type', 'mains', 'libs', 'description', 'is_public', 'is_protected'] JOB_TYPES_CHOICES = ['Hive', 'Java', 'MapReduce', 'Storm', 'Storm.Pyleus', 'Pig', 'Shell', 'MapReduce.Streaming', 'Spark'] def _format_job_template_output(data): data['mains'] = osc_utils.format_list( ['%s:%s' % (m['name'], m['id']) for m in data['mains']]) data['libs'] = osc_utils.format_list( ['%s:%s' % (l['name'], l['id']) for l in data['libs']]) class CreateJobTemplate(command.ShowOne): """Creates job template""" log = logging.getLogger(__name__ + ".CreateJobTemplate") def get_parser(self, prog_name): parser = super(CreateJobTemplate, self).get_parser(prog_name) parser.add_argument( '--name', metavar="", help="Name of the job template [REQUIRED if JSON is not provided]", ) parser.add_argument( '--type', metavar="", choices=JOB_TYPES_CHOICES, help="Type of the job (%s) " "[REQUIRED if JSON is not provided]" % ', '.join( JOB_TYPES_CHOICES) ) parser.add_argument( '--mains', metavar="
", nargs='+', help="Name(s) or ID(s) for job's main job binary(s)", ) parser.add_argument( '--libs', metavar="", nargs='+', help="Name(s) or ID(s) for job's lib job binary(s)", ) parser.add_argument( '--description', metavar="", help="Description of the job template" ) parser.add_argument( '--public', action='store_true', default=False, help='Make the job template public', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the job template protected', ) parser.add_argument( '--interface', metavar='', help='JSON representation of the interface' ) parser.add_argument( '--json', metavar='', help='JSON representation of the job template' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = utils.create_job_template_json(self.app, client, **template) else: if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) mains_ids = [utils.get_resource_id(client.job_binaries, m) for m in parsed_args.mains] if parsed_args.mains else None libs_ids = [utils.get_resource_id(client.job_binaries, m) for m in parsed_args.libs] if parsed_args.libs else None data = utils.create_job_templates(self.app, client, mains_ids, libs_ids, parsed_args) _format_job_template_output(data) data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS) return self.dict2columns(data) class ListJobTemplates(command.Lister): """Lists job templates""" log = logging.getLogger(__name__ + ".ListJobTemplates") def get_parser(self, prog_name): parser = super(ListJobTemplates, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--type', metavar="", choices=JOB_TYPES_CHOICES, help="List job templates of specific type" ) parser.add_argument( '--name', metavar="", help="List job templates with specific substring in the " "name" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {'type': parsed_args.type} if parsed_args.type else {} data = utils.list_job_templates(self.app, client, search_opts) if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.long: columns = ('name', 'id', 'type', 'description', 'is_public', 'is_protected') column_headers = utils.prepare_column_headers(columns) else: columns = ('name', 'id', 'type') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowJobTemplate(command.ShowOne): """Display job template details""" log = logging.getLogger(__name__ + ".ShowJobTemplate") def get_parser(self, prog_name): parser = super(ShowJobTemplate, self).get_parser(prog_name) parser.add_argument( "job_template", metavar="", help="Name or ID of the job template to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_job_templates_resources(self.app, client, parsed_args) _format_job_template_output(data) data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS) return self.dict2columns(data) class DeleteJobTemplate(command.Command): """Deletes job template""" log = logging.getLogger(__name__ + ".DeleteJobTemplate") def get_parser(self, prog_name): parser = super(DeleteJobTemplate, self).get_parser(prog_name) parser.add_argument( "job_template", metavar="", nargs="+", help="Name(s) or id(s) of the job template(s) to delete", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for jt in parsed_args.job_template: utils.delete_job_templates(self.app, client, jt) sys.stdout.write( 'Job template "{jt}" has been removed ' 'successfully.\n'.format(jt=jt)) class UpdateJobTemplate(command.ShowOne): """Updates job template""" log = logging.getLogger(__name__ + ".UpdateJobTemplate") def get_parser(self, prog_name): parser = super(UpdateJobTemplate, self).get_parser(prog_name) parser.add_argument( 'job_template', metavar="", help="Name or ID of the job template", ) parser.add_argument( '--name', metavar="", help="New name of the job template", ) parser.add_argument( '--description', metavar="", help='Description of the job template' ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', help='Make the job template public ' '(Visible from other projects)', dest='is_public' ) public.add_argument( '--private', action='store_false', help='Make the job_template private ' '(Visible only from this tenant)', dest='is_public' ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', help='Make the job template protected', dest='is_protected' ) protected.add_argument( '--unprotected', action='store_false', help='Make the job template unprotected', dest='is_protected' ) parser.set_defaults(is_public=None, is_protected=None) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing jt_id = utils.get_job_template_id(self.app, client, parsed_args) update_data = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected ) data = utils.update_job_templates(self.app, client, jt_id, update_data) _format_job_template_output(data) data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS) return self.dict2columns(data) python-saharaclient-3.1.0/saharaclient/osc/v1/plugins.py0000664000175000017500000001732013643576737023345 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from os import path import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.osc import utils def _serialize_label_items(plugin): labels = {} pl_labels = plugin.get('plugin_labels', {}) for label, data in pl_labels.items(): labels['plugin: %s' % label] = data['status'] vr_labels = plugin.get('version_labels', {}) for version, version_data in vr_labels.items(): for label, data in version_data.items(): labels[ 'plugin version %s: %s' % (version, label)] = data['status'] labels = utils.prepare_data(labels, list(labels.keys())) return sorted(labels.items()) class ListPlugins(command.Lister): """Lists plugins""" log = logging.getLogger(__name__ + ".ListPlugins") def get_parser(self, prog_name): parser = super(ListPlugins, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = client.plugins.list() if parsed_args.long: columns = ('name', 'title', 'versions', 'description') column_headers = utils.prepare_column_headers(columns) else: columns = ('name', 'versions') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns, formatters={ 'versions': osc_utils.format_list }, ) for s in data) ) class ShowPlugin(command.ShowOne): """Display plugin details""" log = logging.getLogger(__name__ + ".ShowPlugin") def get_parser(self, prog_name): parser = super(ShowPlugin, self).get_parser(prog_name) parser.add_argument( "plugin", metavar="", help="Name of the plugin to display", ) parser.add_argument( "--plugin-version", metavar="", help='Version of the plugin to display' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.plugin_version: data = client.plugins.get_version_details( parsed_args.plugin, parsed_args.plugin_version).to_dict() processes = data.pop('node_processes') for k, v in processes.items(): processes[k] = osc_utils.format_list(v) data['required_image_tags'] = osc_utils.format_list( data['required_image_tags']) label_items = _serialize_label_items(data) data = utils.prepare_data( data, ['required_image_tags', 'name', 'description', 'title']) data = self.dict2columns(data) data = utils.extend_columns(data, label_items) data = utils.extend_columns( data, [('Service:', 'Available processes:')]) data = utils.extend_columns( data, sorted(processes.items())) else: data = client.plugins.get(parsed_args.plugin).to_dict() data['versions'] = osc_utils.format_list(data['versions']) items = _serialize_label_items(data) data = utils.prepare_data( data, ['versions', 'name', 'description', 'title']) data = utils.extend_columns(self.dict2columns(data), items) return data class GetPluginConfigs(command.Command): """Get plugin configs""" log = logging.getLogger(__name__ + ".GetPluginConfigs") def get_parser(self, prog_name): parser = super(GetPluginConfigs, self).get_parser(prog_name) parser.add_argument( "plugin", metavar="", help="Name of the plugin to provide config information about", ) parser.add_argument( "plugin_version", metavar="", help="Version of the plugin to provide config information about", ) parser.add_argument( '--file', metavar="", help="Destination file (defaults to a combination of " "plugin name and plugin version)", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if not parsed_args.file: parsed_args.file = (parsed_args.plugin + '-' + parsed_args.plugin_version) if path.exists(parsed_args.file): msg = ('File "%s" already exists. Choose another one with ' '--file argument.' % parsed_args.file) raise exceptions.CommandError(msg) else: data = client.plugins.get_version_details( parsed_args.plugin, parsed_args.plugin_version).to_dict() with open(parsed_args.file, 'w') as f: jsonutils.dump(data, f, indent=4) sys.stdout.write( '"%(plugin)s" plugin "%(version)s" version configs ' 'was saved in "%(file)s" file\n' % { 'plugin': parsed_args.plugin, 'version': parsed_args.plugin_version, 'file': parsed_args.file}) class UpdatePlugin(command.ShowOne): log = logging.getLogger(__name__ + ".UpdatePlugin") def get_parser(self, prog_name): parser = super(UpdatePlugin, self).get_parser(prog_name) parser.add_argument( "plugin", metavar="", help="Name of the plugin to provide config information about", ) parser.add_argument( 'json', metavar="", help='JSON representation of the plugin update dictionary', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing blob = osc_utils.read_blob_file_contents(parsed_args.json) try: update_dict = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'update dict from file %s: %s' % (parsed_args.json, e)) plugin = client.plugins.update(parsed_args.plugin, update_dict) data = plugin.to_dict() data['versions'] = osc_utils.format_list(data['versions']) items = _serialize_label_items(data) data = utils.prepare_data( data, ['versions', 'name', 'description', 'title']) data = utils.extend_columns(self.dict2columns(data), items) return data python-saharaclient-3.1.0/saharaclient/osc/v1/job_binaries.py0000664000175000017500000004305513643576737024316 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from os import path import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils from saharaclient.api import base from saharaclient.osc import utils JOB_BINARY_FIELDS = ['name', 'id', 'url', 'description', 'is_public', 'is_protected'] class CreateJobBinary(command.ShowOne): """Creates job binary""" log = logging.getLogger(__name__ + ".CreateJobBinary") def get_parser(self, prog_name): parser = super(CreateJobBinary, self).get_parser(prog_name) parser.add_argument( '--name', metavar="", help="Name of the job binary [REQUIRED if JSON is not provided]", ) creation_type = parser.add_mutually_exclusive_group() creation_type.add_argument( '--data', metavar='', help='File that will be stored in the internal DB [REQUIRED if ' 'JSON and URL are not provided]' ) creation_type.add_argument( '--url', metavar='', help='URL for the job binary [REQUIRED if JSON and file are ' 'not provided]' ) parser.add_argument( '--description', metavar="", help="Description of the job binary" ) username = parser.add_mutually_exclusive_group() username.add_argument( '--username', metavar='', help='Username for accessing the job binary URL', ) username.add_argument( '--access-key', metavar='', help='S3 access key for accessing the job binary URL', ) password = parser.add_mutually_exclusive_group() password.add_argument( '--password', metavar='', help='Password for accessing the job binary URL', ) password.add_argument( '--secret-key', metavar='', help='S3 secret key for accessing the job binary URL', ) password.add_argument( '--password-prompt', dest="password_prompt", action="store_true", help='Prompt interactively for password', ) password.add_argument( '--secret-key-prompt', dest="secret_key_prompt", action="store_true", help='Prompt interactively for S3 secret key', ) parser.add_argument( '--s3-endpoint', metavar='', help='S3 endpoint for accessing the job binary URL (ignored if ' 'binary not in S3', ) parser.add_argument( '--public', action='store_true', default=False, help='Make the job binary public', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the job binary protected', ) parser.add_argument( '--json', metavar='', help='JSON representation of the job binary. Other ' 'arguments will not be taken into account if this one is ' 'provided' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.create(**template).to_dict() else: if parsed_args.data: data = open(parsed_args.data).read() jbi_id = client.job_binary_internals.create( parsed_args.name, data).id parsed_args.url = 'internal-db://' + jbi_id if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password( self.app.stdin, confirm=False) if not parsed_args.password: parsed_args.password = parsed_args.secret_key if not parsed_args.username: parsed_args.username = parsed_args.access_key if parsed_args.password and not parsed_args.username: raise exceptions.CommandError( 'Username via --username, or S3 access key via ' '--access-key should be provided with password') if parsed_args.username and not parsed_args.password: raise exceptions.CommandError( 'Password should be provided via --password or ' '--secret-key, or entered interactively with ' '--password-prompt or --secret-key-prompt') if parsed_args.password and parsed_args.username: if not parsed_args.url: raise exceptions.CommandError( 'URL must be provided via --url') if parsed_args.url.startswith('s3'): if not parsed_args.s3_endpoint: raise exceptions.CommandError( 'S3 job binaries need an endpoint provided via ' '--s3-endpoint') extra = { 'accesskey': parsed_args.username, 'secretkey': parsed_args.password, 'endpoint': parsed_args.s3_endpoint, } else: extra = { 'user': parsed_args.username, 'password': parsed_args.password } else: extra = None data = client.job_binaries.create( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data) class ListJobBinaries(command.Lister): """Lists job binaries""" log = logging.getLogger(__name__ + ".ListJobBinaries") def get_parser(self, prog_name): parser = super(ListJobBinaries, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--name', metavar="", help="List job binaries with specific substring in the " "name" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = client.job_binaries.list() if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.long: columns = ('name', 'id', 'url', 'description', 'is_public', 'is_protected') column_headers = utils.prepare_column_headers(columns) else: columns = ('name', 'id', 'url') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowJobBinary(command.ShowOne): """Display job binary details""" log = logging.getLogger(__name__ + ".ShowJobBinary") def get_parser(self, prog_name): parser = super(ShowJobBinary, self).get_parser(prog_name) parser.add_argument( "job_binary", metavar="", help="Name or ID of the job binary to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.job_binaries, parsed_args.job_binary).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data) class DeleteJobBinary(command.Command): """Deletes job binary""" log = logging.getLogger(__name__ + ".DeleteJobBinary") def get_parser(self, prog_name): parser = super(DeleteJobBinary, self).get_parser(prog_name) parser.add_argument( "job_binary", metavar="", nargs="+", help="Name(s) or id(s) of the job binary(ies) to delete", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for jb in parsed_args.job_binary: jb = utils.get_resource(client.job_binaries, jb) if jb.url.startswith("internal-db"): jbi_id = jb.url.replace('internal-db://', '') try: client.job_binary_internals.delete(jbi_id) except base.APIException as ex: # check if job binary internal was already deleted for # some reasons if not ex.error_code == '404': raise client.job_binaries.delete(jb.id) sys.stdout.write( 'Job binary "{jb}" has been removed ' 'successfully.\n'.format(jb=jb)) class UpdateJobBinary(command.ShowOne): """Updates job binary""" log = logging.getLogger(__name__ + ".UpdateJobBinary") def get_parser(self, prog_name): parser = super(UpdateJobBinary, self).get_parser(prog_name) parser.add_argument( 'job_binary', metavar="", help="Name or ID of the job binary", ) parser.add_argument( '--name', metavar="", help="New name of the job binary", ) parser.add_argument( '--url', metavar='', help='URL for the job binary [Internal DB URL can not be updated]' ) parser.add_argument( '--description', metavar="", help='Description of the job binary' ) username = parser.add_mutually_exclusive_group() username.add_argument( '--username', metavar='', help='Username for accessing the job binary URL', ) username.add_argument( '--access-key', metavar='', help='S3 access key for accessing the job binary URL', ) password = parser.add_mutually_exclusive_group() password.add_argument( '--password', metavar='', help='Password for accessing the job binary URL', ) password.add_argument( '--secret-key', metavar='', help='S3 secret key for accessing the job binary URL', ) password.add_argument( '--password-prompt', dest="password_prompt", action="store_true", help='Prompt interactively for password', ) password.add_argument( '--secret-key-prompt', dest="secret_key_prompt", action="store_true", help='Prompt interactively for S3 secret key', ) parser.add_argument( '--s3-endpoint', metavar='', help='S3 endpoint for accessing the job binary URL (ignored if ' 'binary not in S3', ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', help='Make the job binary public (Visible from other projects)', dest='is_public' ) public.add_argument( '--private', action='store_false', help='Make the job binary private (Visible only from' ' this project)', dest='is_public' ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', help='Make the job binary protected', dest='is_protected' ) protected.add_argument( '--unprotected', action='store_false', help='Make the job binary unprotected', dest='is_protected' ) parser.add_argument( '--json', metavar='', help='JSON representation of the update object. Other ' 'arguments will not be taken into account if this one is ' 'provided' ) parser.set_defaults(is_public=None, is_protected=None) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing jb_id = utils.get_resource_id( client.job_binaries, parsed_args.job_binary) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.update(jb_id, template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password( self.app.stdin, confirm=False) extra = {} if parsed_args.password: extra['password'] = parsed_args.password if parsed_args.username: extra['user'] = parsed_args.username if parsed_args.access_key: extra['accesskey'] = parsed_args.access_key if parsed_args.secret_key: extra['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: extra['endpoint'] = parsed_args.s3_endpoint if not extra: extra = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected ) data = client.job_binaries.update( jb_id, update_fields).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data) class DownloadJobBinary(command.Command): """Downloads job binary""" log = logging.getLogger(__name__ + ".DownloadJobBinary") def get_parser(self, prog_name): parser = super(DownloadJobBinary, self).get_parser(prog_name) parser.add_argument( "job_binary", metavar="", help="Name or ID of the job binary to download", ) parser.add_argument( '--file', metavar="", help='Destination file (defaults to job binary name)', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if not parsed_args.file: parsed_args.file = parsed_args.job_binary if path.exists(parsed_args.file): msg = ('File "%s" already exists. Chose another one with ' '--file argument.' % parsed_args.file) raise exceptions.CommandError(msg) else: jb_id = utils.get_resource_id( client.job_binaries, parsed_args.job_binary) data = client.job_binaries.get_file(jb_id) with open(parsed_args.file, 'wb') as f: f.write(data) sys.stdout.write( 'Job binary "{jb}" has been downloaded ' 'successfully.\n'.format(jb=parsed_args.job_binary)) python-saharaclient-3.1.0/saharaclient/osc/v1/node_group_templates.py0000664000175000017500000005240713643576737026110 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import utils as osc_utils from oslo_log import log as logging from saharaclient.osc import utils NGT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'node_processes', 'description', 'auto_security_group', 'security_groups', 'availability_zone', 'flavor_id', 'floating_ip_pool', 'volumes_per_node', 'volumes_size', 'volume_type', 'volume_local_to_instance', 'volume_mount_prefix', 'volumes_availability_zone', 'use_autoconfig', 'is_proxy_gateway', 'is_default', 'is_protected', 'is_public'] def _format_ngt_output(data): data['node_processes'] = osc_utils.format_list(data['node_processes']) data['plugin_version'] = data.pop('hadoop_version') if data['volumes_per_node'] == 0: del data['volume_local_to_instance'] del data['volume_mount_prefix'] del data['volume_type'], del data['volumes_availability_zone'] del data['volumes_size'] class CreateNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils): """Creates node group template""" log = logging.getLogger(__name__ + ".CreateNodeGroupTemplate") def get_parser(self, prog_name): parser = super(CreateNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( '--name', metavar="", help="Name of the node group template [REQUIRED if JSON is not " "provided]", ) parser.add_argument( '--plugin', metavar="", help="Name of the plugin [REQUIRED if JSON is not provided]" ) parser.add_argument( '--plugin-version', metavar="", help="Version of the plugin [REQUIRED if JSON is not provided]" ) parser.add_argument( '--processes', metavar="", nargs="+", help="List of the processes that will be launched on each " "instance [REQUIRED if JSON is not provided]" ) parser.add_argument( '--flavor', metavar="", help="Name or ID of the flavor [REQUIRED if JSON is not provided]" ) parser.add_argument( '--security-groups', metavar="", nargs="+", help="List of the security groups for the instances in this node " "group" ) parser.add_argument( '--auto-security-group', action='store_true', default=False, help='Indicates if an additional security group should be created ' 'for the node group', ) parser.add_argument( '--availability-zone', metavar="", help="Name of the availability zone where instances " "will be created" ) parser.add_argument( '--floating-ip-pool', metavar="", help="ID of the floating IP pool" ) parser.add_argument( '--volumes-per-node', type=int, metavar="", help="Number of volumes attached to every node" ) parser.add_argument( '--volumes-size', type=int, metavar="", help='Size of volumes attached to node (GB). ' 'This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-type', metavar="", help='Type of the volumes. ' 'This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-availability-zone', metavar="", help='Name of the availability zone where volumes will be created.' ' This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-mount-prefix', metavar="", help='Prefix for mount point directory. ' 'This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-locality', action='store_true', default=False, help='If enabled, instance and attached volumes will be created on' ' the same physical host. This parameter will be taken into ' 'account only if volumes-per-node is set and non-zero', ) parser.add_argument( '--description', metavar="", help='Description of the node group template' ) parser.add_argument( '--autoconfig', action='store_true', default=False, help='If enabled, instances of the node group will be ' 'automatically configured', ) parser.add_argument( '--proxy-gateway', action='store_true', default=False, help='If enabled, instances of the node group will be used to ' 'access other instances in the cluster', ) parser.add_argument( '--public', action='store_true', default=False, help='Make the node group template public (Visible from other ' 'projects)', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the node group template protected', ) parser.add_argument( '--json', metavar='', help='JSON representation of the node group template. Other ' 'arguments will not be taken into account if this one is ' 'provided' ) parser.add_argument( '--shares', metavar='', help='JSON representation of the manila shares' ) parser.add_argument( '--configs', metavar='', help='JSON representation of the node group template configs' ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._create_take_action(client, self.app, parsed_args) _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class ListNodeGroupTemplates(command.Lister, utils.NodeGroupTemplatesUtils): """Lists node group templates""" log = logging.getLogger(__name__ + ".ListNodeGroupTemplates") def get_parser(self, prog_name): parser = super(ListNodeGroupTemplates, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--plugin', metavar="", help="List node group templates for specific plugin" ) parser.add_argument( '--plugin-version', metavar="", help="List node group templates with specific version of the " "plugin" ) parser.add_argument( '--name', metavar="", help="List node group templates with specific substring in the " "name" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing return self._list_take_action(client, self.app, parsed_args) class ShowNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils): """Display node group template details""" log = logging.getLogger(__name__ + ".ShowNodeGroupTemplate") def get_parser(self, prog_name): parser = super(ShowNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( "node_group_template", metavar="", help="Name or id of the node group template to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.node_group_templates, parsed_args.node_group_template).to_dict() _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class DeleteNodeGroupTemplate(command.Command, utils.NodeGroupTemplatesUtils): """Deletes node group template""" log = logging.getLogger(__name__ + ".DeleteNodeGroupTemplate") def get_parser(self, prog_name): parser = super(DeleteNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( "node_group_template", metavar="", nargs="+", help="Name(s) or id(s) of the node group template(s) to delete", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ngt in parsed_args.node_group_template: ngt_id = utils.get_resource_id( client.node_group_templates, ngt) client.node_group_templates.delete(ngt_id) sys.stdout.write( 'Node group template "{ngt}" has been removed ' 'successfully.\n'.format(ngt=ngt)) class UpdateNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils): """Updates node group template""" log = logging.getLogger(__name__ + ".UpdateNodeGroupTemplate") def get_parser(self, prog_name): parser = super(UpdateNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( 'node_group_template', metavar="", help="Name or ID of the node group template", ) parser.add_argument( '--name', metavar="", help="New name of the node group template", ) parser.add_argument( '--plugin', metavar="", help="Name of the plugin" ) parser.add_argument( '--plugin-version', metavar="", help="Version of the plugin" ) parser.add_argument( '--processes', metavar="", nargs="+", help="List of the processes that will be launched on each " "instance" ) parser.add_argument( '--security-groups', metavar="", nargs="+", help="List of the security groups for the instances in this node " "group" ) autosecurity = parser.add_mutually_exclusive_group() autosecurity.add_argument( '--auto-security-group-enable', action='store_true', help='Additional security group should be created ' 'for the node group', dest='use_auto_security_group' ) autosecurity.add_argument( '--auto-security-group-disable', action='store_false', help='Additional security group should not be created ' 'for the node group', dest='use_auto_security_group' ) parser.add_argument( '--availability-zone', metavar="", help="Name of the availability zone where instances " "will be created" ) parser.add_argument( '--flavor', metavar="", help="Name or ID of the flavor" ) parser.add_argument( '--floating-ip-pool', metavar="", help="ID of the floating IP pool" ) parser.add_argument( '--volumes-per-node', type=int, metavar="", help="Number of volumes attached to every node" ) parser.add_argument( '--volumes-size', type=int, metavar="", help='Size of volumes attached to node (GB). ' 'This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-type', metavar="", help='Type of the volumes. ' 'This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-availability-zone', metavar="", help='Name of the availability zone where volumes will be created.' ' This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) parser.add_argument( '--volumes-mount-prefix', metavar="", help='Prefix for mount point directory. ' 'This parameter will be taken into account only ' 'if volumes-per-node is set and non-zero' ) volumelocality = parser.add_mutually_exclusive_group() volumelocality.add_argument( '--volumes-locality-enable', action='store_true', help='Instance and attached volumes will be created on ' 'the same physical host. This parameter will be taken into ' 'account only if volumes-per-node is set and non-zero', dest='volume_locality' ) volumelocality.add_argument( '--volumes-locality-disable', action='store_false', help='Instance and attached volumes creation on the same physical ' 'host will not be regulated. This parameter will be taken' 'into account only if volumes-per-node is set and non-zero', dest='volume_locality' ) parser.add_argument( '--description', metavar="", help='Description of the node group template' ) autoconfig = parser.add_mutually_exclusive_group() autoconfig.add_argument( '--autoconfig-enable', action='store_true', help='Instances of the node group will be ' 'automatically configured', dest='use_autoconfig' ) autoconfig.add_argument( '--autoconfig-disable', action='store_false', help='Instances of the node group will not be ' 'automatically configured', dest='use_autoconfig' ) proxy = parser.add_mutually_exclusive_group() proxy.add_argument( '--proxy-gateway-enable', action='store_true', help='Instances of the node group will be used to ' 'access other instances in the cluster', dest='is_proxy_gateway' ) proxy.add_argument( '--proxy-gateway-disable', action='store_false', help='Instances of the node group will not be used to ' 'access other instances in the cluster', dest='is_proxy_gateway' ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', help='Make the node group template public ' '(Visible from other projects)', dest='is_public' ) public.add_argument( '--private', action='store_false', help='Make the node group template private ' '(Visible only from this project)', dest='is_public' ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', help='Make the node group template protected', dest='is_protected' ) protected.add_argument( '--unprotected', action='store_false', help='Make the node group template unprotected', dest='is_protected' ) parser.add_argument( '--json', metavar='', help='JSON representation of the node group template update ' 'fields. Other arguments will not be taken into account if ' 'this one is provided' ) parser.add_argument( '--shares', metavar='', help='JSON representation of the manila shares' ) parser.add_argument( '--configs', metavar='', help='JSON representation of the node group template configs' ) parser.set_defaults(is_public=None, is_protected=None, is_proxy_gateway=None, volume_locality=None, use_auto_security_group=None, use_autoconfig=None) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._update_take_action(client, self.app, parsed_args) _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class ImportNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils): """Imports node group template""" log = logging.getLogger(__name__ + ".ImportNodeGroupTemplate") def get_parser(self, prog_name): parser = super(ImportNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( 'json', metavar="", help="JSON containing node group template", ) parser.add_argument( '--name', metavar="", help="Name of the node group template", ) parser.add_argument( '--security_groups', metavar="", help="Security groups of the node group template" ) parser.add_argument( '--floating_ip_pool', metavar="", help="Floating IP pool of the node group template" ) parser.add_argument( '--image_id', metavar="", required=True, help="Image ID of the node group template", ) parser.add_argument( '--flavor_id', metavar="", required=True, help="Flavor ID of the node group template", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._import_take_action(client, parsed_args) _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data) class ExportNodeGroupTemplate(command.Command, utils.NodeGroupTemplatesUtils): """Export node group template to JSON""" log = logging.getLogger(__name__ + ".ExportNodeGroupTemplate") def get_parser(self, prog_name): parser = super(ExportNodeGroupTemplate, self).get_parser(prog_name) parser.add_argument( "node_group_template", metavar="", help="Name or id of the node group template to export", ) parser.add_argument( "--file", metavar="", help="Name of the file node group template should be exported to." "If not provided, print to stdout", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing self._export_take_action(client, parsed_args) python-saharaclient-3.1.0/saharaclient/osc/v1/__init__.py0000664000175000017500000000000013643576737023406 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/osc/v1/cluster_templates.py0000664000175000017500000005415413643576737025431 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import exceptions from osc_lib import utils as osc_utils from oslo_log import log as logging from oslo_serialization import jsonutils as json from saharaclient.osc import utils CT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'description', 'node_groups', 'anti_affinity', 'use_autoconfig', 'is_default', 'is_protected', 'is_public', 'domain_name'] def _format_node_groups_list(node_groups): return ', '.join( ['%s:%s' % (ng['name'], ng['count']) for ng in node_groups]) def _format_ct_output(app, data): data['plugin_version'] = data.pop('hadoop_version') data['node_groups'] = _format_node_groups_list(data['node_groups']) data['anti_affinity'] = osc_utils.format_list(data['anti_affinity']) def _configure_node_groups(app, node_groups, client): node_groups_list = dict( map(lambda x: x.split(':', 1), node_groups)) node_groups = [] plugins_versions = set() for name, count in node_groups_list.items(): ng = utils.get_resource(client.node_group_templates, name) node_groups.append({'name': ng.name, 'count': int(count), 'node_group_template_id': ng.id}) plugins_versions.add((ng.plugin_name, ng.hadoop_version)) if len(plugins_versions) != 1: raise exceptions.CommandError('Node groups with the same plugins ' 'and versions must be specified') plugin, plugin_version = plugins_versions.pop() return plugin, plugin_version, node_groups class CreateClusterTemplate(command.ShowOne): """Creates cluster template""" log = logging.getLogger(__name__ + ".CreateClusterTemplate") def get_parser(self, prog_name): parser = super(CreateClusterTemplate, self).get_parser(prog_name) parser.add_argument( '--name', metavar="", help="Name of the cluster template [REQUIRED if JSON is not " "provided]", ) parser.add_argument( '--node-groups', metavar="", nargs="+", help="List of the node groups(names or IDs) and numbers of " "instances for each one of them [REQUIRED if JSON is not " "provided]" ) parser.add_argument( '--anti-affinity', metavar="", nargs="+", help="List of processes that should be added to an anti-affinity " "group" ) parser.add_argument( '--description', metavar="", help='Description of the cluster template' ) parser.add_argument( '--autoconfig', action='store_true', default=False, help='If enabled, instances of the cluster will be ' 'automatically configured', ) parser.add_argument( '--public', action='store_true', default=False, help='Make the cluster template public (Visible from other ' 'projects)', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the cluster template protected', ) parser.add_argument( '--json', metavar='', help='JSON representation of the cluster template. Other ' 'arguments will not be taken into account if this one is ' 'provided' ) parser.add_argument( '--shares', metavar='', help='JSON representation of the manila shares' ) parser.add_argument( '--configs', metavar='', help='JSON representation of the cluster template configs' ) parser.add_argument( '--domain-name', metavar='', help='Domain name for instances of this cluster template. This ' 'option is available if \'use_designate\' config is True' ) return parser def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') data = client.cluster_templates.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.node_groups: raise exceptions.CommandError( 'At least --name , --node-groups arguments should be ' 'specified or json template should be provided with ' '--json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng(self.app, parsed_args.node_groups, client)) data = utils.create_cluster_template(self.app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups) return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_ct_output(self.app, data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data) class ListClusterTemplates(command.Lister): """Lists cluster templates""" log = logging.getLogger(__name__ + ".ListClusterTemplates") def get_parser(self, prog_name): parser = super(ListClusterTemplates, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--plugin', metavar="", help="List cluster templates for specific plugin" ) parser.add_argument( '--plugin-version', metavar="", help="List cluster templates with specific version of the " "plugin" ) parser.add_argument( '--name', metavar="", help="List cluster templates with specific substring in the " "name" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin if parsed_args.plugin_version: if utils.is_api_v2(self.app): search_opts['plugin_version'] = parsed_args.plugin_version else: search_opts['hadoop_version'] = parsed_args.plugin_version data = client.cluster_templates.list(search_opts=search_opts) if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.long: columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'node_groups', 'description') column_headers = utils.prepare_column_headers( columns, {'hadoop_version': 'plugin_version'}) else: columns = ('name', 'id', 'plugin_name', 'hadoop_version') column_headers = utils.prepare_column_headers( columns, {'hadoop_version': 'plugin_version'}) return ( column_headers, (osc_utils.get_item_properties( s, columns, formatters={ 'node_groups': _format_node_groups_list } ) for s in data) ) class ShowClusterTemplate(command.ShowOne): """Display cluster template details""" log = logging.getLogger(__name__ + ".ShowClusterTemplate") def get_parser(self, prog_name): parser = super(ShowClusterTemplate, self).get_parser(prog_name) parser.add_argument( "cluster_template", metavar="", help="Name or id of the cluster template to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.cluster_templates, parsed_args.cluster_template).to_dict() _format_ct_output(self.app, data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data) class DeleteClusterTemplate(command.Command): """Deletes cluster template""" log = logging.getLogger(__name__ + ".DeleteClusterTemplate") def get_parser(self, prog_name): parser = super(DeleteClusterTemplate, self).get_parser(prog_name) parser.add_argument( "cluster_template", metavar="", nargs="+", help="Name(s) or id(s) of the cluster template(s) to delete", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ct in parsed_args.cluster_template: ct_id = utils.get_resource_id(client.cluster_templates, ct) client.cluster_templates.delete(ct_id) sys.stdout.write( 'Cluster template "{ct}" has been removed ' 'successfully.\n'.format(ct=ct)) class UpdateClusterTemplate(command.ShowOne): """Updates cluster template""" log = logging.getLogger(__name__ + ".UpdateClusterTemplate") def get_parser(self, prog_name): parser = super(UpdateClusterTemplate, self).get_parser(prog_name) parser.add_argument( 'cluster_template', metavar="", help="Name or ID of the cluster template [REQUIRED]", ) parser.add_argument( '--name', metavar="", help="New name of the cluster template", ) parser.add_argument( '--node-groups', metavar="", nargs="+", help="List of the node groups(names or IDs) and numbers of" "instances for each one of them" ) parser.add_argument( '--anti-affinity', metavar="", nargs="+", help="List of processes that should be added to an anti-affinity " "group" ) parser.add_argument( '--description', metavar="", help='Description of the cluster template' ) autoconfig = parser.add_mutually_exclusive_group() autoconfig.add_argument( '--autoconfig-enable', action='store_true', help='Instances of the cluster will be ' 'automatically configured', dest='use_autoconfig' ) autoconfig.add_argument( '--autoconfig-disable', action='store_false', help='Instances of the cluster will not be ' 'automatically configured', dest='use_autoconfig' ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', help='Make the cluster template public ' '(Visible from other projects)', dest='is_public' ) public.add_argument( '--private', action='store_false', help='Make the cluster template private ' '(Visible only from this tenant)', dest='is_public' ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', help='Make the cluster template protected', dest='is_protected' ) protected.add_argument( '--unprotected', action='store_false', help='Make the cluster template unprotected', dest='is_protected' ) parser.add_argument( '--json', metavar='', help='JSON representation of the cluster template. Other ' 'arguments will not be taken into account if this one is ' 'provided' ) parser.add_argument( '--shares', metavar='', help='JSON representation of the manila shares' ) parser.add_argument( '--configs', metavar='', help='JSON representation of the cluster template configs' ) parser.add_argument( '--domain-name', metavar='', default=None, help='Domain name for instances of this cluster template. This ' 'option is available if \'use_designate\' config is True' ) parser.set_defaults(is_public=None, is_protected=None, use_autoconfig=None) return parser def _take_action(self, client, parsed_args, ct_id): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.cluster_templates.update( ct_id, **template).to_dict() else: plugin, plugin_version, node_groups = None, None, None if parsed_args.node_groups: plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng( self.app, parsed_args.node_groups, client)) configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) data = utils.update_cluster_template(self.app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups, ct_id) return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ct_id = utils.get_resource_id( client.cluster_templates, parsed_args.cluster_template) data = self._take_action(client, parsed_args, ct_id) _format_ct_output(self.app, data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data) class ImportClusterTemplate(command.ShowOne): """Imports cluster template""" log = logging.getLogger(__name__ + ".ImportClusterTemplate") def get_parser(self, prog_name): parser = super(ImportClusterTemplate, self).get_parser(prog_name) parser.add_argument( 'json', metavar="", help="JSON containing cluster template", ) parser.add_argument( '--name', metavar="", help="Name of the cluster template", ) parser.add_argument( '--default-image-id', metavar="", help="Default image ID to be used", ) parser.add_argument( '--node-groups', metavar="", nargs="+", required=True, help="List of the node groups(names or IDs) and numbers of " "instances for each one of them" ) return parser def _take_action(self, client, parsed_args): if (not parsed_args.node_groups): raise exceptions.CommandError('--node_groups should be specified') blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if parsed_args.default_image_id: template['cluster_template']['default_image_id'] = ( parsed_args.default_image_id) else: template['cluster_template']['default_image_id'] = None if parsed_args.name: template['cluster_template']['name'] = parsed_args.name if 'neutron_management_network' in template['cluster_template']: template['cluster_template']['net_id'] = ( template['cluster_template'].pop('neutron_management_network')) plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng_configure_node_groups( self.app, parsed_args.node_groups, client)) if (('plugin_version' in template['cluster_template'] and template['cluster_template']['plugin_version'] != plugin_version) or ('plugin' in template['cluster_template'] and template['cluster_template']['plugin'] != plugin)): raise exceptions.CommandError( 'Plugin of plugin version do not match between template ' 'and given node group templates') template['cluster_template']['node_groups'] = node_groups data = client.cluster_templates.create( **template['cluster_template']).to_dict() return data def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = self._take_action(client, parsed_args) _format_ct_output(self.app, data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data) class ExportClusterTemplate(command.Command): """Export cluster template to JSON""" log = logging.getLogger(__name__ + ".ExportClusterTemplate") def get_parser(self, prog_name): parser = super(ExportClusterTemplate, self).get_parser(prog_name) parser.add_argument( "cluster_template", metavar="", help="Name or id of the cluster template to export", ) parser.add_argument( "--file", metavar="", help="Name of the file cluster template should be exported to " "If not provided, print to stdout" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ngt_id = utils.get_resource_id( client.cluster_templates, parsed_args.cluster_template) response = client.cluster_templates.export(ngt_id) result = json.dumps(response._info, indent=4)+"\n" if parsed_args.file: with open(parsed_args.file, "w+") as file: file.write(result) else: sys.stdout.write(result) python-saharaclient-3.1.0/saharaclient/osc/v1/images.py0000664000175000017500000002246013643576737023132 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import utils as osc_utils from oslo_log import log as logging from saharaclient.osc import utils IMAGE_FIELDS = ['name', 'id', 'username', 'tags', 'status', 'description'] class ListImages(command.Lister): """Lists registered images""" log = logging.getLogger(__name__ + ".ListImages") def get_parser(self, prog_name): parser = super(ListImages, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--name', metavar="", help="Regular expression to match image name" ) parser.add_argument( '--tags', metavar="", nargs="+", help="List images with specific tag(s)" ) parser.add_argument( '--username', metavar="", help="List images with specific username" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {'tags': parsed_args.tags} if parsed_args.tags else {} data = client.images.list(search_opts=search_opts) if parsed_args.name: data = utils.get_by_name_substring(data, parsed_args.name) if parsed_args.username: data = [i for i in data if parsed_args.username in i.username] if parsed_args.long: columns = IMAGE_FIELDS column_headers = [c.capitalize() for c in columns] else: columns = ('name', 'id', 'username', 'tags') column_headers = [c.capitalize() for c in columns] return ( column_headers, (osc_utils.get_item_properties( s, columns, formatters={ 'tags': osc_utils.format_list }, ) for s in data) ) class ShowImage(command.ShowOne): """Display image details""" log = logging.getLogger(__name__ + ".ShowImage") def get_parser(self, prog_name): parser = super(ShowImage, self).get_parser(prog_name) parser.add_argument( "image", metavar="", help="Name or id of the image to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.images, parsed_args.image).to_dict() data['tags'] = osc_utils.format_list(data['tags']) data = utils.prepare_data(data, IMAGE_FIELDS) return self.dict2columns(data) class RegisterImage(command.ShowOne): """Register an image""" log = logging.getLogger(__name__ + ".RegisterImage") def get_parser(self, prog_name): parser = super(RegisterImage, self).get_parser(prog_name) parser.add_argument( "image", metavar="", help="Name or ID of the image to register", ) parser.add_argument( "--username", metavar="", help="Username of privileged user in the image [REQUIRED]", required=True ) parser.add_argument( "--description", metavar="", help="Description of the image. If not provided, description of " "the image will be reset to empty", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing image_client = self.app.client_manager.image image_id = image_client.find_image(parsed_args.image, ignore_missing=False).id data = client.images.update_image( image_id, user_name=parsed_args.username, desc=parsed_args.description).image data['tags'] = osc_utils.format_list(data['tags']) data = utils.prepare_data(data, IMAGE_FIELDS) return self.dict2columns(data) class UnregisterImage(command.Command): """Unregister image(s)""" log = logging.getLogger(__name__ + ".RegisterImage") def get_parser(self, prog_name): parser = super(UnregisterImage, self).get_parser(prog_name) parser.add_argument( "image", metavar="", nargs="+", help="Name(s) or id(s) of the image(s) to unregister", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for image in parsed_args.image: image_id = utils.get_resource_id(client.images, image) client.images.unregister_image(image_id) sys.stdout.write( 'Image "{image}" has been unregistered ' 'successfully.\n'.format(image=image)) class SetImageTags(command.ShowOne): """Set image tags (Replace current image tags with provided ones)""" log = logging.getLogger(__name__ + ".AddImageTags") def get_parser(self, prog_name): parser = super(SetImageTags, self).get_parser(prog_name) parser.add_argument( "image", metavar="", help="Name or id of the image", ) parser.add_argument( '--tags', metavar="", nargs="+", required=True, help="Tag(s) to set [REQUIRED]" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing image_id = utils.get_resource_id(client.images, parsed_args.image) data = client.images.update_tags(image_id, parsed_args.tags).to_dict() data['tags'] = osc_utils.format_list(data['tags']) data = utils.prepare_data(data, IMAGE_FIELDS) return self.dict2columns(data) class AddImageTags(command.ShowOne): """Add image tags""" log = logging.getLogger(__name__ + ".AddImageTags") def get_parser(self, prog_name): parser = super(AddImageTags, self).get_parser(prog_name) parser.add_argument( "image", metavar="", help="Name or id of the image", ) parser.add_argument( '--tags', metavar="", nargs="+", required=True, help="Tag(s) to add [REQUIRED]" ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing image = utils.get_resource(client.images, parsed_args.image) parsed_args.tags.extend(image.tags) data = client.images.update_tags( image.id, list(set(parsed_args.tags))).to_dict() data['tags'] = osc_utils.format_list(data['tags']) data = utils.prepare_data(data, IMAGE_FIELDS) return self.dict2columns(data) class RemoveImageTags(command.ShowOne): """Remove image tags""" log = logging.getLogger(__name__ + ".RemoveImageTags") def get_parser(self, prog_name): parser = super(RemoveImageTags, self).get_parser(prog_name) parser.add_argument( "image", metavar="", help="Name or id of the image", ) group = parser.add_mutually_exclusive_group() group.add_argument( '--tags', metavar="", nargs="+", help="Tag(s) to remove" ), group.add_argument( '--all', action='store_true', default=False, help='Remove all tags from image', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing image = utils.get_resource(client.images, parsed_args.image) if parsed_args.all: data = client.images.update_tags(image.id, []).to_dict() else: parsed_args.tags = parsed_args.tags or [] new_tags = list(set(image.tags) - set(parsed_args.tags)) data = client.images.update_tags(image.id, new_tags).to_dict() data['tags'] = osc_utils.format_list(data['tags']) data = utils.prepare_data(data, IMAGE_FIELDS) return self.dict2columns(data) python-saharaclient-3.1.0/saharaclient/osc/v1/data_sources.py0000664000175000017500000003500113643576737024334 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from osc_lib.command import command from osc_lib import utils as osc_utils from oslo_log import log as logging from saharaclient.osc import utils DATA_SOURCE_FIELDS = ['name', 'id', 'type', 'url', 'description', 'is_public', 'is_protected'] DATA_SOURCE_CHOICES = ["swift", "hdfs", "maprfs", "manila", "s3"] class CreateDataSource(command.ShowOne): """Creates data source""" log = logging.getLogger(__name__ + ".CreateDataSource") def get_parser(self, prog_name): parser = super(CreateDataSource, self).get_parser(prog_name) parser.add_argument( 'name', metavar="", help="Name of the data source", ) parser.add_argument( '--type', metavar="", choices=DATA_SOURCE_CHOICES, help="Type of the data source (%s) " "[REQUIRED]" % ', '.join(DATA_SOURCE_CHOICES), required=True ) parser.add_argument( '--url', metavar="", help="URL for the data source [REQUIRED]", required=True ) username = parser.add_mutually_exclusive_group() username.add_argument( '--username', metavar="", help="Username for accessing the data source URL" ) username.add_argument( '--access-key', metavar='', help='S3 access key for accessing the data source URL', ) password = parser.add_mutually_exclusive_group() password.add_argument( '--password', metavar="", help="Password for accessing the data source URL" ) password.add_argument( '--secret-key', metavar='', help='S3 secret key for accessing the data source URL', ) parser.add_argument( '--s3-endpoint', metavar='', help='S3 endpoint for accessing the data source URL (ignored if ' 'data source not in S3)', ) enable_s3_ssl = parser.add_mutually_exclusive_group() enable_s3_ssl.add_argument( '--enable-s3-ssl', action='store_true', help='Enable access to S3 endpoint using SSL (ignored if data ' 'source not in S3)' ) enable_s3_ssl.add_argument( '--disable-s3-ssl', action='store_false', help='Disable access to S3 endpoint using SSL (ignored if data ' 'source not in S3)' ) s3_bucket_in_path = parser.add_mutually_exclusive_group() s3_bucket_in_path.add_argument( '--enable-s3-bucket-in-path', action='store_true', help='Access S3 endpoint using bucket name in path ' '(ignored if data source not in S3)' ) s3_bucket_in_path.add_argument( '--disable-s3-bucket-in-path', action='store_false', help='Access S3 endpoint using bucket name in path ' '(ignored if data source not in S3)' ) parser.add_argument( '--description', metavar="", help="Description of the data source" ) parser.add_argument( '--public', action='store_true', default=False, help='Make the data source public', ) parser.add_argument( '--protected', action='store_true', default=False, help='Make the data source protected', ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing s3_credentials = {} if parsed_args.access_key: s3_credentials['accesskey'] = parsed_args.access_key if parsed_args.secret_key: s3_credentials['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: s3_credentials['endpoint'] = parsed_args.s3_endpoint if parsed_args.enable_s3_ssl == parsed_args.disable_s3_ssl: s3_credentials['ssl'] = parsed_args.enable_s3_ssl if (parsed_args.enable_s3_bucket_in_path == parsed_args.disable_s3_bucket_in_path): s3_credentials['bucket_in_path'] = ( parsed_args.enable_s3_bucket_in_path) s3_credentials = s3_credentials or None description = parsed_args.description or '' data = client.data_sources.create( name=parsed_args.name, description=description, data_source_type=parsed_args.type, url=parsed_args.url, credential_user=parsed_args.username, credential_pass=parsed_args.password, is_public=parsed_args.public, is_protected=parsed_args.protected, s3_credentials=s3_credentials ).to_dict() data = utils.prepare_data(data, DATA_SOURCE_FIELDS) return self.dict2columns(data) class ListDataSources(command.Lister): """Lists data sources""" log = logging.getLogger(__name__ + ".ListDataSources") def get_parser(self, prog_name): parser = super(ListDataSources, self).get_parser(prog_name) parser.add_argument( '--long', action='store_true', default=False, help='List additional fields in output', ) parser.add_argument( '--type', metavar="", choices=DATA_SOURCE_CHOICES, help="List data sources of specific type " "(%s)" % ', '.join(DATA_SOURCE_CHOICES) ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing search_opts = {'type': parsed_args.type} if parsed_args.type else {} data = client.data_sources.list(search_opts=search_opts) if parsed_args.long: columns = DATA_SOURCE_FIELDS column_headers = utils.prepare_column_headers(columns) else: columns = ('name', 'id', 'type') column_headers = utils.prepare_column_headers(columns) return ( column_headers, (osc_utils.get_item_properties( s, columns ) for s in data) ) class ShowDataSource(command.ShowOne): """Display data source details""" log = logging.getLogger(__name__ + ".ShowDataSource") def get_parser(self, prog_name): parser = super(ShowDataSource, self).get_parser(prog_name) parser.add_argument( "data_source", metavar="", help="Name or id of the data source to display", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing data = utils.get_resource( client.data_sources, parsed_args.data_source).to_dict() data = utils.prepare_data(data, DATA_SOURCE_FIELDS) return self.dict2columns(data) class DeleteDataSource(command.Command): """Delete data source""" log = logging.getLogger(__name__ + ".DeleteDataSource") def get_parser(self, prog_name): parser = super(DeleteDataSource, self).get_parser(prog_name) parser.add_argument( "data_source", metavar="", nargs="+", help="Name(s) or id(s) of the data source(s) to delete", ) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ds in parsed_args.data_source: data_source_id = utils.get_resource_id( client.data_sources, ds) client.data_sources.delete(data_source_id) sys.stdout.write( 'Data Source "{ds}" has been removed ' 'successfully.\n'.format(ds=ds)) class UpdateDataSource(command.ShowOne): """Update data source""" log = logging.getLogger(__name__ + ".UpdateDataSource") def get_parser(self, prog_name): parser = super(UpdateDataSource, self).get_parser(prog_name) parser.add_argument( 'data_source', metavar="", help="Name or id of the data source", ) parser.add_argument( '--name', metavar="", help="New name of the data source", ) parser.add_argument( '--type', metavar="", choices=DATA_SOURCE_CHOICES, help="Type of the data source " "(%s)" % ', '.join(DATA_SOURCE_CHOICES) ) parser.add_argument( '--url', metavar="", help="URL for the data source" ) username = parser.add_mutually_exclusive_group() username.add_argument( '--username', metavar="", help="Username for accessing the data source URL" ) username.add_argument( '--access-key', metavar='', help='S3 access key for accessing the data source URL', ) password = parser.add_mutually_exclusive_group() password.add_argument( '--password', metavar="", help="Password for accessing the data source URL" ) password.add_argument( '--secret-key', metavar='', help='S3 secret key for accessing the data source URL', ) parser.add_argument( '--s3-endpoint', metavar='', help='S3 endpoint for accessing the data source URL (ignored if ' 'data source not in S3)', ) enable_s3_ssl = parser.add_mutually_exclusive_group() enable_s3_ssl.add_argument( '--enable-s3-ssl', action='store_true', help='Enable access to S3 endpoint using SSL (ignored if data ' 'source not in S3)' ) enable_s3_ssl.add_argument( '--disable-s3-ssl', action='store_false', help='Disable access to S3 endpoint using SSL (ignored if data ' 'source not in S3)' ) s3_bucket_in_path = parser.add_mutually_exclusive_group() s3_bucket_in_path.add_argument( '--enable-s3-bucket-in-path', action='store_true', help='Access S3 endpoint using bucket name in path ' '(ignored if data source not in S3)' ) s3_bucket_in_path.add_argument( '--disable-s3-bucket-in-path', action='store_false', help='Access S3 endpoint using bucket name in path ' '(ignored if data source not in S3)' ) parser.add_argument( '--description', metavar="", help="Description of the data source" ) public = parser.add_mutually_exclusive_group() public.add_argument( '--public', action='store_true', dest='is_public', help='Make the data source public (Visible from other projects)', ) public.add_argument( '--private', action='store_false', dest='is_public', help='Make the data source private (Visible only from this ' 'tenant)', ) protected = parser.add_mutually_exclusive_group() protected.add_argument( '--protected', action='store_true', dest='is_protected', help='Make the data source protected', ) protected.add_argument( '--unprotected', action='store_false', dest='is_protected', help='Make the data source unprotected', ) parser.set_defaults(is_public=None, is_protected=None) return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing credentials = {} if parsed_args.type == 'swift': if parsed_args.username: credentials['user'] = parsed_args.username if parsed_args.password: credentials['password'] = parsed_args.password elif parsed_args.type == 's3': if parsed_args.access_key: credentials['accesskey'] = parsed_args.access_key if parsed_args.secret_key: credentials['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: credentials['endpoint'] = parsed_args.s3_endpoint if parsed_args.enable_s3_ssl == parsed_args.disable_s3_ssl: credentials['ssl'] = parsed_args.enable_s3_ssl if (parsed_args.enable_s3_bucket_in_path == parsed_args.disable_s3_bucket_in_path): credentials['bucket_in_path'] = ( parsed_args.enable_s3_bucket_in_path) if not credentials: credentials = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, type=parsed_args.type, url=parsed_args.url, credentials=credentials, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected) ds_id = utils.get_resource_id( client.data_sources, parsed_args.data_source) data = client.data_sources.update(ds_id, update_fields).data_source data = utils.prepare_data(data, DATA_SOURCE_FIELDS) return self.dict2columns(data) python-saharaclient-3.1.0/saharaclient/osc/__init__.py0000664000175000017500000000000013643576737023060 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/client.py0000664000175000017500000000306113643576737022025 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_utils import importutils class UnsupportedVersion(Exception): """Indication for using an unsupported version of the API. Indicates that the user is trying to use an unsupported version of the API. """ pass def get_client_class(version): version_map = { '1.0': 'saharaclient.api.client.Client', '1.1': 'saharaclient.api.client.Client', '2': 'saharaclient.api.client.ClientV2', } try: client_path = version_map[str(version)] except (KeyError, ValueError): supported_versions = ', '.join(version_map.keys()) msg = ("Invalid client version '%(version)s'; must be one of: " "%(versions)s") % {'version': version, 'versions': supported_versions} raise UnsupportedVersion(msg) return importutils.import_class(client_path) def Client(version, *args, **kwargs): client_class = get_client_class(version) return client_class(*args, **kwargs) python-saharaclient-3.1.0/saharaclient/version.py0000664000175000017500000000123213643576737022232 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from pbr import version version_info = version.VersionInfo('python-saharaclient') python-saharaclient-3.1.0/saharaclient/_i18n.py0000664000175000017500000000136513643576737021472 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/usage.html """ import oslo_i18n _ = oslo_i18n.TranslatorFactory(domain='saharaclient').primary python-saharaclient-3.1.0/saharaclient/__init__.py0000664000175000017500000000130413643576737022304 0ustar zuulzuul00000000000000# Copyright 2017 Huawei, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from saharaclient import version __version__ = version.version_info.version_string() python-saharaclient-3.1.0/saharaclient/tests/0000775000175000017500000000000013643577103021323 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/hacking/0000775000175000017500000000000013643577103022727 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/hacking/checks.py0000664000175000017500000000743213643576737024563 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import re import tokenize from hacking import core import pycodestyle RE_OSLO_IMPORTS = (re.compile(r"(((from)|(import))\s+oslo\.)"), re.compile(r"(from\s+oslo\s+import)")) RE_DICT_CONSTRUCTOR_WITH_LIST_COPY = re.compile(r".*\bdict\((\[)?(\(|\[)") RE_USE_JSONUTILS_INVALID_LINE = re.compile(r"(import\s+json)") RE_USE_JSONUTILS_VALID_LINE = re.compile(r"(import\s+jsonschema)") RE_MUTABLE_DEFAULT_ARGS = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])") def _starts_with_any(line, *prefixes): for prefix in prefixes: if line.startswith(prefix): return True return False def _any_in(line, *sublines): for subline in sublines: if subline in line: return True return False @core.flake8ext def import_db_only_in_conductor(logical_line, filename): """Check that db calls are only in conductor module and in tests. S361 """ if _any_in(filename, "sahara/conductor", "sahara/tests", "sahara/db"): return if _starts_with_any(logical_line, "from sahara import db", "from sahara.db", "import sahara.db"): yield (0, "S361: sahara.db import only allowed in " "sahara/conductor/*") @core.flake8ext def hacking_no_author_attr(logical_line, tokens): """__author__ should not be used. S362: __author__ = slukjanov """ for token_type, text, start_index, _, _ in tokens: if token_type == tokenize.NAME and text == "__author__": yield (start_index[1], "S362: __author__ should not be used") @core.flake8ext def check_oslo_namespace_imports(logical_line): """Check to prevent old oslo namespace usage. S363 """ if re.match(RE_OSLO_IMPORTS[0], logical_line): yield(0, "S363: '%s' must be used instead of '%s'." % ( logical_line.replace('oslo.', 'oslo_'), logical_line)) if re.match(RE_OSLO_IMPORTS[1], logical_line): yield(0, "S363: '%s' must be used instead of '%s'" % ( 'import oslo_%s' % logical_line.split()[-1], logical_line)) @core.flake8ext def dict_constructor_with_list_copy(logical_line): """Check to prevent dict constructor with a sequence of key-value pairs. S368 """ if RE_DICT_CONSTRUCTOR_WITH_LIST_COPY.match(logical_line): yield (0, 'S368: Must use a dict comprehension instead of a dict ' 'constructor with a sequence of key-value pairs.') @core.flake8ext def use_jsonutils(logical_line, filename): """Check to prevent importing json in sahara code. S375 """ if pycodestyle.noqa(logical_line): return if (RE_USE_JSONUTILS_INVALID_LINE.match(logical_line) and not RE_USE_JSONUTILS_VALID_LINE.match(logical_line)): yield(0, "S375: Use jsonutils from oslo_serialization instead" " of json") @core.flake8ext def no_mutable_default_args(logical_line): """Check to prevent mutable default argument in sahara code. S360 """ msg = "S360: Method's default argument shouldn't be mutable!" if RE_MUTABLE_DEFAULT_ARGS.match(logical_line): yield (0, msg) python-saharaclient-3.1.0/saharaclient/tests/hacking/logging_checks.py0000664000175000017500000000427713643576737026275 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from hacking import core ALL_LOG_LEVELS = "info|exception|warning|critical|error|debug" RE_ACCEPTED_LOG_LEVELS = re.compile( r"(.)*LOG\.(%(levels)s)\(" % {'levels': ALL_LOG_LEVELS}) # Since _Lx() have been removed, we just need to check _() RE_TRANSLATED_LOG = re.compile( r"(.)*LOG\.(%(levels)s)\(\s*_\(" % {'levels': ALL_LOG_LEVELS}) @core.flake8ext def no_translate_logs(logical_line, filename): """Check for 'LOG.*(_(' Translators don't provide translations for log messages, and operators asked not to translate them. * This check assumes that 'LOG' is a logger. * Use filename so we can start enforcing this in specific folders instead of needing to do so all at once. S373 """ msg = "S373 Don't translate logs" if RE_TRANSLATED_LOG.match(logical_line): yield (0, msg) @core.flake8ext def accepted_log_levels(logical_line, filename): """In Sahara we use only 5 log levels. This check is needed because we don't want new contributors to use deprecated log levels. S374 """ # NOTE(Kezar): sahara/tests included because we don't require translations # in tests. sahara/db/templates provide separate cli interface so we don't # want to translate it. ignore_dirs = ["sahara/db/templates", "sahara/tests"] for directory in ignore_dirs: if directory in filename: return msg = ("S374 You used deprecated log level. Accepted log levels are " "%(levels)s" % {'levels': ALL_LOG_LEVELS}) if logical_line.startswith("LOG."): if not RE_ACCEPTED_LOG_LEVELS.search(logical_line): yield(0, msg) python-saharaclient-3.1.0/saharaclient/tests/hacking/__init__.py0000664000175000017500000000000013643576737025042 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/hacking/commit_message.py0000664000175000017500000000615413643576737026317 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import subprocess # nosec from hacking import core class GitCheck(core.GlobalCheck): """Base-class for Git related checks.""" def _get_commit_title(self): # Check if we're inside a git checkout try: subp = subprocess.Popen( # nosec ['git', 'rev-parse', '--show-toplevel'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gitdir = subp.communicate()[0].rstrip() except OSError: # "git" was not found return None if not os.path.exists(gitdir): return None # Get title of most recent commit subp = subprocess.Popen( # nosec ['git', 'log', '--no-merges', '--pretty=%s', '-1'], stdout=subprocess.PIPE) title = subp.communicate()[0] if subp.returncode: raise Exception("git log failed with code %s" % subp.returncode) return title.decode('utf-8') class OnceGitCheckCommitTitleBug(GitCheck): """Check git commit messages for bugs. OpenStack HACKING recommends not referencing a bug or blueprint in first line. It should provide an accurate description of the change S364 """ name = "GitCheckCommitTitleBug" # From https://github.com/openstack/openstack-ci-puppet # /blob/master/modules/gerrit/manifests/init.pp#L74 # Changeid|bug|blueprint GIT_REGEX = re.compile( r'(I[0-9a-f]{8,40})|' r'([Bb]ug|[Ll][Pp])[\s\#:]*(\d+)|' r'([Bb]lue[Pp]rint|[Bb][Pp])[\s\#:]*([A-Za-z0-9\\-]+)') def run_once(self): title = self._get_commit_title() # NOTE(jogo) if match regex but over 3 words, acceptable title if (title and self.GIT_REGEX.search(title) is not None and len(title.split()) <= 3): return (1, 0, "S364: git commit title ('%s') should provide an accurate " "description of the change, not just a reference to a bug " "or blueprint" % title.strip(), self.name) class OnceGitCheckCommitTitleLength(GitCheck): """Check git commit message length. HACKING recommends commit titles 50 chars or less, but enforces a 72 character limit S365 Title limited to 72 chars """ name = "GitCheckCommitTitleLength" def run_once(self): title = self._get_commit_title() if title and len(title) > 72: return ( 1, 0, "S365: git commit title ('%s') should be under 50 chars" % title.strip(), self.name) python-saharaclient-3.1.0/saharaclient/tests/unit/0000775000175000017500000000000013643577103022302 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/test_images.py0000664000175000017500000000564113643576737025202 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import images from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class ImageTest(base.BaseTestCase): body = { 'username': 'name', 'description': 'descr' } def test_images_list(self): url = self.URL + '/images' self.responses.get(url, json={'images': [self.body]}) resp = self.client.images.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], images.Image) self.assertFields(self.body, resp[0]) def test_images_get(self): url = self.URL + '/images/id' self.responses.get(url, json={'image': self.body}) resp = self.client.images.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, images.Image) self.assertFields(self.body, resp) def test_unregister_image(self): url = self.URL + '/images/id' self.responses.delete(url, status_code=204) self.client.images.unregister_image('id') self.assertEqual(url, self.responses.last_request.url) def test_update_image(self): url = self.URL + '/images/id' self.responses.post(url, json={'image': self.body}, status_code=202) self.client.images.update_image('id', 'name', 'descr') self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) def test_update_tags(self): url = self.URL + '/images/id' tag_url = self.URL + '/images/id/tag' untag_url = self.URL + '/images/id/untag' body = self.body.copy() body['tags'] = ['fake', '0.1'] self.responses.post(tag_url, json={'image': body}, status_code=202) self.responses.post(untag_url, json={'image': body}, status_code=202) self.responses.get(url, json={'image': body}) resp = self.client.images.update_tags('id', ['username', 'tag']) self.assertIsInstance(resp, images.Image) self.assertFields(self.body, resp) resp = self.client.images.update_tags('id', ['username']) self.assertIsInstance(resp, images.Image) self.assertFields(self.body, resp) python-saharaclient-3.1.0/saharaclient/tests/unit/test_job_executions.py0000664000175000017500000001104613643576737026751 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import job_executions as je from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class JobExecutionTest(base.BaseTestCase): body = { 'job_id': 'job_id', 'cluster_id': 'cluster_id', 'configs': {}, 'interface': {}, 'input_id': None, 'output_id': None } response = { 'cluster_id': 'cluster_id', 'interface': {}, 'job_configs': {} } update_json = { 'is_public': True, 'is_protected': True, } def test_create_job_execution_with_io(self): url = self.URL + '/jobs/job_id/execute' body = self.body.copy() body.update({'input_id': 'input_id', 'output_id': 'output_id'}) response = self.response.copy() response.update({'input_id': 'input_id', 'output_id': 'output_id'}) self.responses.post(url, status_code=202, json={'job_execution': response}) resp = self.client.job_executions.create(**body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(response, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, je.JobExecution) self.assertFields(response, resp) def test_create_job_execution_without_io(self): url = self.URL + '/jobs/job_id/execute' self.responses.post(url, status_code=202, json={'job_execution': self.response}) resp = self.client.job_executions.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.response, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, je.JobExecution) self.assertFields(self.response, resp) def test_job_executions_list(self): url = self.URL + '/job-executions' self.responses.get(url, json={'job_executions': [self.response]}) resp = self.client.job_executions.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], je.JobExecution) self.assertFields(self.response, resp[0]) def test_job_executions_get(self): url = self.URL + '/job-executions/id' self.responses.get(url, json={'job_execution': self.response}) resp = self.client.job_executions.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, je.JobExecution) self.assertFields(self.response, resp) def test_job_executions_delete(self): url = self.URL + '/job-executions/id' self.responses.delete(url, status_code=204) self.client.job_executions.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_job_executions_update(self): url = self.URL + '/job-executions/id' self.responses.patch(url, status_code=202, json=self.update_json) # check that all parameters will be updated resp = self.client.job_executions.update("id", **self.update_json) self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, je.JobExecution) self.assertEqual(self.update_json, json.loads(self.responses.last_request.body)) # check that parameters will not be updated self.client.job_executions.update("id") self.assertEqual(url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { "is_public": None, "is_protected": None } self.client.job_executions.update("id", **unset_json) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(unset_json, json.loads(self.responses.last_request.body)) python-saharaclient-3.1.0/saharaclient/tests/unit/test_node_group_templates.py0000664000175000017500000002647213643576737030161 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import node_group_templates as ng from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class NodeGroupTemplateTest(base.BaseTestCase): body = { "name": "name", "plugin_name": "plugin", "hadoop_version": "1", "flavor_id": "2", "description": "description", "volumes_per_node": "3", "volumes_size": "4", "node_processes": ["datanode"], "use_autoconfig": True, "volume_mount_prefix": '/volumes/disk', } update_json = { "node_group_template": { "name": "UpdatedName", "plugin_name": "new_plugin", "hadoop_version": "2", "flavor_id": "7", "description": "description", "volumes_per_node": "3", "volumes_size": "4", "node_processes": ["datanode", "namenode"], "use_autoconfig": False, "volume_mount_prefix": '/volumes/newdisk', } } def test_create_node_group_template(self): url = self.URL + '/node-group-templates' self.responses.post(url, status_code=202, json={'node_group_template': self.body}) resp = self.client.node_group_templates.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, ng.NodeGroupTemplate) self.assertFields(self.body, resp) def test_node_group_template_list(self): url = self.URL + '/node-group-templates' self.responses.get(url, json={'node_group_templates': [self.body]}) resp = self.client.node_group_templates.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], ng.NodeGroupTemplate) self.assertFields(self.body, resp[0]) def test_node_group_template_get(self): url = self.URL + '/node-group-templates/id' self.responses.get(url, json={'node_group_template': self.body}) resp = self.client.node_group_templates.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, ng.NodeGroupTemplate) self.assertFields(self.body, resp) def test_node_group_template_delete(self): url = self.URL + '/node-group-templates/id' self.responses.delete(url, status_code=204) self.client.node_group_templates.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_update_node_group_template(self): url = self.URL + '/node-group-templates' self.responses.post(url, status_code=202, json={'node_group_template': self.body}) resp = self.client.node_group_templates.create(**self.body) update_url = self.URL + '/node-group-templates/id' self.responses.put(update_url, status_code=202, json=self.update_json) # check that all parameters will be updated updated = self.client.node_group_templates.update( "id", resp.name, resp.plugin_name, resp.hadoop_version, resp.flavor_id, description=getattr(resp, "description", None), volumes_per_node=getattr(resp, "volumes_per_node", None), node_configs=getattr(resp, "node_configs", None), floating_ip_pool=getattr(resp, "floating_ip_pool", None), security_groups=getattr(resp, "security_groups", None), auto_security_group=getattr(resp, "auto_security_group", None), availability_zone=getattr(resp, "availability_zone", None), volumes_availability_zone=getattr(resp, "volumes_availability_zone", None), volume_type=getattr(resp, "volume_type", None), image_id=getattr(resp, "image_id", None), is_proxy_gateway=getattr(resp, "is_proxy_gateway", None), volume_local_to_instance=getattr(resp, "volume_local_to_instance", None), use_autoconfig=False) self.assertIsInstance(updated, ng.NodeGroupTemplate) self.assertFields(self.update_json["node_group_template"], updated) # check that parameters will not be updated self.client.node_group_templates.update("id") self.assertEqual(update_url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { 'auto_security_group': None, 'availability_zone': None, 'description': None, 'flavor_id': None, 'floating_ip_pool': None, 'hadoop_version': None, 'image_id': None, 'is_protected': None, 'is_proxy_gateway': None, 'is_public': None, 'name': None, 'node_configs': None, 'node_processes': None, 'plugin_name': None, 'security_groups': None, 'shares': None, 'use_autoconfig': None, 'volume_local_to_instance': None, 'volume_mount_prefix': None, 'volume_type': None, 'volumes_availability_zone': None, 'volumes_per_node': None, 'volumes_size': None} self.client.node_group_templates.update("id", **unset_json) self.assertEqual(update_url, self.responses.last_request.url) self.assertEqual(unset_json, json.loads(self.responses.last_request.body)) def test_node_group_template_export(self): url = self.URL + '/node-group-templates/id/export' self.responses.get(url, json={'node_group_template': self.body}) resp = self.client.node_group_templates.export('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, ng.NodeGroupTemplate) self.assertDictsEqual(self.body, resp.__dict__[u'node_group_template']) class NodeGroupTemplateTestV2(base.BaseTestCase): body = { "name": "name", "plugin_name": "plugin", "plugin_version": "1", "flavor_id": "2", "description": "description", "volumes_per_node": "3", "volumes_size": "4", "node_processes": ["datanode"], "use_autoconfig": True, "volume_mount_prefix": '/volumes/disk', "boot_from_volume": False } update_json = { "node_group_template": { "name": "UpdatedName", "plugin_name": "new_plugin", "plugin_version": "2", "flavor_id": "7", "description": "description", "volumes_per_node": "3", "volumes_size": "4", "node_processes": ["datanode", "namenode"], "use_autoconfig": False, "volume_mount_prefix": '/volumes/newdisk', "boot_from_volume": True } } def test_create_node_group_template_v2(self): url = self.URL + '/node-group-templates' self.responses.post(url, status_code=202, json={'node_group_template': self.body}) resp = self.client_v2.node_group_templates.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, ng.NodeGroupTemplate) self.assertFields(self.body, resp) def test_update_node_group_template_v2(self): url = self.URL + '/node-group-templates' self.responses.post(url, status_code=202, json={'node_group_template': self.body}) resp = self.client_v2.node_group_templates.create(**self.body) update_url = self.URL + '/node-group-templates/id' self.responses.patch(update_url, status_code=202, json=self.update_json) # check that all parameters will be updated updated = self.client_v2.node_group_templates.update( "id", resp.name, resp.plugin_name, resp.plugin_version, resp.flavor_id, description=getattr(resp, "description", None), volumes_per_node=getattr(resp, "volumes_per_node", None), node_configs=getattr(resp, "node_configs", None), floating_ip_pool=getattr(resp, "floating_ip_pool", None), security_groups=getattr(resp, "security_groups", None), auto_security_group=getattr(resp, "auto_security_group", None), availability_zone=getattr(resp, "availability_zone", None), volumes_availability_zone=getattr(resp, "volumes_availability_zone", None), volume_type=getattr(resp, "volume_type", None), image_id=getattr(resp, "image_id", None), is_proxy_gateway=getattr(resp, "is_proxy_gateway", None), volume_local_to_instance=getattr(resp, "volume_local_to_instance", None), use_autoconfig=False, boot_from_volume=getattr(resp, "boot_from_volume", None) ) self.assertIsInstance(updated, ng.NodeGroupTemplate) self.assertFields(self.update_json["node_group_template"], updated) # check that parameters will not be updated self.client_v2.node_group_templates.update("id") self.assertEqual(update_url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { 'auto_security_group': None, 'availability_zone': None, 'description': None, 'flavor_id': None, 'floating_ip_pool': None, 'plugin_version': None, 'image_id': None, 'is_protected': None, 'is_proxy_gateway': None, 'is_public': None, 'name': None, 'node_configs': None, 'node_processes': None, 'plugin_name': None, 'security_groups': None, 'shares': None, 'use_autoconfig': None, 'volume_local_to_instance': None, 'volume_mount_prefix': None, 'volume_type': None, 'volumes_availability_zone': None, 'volumes_per_node': None, 'volumes_size': None, 'boot_from_volume': None} self.client_v2.node_group_templates.update("id", **unset_json) self.assertEqual(update_url, self.responses.last_request.url) self.assertEqual(unset_json, json.loads(self.responses.last_request.body)) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/0000775000175000017500000000000013643577103023066 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/osc/test_plugin.py0000664000175000017500000000371513643576737026017 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from saharaclient.osc import plugin from saharaclient.tests.unit import base class TestDataProcessingPlugin(base.BaseTestCase): @mock.patch("saharaclient.api.client.Client") def test_make_client(self, p_client): instance = mock.Mock() instance._api_version = {"data_processing": '1.1'} instance.session = 'session' instance._region_name = 'region_name' instance._cli_options.data_processing_url = 'url' instance._interface = 'public' plugin.make_client(instance) p_client.assert_called_with(session='session', region_name='region_name', sahara_url='url', endpoint_type='public') @mock.patch("saharaclient.api.client.ClientV2") def test_make_client_v2(self, p_client): instance = mock.Mock() instance._api_version = {"data_processing": '2'} instance.session = 'session' instance._region_name = 'region_name' instance._cli_options.data_processing_url = 'url' instance._interface = 'public' plugin.make_client(instance) p_client.assert_called_with(session='session', region_name='region_name', sahara_url='url', endpoint_type='public') python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/0000775000175000017500000000000013643577103023415 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_images.py0000664000175000017500000003131713643576737026314 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import images as api_images from saharaclient.osc.v1 import images as osc_images from saharaclient.tests.unit.osc.v1 import test_images as images_v1 IMAGE_INFO = {'id': 'id', 'name': 'image', 'username': 'ubuntu', 'status': "Active", 'tags': ['fake', '0.1'], 'description': 'Image for tests'} class TestImages(images_v1.TestImages): def setUp(self): super(TestImages, self).setUp() self.app.api_version['data_processing'] = '2' self.image_mock = ( self.app.client_manager.data_processing.images) self.image_mock.reset_mock() class TestListImages(TestImages): def setUp(self): super(TestListImages, self).setUp() self.image_mock.list.return_value = [api_images.Image( None, IMAGE_INFO)] # Command to test self.cmd = osc_images.ListImages(self.app, None) def test_images_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('image', 'id', 'ubuntu', '0.1, fake')] self.assertEqual(expected_data, list(data)) def test_images_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags', 'Status', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('image', 'id', 'ubuntu', '0.1, fake', 'Active', 'Image for tests')] self.assertEqual(expected_data, list(data)) def test_images_list_successful_selection(self): arglist = ['--name', 'image', '--tags', 'fake', '--username', 'ubuntu'] verifylist = [('name', 'image'), ('tags', ['fake']), ('username', 'ubuntu')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.list.assert_called_once_with( search_opts={'tags': ['fake']}) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('image', 'id', 'ubuntu', '0.1, fake')] self.assertEqual(expected_data, list(data)) def test_images_list_with_name_nothing_selected(self): arglist = ['--name', 'img'] verifylist = [('name', 'img')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [] self.assertEqual(expected_data, list(data)) def test_images_list_with_username_nothing_selected(self): arglist = ['--username', 'fedora'] verifylist = [('username', 'fedora')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [] self.assertEqual(expected_data, list(data)) class TestShowImage(TestImages): def setUp(self): super(TestShowImage, self).setUp() self.image_mock.find_unique.return_value = api_images.Image( None, IMAGE_INFO) # Command to test self.cmd = osc_images.ShowImage(self.app, None) def test_image_show_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_show(self): arglist = ['image'] verifylist = [('image', 'image')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_once_with(name='image') # Check that columns are correct expected_columns = ('Description', 'Id', 'Name', 'Status', 'Tags', 'Username') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ['Image for tests', 'id', 'image', 'Active', '0.1, fake', 'ubuntu'] self.assertEqual(expected_data, list(data)) class TestRegisterImage(TestImages): def setUp(self): super(TestRegisterImage, self).setUp() self.image_mock.update_image.return_value = mock.Mock( image=IMAGE_INFO.copy()) self.app.client_manager.image = mock.Mock() self.image_client = self.app.client_manager.image self.image_client.find_image.return_value = mock.Mock(id='id') # Command to test self.cmd = osc_images.RegisterImage(self.app, None) def test_image_register_without_username(self): arglist = ['id'] verifylist = [('image', 'id')] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_register_required_options(self): arglist = ['id', '--username', 'ubuntu'] verifylist = [('image', 'id'), ('username', 'ubuntu')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.update_image.assert_called_once_with( 'id', desc=None, user_name='ubuntu') # Check that columns are correct expected_columns = ('Description', 'Id', 'Name', 'Status', 'Tags', 'Username') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ['Image for tests', 'id', 'image', 'Active', '0.1, fake', 'ubuntu'] self.assertEqual(expected_data, list(data)) def test_image_register_all_options(self): arglist = ['id', '--username', 'ubuntu', '--description', 'descr'] verifylist = [('image', 'id'), ('username', 'ubuntu'), ('description', 'descr')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.update_image.assert_called_once_with( 'id', desc='descr', user_name='ubuntu') class TestUnregisterImage(TestImages): def setUp(self): super(TestUnregisterImage, self).setUp() self.image_mock.find_unique.return_value = api_images.Image( None, IMAGE_INFO) # Command to test self.cmd = osc_images.UnregisterImage(self.app, None) def test_image_unregister_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_unregister(self): arglist = ['image'] verifylist = [('image', ['image'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_once_with(name='image') self.image_mock.unregister_image.assert_called_once_with('id') class TestSetImageTags(TestImages): def setUp(self): super(TestSetImageTags, self).setUp() image_info = IMAGE_INFO.copy() image_info['tags'] = [] self.image_mock.find_unique.return_value = api_images.Image( None, image_info) self.image_mock.update_tags.return_value = api_images.Image( None, image_info) # Command to test self.cmd = osc_images.SetImageTags(self.app, None) def test_image_tags_set_without_tags(self): arglist = ['id'] verifylist = [('image', 'id')] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_tags_set(self): arglist = ['image', '--tags', 'fake', '0.1'] verifylist = [('image', 'image'), ('tags', ['fake', '0.1'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', ['fake', '0.1']) class TestAddImageTags(TestImages): def setUp(self): super(TestAddImageTags, self).setUp() image_info = IMAGE_INFO.copy() image_info['tags'] = [] self.image_mock.update_tags.return_value = api_images.Image( None, image_info) self.image_mock.find_unique.return_value = api_images.Image( None, image_info) # Command to test self.cmd = osc_images.AddImageTags(self.app, None) def test_image_tags_add_without_tags(self): arglist = ['id'] verifylist = [('image', 'id')] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_tags_add(self): arglist = ['image', '--tags', 'fake'] verifylist = [('image', 'image'), ('tags', ['fake'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', ['fake']) class TestRemoveImageTags(TestImages): def setUp(self): super(TestRemoveImageTags, self).setUp() self.image_mock.update_tags.return_value = api_images.Image( None, IMAGE_INFO) self.image_mock.find_unique.return_value = api_images.Image( None, IMAGE_INFO) # Command to test self.cmd = osc_images.RemoveImageTags(self.app, None) def test_image_tags_remove_both_options(self): arglist = ['id', '--all', '--tags', 'fake'] verifylist = [('image', 'id'), ('all', True), ('tags', ['fake'])] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_tags_remove(self): arglist = ['image', '--tags', 'fake'] verifylist = [('image', 'image'), ('tags', ['fake'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', ['0.1']) def test_image_tags_remove_all(self): arglist = ['image', '--all'] verifylist = [('image', 'image'), ('all', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', []) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_node_group_templates.py0000664000175000017500000004423513643576737031271 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import node_group_templates as api_ngt from saharaclient.osc.v2 import node_group_templates as osc_ngt from saharaclient.tests.unit.osc.v1 import fakes NGT_INFO = { "node_processes": [ "namenode", "tasktracker" ], "name": "template", "tenant_id": "tenant_id", "availability_zone": 'av_zone', "use_autoconfig": True, "plugin_version": "0.1", "shares": None, "is_default": False, "description": 'description', "node_configs": {}, "is_proxy_gateway": False, "auto_security_group": True, "volume_type": None, "volumes_size": 2, "volume_mount_prefix": "/volumes/disk", "plugin_name": "fake", "is_protected": False, "security_groups": None, "floating_ip_pool": "floating_pool", "is_public": True, "id": "ng_id", "flavor_id": "flavor_id", "volumes_availability_zone": None, "volumes_per_node": 2, "volume_local_to_instance": False, "boot_from_volume": False, "boot_volume_type": None, "boot_volume_availability_zone": None, "boot_volume_local_to_instance": False } class TestNodeGroupTemplates(fakes.TestDataProcessing): def setUp(self): super(TestNodeGroupTemplates, self).setUp() self.app.api_version['data_processing'] = '2' self.ngt_mock = ( self.app.client_manager.data_processing.node_group_templates) self.ngt_mock.reset_mock() class TestCreateNodeGroupTemplate(TestNodeGroupTemplates): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateNodeGroupTemplate, self).setUp() self.ngt_mock.create.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) self.fl_mock = self.app.client_manager.compute.flavors self.fl_mock.get.return_value = mock.Mock(id='flavor_id') self.fl_mock.reset_mock() # Command to test self.cmd = osc_ngt.CreateNodeGroupTemplate(self.app, None) def test_ngt_create_minimum_options(self): arglist = ['--name', 'template', '--plugin', 'fake', '--plugin-version', '0.1', '--processes', 'namenode', 'tasktracker', '--flavor', 'flavor_id'] verifylist = [('name', 'template'), ('plugin', 'fake'), ('plugin_version', '0.1'), ('flavor', 'flavor_id'), ('processes', ['namenode', 'tasktracker'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.create.assert_called_once_with( auto_security_group=False, availability_zone=None, description=None, flavor_id='flavor_id', floating_ip_pool=None, plugin_version='0.1', is_protected=False, is_proxy_gateway=False, is_public=False, name='template', node_processes=['namenode', 'tasktracker'], plugin_name='fake', security_groups=None, use_autoconfig=False, volume_local_to_instance=False, volume_type=None, volumes_availability_zone=None, volumes_per_node=None, volumes_size=None, shares=None, node_configs=None, volume_mount_prefix=None, boot_from_volume=False, boot_volume_type=None, boot_volume_availability_zone=None, boot_volume_local_to_instance=False) def test_ngt_create_all_options(self): arglist = ['--name', 'template', '--plugin', 'fake', '--plugin-version', '0.1', '--processes', 'namenode', 'tasktracker', '--security-groups', 'secgr', '--auto-security-group', '--availability-zone', 'av_zone', '--flavor', 'flavor_id', '--floating-ip-pool', 'floating_pool', '--volumes-per-node', '2', '--volumes-size', '2', '--volumes-type', 'type', '--volumes-availability-zone', 'vavzone', '--volumes-mount-prefix', '/volume/asd', '--volumes-locality', '--description', 'descr', '--autoconfig', '--proxy-gateway', '--public', '--protected', '--boot-from-volume', '--boot-volume-type', 'volume2', '--boot-volume-availability-zone', 'ceph', '--boot-volume-local-to-instance'] verifylist = [('name', 'template'), ('plugin', 'fake'), ('plugin_version', '0.1'), ('processes', ['namenode', 'tasktracker']), ('security_groups', ['secgr']), ('auto_security_group', True), ('availability_zone', 'av_zone'), ('flavor', 'flavor_id'), ('floating_ip_pool', 'floating_pool'), ('volumes_per_node', 2), ('volumes_size', 2), ('volumes_type', 'type'), ('volumes_availability_zone', 'vavzone'), ('volumes_mount_prefix', '/volume/asd'), ('volumes_locality', True), ('description', 'descr'), ('autoconfig', True), ('proxy_gateway', True), ('public', True), ('protected', True), ('boot_from_volume', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.create.assert_called_once_with( auto_security_group=True, availability_zone='av_zone', description='descr', flavor_id='flavor_id', floating_ip_pool='floating_pool', plugin_version='0.1', is_protected=True, is_proxy_gateway=True, is_public=True, name='template', node_processes=['namenode', 'tasktracker'], plugin_name='fake', security_groups=['secgr'], use_autoconfig=True, volume_local_to_instance=True, volume_type='type', volumes_availability_zone='vavzone', volumes_per_node=2, volumes_size=2, shares=None, node_configs=None, volume_mount_prefix='/volume/asd', boot_from_volume=True, boot_volume_type='volume2', boot_volume_availability_zone='ceph', boot_volume_local_to_instance=True) # Check that columns are correct expected_columns = ( 'Auto security group', 'Availability zone', 'Boot from volume', 'Description', 'Flavor id', 'Floating ip pool', 'Id', 'Is default', 'Is protected', 'Is proxy gateway', 'Is public', 'Name', 'Node processes', 'Plugin name', 'Plugin version', 'Security groups', 'Use autoconfig', 'Volume local to instance', 'Volume mount prefix', 'Volume type', 'Volumes availability zone', 'Volumes per node', 'Volumes size') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( True, 'av_zone', False, 'description', 'flavor_id', 'floating_pool', 'ng_id', False, False, False, True, 'template', 'namenode, tasktracker', 'fake', '0.1', None, True, False, '/volumes/disk', None, None, 2, 2) self.assertEqual(expected_data, data) class TestListNodeGroupTemplates(TestNodeGroupTemplates): def setUp(self): super(TestListNodeGroupTemplates, self).setUp() self.ngt_mock.list.return_value = [api_ngt.NodeGroupTemplate( None, NGT_INFO)] # Command to test self.cmd = osc_ngt.ListNodeGroupTemplates(self.app, None) def test_ngt_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', 'ng_id', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) def test_ngt_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Node processes', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', 'ng_id', 'fake', '0.1', 'namenode, tasktracker', 'description')] self.assertEqual(expected_data, list(data)) def test_ngt_list_extra_search_opts(self): arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name', 'templ'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('name', 'templ')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', 'ng_id', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) class TestShowNodeGroupTemplate(TestNodeGroupTemplates): def setUp(self): super(TestShowNodeGroupTemplate, self).setUp() self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) # Command to test self.cmd = osc_ngt.ShowNodeGroupTemplate(self.app, None) def test_ngt_show(self): arglist = ['template'] verifylist = [('node_group_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.find_unique.assert_called_once_with(name='template') # Check that columns are correct expected_columns = ( 'Auto security group', 'Availability zone', 'Boot from volume', 'Description', 'Flavor id', 'Floating ip pool', 'Id', 'Is default', 'Is protected', 'Is proxy gateway', 'Is public', 'Name', 'Node processes', 'Plugin name', 'Plugin version', 'Security groups', 'Use autoconfig', 'Volume local to instance', 'Volume mount prefix', 'Volume type', 'Volumes availability zone', 'Volumes per node', 'Volumes size') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( True, 'av_zone', False, 'description', 'flavor_id', 'floating_pool', 'ng_id', False, False, False, True, 'template', 'namenode, tasktracker', 'fake', '0.1', None, True, False, '/volumes/disk', None, None, 2, 2) self.assertEqual(expected_data, data) class TestDeleteNodeGroupTemplate(TestNodeGroupTemplates): def setUp(self): super(TestDeleteNodeGroupTemplate, self).setUp() self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) # Command to test self.cmd = osc_ngt.DeleteNodeGroupTemplate(self.app, None) def test_ngt_delete(self): arglist = ['template'] verifylist = [('node_group_template', ['template'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.delete.assert_called_once_with('ng_id') class TestUpdateNodeGroupTemplate(TestNodeGroupTemplates): # TODO(apavlov): check for update with --json def setUp(self): super(TestUpdateNodeGroupTemplate, self).setUp() self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) self.ngt_mock.update.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) self.fl_mock = self.app.client_manager.compute.flavors self.fl_mock.get.return_value = mock.Mock(id='flavor_id') self.fl_mock.reset_mock() # Command to test self.cmd = osc_ngt.UpdateNodeGroupTemplate(self.app, None) def test_ngt_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_ngt_update_nothing_updated(self): arglist = ['template'] verifylist = [('node_group_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.update.assert_called_once_with('ng_id') def test_ngt_update_all_options(self): arglist = ['template', '--name', 'template', '--plugin', 'fake', '--plugin-version', '0.1', '--processes', 'namenode', 'tasktracker', '--security-groups', 'secgr', '--auto-security-group-enable', '--availability-zone', 'av_zone', '--flavor', 'flavor_id', '--floating-ip-pool', 'floating_pool', '--volumes-per-node', '2', '--volumes-size', '2', '--volumes-type', 'type', '--volumes-availability-zone', 'vavzone', '--volumes-mount-prefix', '/volume/asd', '--volumes-locality-enable', '--description', 'descr', '--autoconfig-enable', '--proxy-gateway-enable', '--public', '--protected', '--boot-from-volume-enable', '--boot-volume-type', 'volume2', '--boot-volume-availability-zone', 'ceph', '--boot-volume-local-to-instance-enable'] verifylist = [('node_group_template', 'template'), ('name', 'template'), ('plugin', 'fake'), ('plugin_version', '0.1'), ('processes', ['namenode', 'tasktracker']), ('security_groups', ['secgr']), ('use_auto_security_group', True), ('availability_zone', 'av_zone'), ('flavor', 'flavor_id'), ('floating_ip_pool', 'floating_pool'), ('volumes_per_node', 2), ('volumes_size', 2), ('volumes_type', 'type'), ('volumes_availability_zone', 'vavzone'), ('volumes_mount_prefix', '/volume/asd'), ('volume_locality', True), ('description', 'descr'), ('use_autoconfig', True), ('is_proxy_gateway', True), ('is_public', True), ('is_protected', True), ('boot_from_volume', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.update.assert_called_once_with( 'ng_id', auto_security_group=True, availability_zone='av_zone', description='descr', flavor_id='flavor_id', floating_ip_pool='floating_pool', plugin_version='0.1', is_protected=True, is_proxy_gateway=True, is_public=True, name='template', node_processes=['namenode', 'tasktracker'], plugin_name='fake', security_groups=['secgr'], use_autoconfig=True, volume_local_to_instance=True, volume_type='type', volumes_availability_zone='vavzone', volumes_per_node=2, volumes_size=2, volume_mount_prefix='/volume/asd', boot_from_volume=True, boot_volume_type='volume2', boot_volume_availability_zone='ceph', boot_volume_local_to_instance=True) # Check that columns are correct expected_columns = ( 'Auto security group', 'Availability zone', 'Boot from volume', 'Description', 'Flavor id', 'Floating ip pool', 'Id', 'Is default', 'Is protected', 'Is proxy gateway', 'Is public', 'Name', 'Node processes', 'Plugin name', 'Plugin version', 'Security groups', 'Use autoconfig', 'Volume local to instance', 'Volume mount prefix', 'Volume type', 'Volumes availability zone', 'Volumes per node', 'Volumes size') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( True, 'av_zone', False, 'description', 'flavor_id', 'floating_pool', 'ng_id', False, False, False, True, 'template', 'namenode, tasktracker', 'fake', '0.1', None, True, False, '/volumes/disk', None, None, 2, 2) self.assertEqual(expected_data, data) def test_ngt_update_private_unprotected(self): arglist = ['template', '--private', '--unprotected'] verifylist = [('node_group_template', 'template'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.update.assert_called_once_with( 'ng_id', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_data_sources.py0000664000175000017500000003002513643576737027516 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import data_sources as api_ds from saharaclient.osc.v1 import data_sources as osc_ds from saharaclient.tests.unit.osc.v1 import test_data_sources as tds_v1 DS_INFO = {'id': 'id', 'name': 'source', 'type': 'swift', 'url': 'swift://container.sahara/object', 'description': 'Data Source for tests', 'is_public': True, 'is_protected': True} class TestDataSources(tds_v1.TestDataSources): def setUp(self): super(TestDataSources, self).setUp() self.app.api_version['data_processing'] = '2' self.ds_mock = ( self.app.client_manager.data_processing.data_sources) self.ds_mock.reset_mock() class TestCreateDataSource(TestDataSources): def setUp(self): super(TestCreateDataSource, self).setUp() self.ds_mock.create.return_value = api_ds.DataSources( None, DS_INFO) # Command to test self.cmd = osc_ds.CreateDataSource(self.app, None) def test_data_sources_create_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_data_sources_create_required_options(self): arglist = ['source', '--type', 'swift', '--url', 'swift://container.sahara/object'] verifylist = [('name', 'source'), ('type', 'swift'), ('url', 'swift://container.sahara/object')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments called_args = {'credential_pass': None, 'credential_user': None, 'data_source_type': 'swift', 'name': 'source', 'description': '', 'url': 'swift://container.sahara/object', 'is_public': False, 'is_protected': False, 's3_credentials': None} self.ds_mock.create.assert_called_once_with(**called_args) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_sources_create_all_options(self): arglist = ['source', '--type', 'swift', '--url', 'swift://container.sahara/object', '--username', 'user', '--password', 'pass', '--description', 'Data Source for tests', '--public', '--protected'] verifylist = [('name', 'source'), ('type', 'swift'), ('url', 'swift://container.sahara/object'), ('username', 'user'), ('password', 'pass'), ('description', 'Data Source for tests'), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments called_args = {'credential_pass': 'pass', 'credential_user': 'user', 'data_source_type': 'swift', 'name': 'source', 'description': 'Data Source for tests', 'url': 'swift://container.sahara/object', 'is_protected': True, 'is_public': True, 's3_credentials': None} self.ds_mock.create.assert_called_once_with(**called_args) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) class TestListDataSources(TestDataSources): def setUp(self): super(TestListDataSources, self).setUp() self.ds_mock.list.return_value = [api_ds.DataSources( None, DS_INFO)] # Command to test self.cmd = osc_ds.ListDataSources(self.app, None) def test_data_sources_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('source', 'id', 'swift')] self.assertEqual(expected_data, list(data)) def test_data_sources_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type', 'Url', 'Description', 'Is public', 'Is protected'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('source', 'id', 'swift', 'swift://container.sahara/object', 'Data Source for tests', True, True)] self.assertEqual(expected_data, list(data)) class TestShowDataSource(TestDataSources): def setUp(self): super(TestShowDataSource, self).setUp() self.ds_mock.find_unique.return_value = api_ds.DataSources( None, DS_INFO) # Command to test self.cmd = osc_ds.ShowDataSource(self.app, None) def test_data_sources_show(self): arglist = ['source'] verifylist = [('data_source', 'source')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.ds_mock.find_unique.assert_called_once_with(name='source') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ['Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object'] self.assertEqual(expected_data, list(data)) class TestDeleteDataSource(TestDataSources): def setUp(self): super(TestDeleteDataSource, self).setUp() self.ds_mock.find_unique.return_value = api_ds.DataSources( None, DS_INFO) # Command to test self.cmd = osc_ds.DeleteDataSource(self.app, None) def test_data_sources_delete(self): arglist = ['source'] verifylist = [('data_source', ['source'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.ds_mock.delete.assert_called_once_with('id') class TestUpdateDataSource(TestDataSources): def setUp(self): super(TestUpdateDataSource, self).setUp() self.ds_mock.find_unique.return_value = api_ds.DataSources( None, DS_INFO) self.ds_mock.update.return_value = mock.Mock( data_source=DS_INFO) # Command to test self.cmd = osc_ds.UpdateDataSource(self.app, None) def test_data_sources_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_data_sources_update_nothing_updated(self): arglist = ['source'] verifylist = [('data_source', 'source')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.ds_mock.update.assert_called_once_with('id', {}) def test_data_sources_update_required_options(self): arglist = ['source'] verifylist = [('data_source', 'source')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments self.ds_mock.update.assert_called_once_with('id', {}) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_sources_update_all_options(self): arglist = ['source', '--name', 'source', '--type', 'swift', '--url', 'swift://container.sahara/object', '--username', 'user', '--password', 'pass', '--description', 'Data Source for tests', '--public', '--protected'] verifylist = [('data_source', 'source'), ('name', 'source'), ('type', 'swift'), ('url', 'swift://container.sahara/object'), ('username', 'user'), ('password', 'pass'), ('description', 'Data Source for tests'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments self.ds_mock.update.assert_called_once_with( 'id', {'name': 'source', 'url': 'swift://container.sahara/object', 'is_protected': True, 'credentials': {'password': 'pass', 'user': 'user'}, 'is_public': True, 'type': 'swift', 'description': 'Data Source for tests'}) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_sources_update_private_unprotected(self): arglist = ['source', '--private', '--unprotected'] verifylist = [('data_source', 'source'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments self.ds_mock.update.assert_called_once_with( 'id', {'is_public': False, 'is_protected': False}) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_job_templates.py0000664000175000017500000002435413643576737027702 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api.v2 import job_templates as api_j from saharaclient.osc.v2 import job_templates as osc_j from saharaclient.tests.unit.osc.v1 import test_job_templates as tjt_v1 JOB_TEMPLATE_INFO = { "is_public": False, "id": "job_id", "name": "pig-job", "description": "Job for test", "interface": [], "libs": [ { "id": "lib_id", "name": "lib" } ], "type": "Pig", "is_protected": False, "mains": [ { "id": "main_id", "name": "main" } ] } class TestJobTemplates(tjt_v1.TestJobTemplates): def setUp(self): super(TestJobTemplates, self).setUp() self.app.api_version['data_processing'] = '2' self.job_mock = self.app.client_manager.data_processing.job_templates self.job_mock.reset_mock() class TestCreateJobTemplate(TestJobTemplates): # TODO(apavlov): check for creation with --interface def setUp(self): super(TestCreateJobTemplate, self).setUp() self.job_mock.create.return_value = api_j.JobTemplate( None, JOB_TEMPLATE_INFO) self.jb_mock = self.app.client_manager.data_processing.job_binaries self.jb_mock.find_unique.return_value = mock.Mock(id='jb_id') self.jb_mock.reset_mock() # Command to test self.cmd = osc_j.CreateJobTemplate(self.app, None) def test_job_template_create_minimum_options(self): arglist = ['--name', 'pig-job', '--type', 'Pig'] verifylist = [('name', 'pig-job'), ('type', 'Pig')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.create.assert_called_once_with( description=None, interface=None, is_protected=False, is_public=False, libs=None, mains=None, name='pig-job', type='Pig') def test_job_template_create_all_options(self): arglist = ['--name', 'pig-job', '--type', 'Pig', '--mains', 'main', '--libs', 'lib', '--description', 'descr', '--public', '--protected'] verifylist = [('name', 'pig-job'), ('type', 'Pig'), ('mains', ['main']), ('libs', ['lib']), ('description', 'descr'), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.create.assert_called_once_with( description='descr', interface=None, is_protected=True, is_public=True, libs=['jb_id'], mains=['jb_id'], name='pig-job', type='Pig') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Libs', 'Mains', 'Name', 'Type') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id', 'main:main_id', 'pig-job', 'Pig') self.assertEqual(expected_data, data) class TestListJobTemplates(TestJobTemplates): def setUp(self): super(TestListJobTemplates, self).setUp() self.job_mock.list.return_value = [api_j.JobTemplate( None, JOB_TEMPLATE_INFO)] # Command to test self.cmd = osc_j.ListJobTemplates(self.app, None) def test_job_templates_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('pig-job', 'job_id', 'Pig')] self.assertEqual(expected_data, list(data)) def test_job_template_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type', 'Description', 'Is public', 'Is protected'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('pig-job', 'job_id', 'Pig', 'Job for test', False, False)] self.assertEqual(expected_data, list(data)) def test_job_template_list_extra_search_opts(self): arglist = ['--type', 'Pig', '--name', 'pig'] verifylist = [('type', 'Pig'), ('name', 'pig')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('pig-job', 'job_id', 'Pig')] self.assertEqual(expected_data, list(data)) class TestShowJobTemplate(TestJobTemplates): def setUp(self): super(TestShowJobTemplate, self).setUp() self.job_mock.find_unique.return_value = api_j.JobTemplate( None, JOB_TEMPLATE_INFO) # Command to test self.cmd = osc_j.ShowJobTemplate(self.app, None) def test_job_template_show(self): arglist = ['pig-job'] verifylist = [('job_template', 'pig-job')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.find_unique.assert_called_once_with(name='pig-job') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Libs', 'Mains', 'Name', 'Type') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id', 'main:main_id', 'pig-job', 'Pig') self.assertEqual(expected_data, data) class TestDeleteJobTemplate(TestJobTemplates): def setUp(self): super(TestDeleteJobTemplate, self).setUp() self.job_mock.find_unique.return_value = api_j.JobTemplate( None, JOB_TEMPLATE_INFO) # Command to test self.cmd = osc_j.DeleteJobTemplate(self.app, None) def test_job_template_delete(self): arglist = ['pig-job'] verifylist = [('job_template', ['pig-job'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.delete.assert_called_once_with('job_id') class TestUpdateJobTemplate(TestJobTemplates): def setUp(self): super(TestUpdateJobTemplate, self).setUp() self.job_mock.find_unique.return_value = api_j.JobTemplate( None, JOB_TEMPLATE_INFO) self.job_mock.update.return_value = mock.Mock( job_template=JOB_TEMPLATE_INFO.copy()) # Command to test self.cmd = osc_j.UpdateJobTemplate(self.app, None) def test_job_template_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_job_template_update_nothing_updated(self): arglist = ['pig-job'] verifylist = [('job_template', 'pig-job')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.update.assert_called_once_with('job_id') def test_job_template_update_all_options(self): arglist = ['pig-job', '--name', 'pig-job', '--description', 'descr', '--public', '--protected'] verifylist = [('job_template', 'pig-job'), ('name', 'pig-job'), ('description', 'descr'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.update.assert_called_once_with( 'job_id', description='descr', is_protected=True, is_public=True, name='pig-job') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Libs', 'Mains', 'Name', 'Type') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id', 'main:main_id', 'pig-job', 'Pig') self.assertEqual(expected_data, data) def test_job_template_update_private_unprotected(self): arglist = ['pig-job', '--private', '--unprotected'] verifylist = [('job_template', 'pig-job'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.update.assert_called_once_with( 'job_id', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_cluster_templates.py0000664000175000017500000003206213643576737030604 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from osc_lib.tests import utils as osc_utils from saharaclient.api import cluster_templates as api_ct from saharaclient.api import node_group_templates as api_ngt from saharaclient.osc.v2 import cluster_templates as osc_ct from saharaclient.tests.unit.osc.v1 import test_cluster_templates as tct_v1 CT_INFO = { "description": "Cluster template for tests", "use_autoconfig": True, "is_default": False, "node_groups": [ { "count": 2, "id": "d29631fc-0fad-434b-80aa-7a3e9526f57c", "name": "fakeng", "plugin_name": 'fake', "plugin_version": '0.1' } ], "plugin_version": "0.1", "is_public": False, "plugin_name": "fake", "id": "0647061f-ab98-4c89-84e0-30738ea55750", "anti_affinity": [], "name": "template", "is_protected": False, "domain_name": 'domain.org.' } class TestClusterTemplates(tct_v1.TestClusterTemplates): def setUp(self): super(TestClusterTemplates, self).setUp() self.app.api_version['data_processing'] = '2' self.ct_mock = ( self.app.client_manager.data_processing.cluster_templates) self.ngt_mock = ( self.app.client_manager.data_processing.node_group_templates) self.ct_mock.reset_mock() self.ngt_mock.reset_mock() class TestCreateClusterTemplate(TestClusterTemplates): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateClusterTemplate, self).setUp() self.ct_mock.create.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, CT_INFO['node_groups'][0]) # Command to test self.cmd = osc_ct.CreateClusterTemplate(self.app, None) def test_ct_create_minimum_options(self): arglist = ['--name', 'template', '--node-groups', 'fakeng:2'] verifylist = [('name', 'template'), ('node_groups', ['fakeng:2'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.create.assert_called_once_with( description=None, plugin_version='0.1', is_protected=False, is_public=False, name='template', node_groups=[ {'count': 2, 'name': 'fakeng', 'node_group_template_id': 'd29631fc-0fad-434b-80aa-7a3e9526f57c'}], plugin_name='fake', use_autoconfig=False, shares=None, cluster_configs=None, domain_name=None) def test_ct_create_all_options(self): arglist = ['--name', 'template', '--node-groups', 'fakeng:2', '--anti-affinity', 'datanode', '--description', 'descr', '--autoconfig', '--public', '--protected', '--domain-name', 'domain.org.'] verifylist = [('name', 'template'), ('node_groups', ['fakeng:2']), ('description', 'descr'), ('autoconfig', True), ('public', True), ('protected', True), ('domain_name', 'domain.org.')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.create.assert_called_once_with( description='descr', plugin_version='0.1', is_protected=True, is_public=True, name='template', node_groups=[ {'count': 2, 'name': 'fakeng', 'node_group_template_id': 'd29631fc-0fad-434b-80aa-7a3e9526f57c'}], plugin_name='fake', use_autoconfig=True, shares=None, cluster_configs=None, domain_name='domain.org.') # Check that columns are correct expected_columns = ('Anti affinity', 'Description', 'Domain name', 'Id', 'Is default', 'Is protected', 'Is public', 'Name', 'Node groups', 'Plugin name', 'Plugin version', 'Use autoconfig') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'Cluster template for tests', 'domain.org.', '0647061f-ab98-4c89-84e0-30738ea55750', False, False, False, 'template', 'fakeng:2', 'fake', '0.1', True) self.assertEqual(expected_data, data) class TestListClusterTemplates(TestClusterTemplates): def setUp(self): super(TestListClusterTemplates, self).setUp() self.ct_mock.list.return_value = [api_ct.ClusterTemplate( None, CT_INFO)] # Command to test self.cmd = osc_ct.ListClusterTemplates(self.app, None) def test_ct_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) def test_ct_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Node groups', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750', 'fake', '0.1', 'fakeng:2', 'Cluster template for tests')] self.assertEqual(expected_data, list(data)) def test_ct_list_extra_search_opts(self): arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name', 'templ'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('name', 'templ')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) class TestShowClusterTemplate(TestClusterTemplates): def setUp(self): super(TestShowClusterTemplate, self).setUp() self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) # Command to test self.cmd = osc_ct.ShowClusterTemplate(self.app, None) def test_ct_show(self): arglist = ['template'] verifylist = [('cluster_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.find_unique.assert_called_once_with(name='template') # Check that columns are correct expected_columns = ('Anti affinity', 'Description', 'Domain name', 'Id', 'Is default', 'Is protected', 'Is public', 'Name', 'Node groups', 'Plugin name', 'Plugin version', 'Use autoconfig') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( '', 'Cluster template for tests', 'domain.org.', '0647061f-ab98-4c89-84e0-30738ea55750', False, False, False, 'template', 'fakeng:2', 'fake', '0.1', True) self.assertEqual(expected_data, data) class TestDeleteClusterTemplate(TestClusterTemplates): def setUp(self): super(TestDeleteClusterTemplate, self).setUp() self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) # Command to test self.cmd = osc_ct.DeleteClusterTemplate(self.app, None) def test_ct_delete(self): arglist = ['template'] verifylist = [('cluster_template', ['template'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.delete.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750') class TestUpdateClusterTemplate(TestClusterTemplates): # TODO(apavlov): check for update with --json def setUp(self): super(TestUpdateClusterTemplate, self).setUp() self.ct_mock.update.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, CT_INFO['node_groups'][0]) # Command to test self.cmd = osc_ct.UpdateClusterTemplate(self.app, None) def test_ct_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_ct_update_nothing_updated(self): arglist = ['template'] verifylist = [('cluster_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.ct_mock.update.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750') def test_ct_update_all_options(self): arglist = ['template', '--name', 'template', '--node-groups', 'fakeng:2', '--anti-affinity', 'datanode', '--description', 'descr', '--autoconfig-enable', '--public', '--protected', '--domain-name', 'domain.org.'] verifylist = [('cluster_template', 'template'), ('name', 'template'), ('node_groups', ['fakeng:2']), ('description', 'descr'), ('use_autoconfig', True), ('is_public', True), ('is_protected', True), ('domain_name', 'domain.org.')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.update.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750', description='descr', plugin_version='0.1', is_protected=True, is_public=True, name='template', node_groups=[ {'count': 2, 'name': 'fakeng', 'node_group_template_id': 'd29631fc-0fad-434b-80aa-7a3e9526f57c'}], plugin_name='fake', use_autoconfig=True, domain_name='domain.org.') # Check that columns are correct expected_columns = ('Anti affinity', 'Description', 'Domain name', 'Id', 'Is default', 'Is protected', 'Is public', 'Name', 'Node groups', 'Plugin name', 'Plugin version', 'Use autoconfig') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'Cluster template for tests', 'domain.org.', '0647061f-ab98-4c89-84e0-30738ea55750', False, False, False, 'template', 'fakeng:2', 'fake', '0.1', True) self.assertEqual(expected_data, data) def test_ct_update_private_unprotected(self): arglist = ['template', '--private', '--unprotected'] verifylist = [('cluster_template', 'template'), ('is_protected', False), ('is_public', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.ct_mock.update.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_job_types.py0000664000175000017500000001201513643576737027037 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from saharaclient.api import job_types as api_jt from saharaclient.api.v2 import job_templates as api_job_templates from saharaclient.osc.v2 import job_types as osc_jt from saharaclient.tests.unit.osc.v1 import test_job_types as tjt_v1 JOB_TYPE_INFO = { "name": 'Pig', "plugins": [ { 'versions': { '0.1': {}, '0.2': {} }, 'name': 'fake' }, { 'versions': { '6.2.2': {} }, 'name': 'wod' } ] } class TestJobTypes(tjt_v1.TestJobTypes): def setUp(self): super(TestJobTypes, self).setUp() self.app.api_version['data_processing'] = '2' self.job_template_mock = ( self.app.client_manager.data_processing.job_templates) self.jt_mock = self.app.client_manager.data_processing.job_types self.jt_mock.reset_mock() self.job_template_mock.reset_mock() class TestListJobTemplates(TestJobTypes): def setUp(self): super(TestListJobTemplates, self).setUp() self.jt_mock.list.return_value = [api_jt.JobType(None, JOB_TYPE_INFO)] # Command to test self.cmd = osc_jt.ListJobTypes(self.app, None) def test_job_types_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Plugins'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('Pig', 'fake(0.1, 0.2), wod(6.2.2)')] self.assertEqual(expected_data, list(data)) def test_job_types_list_extra_search_opts(self): arglist = ['--type', 'Pig', '--plugin', 'fake', '--plugin-version', '0.1'] verifylist = [('type', 'Pig'), ('plugin', 'fake'), ('plugin_version', '0.1')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Plugins'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('Pig', 'fake(0.1, 0.2), wod(6.2.2)')] self.assertEqual(expected_data, list(data)) class TestGetJobTypeConfigs(TestJobTypes): def setUp(self): super(TestGetJobTypeConfigs, self).setUp() self.job_template_mock.get_configs.return_value = ( api_job_templates.JobTemplate(None, JOB_TYPE_INFO)) # Command to test self.cmd = osc_jt.GetJobTypeConfigs(self.app, None) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_job_type_configs_default_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['Pig'] verifylist = [('job_type', 'Pig')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.job_template_mock.get_configs.assert_called_once_with( 'Pig') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(JOB_TYPE_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('Pig', m_open.call_args[0][0]) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_job_type_configs_specified_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open): arglist = ['Pig', '--file', 'testfile'] verifylist = [('job_type', 'Pig'), ('file', 'testfile')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.job_template_mock.get_configs.assert_called_once_with( 'Pig') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(JOB_TYPE_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('testfile', m_open.call_args[0][0]) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_jobs.py0000664000175000017500000003237513643576737026011 0ustar zuulzuul00000000000000# Copyright (c) 2018 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api.v2 import jobs as api_j from saharaclient.osc.v2 import jobs as osc_j from saharaclient.tests.unit.osc.v1 import test_jobs as tj_v1 JOB_INFO = { "is_public": False, "id": "j_id", "interface": [], "is_protected": False, "input_id": 'input_id', "output_id": 'output_id', "job_template_id": "job_template_id", "cluster_id": 'cluster_id', "start_time": "start", "end_time": "end", "engine_job_id": "engine_job_id", "info": { "status": 'SUCCEEDED' }, "job_configs": { "configs": { "config1": "1", "config2": "2" }, "args": [ "arg1", "arg2" ], "params": { "param2": "value2", "param1": "value1" } } } class TestJobs(tj_v1.TestJobs): def setUp(self): super(TestJobs, self).setUp() self.app.api_version['data_processing'] = '2' self.j_mock = self.app.client_manager.data_processing.jobs self.j_mock.reset_mock() class TestExecuteJob(TestJobs): # TODO(apavlov): check for execution with --interface, --configs, --json def setUp(self): super(TestExecuteJob, self).setUp() self.j_mock.create.return_value = api_j.Job(None, JOB_INFO) self.ds_mock = self.app.client_manager.data_processing.data_sources self.ds_mock.find_unique.return_value = mock.Mock(id='ds_id') self.c_mock = self.app.client_manager.data_processing.clusters self.c_mock.find_unique.return_value = mock.Mock(id='cluster_id') self.jt_mock = self.app.client_manager.data_processing.job_templates self.jt_mock.find_unique.return_value = mock.Mock(id='job_template_id') self.ds_mock.reset_mock() self.c_mock.reset_mock() self.jt_mock.reset_mock() # Command to test self.cmd = osc_j.ExecuteJob(self.app, None) def test_job_execute_minimum_options(self): arglist = ['--job-template', 'job-template', '--cluster', 'cluster'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.create.assert_called_once_with( cluster_id='cluster_id', configs={}, input_id=None, interface=None, is_protected=False, is_public=False, job_template_id='job_template_id', output_id=None) def test_job_execute_with_input_output_option(self): arglist = ['--job-template', 'job-template', '--cluster', 'cluster', '--input', 'input', '--output', 'output'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'), ('input', 'input'), ('output', 'output')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.j_mock.create.assert_called_once_with( cluster_id='cluster_id', configs={}, input_id='ds_id', interface=None, is_protected=False, is_public=False, job_template_id='job_template_id', output_id='ds_id') # without option --output arglist = ['--job-template', 'job-template', '--cluster', 'cluster', '--input', 'input'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'), ('input', 'input')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.j_mock.create.assert_called_with( cluster_id='cluster_id', configs={}, input_id='ds_id', interface=None, is_protected=False, is_public=False, job_template_id='job_template_id', output_id=None) # without options --output and --input arglist = ['--job-template', 'job-template', '--cluster', 'cluster'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.j_mock.create.assert_called_with( cluster_id='cluster_id', configs={}, input_id=None, interface=None, is_protected=False, is_public=False, job_template_id='job_template_id', output_id=None) def test_job_execute_all_options(self): arglist = ['--job-template', 'job-template', '--cluster', 'cluster', '--input', 'input', '--output', 'output', '--params', 'param1:value1', 'param2:value2', '--args', 'arg1', 'arg2', '--configs', 'config1:1', 'config2:2', '--public', '--protected'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'), ('input', 'input'), ('output', 'output'), ('params', ['param1:value1', 'param2:value2']), ('args', ['arg1', 'arg2']), ('configs', ['config1:1', 'config2:2']), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.create.assert_called_once_with( cluster_id='cluster_id', configs={'configs': {'config1': '1', 'config2': '2'}, 'args': ['arg1', 'arg2'], 'params': {'param2': 'value2', 'param1': 'value1'}}, input_id='ds_id', interface=None, is_protected=True, is_public=True, job_template_id='job_template_id', output_id='ds_id') # Check that columns are correct expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id', 'Input id', 'Is protected', 'Is public', 'Job template id', 'Output id', 'Start time', 'Status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id', 'end', 'engine_job_id', 'j_id', 'input_id', False, False, 'job_template_id', 'output_id', 'start', 'SUCCEEDED') self.assertEqual(expected_data, data) class TestListJobs(TestJobs): def setUp(self): super(TestListJobs, self).setUp() self.j_mock.list.return_value = [api_j.Job(None, JOB_INFO)] # Command to test self.cmd = osc_j.ListJobs(self.app, None) def test_jobs_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Id', 'Cluster id', 'Job template id', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('j_id', 'cluster_id', 'job_template_id', 'SUCCEEDED')] self.assertEqual(expected_data, list(data)) def test_jobs_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Id', 'Cluster id', 'Job template id', 'Status', 'Start time', 'End time'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('j_id', 'cluster_id', 'job_template_id', 'SUCCEEDED', 'start', 'end')] self.assertEqual(expected_data, list(data)) def test_jobs_list_extra_search_opts(self): arglist = ['--status', 'succeeded'] verifylist = [('status', 'succeeded')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Id', 'Cluster id', 'Job template id', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('j_id', 'cluster_id', 'job_template_id', 'SUCCEEDED')] self.assertEqual(expected_data, list(data)) class TestShowJob(TestJobs): def setUp(self): super(TestShowJob, self).setUp() self.j_mock.get.return_value = api_j.Job(None, JOB_INFO) # Command to test self.cmd = osc_j.ShowJob(self.app, None) def test_job_show(self): arglist = ['job_id'] verifylist = [('job', 'job_id')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.get.assert_called_once_with('job_id') # Check that columns are correct expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id', 'Input id', 'Is protected', 'Is public', 'Job template id', 'Output id', 'Start time', 'Status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id', 'end', 'engine_job_id', 'j_id', 'input_id', False, False, 'job_template_id', 'output_id', 'start', 'SUCCEEDED') self.assertEqual(expected_data, data) class TestDeleteJob(TestJobs): def setUp(self): super(TestDeleteJob, self).setUp() self.j_mock.get.return_value = api_j.Job(None, JOB_INFO) # Command to test self.cmd = osc_j.DeleteJob(self.app, None) def test_job_delete(self): arglist = ['job_id'] verifylist = [('job', ['job_id'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.delete.assert_called_once_with('job_id') class TestUpdateJob(TestJobs): def setUp(self): super(TestUpdateJob, self).setUp() self.j_mock.get.return_value = api_j.Job(None, JOB_INFO) self.j_mock.update.return_value = mock.Mock(job=JOB_INFO.copy()) # Command to test self.cmd = osc_j.UpdateJob(self.app, None) def test_job_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_job_update_nothing_updated(self): arglist = ['job_id'] verifylist = [('job', 'job_id')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.update.assert_called_once_with('job_id') def test_job_update_public_protected(self): arglist = ['job_id', '--public', '--protected'] verifylist = [('job', 'job_id'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.update.assert_called_once_with( 'job_id', is_protected=True, is_public=True) # Check that columns are correct expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id', 'Input id', 'Is protected', 'Is public', 'Job template id', 'Output id', 'Start time', 'Status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id', 'end', 'engine_job_id', 'j_id', 'input_id', False, False, 'job_template_id', 'output_id', 'start', 'SUCCEEDED') self.assertEqual(expected_data, data) def test_job_update_private_unprotected(self): arglist = ['job_id', '--private', '--unprotected'] verifylist = [('job', 'job_id'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.j_mock.update.assert_called_once_with( 'job_id', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_job_binaries.py0000664000175000017500000002743613643576737027504 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_u import testtools from saharaclient.api import job_binaries as api_jb from saharaclient.osc.v1 import job_binaries as osc_jb from saharaclient.tests.unit.osc.v1 import test_job_binaries as tjb_v1 JOB_BINARY_INFO = { "name": 'job-binary', "description": 'descr', "id": 'jb_id', "is_protected": False, "is_public": False, "url": 'swift://cont/test' } class TestJobBinaries(tjb_v1.TestJobBinaries): def setUp(self): super(TestJobBinaries, self).setUp() self.app.api_version['data_processing'] = '2' self.jb_mock = self.app.client_manager.data_processing.job_binaries self.jb_mock.reset_mock() class TestCreateJobBinary(TestJobBinaries): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateJobBinary, self).setUp() self.jb_mock.create.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) self.jbi_mock = (self.app.client_manager. data_processing.job_binary_internals) self.jbi_mock.create.return_value = mock.Mock(id='jbi_id') self.jbi_mock.reset_mock() # Command to test self.cmd = osc_jb.CreateJobBinary(self.app, None) def test_job_binary_create_swift_public_protected(self): arglist = ['--name', 'job-binary', '--url', 'swift://cont/test', '--username', 'user', '--password', 'pass', '--public', '--protected'] verifylist = [('name', 'job-binary'), ('url', 'swift://cont/test'), ('username', 'user'), ('password', 'pass'), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.create.assert_called_once_with( description=None, extra={'password': 'pass', 'user': 'user'}, is_protected=True, is_public=True, name='job-binary', url='swift://cont/test') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('descr', 'jb_id', False, False, 'job-binary', 'swift://cont/test') self.assertEqual(expected_data, data) def test_job_binary_create_mutual_exclusion(self): arglist = ['job-binary', '--name', 'job-binary', '--access-key', 'ak', '--secret-key', 'sk', '--url', 's3://abc/def', '--password', 'pw'] with testtools.ExpectedException(osc_u.ParserException): self.check_parser(self.cmd, arglist, mock.Mock()) class TestListJobBinaries(TestJobBinaries): def setUp(self): super(TestListJobBinaries, self).setUp() self.jb_mock.list.return_value = [api_jb.JobBinaries( None, JOB_BINARY_INFO)] # Command to test self.cmd = osc_jb.ListJobBinaries(self.app, None) def test_job_binary_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Url'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('job-binary', 'jb_id', 'swift://cont/test')] self.assertEqual(expected_data, list(data)) def test_job_binary_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Url', 'Description', 'Is public', 'Is protected'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('job-binary', 'jb_id', 'swift://cont/test', 'descr', False, False)] self.assertEqual(expected_data, list(data)) def test_job_binary_list_extra_search_opts(self): arglist = ['--name', 'bin'] verifylist = [('name', 'bin')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Url'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('job-binary', 'jb_id', 'swift://cont/test')] self.assertEqual(expected_data, list(data)) class TestShowJobBinary(TestJobBinaries): def setUp(self): super(TestShowJobBinary, self).setUp() self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.ShowJobBinary(self.app, None) def test_job_binary_show(self): arglist = ['job-binary'] verifylist = [('job_binary', 'job-binary')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.find_unique.assert_called_once_with(name='job-binary') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('descr', 'jb_id', False, False, 'job-binary', 'swift://cont/test') self.assertEqual(expected_data, data) class TestDeleteJobBinary(TestJobBinaries): def setUp(self): super(TestDeleteJobBinary, self).setUp() self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.DeleteJobBinary(self.app, None) def test_job_binary_delete(self): arglist = ['job-binary'] verifylist = [('job_binary', ['job-binary'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.delete.assert_called_once_with('jb_id') class TestUpdateJobBinary(TestJobBinaries): def setUp(self): super(TestUpdateJobBinary, self).setUp() self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) self.jb_mock.update.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.UpdateJobBinary(self.app, None) def test_job_binary_update_all_options(self): arglist = ['job-binary', '--name', 'job-binary', '--description', 'descr', '--username', 'user', '--password', 'pass', '--public', '--protected'] verifylist = [('job_binary', 'job-binary'), ('name', 'job-binary'), ('description', 'descr'), ('username', 'user'), ('password', 'pass'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.update.assert_called_once_with( 'jb_id', {'is_public': True, 'description': 'descr', 'is_protected': True, 'name': 'job-binary', 'extra': {'password': 'pass', 'user': 'user'}}) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('descr', 'jb_id', False, False, 'job-binary', 'swift://cont/test') self.assertEqual(expected_data, data) def test_job_binary_update_private_unprotected(self): arglist = ['job-binary', '--private', '--unprotected'] verifylist = [('job_binary', 'job-binary'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.update.assert_called_once_with( 'jb_id', {'is_public': False, 'is_protected': False}) def test_job_binary_update_nothing_updated(self): arglist = ['job-binary'] verifylist = [('job_binary', 'job-binary')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.update.assert_called_once_with( 'jb_id', {}) def test_job_binary_update_mutual_exclusion(self): arglist = ['job-binary', '--name', 'job-binary', '--access-key', 'ak', '--secret-key', 'sk', '--url', 's3://abc/def', '--password', 'pw'] with testtools.ExpectedException(osc_u.ParserException): self.check_parser(self.cmd, arglist, mock.Mock()) class TestDownloadJobBinary(TestJobBinaries): def setUp(self): super(TestDownloadJobBinary, self).setUp() self.jb_mock.get_file.return_value = 'data' self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.DownloadJobBinary(self.app, None) def test_download_job_binary_default_file(self): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['job-binary'] verifylist = [('job_binary', 'job-binary')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.jb_mock.get_file.assert_called_once_with( 'jb_id') # Check that data will be saved to the right file self.assertEqual('job-binary', m_open.call_args[0][0]) def test_download_job_binary_specified_file(self): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['job-binary', '--file', 'test'] verifylist = [('job_binary', 'job-binary'), ('file', 'test')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.jb_mock.get_file.assert_called_once_with( 'jb_id') # Check that data will be saved to the right file self.assertEqual('test', m_open.call_args[0][0]) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/__init__.py0000664000175000017500000000000013643576737025530 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_plugins.py0000664000175000017500000002130213643576737026521 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_serialization import jsonutils as json from saharaclient.api import plugins as api_plugins from saharaclient.osc.v1 import plugins as osc_plugins from saharaclient.tests.unit.osc.v1 import fakes PLUGIN_INFO = {'name': 'fake', 'title': 'Fake Plugin', 'versions': ['0.1', '0.2'], 'description': 'Plugin for tests', 'required_image_tags': ['fake', '0.1'], 'node_processes': { 'HDFS': ['datanode', 'namenode'], 'MapReduce': ['jobtracker', 'tasktracker'] }, 'plugin_labels': {'enabled': {'status': True}}, 'version_labels': {'0.1': {'enabled': {'status': True}}}} class TestPlugins(fakes.TestDataProcessing): def setUp(self): super(TestPlugins, self).setUp() self.app.api_version['data_processing'] = '2' self.plugins_mock = self.app.client_manager.data_processing.plugins self.plugins_mock.reset_mock() class TestListPlugins(TestPlugins): def setUp(self): super(TestListPlugins, self).setUp() self.plugins_mock.list.return_value = [api_plugins.Plugin( None, PLUGIN_INFO)] # Command to test self.cmd = osc_plugins.ListPlugins(self.app, None) def test_plugins_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Versions'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', '0.1, 0.2')] self.assertEqual(expected_data, list(data)) def test_plugins_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Title', 'Versions', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'Fake Plugin', '0.1, 0.2', 'Plugin for tests')] self.assertEqual(expected_data, list(data)) class TestShowPlugin(TestPlugins): def setUp(self): super(TestShowPlugin, self).setUp() self.plugins_mock.get.return_value = api_plugins.Plugin( None, PLUGIN_INFO) self.plugins_mock.get_version_details.return_value = ( api_plugins.Plugin(None, PLUGIN_INFO)) # Command to test self.cmd = osc_plugins.ShowPlugin(self.app, None) def test_plugin_show(self): arglist = ['fake'] verifylist = [('plugin', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get.assert_called_once_with('fake') # Check that columns are correct expected_columns = ('Description', 'Name', 'Title', 'Versions', '', 'Plugin version 0.1: enabled', 'Plugin: enabled') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Plugin for tests', 'fake', 'Fake Plugin', '0.1, 0.2', '', True, True) self.assertEqual(expected_data, data) def test_plugin_version_show(self): arglist = ['fake', '--plugin-version', '0.1'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get_version_details.assert_called_once_with( 'fake', '0.1') # Check that columns are correct expected_columns = ('Description', 'Name', 'Required image tags', 'Title', '', 'Plugin version 0.1: enabled', 'Plugin: enabled', '', 'Service:', '', 'HDFS', 'MapReduce') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Plugin for tests', 'fake', '0.1, fake', 'Fake Plugin', '', True, True, '', 'Available processes:', '', 'datanode, namenode', 'jobtracker, tasktracker') self.assertEqual(expected_data, data) class TestGetPluginConfigs(TestPlugins): def setUp(self): super(TestGetPluginConfigs, self).setUp() self.plugins_mock.get_version_details.return_value = ( api_plugins.Plugin(None, PLUGIN_INFO)) # Command to test self.cmd = osc_plugins.GetPluginConfigs(self.app, None) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_plugin_configs_default_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['fake', '0.1'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get_version_details.assert_called_once_with( 'fake', '0.1') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(PLUGIN_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('fake-0.1', m_open.call_args[0][0]) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_plugin_configs_specified_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open): arglist = ['fake', '0.1', '--file', 'testfile'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('file', 'testfile')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get_version_details.assert_called_once_with( 'fake', '0.1') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(PLUGIN_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('testfile', m_open.call_args[0][0]) class TestUpdatePlugin(TestPlugins): def setUp(self): super(TestUpdatePlugin, self).setUp() self.plugins_mock.update.return_value = api_plugins.Plugin( None, PLUGIN_INFO) # Command to test self.cmd = osc_plugins.UpdatePlugin(self.app, None) @mock.patch('osc_lib.utils.read_blob_file_contents') def test_plugin_update(self, read): arglist = ['fake', 'update.json'] verifylist = [('plugin', 'fake'), ('json', 'update.json')] value = {'plugin_labels': {'enabled': {'status': True}}} value = json.dumps(value) read.return_value = value parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.update.assert_called_once_with( 'fake', {'plugin_labels': {'enabled': {'status': True}}}) # Check that columns are correct expected_columns = ('Description', 'Name', 'Title', 'Versions', '', 'Plugin version 0.1: enabled', 'Plugin: enabled') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Plugin for tests', 'fake', 'Fake Plugin', '0.1, 0.2', '', True, True) self.assertEqual(expected_data, data) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v2/test_clusters.py0000664000175000017500000005050413643576737026712 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import cluster_templates as api_ct from saharaclient.api import clusters as api_cl from saharaclient.api import images as api_img from saharaclient.api import node_group_templates as api_ngt from saharaclient.osc.v2 import clusters as osc_cl from saharaclient.tests.unit.osc.v1 import test_clusters as tc_v1 CLUSTER_INFO = { "description": "Cluster template for tests", "use_autoconfig": True, "is_default": False, "node_groups": [ { "count": 2, "id": "ng_id", "name": "fakeng", "plugin_name": 'fake', "plugin_version": '0.1', "node_group_template_id": 'ngt_id' } ], "plugin_version": "0.1", "is_public": False, "plugin_name": "fake", "id": "cluster_id", "anti_affinity": [], "name": "fake", "is_protected": False, "cluster_template_id": "ct_id", "neutron_management_network": "net_id", "user_keypair_id": "test", "status": 'Active', "default_image_id": "img_id", 'verification': { 'status': 'GREEN', 'id': 'ver_id', 'cluster_id': 'cluster_id', 'checks': [ { 'status': 'GREEN', 'name': 'Some check' } ] } } CT_INFO = { "plugin_name": "fake", "plugin_version": "0.1", "name": '"template', "id": "ct_id" } NGT_INFO = { 'id': 'ngt_id', 'name': 'fakeng' } class TestClusters(tc_v1.TestClusters): def setUp(self): super(TestClusters, self).setUp() self.app.api_version['data_processing'] = '2' self.cl_mock = ( self.app.client_manager.data_processing.clusters) self.ngt_mock = ( self.app.client_manager.data_processing.node_group_templates) self.ct_mock = ( self.app.client_manager.data_processing.cluster_templates) self.img_mock = ( self.app.client_manager.data_processing.images) self.cl_mock.reset_mock() self.ngt_mock.reset_mock() self.ct_mock.reset_mock() self.img_mock.reset_mock() class TestCreateCluster(TestClusters): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateCluster, self).setUp() self.cl_mock.create.return_value = api_cl.Cluster( None, CLUSTER_INFO) self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.img_mock.find_unique.return_value = api_img.Image( None, {'id': 'img_id'}) self.net_mock = self.app.client_manager.network self.net_mock.find_network.return_value = mock.Mock(id='net_id') self.net_mock.reset_mock() # Command to test self.cmd = osc_cl.CreateCluster(self.app, None) def test_cluster_create_minimum_options(self): arglist = ['--name', 'fake', '--cluster-template', 'template', '--image', 'ubuntu'] verifylist = [('name', 'fake'), ('cluster_template', 'template'), ('image', 'ubuntu')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.create.assert_called_once_with( cluster_template_id='ct_id', count=None, default_image_id='img_id', description=None, plugin_version='0.1', is_protected=False, is_public=False, is_transient=False, name='fake', net_id=None, plugin_name='fake', user_keypair_id=None) def test_cluster_create_all_options(self): arglist = ['--name', 'fake', '--cluster-template', 'template', '--image', 'ubuntu', '--user-keypair', 'test', '--neutron-network', 'net', '--description', 'descr', '--transient', '--public', '--protected'] verifylist = [('name', 'fake'), ('cluster_template', 'template'), ('image', 'ubuntu'), ('user_keypair', 'test'), ('neutron_network', 'net'), ('description', 'descr'), ('transient', True), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.create.assert_called_once_with( cluster_template_id='ct_id', count=None, default_image_id='img_id', description='descr', plugin_version='0.1', is_protected=True, is_public=True, is_transient=True, name='fake', net_id='net_id', plugin_name='fake', user_keypair_id='test') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_create_with_count(self): clusters_mock = mock.Mock() clusters_mock.to_dict.return_value = { 'clusters': [{'cluster': {'id': 'cluster1_id'}}, {'cluster': {'id': 'cluster2_id'}}] } self.cl_mock.create.return_value = clusters_mock arglist = ['--name', 'fake', '--cluster-template', 'template', '--image', 'ubuntu', '--count', '2'] verifylist = [('name', 'fake'), ('cluster_template', 'template'), ('image', 'ubuntu'), ('count', 2)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.create.assert_called_once_with( cluster_template_id='ct_id', count=2, default_image_id='img_id', description=None, plugin_version='0.1', is_protected=False, is_public=False, is_transient=False, name='fake', net_id=None, plugin_name='fake', user_keypair_id=None) # Check that columns are correct expected_columns = ('fake',) self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id',) self.assertEqual(expected_data, data) class TestListClusters(TestClusters): def setUp(self): super(TestListClusters, self).setUp() self.cl_mock.list.return_value = [api_cl.Cluster( None, CLUSTER_INFO)] # Command to test self.cmd = osc_cl.ListClusters(self.app, None) def test_clusters_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active')] self.assertEqual(expected_data, list(data)) def test_clusters_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Status', 'Description', 'Image'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active', 'Cluster template for tests', 'img_id')] self.assertEqual(expected_data, list(data)) def test_clusters_list_extra_search_opts(self): arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name', 'fake'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('name', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active')] self.assertEqual(expected_data, list(data)) class TestShowCluster(TestClusters): def setUp(self): super(TestShowCluster, self).setUp() self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.ShowCluster(self.app, None) def test_cluster_show(self): arglist = ['fake'] verifylist = [('cluster', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.find_unique.assert_called_once_with(name='fake') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_show_verification(self): arglist = ['fake', '--verification'] verifylist = [('cluster', 'fake'), ('verification', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.find_unique.assert_called_once_with(name='fake') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Health check (some check)', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id', 'Verification status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'GREEN', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test', 'GREEN') self.assertEqual(expected_data, data) class TestDeleteCluster(TestClusters): def setUp(self): super(TestDeleteCluster, self).setUp() self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.DeleteCluster(self.app, None) def test_cluster_delete(self): arglist = ['fake'] verifylist = [('cluster', ['fake'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.delete.assert_called_once_with('cluster_id') class TestUpdateCluster(TestClusters): def setUp(self): super(TestUpdateCluster, self).setUp() self.cl_mock.update.return_value = mock.Mock( cluster=CLUSTER_INFO.copy()) self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.UpdateCluster(self.app, None) def test_cluster_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_cluster_update_nothing_updated(self): arglist = ['fake'] verifylist = [('cluster', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.update.assert_called_once_with('cluster_id') def test_cluster_update_all_options(self): arglist = ['fake', '--name', 'fake', '--description', 'descr', '--public', '--protected'] verifylist = [('cluster', 'fake'), ('name', 'fake'), ('description', 'descr'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.update.assert_called_once_with( 'cluster_id', description='descr', is_protected=True, is_public=True, name='fake') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_update_private_unprotected(self): arglist = ['fake', '--private', '--unprotected'] verifylist = [('cluster', 'fake'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.update.assert_called_once_with( 'cluster_id', is_protected=False, is_public=False) class TestScaleCluster(TestClusters): def setUp(self): super(TestScaleCluster, self).setUp() self.cl_mock.scale.return_value = mock.Mock( cluster=CLUSTER_INFO.copy()) self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.ScaleCluster(self.app, None) def test_cluster_scale_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_cluster_scale_resize(self): self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) arglist = ['fake', '--instances', 'fakeng:1'] verifylist = [('cluster', 'fake'), ('instances', ['fakeng:1'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.scale.assert_called_once_with( 'cluster_id', {'resize_node_groups': [ {'count': 1, 'name': 'fakeng'}]} ) # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_scale_add_ng(self): new_ng = {'name': 'new', 'id': 'new_id'} self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, new_ng) arglist = ['fake', '--instances', 'new:1'] verifylist = [('cluster', 'fake'), ('instances', ['new:1'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.scale.assert_called_once_with( 'cluster_id', {'add_node_groups': [ {'count': 1, 'node_group_template_id': 'new_id', 'name': 'new'} ]}) class TestVerificationUpdateCluster(TestClusters): def setUp(self): super(TestVerificationUpdateCluster, self).setUp() self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) self.cl_mock.verification_update.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.VerificationUpdateCluster(self.app, None) def test_verification_show(self): arglist = ['fake', '--show'] verifylist = [('cluster', 'fake'), ('show', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.find_unique.assert_called_once_with(name='fake') # Check that columns are correct expected_columns = ('Health check (some check)', 'Verification status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('GREEN', 'GREEN') self.assertEqual(expected_data, data) def test_verification_start(self): arglist = ['fake', '--start'] verifylist = [('cluster', 'fake'), ('status', 'START')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.verification_update.assert_called_once_with( 'cluster_id', 'START') python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/0000775000175000017500000000000013643577103023414 5ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_images.py0000664000175000017500000003130013643576737026303 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import images as api_images from saharaclient.osc.v1 import images as osc_images from saharaclient.tests.unit.osc.v1 import fakes IMAGE_INFO = {'id': 'id', 'name': 'image', 'username': 'ubuntu', 'status': "Active", 'tags': ['fake', '0.1'], 'description': 'Image for tests'} class TestImages(fakes.TestDataProcessing): def setUp(self): super(TestImages, self).setUp() self.image_mock = ( self.app.client_manager.data_processing.images) self.image_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestListImages(TestImages): def setUp(self): super(TestListImages, self).setUp() self.image_mock.list.return_value = [api_images.Image( None, IMAGE_INFO)] # Command to test self.cmd = osc_images.ListImages(self.app, None) def test_images_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('image', 'id', 'ubuntu', '0.1, fake')] self.assertEqual(expected_data, list(data)) def test_images_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags', 'Status', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('image', 'id', 'ubuntu', '0.1, fake', 'Active', 'Image for tests')] self.assertEqual(expected_data, list(data)) def test_images_list_successful_selection(self): arglist = ['--name', 'image', '--tags', 'fake', '--username', 'ubuntu'] verifylist = [('name', 'image'), ('tags', ['fake']), ('username', 'ubuntu')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.list.assert_called_once_with( search_opts={'tags': ['fake']}) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('image', 'id', 'ubuntu', '0.1, fake')] self.assertEqual(expected_data, list(data)) def test_images_list_with_name_nothing_selected(self): arglist = ['--name', 'img'] verifylist = [('name', 'img')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [] self.assertEqual(expected_data, list(data)) def test_images_list_with_username_nothing_selected(self): arglist = ['--username', 'fedora'] verifylist = [('username', 'fedora')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Username', 'Tags'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [] self.assertEqual(expected_data, list(data)) class TestShowImage(TestImages): def setUp(self): super(TestShowImage, self).setUp() self.image_mock.find_unique.return_value = api_images.Image( None, IMAGE_INFO) # Command to test self.cmd = osc_images.ShowImage(self.app, None) def test_image_show_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_show(self): arglist = ['image'] verifylist = [('image', 'image')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_once_with(name='image') # Check that columns are correct expected_columns = ('Description', 'Id', 'Name', 'Status', 'Tags', 'Username') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ['Image for tests', 'id', 'image', 'Active', '0.1, fake', 'ubuntu'] self.assertEqual(expected_data, list(data)) class TestRegisterImage(TestImages): def setUp(self): super(TestRegisterImage, self).setUp() self.image_mock.update_image.return_value = mock.Mock( image=IMAGE_INFO.copy()) self.app.client_manager.image = mock.Mock() self.image_client = self.app.client_manager.image self.image_client.find_image.return_value = mock.Mock(id='id') # Command to test self.cmd = osc_images.RegisterImage(self.app, None) def test_image_register_without_username(self): arglist = ['id'] verifylist = [('image', 'id')] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_register_required_options(self): arglist = ['id', '--username', 'ubuntu'] verifylist = [('image', 'id'), ('username', 'ubuntu')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.update_image.assert_called_once_with( 'id', desc=None, user_name='ubuntu') # Check that columns are correct expected_columns = ('Description', 'Id', 'Name', 'Status', 'Tags', 'Username') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ['Image for tests', 'id', 'image', 'Active', '0.1, fake', 'ubuntu'] self.assertEqual(expected_data, list(data)) def test_image_register_all_options(self): arglist = ['id', '--username', 'ubuntu', '--description', 'descr'] verifylist = [('image', 'id'), ('username', 'ubuntu'), ('description', 'descr')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.update_image.assert_called_once_with( 'id', desc='descr', user_name='ubuntu') class TestUnregisterImage(TestImages): def setUp(self): super(TestUnregisterImage, self).setUp() self.image_mock.find_unique.return_value = api_images.Image( None, IMAGE_INFO) # Command to test self.cmd = osc_images.UnregisterImage(self.app, None) def test_image_unregister_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_unregister(self): arglist = ['image'] verifylist = [('image', ['image'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_once_with(name='image') self.image_mock.unregister_image.assert_called_once_with('id') class TestSetImageTags(TestImages): def setUp(self): super(TestSetImageTags, self).setUp() image_info = IMAGE_INFO.copy() image_info['tags'] = [] self.image_mock.find_unique.return_value = api_images.Image( None, image_info) self.image_mock.update_tags.return_value = api_images.Image( None, image_info) # Command to test self.cmd = osc_images.SetImageTags(self.app, None) def test_image_tags_set_without_tags(self): arglist = ['id'] verifylist = [('image', 'id')] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_tags_set(self): arglist = ['image', '--tags', 'fake', '0.1'] verifylist = [('image', 'image'), ('tags', ['fake', '0.1'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', ['fake', '0.1']) class TestAddImageTags(TestImages): def setUp(self): super(TestAddImageTags, self).setUp() image_info = IMAGE_INFO.copy() image_info['tags'] = [] self.image_mock.update_tags.return_value = api_images.Image( None, image_info) self.image_mock.find_unique.return_value = api_images.Image( None, image_info) # Command to test self.cmd = osc_images.AddImageTags(self.app, None) def test_image_tags_add_without_tags(self): arglist = ['id'] verifylist = [('image', 'id')] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_tags_add(self): arglist = ['image', '--tags', 'fake'] verifylist = [('image', 'image'), ('tags', ['fake'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', ['fake']) class TestRemoveImageTags(TestImages): def setUp(self): super(TestRemoveImageTags, self).setUp() self.image_mock.update_tags.return_value = api_images.Image( None, IMAGE_INFO) self.image_mock.find_unique.return_value = api_images.Image( None, IMAGE_INFO) # Command to test self.cmd = osc_images.RemoveImageTags(self.app, None) def test_image_tags_remove_both_options(self): arglist = ['id', '--all', '--tags', 'fake'] verifylist = [('image', 'id'), ('all', True), ('tags', ['fake'])] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_image_tags_remove(self): arglist = ['image', '--tags', 'fake'] verifylist = [('image', 'image'), ('tags', ['fake'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', ['0.1']) def test_image_tags_remove_all(self): arglist = ['image', '--all'] verifylist = [('image', 'image'), ('all', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.image_mock.find_unique.assert_called_with(name='image') self.image_mock.update_tags.assert_called_once_with( 'id', []) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/fakes.py0000664000175000017500000000162413643576737025076 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils class TestDataProcessing(utils.TestCommand): def setUp(self): super(TestDataProcessing, self).setUp() self.app.client_manager.data_processing = mock.Mock() self.app.client_manager.network = mock.Mock() self.app.client_manager.compute = mock.Mock() python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_node_group_templates.py0000664000175000017500000004172413643576737031270 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import node_group_templates as api_ngt from saharaclient.osc.v1 import node_group_templates as osc_ngt from saharaclient.tests.unit.osc.v1 import fakes NGT_INFO = { "node_processes": [ "namenode", "tasktracker" ], "name": "template", "tenant_id": "tenant_id", "availability_zone": 'av_zone', "use_autoconfig": True, "hadoop_version": "0.1", "shares": None, "is_default": False, "description": 'description', "node_configs": {}, "is_proxy_gateway": False, "auto_security_group": True, "volume_type": None, "volumes_size": 2, "volume_mount_prefix": "/volumes/disk", "plugin_name": "fake", "is_protected": False, "security_groups": None, "floating_ip_pool": "floating_pool", "is_public": True, "id": "ng_id", "flavor_id": "flavor_id", "volumes_availability_zone": None, "volumes_per_node": 2, "volume_local_to_instance": False } class TestNodeGroupTemplates(fakes.TestDataProcessing): def setUp(self): super(TestNodeGroupTemplates, self).setUp() self.ngt_mock = ( self.app.client_manager.data_processing.node_group_templates) self.ngt_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestCreateNodeGroupTemplate(TestNodeGroupTemplates): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateNodeGroupTemplate, self).setUp() self.ngt_mock.create.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) self.fl_mock = self.app.client_manager.compute.flavors self.fl_mock.get.return_value = mock.Mock(id='flavor_id') self.fl_mock.reset_mock() # Command to test self.cmd = osc_ngt.CreateNodeGroupTemplate(self.app, None) def test_ngt_create_minimum_options(self): arglist = ['--name', 'template', '--plugin', 'fake', '--plugin-version', '0.1', '--processes', 'namenode', 'tasktracker', '--flavor', 'flavor_id'] verifylist = [('name', 'template'), ('plugin', 'fake'), ('plugin_version', '0.1'), ('flavor', 'flavor_id'), ('processes', ['namenode', 'tasktracker'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.create.assert_called_once_with( auto_security_group=False, availability_zone=None, description=None, flavor_id='flavor_id', floating_ip_pool=None, hadoop_version='0.1', is_protected=False, is_proxy_gateway=False, is_public=False, name='template', node_processes=['namenode', 'tasktracker'], plugin_name='fake', security_groups=None, use_autoconfig=False, volume_local_to_instance=False, volume_type=None, volumes_availability_zone=None, volumes_per_node=None, volumes_size=None, shares=None, node_configs=None, volume_mount_prefix=None) def test_ngt_create_all_options(self): arglist = ['--name', 'template', '--plugin', 'fake', '--plugin-version', '0.1', '--processes', 'namenode', 'tasktracker', '--security-groups', 'secgr', '--auto-security-group', '--availability-zone', 'av_zone', '--flavor', 'flavor_id', '--floating-ip-pool', 'floating_pool', '--volumes-per-node', '2', '--volumes-size', '2', '--volumes-type', 'type', '--volumes-availability-zone', 'vavzone', '--volumes-mount-prefix', '/volume/asd', '--volumes-locality', '--description', 'descr', '--autoconfig', '--proxy-gateway', '--public', '--protected'] verifylist = [('name', 'template'), ('plugin', 'fake'), ('plugin_version', '0.1'), ('processes', ['namenode', 'tasktracker']), ('security_groups', ['secgr']), ('auto_security_group', True), ('availability_zone', 'av_zone'), ('flavor', 'flavor_id'), ('floating_ip_pool', 'floating_pool'), ('volumes_per_node', 2), ('volumes_size', 2), ('volumes_type', 'type'), ('volumes_availability_zone', 'vavzone'), ('volumes_mount_prefix', '/volume/asd'), ('volumes_locality', True), ('description', 'descr'), ('autoconfig', True), ('proxy_gateway', True), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.create.assert_called_once_with( auto_security_group=True, availability_zone='av_zone', description='descr', flavor_id='flavor_id', floating_ip_pool='floating_pool', hadoop_version='0.1', is_protected=True, is_proxy_gateway=True, is_public=True, name='template', node_processes=['namenode', 'tasktracker'], plugin_name='fake', security_groups=['secgr'], use_autoconfig=True, volume_local_to_instance=True, volume_type='type', volumes_availability_zone='vavzone', volumes_per_node=2, volumes_size=2, shares=None, node_configs=None, volume_mount_prefix='/volume/asd') # Check that columns are correct expected_columns = ( 'Auto security group', 'Availability zone', 'Description', 'Flavor id', 'Floating ip pool', 'Id', 'Is default', 'Is protected', 'Is proxy gateway', 'Is public', 'Name', 'Node processes', 'Plugin name', 'Plugin version', 'Security groups', 'Use autoconfig', 'Volume local to instance', 'Volume mount prefix', 'Volume type', 'Volumes availability zone', 'Volumes per node', 'Volumes size') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( True, 'av_zone', 'description', 'flavor_id', 'floating_pool', 'ng_id', False, False, False, True, 'template', 'namenode, tasktracker', 'fake', '0.1', None, True, False, '/volumes/disk', None, None, 2, 2) self.assertEqual(expected_data, data) class TestListNodeGroupTemplates(TestNodeGroupTemplates): def setUp(self): super(TestListNodeGroupTemplates, self).setUp() self.ngt_mock.list.return_value = [api_ngt.NodeGroupTemplate( None, NGT_INFO)] # Command to test self.cmd = osc_ngt.ListNodeGroupTemplates(self.app, None) def test_ngt_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', 'ng_id', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) def test_ngt_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Node processes', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', 'ng_id', 'fake', '0.1', 'namenode, tasktracker', 'description')] self.assertEqual(expected_data, list(data)) def test_ngt_list_extra_search_opts(self): arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name', 'templ'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('name', 'templ')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', 'ng_id', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) class TestShowNodeGroupTemplate(TestNodeGroupTemplates): def setUp(self): super(TestShowNodeGroupTemplate, self).setUp() self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) # Command to test self.cmd = osc_ngt.ShowNodeGroupTemplate(self.app, None) def test_ngt_show(self): arglist = ['template'] verifylist = [('node_group_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.find_unique.assert_called_once_with(name='template') # Check that columns are correct expected_columns = ( 'Auto security group', 'Availability zone', 'Description', 'Flavor id', 'Floating ip pool', 'Id', 'Is default', 'Is protected', 'Is proxy gateway', 'Is public', 'Name', 'Node processes', 'Plugin name', 'Plugin version', 'Security groups', 'Use autoconfig', 'Volume local to instance', 'Volume mount prefix', 'Volume type', 'Volumes availability zone', 'Volumes per node', 'Volumes size') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( True, 'av_zone', 'description', 'flavor_id', 'floating_pool', 'ng_id', False, False, False, True, 'template', 'namenode, tasktracker', 'fake', '0.1', None, True, False, '/volumes/disk', None, None, 2, 2) self.assertEqual(expected_data, data) class TestDeleteNodeGroupTemplate(TestNodeGroupTemplates): def setUp(self): super(TestDeleteNodeGroupTemplate, self).setUp() self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) # Command to test self.cmd = osc_ngt.DeleteNodeGroupTemplate(self.app, None) def test_ngt_delete(self): arglist = ['template'] verifylist = [('node_group_template', ['template'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.delete.assert_called_once_with('ng_id') class TestUpdateNodeGroupTemplate(TestNodeGroupTemplates): # TODO(apavlov): check for update with --json def setUp(self): super(TestUpdateNodeGroupTemplate, self).setUp() self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) self.ngt_mock.update.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) self.fl_mock = self.app.client_manager.compute.flavors self.fl_mock.get.return_value = mock.Mock(id='flavor_id') self.fl_mock.reset_mock() # Command to test self.cmd = osc_ngt.UpdateNodeGroupTemplate(self.app, None) def test_ngt_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_ngt_update_nothing_updated(self): arglist = ['template'] verifylist = [('node_group_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.update.assert_called_once_with('ng_id') def test_ngt_update_all_options(self): arglist = ['template', '--name', 'template', '--plugin', 'fake', '--plugin-version', '0.1', '--processes', 'namenode', 'tasktracker', '--security-groups', 'secgr', '--auto-security-group-enable', '--availability-zone', 'av_zone', '--flavor', 'flavor_id', '--floating-ip-pool', 'floating_pool', '--volumes-per-node', '2', '--volumes-size', '2', '--volumes-type', 'type', '--volumes-availability-zone', 'vavzone', '--volumes-mount-prefix', '/volume/asd', '--volumes-locality-enable', '--description', 'descr', '--autoconfig-enable', '--proxy-gateway-enable', '--public', '--protected'] verifylist = [('node_group_template', 'template'), ('name', 'template'), ('plugin', 'fake'), ('plugin_version', '0.1'), ('processes', ['namenode', 'tasktracker']), ('security_groups', ['secgr']), ('use_auto_security_group', True), ('availability_zone', 'av_zone'), ('flavor', 'flavor_id'), ('floating_ip_pool', 'floating_pool'), ('volumes_per_node', 2), ('volumes_size', 2), ('volumes_type', 'type'), ('volumes_availability_zone', 'vavzone'), ('volumes_mount_prefix', '/volume/asd'), ('volume_locality', True), ('description', 'descr'), ('use_autoconfig', True), ('is_proxy_gateway', True), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.update.assert_called_once_with( 'ng_id', auto_security_group=True, availability_zone='av_zone', description='descr', flavor_id='flavor_id', floating_ip_pool='floating_pool', hadoop_version='0.1', is_protected=True, is_proxy_gateway=True, is_public=True, name='template', node_processes=['namenode', 'tasktracker'], plugin_name='fake', security_groups=['secgr'], use_autoconfig=True, volume_local_to_instance=True, volume_type='type', volumes_availability_zone='vavzone', volumes_per_node=2, volumes_size=2, volume_mount_prefix='/volume/asd') # Check that columns are correct expected_columns = ( 'Auto security group', 'Availability zone', 'Description', 'Flavor id', 'Floating ip pool', 'Id', 'Is default', 'Is protected', 'Is proxy gateway', 'Is public', 'Name', 'Node processes', 'Plugin name', 'Plugin version', 'Security groups', 'Use autoconfig', 'Volume local to instance', 'Volume mount prefix', 'Volume type', 'Volumes availability zone', 'Volumes per node', 'Volumes size') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( True, 'av_zone', 'description', 'flavor_id', 'floating_pool', 'ng_id', False, False, False, True, 'template', 'namenode, tasktracker', 'fake', '0.1', None, True, False, '/volumes/disk', None, None, 2, 2) self.assertEqual(expected_data, data) def test_ngt_update_private_unprotected(self): arglist = ['template', '--private', '--unprotected'] verifylist = [('node_group_template', 'template'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ngt_mock.update.assert_called_once_with( 'ng_id', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_utils.py0000664000175000017500000000650513643576737026207 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from saharaclient.osc import utils from saharaclient.tests.unit import base class TestUtils(base.BaseTestCase): def test_prepare_data(self): data = {'id': '123', 'name_of_res': 'name', 'description': 'descr'} fields = ['id', 'name_of_res', 'description'] expected_data = {'Description': 'descr', 'Id': '123', 'Name of res': 'name'} self.assertEqual(expected_data, utils.prepare_data(data, fields)) fields = ['id', 'name_of_res'] expected_data = {'Id': '123', 'Name of res': 'name'} self.assertEqual(expected_data, utils.prepare_data(data, fields)) fields = ['name_of_res'] expected_data = {'Name of res': 'name'} self.assertEqual(expected_data, utils.prepare_data(data, fields)) def test_get_resource(self): manager = mock.Mock() # check case when resource id is passed uuid = '82065b4d-2c79-420d-adc3-310de275e922' utils.get_resource(manager, uuid) manager.get.assert_called_once_with(uuid) # check case when resource name is passed utils.get_resource(manager, 'name') manager.find_unique.assert_called_once_with(name='name') def test_get_resource_id(self): manager = mock.Mock() uuid = '82065b4d-2c79-420d-adc3-310de275e922' manager.find_unique.return_value = mock.Mock(id=uuid) # check case when resource id is passed res = utils.get_resource_id(manager, uuid) self.assertEqual(uuid, res) manager.get.assert_not_called() manager.find_unique.assert_not_called() # check case when resource name is passed res = utils.get_resource_id(manager, 'name') manager.find_unique.assert_called_once_with(name='name') self.assertEqual(uuid, res) def test_create_dict_from_kwargs(self): dict1 = utils.create_dict_from_kwargs(first='1', second=2) self.assertEqual({'first': '1', 'second': 2}, dict1) dict2 = utils.create_dict_from_kwargs(first='1', second=None) self.assertEqual({'first': '1'}, dict2) dict3 = utils.create_dict_from_kwargs(first='1', second=False) self.assertEqual({'first': '1', 'second': False}, dict3) def test_prepare_column_headers(self): columns1 = ['first', 'second_column'] self.assertEqual( ['First', 'Second column'], utils.prepare_column_headers(columns1)) columns2 = ['First', 'Second column'] self.assertEqual( ['First', 'Second column'], utils.prepare_column_headers(columns2)) columns3 = ['first', 'second_column'] self.assertEqual( ['First', 'Second'], utils.prepare_column_headers( columns3, remap={'second_column': 'second'})) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_data_sources.py0000664000175000017500000003140113643576737027514 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils import testtools from saharaclient.api import data_sources as api_ds from saharaclient.osc.v1 import data_sources as osc_ds from saharaclient.tests.unit.osc.v1 import fakes DS_INFO = {'id': 'id', 'name': 'source', 'type': 'swift', 'url': 'swift://container.sahara/object', 'description': 'Data Source for tests', 'is_public': True, 'is_protected': True} class TestDataSources(fakes.TestDataProcessing): def setUp(self): super(TestDataSources, self).setUp() self.ds_mock = ( self.app.client_manager.data_processing.data_sources) self.ds_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestCreateDataSource(TestDataSources): def setUp(self): super(TestCreateDataSource, self).setUp() self.ds_mock.create.return_value = api_ds.DataSources( None, DS_INFO) # Command to test self.cmd = osc_ds.CreateDataSource(self.app, None) def test_data_sources_create_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_data_sources_create_required_options(self): arglist = ['source', '--type', 'swift', '--url', 'swift://container.sahara/object'] verifylist = [('name', 'source'), ('type', 'swift'), ('url', 'swift://container.sahara/object')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments called_args = {'credential_pass': None, 'credential_user': None, 'data_source_type': 'swift', 'name': 'source', 'description': '', 'url': 'swift://container.sahara/object', 'is_public': False, 'is_protected': False, 's3_credentials': None} self.ds_mock.create.assert_called_once_with(**called_args) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_sources_create_all_options(self): arglist = ['source', '--type', 'swift', '--url', 'swift://container.sahara/object', '--username', 'user', '--password', 'pass', '--description', 'Data Source for tests', '--public', '--protected'] verifylist = [('name', 'source'), ('type', 'swift'), ('url', 'swift://container.sahara/object'), ('username', 'user'), ('password', 'pass'), ('description', 'Data Source for tests'), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments called_args = {'credential_pass': 'pass', 'credential_user': 'user', 'data_source_type': 'swift', 'name': 'source', 'description': 'Data Source for tests', 'url': 'swift://container.sahara/object', 'is_protected': True, 'is_public': True, 's3_credentials': None} self.ds_mock.create.assert_called_once_with(**called_args) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_source_create_mutual_exclusion(self): arglist = ['data-source', '--name', 'data-source', '--access-key', 'ak', '--secret-key', 'sk', '--url', 's3a://abc/def', '--password', 'pw'] with testtools.ExpectedException(osc_utils.ParserException): self.check_parser(self.cmd, arglist, mock.Mock()) class TestListDataSources(TestDataSources): def setUp(self): super(TestListDataSources, self).setUp() self.ds_mock.list.return_value = [api_ds.DataSources( None, DS_INFO)] # Command to test self.cmd = osc_ds.ListDataSources(self.app, None) def test_data_sources_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('source', 'id', 'swift')] self.assertEqual(expected_data, list(data)) def test_data_sources_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type', 'Url', 'Description', 'Is public', 'Is protected'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('source', 'id', 'swift', 'swift://container.sahara/object', 'Data Source for tests', True, True)] self.assertEqual(expected_data, list(data)) class TestShowDataSource(TestDataSources): def setUp(self): super(TestShowDataSource, self).setUp() self.ds_mock.find_unique.return_value = api_ds.DataSources( None, DS_INFO) # Command to test self.cmd = osc_ds.ShowDataSource(self.app, None) def test_data_sources_show(self): arglist = ['source'] verifylist = [('data_source', 'source')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.ds_mock.find_unique.assert_called_once_with(name='source') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ['Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object'] self.assertEqual(expected_data, list(data)) class TestDeleteDataSource(TestDataSources): def setUp(self): super(TestDeleteDataSource, self).setUp() self.ds_mock.find_unique.return_value = api_ds.DataSources( None, DS_INFO) # Command to test self.cmd = osc_ds.DeleteDataSource(self.app, None) def test_data_sources_delete(self): arglist = ['source'] verifylist = [('data_source', ['source'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.ds_mock.delete.assert_called_once_with('id') class TestUpdateDataSource(TestDataSources): def setUp(self): super(TestUpdateDataSource, self).setUp() self.ds_mock.find_unique.return_value = api_ds.DataSources( None, DS_INFO) self.ds_mock.update.return_value = mock.Mock( data_source=DS_INFO) # Command to test self.cmd = osc_ds.UpdateDataSource(self.app, None) def test_data_sources_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_data_sources_update_nothing_updated(self): arglist = ['source'] verifylist = [('data_source', 'source')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.ds_mock.update.assert_called_once_with('id', {}) def test_data_sources_update_required_options(self): arglist = ['source'] verifylist = [('data_source', 'source')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments self.ds_mock.update.assert_called_once_with('id', {}) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_sources_update_all_options(self): arglist = ['source', '--name', 'source', '--type', 'swift', '--url', 'swift://container.sahara/object', '--username', 'user', '--password', 'pass', '--description', 'Data Source for tests', '--public', '--protected'] verifylist = [('data_source', 'source'), ('name', 'source'), ('type', 'swift'), ('url', 'swift://container.sahara/object'), ('username', 'user'), ('password', 'pass'), ('description', 'Data Source for tests'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments self.ds_mock.update.assert_called_once_with( 'id', {'name': 'source', 'url': 'swift://container.sahara/object', 'is_protected': True, 'credentials': {'password': 'pass', 'user': 'user'}, 'is_public': True, 'type': 'swift', 'description': 'Data Source for tests'}) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Type', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Data Source for tests', 'id', True, True, 'source', 'swift', 'swift://container.sahara/object') self.assertEqual(expected_data, data) def test_data_sources_update_private_unprotected(self): arglist = ['source', '--private', '--unprotected'] verifylist = [('data_source', 'source'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that data source was created with correct arguments self.ds_mock.update.assert_called_once_with( 'id', {'is_public': False, 'is_protected': False}) def test_data_source_update_mutual_exclusion(self): arglist = ['data-source', '--name', 'data-source', '--access-key', 'ak', '--secret-key', 'sk', '--url', 's3a://abc/def', '--password', 'pw'] with testtools.ExpectedException(osc_utils.ParserException): self.check_parser(self.cmd, arglist, mock.Mock()) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_job_templates.py0000664000175000017500000002406713643576737027702 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import jobs as api_j from saharaclient.osc.v1 import job_templates as osc_j from saharaclient.tests.unit.osc.v1 import fakes JOB_INFO = { "is_public": False, "id": "job_id", "name": "pig-job", "description": "Job for test", "interface": [], "libs": [ { "id": "lib_id", "name": "lib" } ], "type": "Pig", "is_protected": False, "mains": [ { "id": "main_id", "name": "main" } ] } class TestJobTemplates(fakes.TestDataProcessing): def setUp(self): super(TestJobTemplates, self).setUp() self.job_mock = self.app.client_manager.data_processing.jobs self.job_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestCreateJobTemplate(TestJobTemplates): # TODO(apavlov): check for creation with --interface def setUp(self): super(TestCreateJobTemplate, self).setUp() self.job_mock.create.return_value = api_j.Job( None, JOB_INFO) self.jb_mock = self.app.client_manager.data_processing.job_binaries self.jb_mock.find_unique.return_value = mock.Mock(id='jb_id') self.jb_mock.reset_mock() # Command to test self.cmd = osc_j.CreateJobTemplate(self.app, None) def test_job_template_create_minimum_options(self): arglist = ['--name', 'pig-job', '--type', 'Pig'] verifylist = [('name', 'pig-job'), ('type', 'Pig')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.create.assert_called_once_with( description=None, interface=None, is_protected=False, is_public=False, libs=None, mains=None, name='pig-job', type='Pig') def test_job_template_create_all_options(self): arglist = ['--name', 'pig-job', '--type', 'Pig', '--mains', 'main', '--libs', 'lib', '--description', 'descr', '--public', '--protected'] verifylist = [('name', 'pig-job'), ('type', 'Pig'), ('mains', ['main']), ('libs', ['lib']), ('description', 'descr'), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.create.assert_called_once_with( description='descr', interface=None, is_protected=True, is_public=True, libs=['jb_id'], mains=['jb_id'], name='pig-job', type='Pig') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Libs', 'Mains', 'Name', 'Type') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id', 'main:main_id', 'pig-job', 'Pig') self.assertEqual(expected_data, data) class TestListJobTemplates(TestJobTemplates): def setUp(self): super(TestListJobTemplates, self).setUp() self.job_mock.list.return_value = [api_j.Job( None, JOB_INFO)] # Command to test self.cmd = osc_j.ListJobTemplates(self.app, None) def test_job_templates_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('pig-job', 'job_id', 'Pig')] self.assertEqual(expected_data, list(data)) def test_job_template_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type', 'Description', 'Is public', 'Is protected'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('pig-job', 'job_id', 'Pig', 'Job for test', False, False)] self.assertEqual(expected_data, list(data)) def test_job_template_list_extra_search_opts(self): arglist = ['--type', 'Pig', '--name', 'pig'] verifylist = [('type', 'Pig'), ('name', 'pig')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Type'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('pig-job', 'job_id', 'Pig')] self.assertEqual(expected_data, list(data)) class TestShowJobTemplate(TestJobTemplates): def setUp(self): super(TestShowJobTemplate, self).setUp() self.job_mock.find_unique.return_value = api_j.Job( None, JOB_INFO) # Command to test self.cmd = osc_j.ShowJobTemplate(self.app, None) def test_job_template_show(self): arglist = ['pig-job'] verifylist = [('job_template', 'pig-job')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.find_unique.assert_called_once_with(name='pig-job') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Libs', 'Mains', 'Name', 'Type') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id', 'main:main_id', 'pig-job', 'Pig') self.assertEqual(expected_data, data) class TestDeleteJobTemplate(TestJobTemplates): def setUp(self): super(TestDeleteJobTemplate, self).setUp() self.job_mock.find_unique.return_value = api_j.Job( None, JOB_INFO) # Command to test self.cmd = osc_j.DeleteJobTemplate(self.app, None) def test_job_template_delete(self): arglist = ['pig-job'] verifylist = [('job_template', ['pig-job'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.delete.assert_called_once_with('job_id') class TestUpdateJobTemplate(TestJobTemplates): def setUp(self): super(TestUpdateJobTemplate, self).setUp() self.job_mock.find_unique.return_value = api_j.Job(None, JOB_INFO) self.job_mock.update.return_value = mock.Mock(job=JOB_INFO.copy()) # Command to test self.cmd = osc_j.UpdateJobTemplate(self.app, None) def test_job_template_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_job_template_update_nothing_updated(self): arglist = ['pig-job'] verifylist = [('job_template', 'pig-job')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.update.assert_called_once_with('job_id') def test_job_template_update_all_options(self): arglist = ['pig-job', '--name', 'pig-job', '--description', 'descr', '--public', '--protected'] verifylist = [('job_template', 'pig-job'), ('name', 'pig-job'), ('description', 'descr'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.update.assert_called_once_with( 'job_id', description='descr', is_protected=True, is_public=True, name='pig-job') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Libs', 'Mains', 'Name', 'Type') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Job for test', 'job_id', False, False, 'lib:lib_id', 'main:main_id', 'pig-job', 'Pig') self.assertEqual(expected_data, data) def test_job_template_update_private_unprotected(self): arglist = ['pig-job', '--private', '--unprotected'] verifylist = [('job_template', 'pig-job'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.job_mock.update.assert_called_once_with( 'job_id', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_cluster_templates.py0000664000175000017500000003211413643576737030601 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from osc_lib.tests import utils as osc_utils from saharaclient.api import cluster_templates as api_ct from saharaclient.api import node_group_templates as api_ngt from saharaclient.osc.v1 import cluster_templates as osc_ct from saharaclient.tests.unit.osc.v1 import fakes CT_INFO = { "description": "Cluster template for tests", "use_autoconfig": True, "is_default": False, "node_groups": [ { "count": 2, "id": "d29631fc-0fad-434b-80aa-7a3e9526f57c", "name": "fakeng", "plugin_name": 'fake', "hadoop_version": '0.1' } ], "hadoop_version": "0.1", "is_public": False, "plugin_name": "fake", "id": "0647061f-ab98-4c89-84e0-30738ea55750", "anti_affinity": [], "name": "template", "is_protected": False, "domain_name": 'domain.org.' } class TestClusterTemplates(fakes.TestDataProcessing): def setUp(self): super(TestClusterTemplates, self).setUp() self.ct_mock = ( self.app.client_manager.data_processing.cluster_templates) self.ngt_mock = ( self.app.client_manager.data_processing.node_group_templates) self.ct_mock.reset_mock() self.ngt_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestCreateClusterTemplate(TestClusterTemplates): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateClusterTemplate, self).setUp() self.ct_mock.create.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, CT_INFO['node_groups'][0]) self.app.api_version['data_processing'] = '1.1' # Command to test self.cmd = osc_ct.CreateClusterTemplate(self.app, None) def test_ct_create_minimum_options(self): arglist = ['--name', 'template', '--node-groups', 'fakeng:2'] verifylist = [('name', 'template'), ('node_groups', ['fakeng:2'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.create.assert_called_once_with( description=None, hadoop_version='0.1', is_protected=False, is_public=False, name='template', node_groups=[ {'count': 2, 'name': 'fakeng', 'node_group_template_id': 'd29631fc-0fad-434b-80aa-7a3e9526f57c'}], plugin_name='fake', use_autoconfig=False, shares=None, cluster_configs=None, domain_name=None) def test_ct_create_all_options(self): arglist = ['--name', 'template', '--node-groups', 'fakeng:2', '--anti-affinity', 'datanode', '--description', 'descr', '--autoconfig', '--public', '--protected', '--domain-name', 'domain.org.'] verifylist = [('name', 'template'), ('node_groups', ['fakeng:2']), ('description', 'descr'), ('autoconfig', True), ('public', True), ('protected', True), ('domain_name', 'domain.org.')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.create.assert_called_once_with( description='descr', hadoop_version='0.1', is_protected=True, is_public=True, name='template', node_groups=[ {'count': 2, 'name': 'fakeng', 'node_group_template_id': 'd29631fc-0fad-434b-80aa-7a3e9526f57c'}], plugin_name='fake', use_autoconfig=True, shares=None, cluster_configs=None, domain_name='domain.org.') # Check that columns are correct expected_columns = ('Anti affinity', 'Description', 'Domain name', 'Id', 'Is default', 'Is protected', 'Is public', 'Name', 'Node groups', 'Plugin name', 'Plugin version', 'Use autoconfig') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'Cluster template for tests', 'domain.org.', '0647061f-ab98-4c89-84e0-30738ea55750', False, False, False, 'template', 'fakeng:2', 'fake', '0.1', True) self.assertEqual(expected_data, data) class TestListClusterTemplates(TestClusterTemplates): def setUp(self): super(TestListClusterTemplates, self).setUp() self.ct_mock.list.return_value = [api_ct.ClusterTemplate( None, CT_INFO)] # Command to test self.cmd = osc_ct.ListClusterTemplates(self.app, None) def test_ct_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) def test_ct_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Node groups', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750', 'fake', '0.1', 'fakeng:2', 'Cluster template for tests')] self.assertEqual(expected_data, list(data)) def test_ct_list_extra_search_opts(self): arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name', 'templ'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('name', 'templ')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('template', '0647061f-ab98-4c89-84e0-30738ea55750', 'fake', '0.1')] self.assertEqual(expected_data, list(data)) class TestShowClusterTemplate(TestClusterTemplates): def setUp(self): super(TestShowClusterTemplate, self).setUp() self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) # Command to test self.cmd = osc_ct.ShowClusterTemplate(self.app, None) def test_ct_show(self): arglist = ['template'] verifylist = [('cluster_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.find_unique.assert_called_once_with(name='template') # Check that columns are correct expected_columns = ('Anti affinity', 'Description', 'Domain name', 'Id', 'Is default', 'Is protected', 'Is public', 'Name', 'Node groups', 'Plugin name', 'Plugin version', 'Use autoconfig') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ( '', 'Cluster template for tests', 'domain.org.', '0647061f-ab98-4c89-84e0-30738ea55750', False, False, False, 'template', 'fakeng:2', 'fake', '0.1', True) self.assertEqual(expected_data, data) class TestDeleteClusterTemplate(TestClusterTemplates): def setUp(self): super(TestDeleteClusterTemplate, self).setUp() self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) # Command to test self.cmd = osc_ct.DeleteClusterTemplate(self.app, None) def test_ct_delete(self): arglist = ['template'] verifylist = [('cluster_template', ['template'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.delete.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750') class TestUpdateClusterTemplate(TestClusterTemplates): # TODO(apavlov): check for update with --json def setUp(self): super(TestUpdateClusterTemplate, self).setUp() self.ct_mock.update.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, CT_INFO['node_groups'][0]) # Command to test self.cmd = osc_ct.UpdateClusterTemplate(self.app, None) def test_ct_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_ct_update_nothing_updated(self): arglist = ['template'] verifylist = [('cluster_template', 'template')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.ct_mock.update.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750') def test_ct_update_all_options(self): arglist = ['template', '--name', 'template', '--node-groups', 'fakeng:2', '--anti-affinity', 'datanode', '--description', 'descr', '--autoconfig-enable', '--public', '--protected', '--domain-name', 'domain.org.'] verifylist = [('cluster_template', 'template'), ('name', 'template'), ('node_groups', ['fakeng:2']), ('description', 'descr'), ('use_autoconfig', True), ('is_public', True), ('is_protected', True), ('domain_name', 'domain.org.')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.ct_mock.update.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750', description='descr', hadoop_version='0.1', is_protected=True, is_public=True, name='template', node_groups=[ {'count': 2, 'name': 'fakeng', 'node_group_template_id': 'd29631fc-0fad-434b-80aa-7a3e9526f57c'}], plugin_name='fake', use_autoconfig=True, domain_name='domain.org.') # Check that columns are correct expected_columns = ('Anti affinity', 'Description', 'Domain name', 'Id', 'Is default', 'Is protected', 'Is public', 'Name', 'Node groups', 'Plugin name', 'Plugin version', 'Use autoconfig') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'Cluster template for tests', 'domain.org.', '0647061f-ab98-4c89-84e0-30738ea55750', False, False, False, 'template', 'fakeng:2', 'fake', '0.1', True) self.assertEqual(expected_data, data) def test_ct_update_private_unprotected(self): arglist = ['template', '--private', '--unprotected'] verifylist = [('cluster_template', 'template'), ('is_protected', False), ('is_public', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.ct_mock.update.assert_called_once_with( '0647061f-ab98-4c89-84e0-30738ea55750', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_job_types.py0000664000175000017500000001161613643576737027044 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from saharaclient.api import job_types as api_jt from saharaclient.api import jobs as api_j from saharaclient.osc.v1 import job_types as osc_jt from saharaclient.tests.unit.osc.v1 import fakes JOB_TYPE_INFO = { "name": 'Pig', "plugins": [ { 'versions': { '0.1': {}, '0.2': {} }, 'name': 'fake' }, { 'versions': { '6.2.2': {} }, 'name': 'wod' } ] } class TestJobTypes(fakes.TestDataProcessing): def setUp(self): super(TestJobTypes, self).setUp() self.job_mock = self.app.client_manager.data_processing.jobs self.jt_mock = self.app.client_manager.data_processing.job_types self.jt_mock.reset_mock() self.job_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestListJobTemplates(TestJobTypes): def setUp(self): super(TestListJobTemplates, self).setUp() self.jt_mock.list.return_value = [api_jt.JobType(None, JOB_TYPE_INFO)] # Command to test self.cmd = osc_jt.ListJobTypes(self.app, None) def test_job_types_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Plugins'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('Pig', 'fake(0.1, 0.2), wod(6.2.2)')] self.assertEqual(expected_data, list(data)) def test_job_types_list_extra_search_opts(self): arglist = ['--type', 'Pig', '--plugin', 'fake', '--plugin-version', '0.1'] verifylist = [('type', 'Pig'), ('plugin', 'fake'), ('plugin_version', '0.1')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Plugins'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('Pig', 'fake(0.1, 0.2), wod(6.2.2)')] self.assertEqual(expected_data, list(data)) class TestGetJobTypeConfigs(TestJobTypes): def setUp(self): super(TestGetJobTypeConfigs, self).setUp() self.job_mock.get_configs.return_value = ( api_j.Job(None, JOB_TYPE_INFO)) # Command to test self.cmd = osc_jt.GetJobTypeConfigs(self.app, None) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_job_type_configs_default_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['Pig'] verifylist = [('job_type', 'Pig')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.job_mock.get_configs.assert_called_once_with( 'Pig') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(JOB_TYPE_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('Pig', m_open.call_args[0][0]) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_job_type_configs_specified_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open): arglist = ['Pig', '--file', 'testfile'] verifylist = [('job_type', 'Pig'), ('file', 'testfile')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.job_mock.get_configs.assert_called_once_with( 'Pig') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(JOB_TYPE_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('testfile', m_open.call_args[0][0]) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_jobs.py0000664000175000017500000003235513643576737026006 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import job_executions as api_je from saharaclient.osc.v1 import jobs as osc_je from saharaclient.tests.unit.osc.v1 import fakes JOB_EXECUTION_INFO = { "is_public": False, "id": "je_id", "interface": [], "is_protected": False, "input_id": 'input_id', "output_id": 'output_id', "job_id": "job_id", "cluster_id": 'cluster_id', "start_time": "start", "end_time": "end", "engine_job_id": "engine_job_id", "info": { "status": 'SUCCEEDED' }, "job_configs": { "configs": { "config1": "1", "config2": "2" }, "args": [ "arg1", "arg2" ], "params": { "param2": "value2", "param1": "value1" } } } class TestJobs(fakes.TestDataProcessing): def setUp(self): super(TestJobs, self).setUp() self.je_mock = self.app.client_manager.data_processing.job_executions self.je_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestExecuteJob(TestJobs): # TODO(apavlov): check for execution with --interface, --configs, --json def setUp(self): super(TestExecuteJob, self).setUp() self.je_mock.create.return_value = api_je.JobExecution( None, JOB_EXECUTION_INFO) self.ds_mock = self.app.client_manager.data_processing.data_sources self.ds_mock.find_unique.return_value = mock.Mock(id='ds_id') self.c_mock = self.app.client_manager.data_processing.clusters self.c_mock.find_unique.return_value = mock.Mock(id='cluster_id') self.jt_mock = self.app.client_manager.data_processing.jobs self.jt_mock.find_unique.return_value = mock.Mock(id='job_id') self.ds_mock.reset_mock() self.c_mock.reset_mock() self.jt_mock.reset_mock() # Command to test self.cmd = osc_je.ExecuteJob(self.app, None) def test_job_execute_minimum_options(self): arglist = ['--job-template', 'job-template', '--cluster', 'cluster'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.create.assert_called_once_with( cluster_id='cluster_id', configs={}, input_id=None, interface=None, is_protected=False, is_public=False, job_id='job_id', output_id=None) def test_job_execute_with_input_output_option(self): arglist = ['--job-template', 'job-template', '--cluster', 'cluster', '--input', 'input', '--output', 'output'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'), ('input', 'input'), ('output', 'output')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.je_mock.create.assert_called_once_with( cluster_id='cluster_id', configs={}, input_id='ds_id', interface=None, is_protected=False, is_public=False, job_id='job_id', output_id='ds_id') # without option --output arglist = ['--job-template', 'job-template', '--cluster', 'cluster', '--input', 'input'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'), ('input', 'input')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.je_mock.create.assert_called_with( cluster_id='cluster_id', configs={}, input_id='ds_id', interface=None, is_protected=False, is_public=False, job_id='job_id', output_id=None) # without options --output and --input arglist = ['--job-template', 'job-template', '--cluster', 'cluster'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) self.je_mock.create.assert_called_with( cluster_id='cluster_id', configs={}, input_id=None, interface=None, is_protected=False, is_public=False, job_id='job_id', output_id=None) def test_job_execute_all_options(self): arglist = ['--job-template', 'job-template', '--cluster', 'cluster', '--input', 'input', '--output', 'output', '--params', 'param1:value1', 'param2:value2', '--args', 'arg1', 'arg2', '--configs', 'config1:1', 'config2:2', '--public', '--protected'] verifylist = [('job_template', 'job-template'), ('cluster', 'cluster'), ('input', 'input'), ('output', 'output'), ('params', ['param1:value1', 'param2:value2']), ('args', ['arg1', 'arg2']), ('configs', ['config1:1', 'config2:2']), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.create.assert_called_once_with( cluster_id='cluster_id', configs={'configs': {'config1': '1', 'config2': '2'}, 'args': ['arg1', 'arg2'], 'params': {'param2': 'value2', 'param1': 'value1'}}, input_id='ds_id', interface=None, is_protected=True, is_public=True, job_id='job_id', output_id='ds_id') # Check that columns are correct expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id', 'Input id', 'Is protected', 'Is public', 'Job template id', 'Output id', 'Start time', 'Status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id', 'end', 'engine_job_id', 'je_id', 'input_id', False, False, 'job_id', 'output_id', 'start', 'SUCCEEDED') self.assertEqual(expected_data, data) class TestListJobs(TestJobs): def setUp(self): super(TestListJobs, self).setUp() self.je_mock.list.return_value = [api_je.JobExecution( None, JOB_EXECUTION_INFO)] # Command to test self.cmd = osc_je.ListJobs(self.app, None) def test_jobs_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Id', 'Cluster id', 'Job id', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('je_id', 'cluster_id', 'job_id', 'SUCCEEDED')] self.assertEqual(expected_data, list(data)) def test_jobs_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Id', 'Cluster id', 'Job id', 'Status', 'Start time', 'End time'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('je_id', 'cluster_id', 'job_id', 'SUCCEEDED', 'start', 'end')] self.assertEqual(expected_data, list(data)) def test_jobs_list_extra_search_opts(self): arglist = ['--status', 'succeeded'] verifylist = [('status', 'succeeded')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Id', 'Cluster id', 'Job id', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('je_id', 'cluster_id', 'job_id', 'SUCCEEDED')] self.assertEqual(expected_data, list(data)) class TestShowJob(TestJobs): def setUp(self): super(TestShowJob, self).setUp() self.je_mock.get.return_value = api_je.JobExecution( None, JOB_EXECUTION_INFO) # Command to test self.cmd = osc_je.ShowJob(self.app, None) def test_job_show(self): arglist = ['job_id'] verifylist = [('job', 'job_id')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.get.assert_called_once_with('job_id') # Check that columns are correct expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id', 'Input id', 'Is protected', 'Is public', 'Job template id', 'Output id', 'Start time', 'Status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id', 'end', 'engine_job_id', 'je_id', 'input_id', False, False, 'job_id', 'output_id', 'start', 'SUCCEEDED') self.assertEqual(expected_data, data) class TestDeleteJob(TestJobs): def setUp(self): super(TestDeleteJob, self).setUp() self.je_mock.get.return_value = api_je.JobExecution( None, JOB_EXECUTION_INFO) # Command to test self.cmd = osc_je.DeleteJob(self.app, None) def test_job_delete(self): arglist = ['job_id'] verifylist = [('job', ['job_id'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.delete.assert_called_once_with('job_id') class TestUpdateJob(TestJobs): def setUp(self): super(TestUpdateJob, self).setUp() self.je_mock.get.return_value = api_je.JobExecution( None, JOB_EXECUTION_INFO) self.je_mock.update.return_value = mock.Mock( job_execution=JOB_EXECUTION_INFO.copy()) # Command to test self.cmd = osc_je.UpdateJob(self.app, None) def test_job_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_job_update_nothing_updated(self): arglist = ['job_id'] verifylist = [('job', 'job_id')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.update.assert_called_once_with('job_id') def test_job_update_public_protected(self): arglist = ['job_id', '--public', '--protected'] verifylist = [('job', 'job_id'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.update.assert_called_once_with( 'job_id', is_protected=True, is_public=True) # Check that columns are correct expected_columns = ('Cluster id', 'End time', 'Engine job id', 'Id', 'Input id', 'Is protected', 'Is public', 'Job template id', 'Output id', 'Start time', 'Status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id', 'end', 'engine_job_id', 'je_id', 'input_id', False, False, 'job_id', 'output_id', 'start', 'SUCCEEDED') self.assertEqual(expected_data, data) def test_job_update_private_unprotected(self): arglist = ['job_id', '--private', '--unprotected'] verifylist = [('job', 'job_id'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.je_mock.update.assert_called_once_with( 'job_id', is_protected=False, is_public=False) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_job_binaries.py0000664000175000017500000003077413643576737027502 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_u import testtools from saharaclient.api import job_binaries as api_jb from saharaclient.osc.v1 import job_binaries as osc_jb from saharaclient.tests.unit.osc.v1 import fakes JOB_BINARY_INFO = { "name": 'job-binary', "description": 'descr', "id": 'jb_id', "is_protected": False, "is_public": False, "url": 'swift://cont/test' } class TestJobBinaries(fakes.TestDataProcessing): def setUp(self): super(TestJobBinaries, self).setUp() self.jb_mock = self.app.client_manager.data_processing.job_binaries self.jb_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestCreateJobBinary(TestJobBinaries): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateJobBinary, self).setUp() self.jb_mock.create.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) self.jbi_mock = (self.app.client_manager. data_processing.job_binary_internals) self.jbi_mock.create.return_value = mock.Mock(id='jbi_id') self.jbi_mock.reset_mock() # Command to test self.cmd = osc_jb.CreateJobBinary(self.app, None) def test_job_binary_create_swift_public_protected(self): arglist = ['--name', 'job-binary', '--url', 'swift://cont/test', '--username', 'user', '--password', 'pass', '--public', '--protected'] verifylist = [('name', 'job-binary'), ('url', 'swift://cont/test'), ('username', 'user'), ('password', 'pass'), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.create.assert_called_once_with( description=None, extra={'password': 'pass', 'user': 'user'}, is_protected=True, is_public=True, name='job-binary', url='swift://cont/test') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('descr', 'jb_id', False, False, 'job-binary', 'swift://cont/test') self.assertEqual(expected_data, data) def test_job_binary_create_internal(self): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['--name', 'job-binary', '--data', 'filepath'] verifylist = [('name', 'job-binary'), ('data', 'filepath')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.create.assert_called_once_with( description=None, extra=None, is_protected=False, is_public=False, name='job-binary', url='internal-db://jbi_id') self.jbi_mock.create.assert_called_once_with('job-binary', '') def test_job_binary_create_mutual_exclusion(self): arglist = ['job-binary', '--name', 'job-binary', '--access-key', 'ak', '--secret-key', 'sk', '--url', 's3://abc/def', '--password', 'pw'] with testtools.ExpectedException(osc_u.ParserException): self.check_parser(self.cmd, arglist, mock.Mock()) class TestListJobBinaries(TestJobBinaries): def setUp(self): super(TestListJobBinaries, self).setUp() self.jb_mock.list.return_value = [api_jb.JobBinaries( None, JOB_BINARY_INFO)] # Command to test self.cmd = osc_jb.ListJobBinaries(self.app, None) def test_job_binary_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Url'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('job-binary', 'jb_id', 'swift://cont/test')] self.assertEqual(expected_data, list(data)) def test_job_binary_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Url', 'Description', 'Is public', 'Is protected'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('job-binary', 'jb_id', 'swift://cont/test', 'descr', False, False)] self.assertEqual(expected_data, list(data)) def test_job_binary_list_extra_search_opts(self): arglist = ['--name', 'bin'] verifylist = [('name', 'bin')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Url'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('job-binary', 'jb_id', 'swift://cont/test')] self.assertEqual(expected_data, list(data)) class TestShowJobBinary(TestJobBinaries): def setUp(self): super(TestShowJobBinary, self).setUp() self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.ShowJobBinary(self.app, None) def test_job_binary_show(self): arglist = ['job-binary'] verifylist = [('job_binary', 'job-binary')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.find_unique.assert_called_once_with(name='job-binary') # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('descr', 'jb_id', False, False, 'job-binary', 'swift://cont/test') self.assertEqual(expected_data, data) class TestDeleteJobBinary(TestJobBinaries): def setUp(self): super(TestDeleteJobBinary, self).setUp() self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.DeleteJobBinary(self.app, None) def test_job_binary_delete(self): arglist = ['job-binary'] verifylist = [('job_binary', ['job-binary'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.delete.assert_called_once_with('jb_id') class TestUpdateJobBinary(TestJobBinaries): def setUp(self): super(TestUpdateJobBinary, self).setUp() self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) self.jb_mock.update.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.UpdateJobBinary(self.app, None) def test_job_binary_update_all_options(self): arglist = ['job-binary', '--name', 'job-binary', '--description', 'descr', '--username', 'user', '--password', 'pass', '--public', '--protected'] verifylist = [('job_binary', 'job-binary'), ('name', 'job-binary'), ('description', 'descr'), ('username', 'user'), ('password', 'pass'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.update.assert_called_once_with( 'jb_id', {'is_public': True, 'description': 'descr', 'is_protected': True, 'name': 'job-binary', 'extra': {'password': 'pass', 'user': 'user'}}) # Check that columns are correct expected_columns = ('Description', 'Id', 'Is protected', 'Is public', 'Name', 'Url') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('descr', 'jb_id', False, False, 'job-binary', 'swift://cont/test') self.assertEqual(expected_data, data) def test_job_binary_update_private_unprotected(self): arglist = ['job-binary', '--private', '--unprotected'] verifylist = [('job_binary', 'job-binary'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.update.assert_called_once_with( 'jb_id', {'is_public': False, 'is_protected': False}) def test_job_binary_update_nothing_updated(self): arglist = ['job-binary'] verifylist = [('job_binary', 'job-binary')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.jb_mock.update.assert_called_once_with( 'jb_id', {}) def test_job_binary_update_mutual_exclusion(self): arglist = ['job-binary', '--name', 'job-binary', '--access-key', 'ak', '--secret-key', 'sk', '--url', 's3://abc/def', '--password', 'pw'] with testtools.ExpectedException(osc_u.ParserException): self.check_parser(self.cmd, arglist, mock.Mock()) class TestDownloadJobBinary(TestJobBinaries): def setUp(self): super(TestDownloadJobBinary, self).setUp() self.jb_mock.get_file.return_value = 'data' self.jb_mock.find_unique.return_value = api_jb.JobBinaries( None, JOB_BINARY_INFO) # Command to test self.cmd = osc_jb.DownloadJobBinary(self.app, None) def test_download_job_binary_default_file(self): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['job-binary'] verifylist = [('job_binary', 'job-binary')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.jb_mock.get_file.assert_called_once_with( 'jb_id') # Check that data will be saved to the right file self.assertEqual('job-binary', m_open.call_args[0][0]) def test_download_job_binary_specified_file(self): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['job-binary', '--file', 'test'] verifylist = [('job_binary', 'job-binary'), ('file', 'test')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments was passed self.jb_mock.get_file.assert_called_once_with( 'jb_id') # Check that data will be saved to the right file self.assertEqual('test', m_open.call_args[0][0]) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/__init__.py0000664000175000017500000000000013643576737025527 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_plugins.py0000664000175000017500000002130213643576737026520 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_serialization import jsonutils as json from saharaclient.api import plugins as api_plugins from saharaclient.osc.v1 import plugins as osc_plugins from saharaclient.tests.unit.osc.v1 import fakes PLUGIN_INFO = {'name': 'fake', 'title': 'Fake Plugin', 'versions': ['0.1', '0.2'], 'description': 'Plugin for tests', 'required_image_tags': ['fake', '0.1'], 'node_processes': { 'HDFS': ['datanode', 'namenode'], 'MapReduce': ['jobtracker', 'tasktracker'] }, 'plugin_labels': {'enabled': {'status': True}}, 'version_labels': {'0.1': {'enabled': {'status': True}}}} class TestPlugins(fakes.TestDataProcessing): def setUp(self): super(TestPlugins, self).setUp() self.plugins_mock = self.app.client_manager.data_processing.plugins self.plugins_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestListPlugins(TestPlugins): def setUp(self): super(TestListPlugins, self).setUp() self.plugins_mock.list.return_value = [api_plugins.Plugin( None, PLUGIN_INFO)] # Command to test self.cmd = osc_plugins.ListPlugins(self.app, None) def test_plugins_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Versions'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', '0.1, 0.2')] self.assertEqual(expected_data, list(data)) def test_plugins_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Title', 'Versions', 'Description'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'Fake Plugin', '0.1, 0.2', 'Plugin for tests')] self.assertEqual(expected_data, list(data)) class TestShowPlugin(TestPlugins): def setUp(self): super(TestShowPlugin, self).setUp() self.plugins_mock.get.return_value = api_plugins.Plugin( None, PLUGIN_INFO) self.plugins_mock.get_version_details.return_value = ( api_plugins.Plugin(None, PLUGIN_INFO)) # Command to test self.cmd = osc_plugins.ShowPlugin(self.app, None) def test_plugin_show(self): arglist = ['fake'] verifylist = [('plugin', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get.assert_called_once_with('fake') # Check that columns are correct expected_columns = ('Description', 'Name', 'Title', 'Versions', '', 'Plugin version 0.1: enabled', 'Plugin: enabled') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Plugin for tests', 'fake', 'Fake Plugin', '0.1, 0.2', '', True, True) self.assertEqual(expected_data, data) def test_plugin_version_show(self): arglist = ['fake', '--plugin-version', '0.1'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get_version_details.assert_called_once_with( 'fake', '0.1') # Check that columns are correct expected_columns = ('Description', 'Name', 'Required image tags', 'Title', '', 'Plugin version 0.1: enabled', 'Plugin: enabled', '', 'Service:', '', 'HDFS', 'MapReduce') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Plugin for tests', 'fake', '0.1, fake', 'Fake Plugin', '', True, True, '', 'Available processes:', '', 'datanode, namenode', 'jobtracker, tasktracker') self.assertEqual(expected_data, data) class TestGetPluginConfigs(TestPlugins): def setUp(self): super(TestGetPluginConfigs, self).setUp() self.plugins_mock.get_version_details.return_value = ( api_plugins.Plugin(None, PLUGIN_INFO)) # Command to test self.cmd = osc_plugins.GetPluginConfigs(self.app, None) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_plugin_configs_default_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open, create=True): arglist = ['fake', '0.1'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get_version_details.assert_called_once_with( 'fake', '0.1') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(PLUGIN_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('fake-0.1', m_open.call_args[0][0]) @mock.patch('oslo_serialization.jsonutils.dump') def test_get_plugin_configs_specified_file(self, p_dump): m_open = mock.mock_open() with mock.patch('six.moves.builtins.open', m_open): arglist = ['fake', '0.1', '--file', 'testfile'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('file', 'testfile')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.get_version_details.assert_called_once_with( 'fake', '0.1') args_to_dump = p_dump.call_args[0] # Check that the right data will be saved self.assertEqual(PLUGIN_INFO, args_to_dump[0]) # Check that data will be saved to the right file self.assertEqual('testfile', m_open.call_args[0][0]) class TestUpdatePlugin(TestPlugins): def setUp(self): super(TestUpdatePlugin, self).setUp() self.plugins_mock.update.return_value = api_plugins.Plugin( None, PLUGIN_INFO) # Command to test self.cmd = osc_plugins.UpdatePlugin(self.app, None) @mock.patch('osc_lib.utils.read_blob_file_contents') def test_plugin_update(self, read): arglist = ['fake', 'update.json'] verifylist = [('plugin', 'fake'), ('json', 'update.json')] value = {'plugin_labels': {'enabled': {'status': True}}} value = json.dumps(value) read.return_value = value parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.plugins_mock.update.assert_called_once_with( 'fake', {'plugin_labels': {'enabled': {'status': True}}}) # Check that columns are correct expected_columns = ('Description', 'Name', 'Title', 'Versions', '', 'Plugin version 0.1: enabled', 'Plugin: enabled') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('Plugin for tests', 'fake', 'Fake Plugin', '0.1, 0.2', '', True, True) self.assertEqual(expected_data, data) python-saharaclient-3.1.0/saharaclient/tests/unit/osc/v1/test_clusters.py0000664000175000017500000005036613643576737026717 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from osc_lib.tests import utils as osc_utils from saharaclient.api import cluster_templates as api_ct from saharaclient.api import clusters as api_cl from saharaclient.api import images as api_img from saharaclient.api import node_group_templates as api_ngt from saharaclient.osc.v1 import clusters as osc_cl from saharaclient.tests.unit.osc.v1 import fakes CLUSTER_INFO = { "description": "Cluster template for tests", "use_autoconfig": True, "is_default": False, "node_groups": [ { "count": 2, "id": "ng_id", "name": "fakeng", "plugin_name": 'fake', "hadoop_version": '0.1', "node_group_template_id": 'ngt_id' } ], "hadoop_version": "0.1", "is_public": False, "plugin_name": "fake", "id": "cluster_id", "anti_affinity": [], "name": "fake", "is_protected": False, "cluster_template_id": "ct_id", "neutron_management_network": "net_id", "user_keypair_id": "test", "status": 'Active', "default_image_id": "img_id", 'verification': { 'status': 'GREEN', 'id': 'ver_id', 'cluster_id': 'cluster_id', 'checks': [ { 'status': 'GREEN', 'name': 'Some check' } ] } } CT_INFO = { "plugin_name": "fake", "hadoop_version": "0.1", "name": '"template', "id": "ct_id" } NGT_INFO = { 'id': 'ngt_id', 'name': 'fakeng' } class TestClusters(fakes.TestDataProcessing): def setUp(self): super(TestClusters, self).setUp() self.cl_mock = ( self.app.client_manager.data_processing.clusters) self.ngt_mock = ( self.app.client_manager.data_processing.node_group_templates) self.ct_mock = ( self.app.client_manager.data_processing.cluster_templates) self.img_mock = ( self.app.client_manager.data_processing.images) self.cl_mock.reset_mock() self.ngt_mock.reset_mock() self.ct_mock.reset_mock() self.img_mock.reset_mock() self.app.api_version['data_processing'] = '1' class TestCreateCluster(TestClusters): # TODO(apavlov): check for creation with --json def setUp(self): super(TestCreateCluster, self).setUp() self.cl_mock.create.return_value = api_cl.Cluster( None, CLUSTER_INFO) self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) self.ct_mock.find_unique.return_value = api_ct.ClusterTemplate( None, CT_INFO) self.img_mock.find_unique.return_value = api_img.Image( None, {'id': 'img_id'}) self.net_mock = self.app.client_manager.network self.net_mock.find_network.return_value = mock.Mock(id='net_id') self.net_mock.reset_mock() # Command to test self.cmd = osc_cl.CreateCluster(self.app, None) def test_cluster_create_minimum_options(self): arglist = ['--name', 'fake', '--cluster-template', 'template', '--image', 'ubuntu'] verifylist = [('name', 'fake'), ('cluster_template', 'template'), ('image', 'ubuntu')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.create.assert_called_once_with( cluster_template_id='ct_id', count=None, default_image_id='img_id', description=None, hadoop_version='0.1', is_protected=False, is_public=False, is_transient=False, name='fake', net_id=None, plugin_name='fake', user_keypair_id=None) def test_cluster_create_all_options(self): arglist = ['--name', 'fake', '--cluster-template', 'template', '--image', 'ubuntu', '--user-keypair', 'test', '--neutron-network', 'net', '--description', 'descr', '--transient', '--public', '--protected'] verifylist = [('name', 'fake'), ('cluster_template', 'template'), ('image', 'ubuntu'), ('user_keypair', 'test'), ('neutron_network', 'net'), ('description', 'descr'), ('transient', True), ('public', True), ('protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.create.assert_called_once_with( cluster_template_id='ct_id', count=None, default_image_id='img_id', description='descr', hadoop_version='0.1', is_protected=True, is_public=True, is_transient=True, name='fake', net_id='net_id', plugin_name='fake', user_keypair_id='test') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_create_with_count(self): clusters_mock = mock.Mock() clusters_mock.to_dict.return_value = { 'clusters': ['cluster1_id', 'cluster2_id'] } self.cl_mock.create.return_value = clusters_mock arglist = ['--name', 'fake', '--cluster-template', 'template', '--image', 'ubuntu', '--count', '2'] verifylist = [('name', 'fake'), ('cluster_template', 'template'), ('image', 'ubuntu'), ('count', 2)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.create.assert_called_once_with( cluster_template_id='ct_id', count=2, default_image_id='img_id', description=None, hadoop_version='0.1', is_protected=False, is_public=False, is_transient=False, name='fake', net_id=None, plugin_name='fake', user_keypair_id=None) # Check that columns are correct expected_columns = ('fake',) self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('cluster_id',) self.assertEqual(expected_data, data) class TestListClusters(TestClusters): def setUp(self): super(TestListClusters, self).setUp() self.cl_mock.list.return_value = [api_cl.Cluster( None, CLUSTER_INFO)] # Command to test self.cmd = osc_cl.ListClusters(self.app, None) def test_clusters_list_no_options(self): arglist = [] verifylist = [] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active')] self.assertEqual(expected_data, list(data)) def test_clusters_list_long(self): arglist = ['--long'] verifylist = [('long', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Status', 'Description', 'Image'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active', 'Cluster template for tests', 'img_id')] self.assertEqual(expected_data, list(data)) def test_clusters_list_extra_search_opts(self): arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name', 'fake'] verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'), ('name', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that columns are correct expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version', 'Status'] self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = [('fake', 'cluster_id', 'fake', '0.1', 'Active')] self.assertEqual(expected_data, list(data)) class TestShowCluster(TestClusters): def setUp(self): super(TestShowCluster, self).setUp() self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.ShowCluster(self.app, None) def test_cluster_show(self): arglist = ['fake'] verifylist = [('cluster', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.find_unique.assert_called_once_with(name='fake') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_show_verification(self): arglist = ['fake', '--verification'] verifylist = [('cluster', 'fake'), ('verification', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.find_unique.assert_called_once_with(name='fake') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Health check (some check)', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id', 'Verification status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'GREEN', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test', 'GREEN') self.assertEqual(expected_data, data) class TestDeleteCluster(TestClusters): def setUp(self): super(TestDeleteCluster, self).setUp() self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.DeleteCluster(self.app, None) def test_cluster_delete(self): arglist = ['fake'] verifylist = [('cluster', ['fake'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.delete.assert_called_once_with('cluster_id') class TestUpdateCluster(TestClusters): def setUp(self): super(TestUpdateCluster, self).setUp() self.cl_mock.update.return_value = mock.Mock( cluster=CLUSTER_INFO.copy()) self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.UpdateCluster(self.app, None) def test_cluster_update_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_cluster_update_nothing_updated(self): arglist = ['fake'] verifylist = [('cluster', 'fake')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.update.assert_called_once_with('cluster_id') def test_cluster_update_all_options(self): arglist = ['fake', '--name', 'fake', '--description', 'descr', '--public', '--protected'] verifylist = [('cluster', 'fake'), ('name', 'fake'), ('description', 'descr'), ('is_public', True), ('is_protected', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.update.assert_called_once_with( 'cluster_id', description='descr', is_protected=True, is_public=True, name='fake') # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_update_private_unprotected(self): arglist = ['fake', '--private', '--unprotected'] verifylist = [('cluster', 'fake'), ('is_public', False), ('is_protected', False)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.update.assert_called_once_with( 'cluster_id', is_protected=False, is_public=False) class TestScaleCluster(TestClusters): def setUp(self): super(TestScaleCluster, self).setUp() self.cl_mock.scale.return_value = mock.Mock( cluster=CLUSTER_INFO.copy()) self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.ScaleCluster(self.app, None) def test_cluster_scale_no_options(self): arglist = [] verifylist = [] self.assertRaises(osc_utils.ParserException, self.check_parser, self.cmd, arglist, verifylist) def test_cluster_scale_resize(self): self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, NGT_INFO) arglist = ['fake', '--instances', 'fakeng:1'] verifylist = [('cluster', 'fake'), ('instances', ['fakeng:1'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.scale.assert_called_once_with( 'cluster_id', {'resize_node_groups': [ {'count': 1, 'name': 'fakeng'}]} ) # Check that columns are correct expected_columns = ('Anti affinity', 'Cluster template id', 'Description', 'Id', 'Image', 'Is protected', 'Is public', 'Name', 'Neutron management network', 'Node groups', 'Plugin name', 'Plugin version', 'Status', 'Use autoconfig', 'User keypair id') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('', 'ct_id', 'Cluster template for tests', 'cluster_id', 'img_id', False, False, 'fake', 'net_id', 'fakeng:2', 'fake', '0.1', 'Active', True, 'test') self.assertEqual(expected_data, data) def test_cluster_scale_add_ng(self): new_ng = {'name': 'new', 'id': 'new_id'} self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate( None, new_ng) arglist = ['fake', '--instances', 'new:1'] verifylist = [('cluster', 'fake'), ('instances', ['new:1'])] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.scale.assert_called_once_with( 'cluster_id', {'add_node_groups': [ {'count': 1, 'node_group_template_id': 'new_id', 'name': 'new'} ]}) class TestVerificationUpdateCluster(TestClusters): def setUp(self): super(TestVerificationUpdateCluster, self).setUp() self.cl_mock.find_unique.return_value = api_cl.Cluster( None, CLUSTER_INFO) self.cl_mock.verification_update.return_value = api_cl.Cluster( None, CLUSTER_INFO) # Command to test self.cmd = osc_cl.VerificationUpdateCluster(self.app, None) def test_verification_show(self): arglist = ['fake', '--show'] verifylist = [('cluster', 'fake'), ('show', True)] parsed_args = self.check_parser(self.cmd, arglist, verifylist) columns, data = self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.find_unique.assert_called_once_with(name='fake') # Check that columns are correct expected_columns = ('Health check (some check)', 'Verification status') self.assertEqual(expected_columns, columns) # Check that data is correct expected_data = ('GREEN', 'GREEN') self.assertEqual(expected_data, data) def test_verification_start(self): arglist = ['fake', '--start'] verifylist = [('cluster', 'fake'), ('status', 'START')] parsed_args = self.check_parser(self.cmd, arglist, verifylist) self.cmd.take_action(parsed_args) # Check that correct arguments were passed self.cl_mock.verification_update.assert_called_once_with( 'cluster_id', 'START') python-saharaclient-3.1.0/saharaclient/tests/unit/osc/__init__.py0000664000175000017500000000000013643576737025201 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/test_data_sources.py0000664000175000017500000001076313643576737026412 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import data_sources as ds from saharaclient.tests.unit import base import mock from oslo_serialization import jsonutils as json class DataSourceTest(base.BaseTestCase): body = { 'name': 'name', 'url': 'url', 'description': 'descr', 'data_source_type': 'hdfs', 'credential_user': 'user', 'credential_pass': '123' } response = { 'name': 'name', 'url': 'url', 'description': 'descr', 'type': 'hdfs', 'credentials': { 'user': 'user', 'password': '123' } } update_json = { 'name': 'UpdatedName', 'url': 'hdfs://myfakeserver/fakepath' } def test_create_data_sources(self): url = self.URL + '/data-sources' self.responses.post(url, status_code=202, json={'data_source': self.response}) resp = self.client.data_sources.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.response, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, ds.DataSources) self.assertFields(self.response, resp) def test_data_sources_list(self): url = self.URL + '/data-sources' self.responses.get(url, json={'data_sources': [self.response]}) resp = self.client.data_sources.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], ds.DataSources) self.assertFields(self.response, resp[0]) def test_data_sources_get(self): url = self.URL + '/data-sources/id' self.responses.get(url, json={'data_source': self.response}) resp = self.client.data_sources.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, ds.DataSources) self.assertFields(self.response, resp) def test_data_sources_delete(self): url = self.URL + '/data-sources/id' self.responses.delete(url, status_code=204) self.client.data_sources.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_update_data_sources(self): update_url = self.URL + '/data-sources/id' self.responses.put(update_url, status_code=202, json=self.update_json) updated = self.client.data_sources.update("id", self.update_json) self.assertEqual(self.update_json["name"], updated.name) self.assertEqual(self.update_json["url"], updated.url) @mock.patch('saharaclient.api.base.ResourceManager._create') def test_create_data_source_s3_or_swift_credentials(self, create): # Data source without any credential arguments self.client.data_sources.create('ds', '', 'swift', 'swift://path') self.assertNotIn('credentials', create.call_args[0][1]) # Data source with Swift credential arguments self.client.data_sources.create('ds', '', 'swift', 'swift://path', credential_user='user') self.assertIn('credentials', create.call_args[0][1]) # Data source with S3 credential arguments self.client.data_sources.create('ds', '', 'swift', 'swift://path', s3_credentials={'accesskey': 'a'}) self.assertIn('credentials', create.call_args[0][1]) self.assertIn('accesskey', create.call_args[0][1]['credentials']) # Data source with both S3 and swift credential arguments self.client.data_sources.create('ds', '', 's3', 's3://path', credential_user='swift_user', s3_credentials={'accesskey': 's3_a'}) self.assertIn('user', create.call_args[0][1]['credentials']) self.assertNotIn('accesskey', create.call_args[0][1]['credentials']) python-saharaclient-3.1.0/saharaclient/tests/unit/base.py0000664000175000017500000000333513643576737023606 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from saharaclient.api import base from saharaclient.api import client from keystoneauth1 import session from requests_mock.contrib import fixture class BaseTestCase(testtools.TestCase): URL = 'http://localhost:8386' SESSION = session.Session() def setUp(self): super(BaseTestCase, self).setUp() self.responses = self.useFixture(fixture.Fixture()) self.client = client.Client(session=self.SESSION, sahara_url=self.URL) self.client_v2 = client.ClientV2(session=self.SESSION, sahara_url=self.URL) def assertFields(self, body, obj): for key, value in body.items(): self.assertEqual(value, getattr(obj, key)) def assertDictsEqual(self, dict1, dict2): self.assertEqual(len(dict1), len(dict2)) for key in dict1: self.assertEqual(dict1[key], dict2[key]) class TestResource(base.Resource): resource_name = 'Test Resource' defaults = {'description': 'Test Description', 'extra': "extra"} class TestManager(base.ResourceManager): resource_class = TestResource python-saharaclient-3.1.0/saharaclient/tests/unit/test_cluster_templates.py0000664000175000017500000001270413643576737027472 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import cluster_templates as ct from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class ClusterTemplateTest(base.BaseTestCase): body = { 'name': 'name', 'description': 'description', 'plugin_name': 'plugin', 'hadoop_version': '1', 'node_groups': { 'name': 'master-node', 'flavor_id': '2', 'node_processes': ['namenode'], 'count': 1 }, "use_autoconfig": False, "domain_name": 'domain.org.' } update_json = { "cluster_template": { 'name': 'UpdatedName', 'description': 'Updated description', 'plugin_name': 'plugin', 'hadoop_version': '1', 'node_groups': { 'name': 'master-node', 'flavor_id': '3', 'node_processes': ['namenode', 'datanode'], 'count': 1 }, "use_autoconfig": True, "domain_name": 'domain.org.' } } def test_create_cluster_template(self): url = self.URL + '/cluster-templates' self.responses.post(url, status_code=202, json={'cluster_template': self.body}) resp = self.client.cluster_templates.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, ct.ClusterTemplate) self.assertFields(self.body, resp) def test_cluster_template_list(self): url = self.URL + '/cluster-templates' self.responses.get(url, json={'cluster_templates': [self.body]}) resp = self.client.cluster_templates.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], ct.ClusterTemplate) self.assertFields(self.body, resp[0]) def test_cluster_template_get(self): url = self.URL + '/cluster-templates/id' self.responses.get(url, json={'cluster_template': self.body}) resp = self.client.cluster_templates.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, ct.ClusterTemplate) self.assertFields(self.body, resp) def test_cluster_template_delete(self): url = self.URL + '/cluster-templates/id' self.responses.delete(url, status_code=204) self.client.cluster_templates.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_cluster_template_update(self): url = self.URL + '/cluster-templates' self.responses.post(url, status_code=202, json={'cluster_template': self.body}) resp = self.client.cluster_templates.create(**self.body) update_url = self.URL + '/cluster-templates/id' self.responses.put(update_url, status_code=202, json=self.update_json) # check that all parameters will be updated updated = self.client.cluster_templates.update( "id", resp.name, resp.plugin_name, resp.hadoop_version, description=getattr(resp, "description", None), cluster_configs=getattr(resp, "cluster_configs", None), node_groups=getattr(resp, "node_groups", None), anti_affinity=getattr(resp, "anti_affinity", None), net_id=getattr(resp, "neutron_management_network", None), default_image_id=getattr(resp, "default_image_id", None), use_autoconfig=True, domain_name=getattr(resp, "domain_name", None) ) self.assertIsInstance(updated, ct.ClusterTemplate) self.assertFields(self.update_json["cluster_template"], updated) # check that parameters will not be updated self.client.cluster_templates.update("id") self.assertEqual(update_url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { 'anti_affinity': None, 'cluster_configs': None, 'default_image_id': None, 'description': None, 'hadoop_version': None, 'is_protected': None, 'is_public': None, 'name': None, 'net_id': None, 'node_groups': None, 'plugin_name': None, 'shares': None, 'use_autoconfig': None, 'domain_name': None} req_json = unset_json.copy() req_json['neutron_management_network'] = req_json.pop('net_id') self.client.cluster_templates.update("id", **unset_json) self.assertEqual(update_url, self.responses.last_request.url) self.assertEqual(req_json, json.loads(self.responses.last_request.body)) python-saharaclient-3.1.0/saharaclient/tests/unit/test_job_types.py0000664000175000017500000000324513643576737025731 0ustar zuulzuul00000000000000# Copyright (c) 2015 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import job_types as jt from saharaclient.tests.unit import base class JobTypesTest(base.BaseTestCase): body = { "name": "Hive", "plugins": [ { "description": "The Apache Vanilla plugin.", "name": "vanilla", "title": "Vanilla Apache Hadoop", "versions": { "1.2.1": {} } }, { "description": "The Hortonworks Sahara plugin.", "name": "hdp", "title": "Hortonworks Data Platform", "versions": { "1.3.2": {}, "2.0.6": {} } } ] } def test_job_types_list(self): url = self.URL + '/job-types' self.responses.get(url, json={'job_types': [self.body]}) resp = self.client.job_types.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], jt.JobType) self.assertFields(self.body, resp[0]) python-saharaclient-3.1.0/saharaclient/tests/unit/test_base.py0000664000175000017500000000231713643576737024644 0ustar zuulzuul00000000000000# Copyright (c) 2016 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import saharaclient from saharaclient.api import base as api_base from saharaclient.tests.unit import base class BaseTest(base.BaseTestCase): def test_get_query_string(self): res = api_base.get_query_string(None, limit=None, marker=None) self.assertEqual("", res) res = api_base.get_query_string(None, limit=4, marker=None) self.assertEqual("?limit=4", res) res = api_base.get_query_string({'opt1': 2}, limit=None, marker=3) self.assertEqual("?marker=3&opt1=2", res) def test_module_version(self): self.assertTrue(hasattr(saharaclient, '__version__')) python-saharaclient-3.1.0/saharaclient/tests/unit/test_jobs.py0000664000175000017500000001001413643576737024660 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import jobs from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class JobTest(base.BaseTestCase): body = { 'name': 'name', 'type': 'pig', 'mains': ['job_binary_id'], 'libs': [], 'description': 'descr', 'is_public': True, 'is_protected': False } def test_create_job(self): url = self.URL + '/jobs' self.responses.post(url, status_code=202, json={'job': self.body}) resp = self.client.jobs.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, jobs.Job) self.assertFields(self.body, resp) def test_jobs_list(self): url = self.URL + '/jobs' self.responses.get(url, json={'jobs': [self.body]}) resp = self.client.jobs.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], jobs.Job) self.assertFields(self.body, resp[0]) def test_jobs_get(self): url = self.URL + '/jobs/id' self.responses.get(url, json={'job': self.body}) resp = self.client.jobs.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, jobs.Job) self.assertFields(self.body, resp) def test_jobs_get_configs(self): url = self.URL + '/jobs/config-hints/Pig' response = { "job_config": { "args": [], "configs": [] }, "interface": [] } self.responses.get(url, json=response) resp = self.client.jobs.get_configs('Pig') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, jobs.Job) self.assertFields(response, resp) def test_jobs_delete(self): url = self.URL + '/jobs/id' self.responses.delete(url, status_code=204) self.client.jobs.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_jobs_update(self): url = self.URL + '/jobs/id' update_body = { 'name': 'new_name', 'description': 'description' } self.responses.patch(url, status_code=202, json=update_body) # check that all parameters will be updated resp = self.client.jobs.update('id', name='new_name', description='description') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, jobs.Job) self.assertEqual(update_body, json.loads(self.responses.last_request.body)) # check that parameters will not be updated self.client.jobs.update("id") self.assertEqual(url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { "name": None, "description": None, "is_public": None, "is_protected": None } self.client.jobs.update("id", **unset_json) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(unset_json, json.loads(self.responses.last_request.body)) python-saharaclient-3.1.0/saharaclient/tests/unit/test_resource.py0000664000175000017500000000454713643576737025570 0ustar zuulzuul00000000000000# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import testtools from saharaclient.tests.unit import base as test_base class ResourceTest(testtools.TestCase): def test_create_resource(self): dict = {"name": "test"} resource = test_base.TestResource(None, dict) self.assertEqual("test", resource.name) self.assertEqual("Test Description", resource.description) def test_overwrite_default(self): dict = {"name": "test", "description": "Changed Description"} resource = test_base.TestResource(None, dict) self.assertEqual("test", resource.name) self.assertEqual("Changed Description", resource.description) self.assertEqual("extra", resource.extra) def test_create_dont_modify_info_dict(self): dict = {"name": "test", "description": "Changed Description"} dict_copy = dict.copy() resource = test_base.TestResource(None, dict) self.assertIsNotNone(resource) self.assertEqual(dict_copy, dict) def test_to_dict(self): dict = {"name": "test"} resource = test_base.TestResource(None, dict) self.assertEqual({'description': 'Test Description', 'extra': 'extra', 'name': 'test'}, resource.to_dict()) def test_resource_str(self): dict = {"name": "test", "description": "Changed Description"} resource = test_base.TestResource(None, dict) rstr = str(resource) self.assertIn(resource.resource_name, rstr) self.assertIn("name", rstr) self.assertIn("description", rstr) self.assertIn("Changed Description", rstr) self.assertNotIn("Test Description", rstr) self.assertIn("extra", rstr) self.assertNotIn("manager", rstr) python-saharaclient-3.1.0/saharaclient/tests/unit/test_hacking.py0000664000175000017500000000613613643576737025341 0ustar zuulzuul00000000000000# Copyright 2015 EasyStack Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from saharaclient.tests.hacking import checks class HackingTestCase(testtools.TestCase): def test_dict_constructor_with_list_copy(self): # Following checks for code-lines with pep8 error self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([(i, connect_info[i])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " attrs = dict([(k, _from_json(v))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " type_names = dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( "foo(param=dict((k, v) for k, v in bar.items()))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([[i,i] for i in range(3)])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dd = dict([i,i] for i in range(3))")))) # Following checks for ok code-lines self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " dict()")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " create_kwargs = dict(snapshot=snapshot,")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " self._render_dict(xml, data_el, data.__dict__)")))) def test_use_jsonutils(self): self.assertEqual(0, len(list(checks.use_jsonutils( "import json # noqa", "path")))) self.assertEqual(0, len(list(checks.use_jsonutils( "from oslo_serialization import jsonutils as json", "path")))) self.assertEqual(0, len(list(checks.use_jsonutils( "import jsonschema", "path")))) self.assertEqual(1, len(list(checks.use_jsonutils( "import json", "path")))) self.assertEqual(1, len(list(checks.use_jsonutils( "import json as jsonutils", "path")))) def test_no_mutable_default_args(self): self.assertEqual(0, len(list(checks.no_mutable_default_args( "def foo (bar):")))) self.assertEqual(1, len(list(checks.no_mutable_default_args( "def foo (bar=[]):")))) self.assertEqual(1, len(list(checks.no_mutable_default_args( "def foo (bar={}):")))) python-saharaclient-3.1.0/saharaclient/tests/unit/test_job_binary_internals.py0000664000175000017500000000717213643576737030133 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_serialization import jsonutils as json from saharaclient.api import job_binary_internals as jbi from saharaclient.tests.unit import base class JobBinaryInternalTest(base.BaseTestCase): body = { 'name': 'name', 'datasize': '123', 'id': 'id' } def test_create_job_binary_internal(self): url = self.URL + '/job-binary-internals/name' self.responses.put(url, status_code=202, json={'job_binary_internal': self.body}) resp = self.client.job_binary_internals.create('name', 'data') self.assertEqual(url, self.responses.last_request.url) self.assertEqual('data', self.responses.last_request.body) self.assertIsInstance(resp, jbi.JobBinaryInternal) self.assertFields(self.body, resp) def test_job_binary_internal_list(self): url = self.URL + '/job-binary-internals' self.responses.get(url, json={'binaries': [self.body]}) resp = self.client.job_binary_internals.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], jbi.JobBinaryInternal) self.assertFields(self.body, resp[0]) def test_job_binary_get(self): url = self.URL + '/job-binary-internals/id' self.responses.get(url, json={'job_binary_internal': self.body}) resp = self.client.job_binary_internals.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, jbi.JobBinaryInternal) self.assertFields(self.body, resp) def test_job_binary_delete(self): url = self.URL + '/job-binary-internals/id' self.responses.delete(url, status_code=204) self.client.job_binary_internals.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_job_binary_update(self): url = self.URL + '/job-binary-internals/id' update_body = { 'name': 'new_name' } self.responses.patch(url, status_code=202, json=update_body) # check that all parameters will be updated resp = self.client.job_binary_internals.update('id', name='new_name') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, jbi.JobBinaryInternal) self.assertEqual(update_body, json.loads(self.responses.last_request.body)) # check that parameters will not be updated self.client.job_binary_internals.update("id") self.assertEqual(url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { 'name': None, "is_public": None, "is_protected": None } self.client.job_binary_internals.update("id", **unset_json) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(unset_json, json.loads(self.responses.last_request.body)) python-saharaclient-3.1.0/saharaclient/tests/unit/test_job_binaries.py0000664000175000017500000000637113643576737026364 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import job_binaries as jb from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class JobBinaryTest(base.BaseTestCase): body = { 'name': 'name', 'url': 'url', 'description': 'descr', 'extra': { 'user': 'user', 'password': '123' } } update_body = { 'name': 'Updatedname', 'url': 'Updatedurl', 'description': 'Updateddescr', 'extra': { 'user': 'user', 'password': 'Updated123' } } def test_create_job_binary(self): url = self.URL + '/job-binaries' self.responses.post(url, status_code=202, json={'job_binary': self.body}) resp = self.client.job_binaries.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, jb.JobBinaries) self.assertFields(self.body, resp) def test_job_binary_list(self): url = self.URL + '/job-binaries' self.responses.get(url, json={'binaries': [self.body]}) resp = self.client.job_binaries.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], jb.JobBinaries) self.assertFields(self.body, resp[0]) def test_job_binary_get(self): url = self.URL + '/job-binaries/id' self.responses.get(url, json={'job_binary': self.body}) resp = self.client.job_binaries.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, jb.JobBinaries) self.assertFields(self.body, resp) def test_job_binary_delete(self): url = self.URL + '/job-binaries/id' self.responses.delete(url, status_code=204) self.client.job_binaries.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_job_binary_get_file(self): url = self.URL + '/job-binaries/id/data' self.responses.get(url, text='data') resp = self.client.job_binaries.get_file('id') self.assertEqual(url, self.responses.last_request.url) self.assertEqual(b'data', resp) def test_job_binary_update(self): url = self.URL + '/job-binaries/id' self.responses.put(url, status_code=202, json={'job_binary': self.update_body}) resp = self.client.job_binaries.update("id", self.update_body) self.assertEqual(self.update_body["name"], resp.name) python-saharaclient-3.1.0/saharaclient/tests/unit/__init__.py0000664000175000017500000000000013643576737024415 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/saharaclient/tests/unit/test_manager.py0000664000175000017500000000332313643576737025342 0ustar zuulzuul00000000000000# Copyright (c) 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from saharaclient.api import base from saharaclient.tests.unit import base as test_base class ManagerTest(test_base.BaseTestCase): def setUp(self): super(ManagerTest, self).setUp() self.man = test_base.TestManager(self.client) def test_find(self): self.man.list = mock.MagicMock( return_value=[mock.Mock(test='foo'), mock.Mock(test='bar')] ) self.assertEqual(2, len(self.man.find())) self.assertEqual(1, len(self.man.find(test='foo'))) self.assertEqual(0, len(self.man.find(test='baz'))) def test_find_unique(self): expected = mock.Mock(test='foo') self.man.list = mock.MagicMock( return_value=[expected, mock.Mock(test='bar')] ) ex = self.assertRaises(base.APIException, self.man.find_unique, test='baz') self.assertEqual(404, ex.error_code) ex = self.assertRaises(base.APIException, self.man.find_unique) self.assertEqual(409, ex.error_code) self.assertEqual(expected, self.man.find_unique(test='foo')) python-saharaclient-3.1.0/saharaclient/tests/unit/test_plugins.py0000664000175000017500000000472013643576737025413 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import plugins from saharaclient.tests.unit import base class PluginTest(base.BaseTestCase): body = { 'description': 'description', 'name': 'name', 'version': '1' } def test_plugins_list(self): url = self.URL + '/plugins' self.responses.get(url, json={'plugins': [self.body]}) resp = self.client.plugins.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], plugins.Plugin) self.assertFields(self.body, resp[0]) def test_plugins_get(self): url = self.URL + '/plugins/name' self.responses.get(url, json={'plugin': self.body}) resp = self.client.plugins.get('name') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, plugins.Plugin) self.assertFields(self.body, resp) def test_plugins_get_version_details(self): url = self.URL + '/plugins/name/1' self.responses.get(url, json={'plugin': self.body}) resp = self.client.plugins.get_version_details('name', '1') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, plugins.Plugin) self.assertFields(self.body, resp) def test_convert_to_cluster_template(self): url = self.URL + '/plugins/plugin/1/convert-config/template' response = { 'name': 'name', 'description': 'description', 'plugin_name': 'plugin', 'hadoop_version': '1', } self.responses.post(url, status_code=202, json={'cluster_template': response}) resp = self.client.plugins.convert_to_cluster_template( 'plugin', 1, 'template', 'file') self.assertEqual(url, self.responses.last_request.url) self.assertEqual(response, resp) python-saharaclient-3.1.0/saharaclient/tests/unit/test_clusters.py0000664000175000017500000001636613643576737025607 0ustar zuulzuul00000000000000# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from saharaclient.api import clusters as cl from saharaclient.tests.unit import base from oslo_serialization import jsonutils as json class ClusterTest(base.BaseTestCase): body = { 'name': 'name', 'plugin_name': 'fake', 'hadoop_version': '0.1', 'cluster_template_id': 'id', } body_with_count = { 'name': 'name', 'plugin_name': 'fake', 'hadoop_version': '0.1', 'cluster_template_id': 'id', 'count': 2 } body_with_progress = { 'name': 'name', 'plugin_name': 'fake', 'hadoop_version': '0.1', 'cluster_template_id': 'id', "provision_progress": [] } test_shares = [ { "id": "bd71d2d5-60a0-4ed9-a3d2-ad312c368880", "path": "/mnt/manila", "access_level": "rw" } ] def test_create_cluster_with_template(self,): url = self.URL + '/clusters' self.responses.post(url, status_code=202, json={'cluster': self.body}) resp = self.client.clusters.create(**self.body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, cl.Cluster) self.assertFields(self.body, resp) def test_create_cluster_without_template(self): body = self.body.copy() del body['cluster_template_id'] body.update({'default_image_id': 'image_id', 'cluster_configs': {}, 'node_groups': ['ng1', 'ng2']}) url = self.URL + '/clusters' self.responses.post(url, status_code=202, json={'cluster': body}) resp = self.client.clusters.create(**body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, cl.Cluster) self.assertFields(body, resp) def test_create_multiple_clusters(self): url = self.URL + '/clusters/multiple' self.responses.post(url, status_code=202, json={'clusters': ['id1', 'id2']}) resp = self.client.clusters.create(**self.body_with_count) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(self.body_with_count, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, cl.Cluster) self.assertFields({'clusters': ['id1', 'id2']}, resp) def test_clusters_list(self): url = self.URL + '/clusters' self.responses.get(url, json={'clusters': [self.body]}) resp = self.client.clusters.list() self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp[0], cl.Cluster) self.assertFields(self.body, resp[0]) def test_clusters_get(self): url = self.URL + '/clusters/id?show_progress=False' self.responses.get(url, json={'cluster': self.body}) resp = self.client.clusters.get('id') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, cl.Cluster) self.assertFields(self.body, resp) def test_clusters_get_with_progress(self): url = self.URL + '/clusters/id?show_progress=True' self.responses.get(url, json={'cluster': self.body_with_progress}) resp = self.client.clusters.get('id', show_progress=True) self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, cl.Cluster) self.assertFields(self.body, resp) def test_clusters_scale(self): url = self.URL + '/clusters/id' self.responses.put(url, status_code=202, json=self.body) scale_body = { 'resize_node_groups': [ { 'count': 2, 'name': 'name1' }, ], 'add_node_groups': [ { 'count': 1, 'name': 'name2', 'node_group_template_id': 'id' } ] } resp = self.client.clusters.scale('id', scale_body) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(scale_body, json.loads(self.responses.last_request.body)) self.assertIsInstance(resp, cl.Cluster) self.assertFields(self.body, resp) def test_clusters_delete(self): url = self.URL + '/clusters/id' self.responses.delete(url, status_code=204) self.client.clusters.delete('id') self.assertEqual(url, self.responses.last_request.url) def test_clusters_update(self): url = self.URL + '/clusters/id' update_body = { 'name': 'new_name', 'description': 'descr' } self.responses.patch(url, status_code=202, json=update_body) # check that all parameters will be updated resp = self.client.clusters.update('id', name='new_name', description='descr') self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, cl.Cluster) self.assertEqual(update_body, json.loads(self.responses.last_request.body)) # check that parameters will not be updated self.client.clusters.update("id") self.assertEqual(url, self.responses.last_request.url) self.assertEqual({}, json.loads(self.responses.last_request.body)) # check that all parameters will be unset unset_json = { "name": None, "description": None, "is_public": None, "is_protected": None, "shares": None } self.client.clusters.update("id", **unset_json) self.assertEqual(url, self.responses.last_request.url) self.assertEqual(unset_json, json.loads(self.responses.last_request.body)) def test_clusters_update_share(self): url = self.URL + '/clusters/id' update_body = { 'name': 'new_name', 'description': 'descr', 'shares': self.test_shares } self.responses.patch(url, status_code=202, json=update_body) resp = self.client.clusters.update('id', name='new_name', description='descr', shares=self.test_shares) self.assertEqual(url, self.responses.last_request.url) self.assertIsInstance(resp, cl.Cluster) self.assertEqual(update_body, json.loads(self.responses.last_request.body)) python-saharaclient-3.1.0/saharaclient/tests/__init__.py0000664000175000017500000000000013643576737023436 0ustar zuulzuul00000000000000python-saharaclient-3.1.0/README.rst0000664000175000017500000000333013643576737017225 0ustar zuulzuul00000000000000======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/python-saharaclient.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on Python bindings to the OpenStack Sahara API =========================================== .. image:: https://img.shields.io/pypi/v/python-saharaclient.svg :target: https://pypi.org/project/python-saharaclient/ :alt: Latest Version This is a client for the OpenStack Sahara API. There's a Python API (the ``saharaclient`` module), and a command-line script (``sahara``). Each implements the OpenStack Sahara API. You can find documentation for both Python bindings and CLI in `Docs`_. Development takes place via the usual OpenStack processes as outlined in the `developer guide `_. .. _Docs: https://docs.openstack.org/python-saharaclient/latest/ * License: Apache License, Version 2.0 * `PyPi`_ - package installation * `Online Documentation`_ * `Blueprints`_ - feature specifications * `Bugs`_ - stories and issue tracking * `Source`_ * `Specs`_ * `How to Contribute`_ .. _PyPi: https://pypi.org/project/python-saharaclient .. _Online Documentation: https://docs.openstack.org/python-saharaclient/latest/ .. _Blueprints: http://specs.openstack.org/openstack/sahara-specs/ .. _Bugs: https://storyboard.openstack.org/#!/project/934 .. _Source: https://opendev.org/openstack/python-saharaclient .. _How to Contribute: https://docs.openstack.org/infra/manual/developers.html .. _Specs: https://specs.openstack.org/openstack/sahara-specs/ .. _Release Notes: https://docs.openstack.org/releasenotes/python-saharaclient python-saharaclient-3.1.0/.stestr.conf0000664000175000017500000000006413643576737020010 0ustar zuulzuul00000000000000[DEFAULT] test_path=./saharaclient/tests top_dir=./ python-saharaclient-3.1.0/test-requirements.txt0000664000175000017500000000060213643576737021776 0ustar zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. hacking>=3.0,<3.1.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 mock>=2.0.0 # BSD oslotest>=3.2.0 # Apache-2.0 stestr>=1.0.0 # Apache-2.0 requests-mock>=1.2.0 # Apache-2.0 python-saharaclient-3.1.0/tox.ini0000664000175000017500000000605413643576737017057 0ustar zuulzuul00000000000000[tox] envlist = py37,pypy,pep8,releasenotes minversion = 3.1.1 skipsdist = True # this allows tox to infer the base python from the environment name # and override any basepython configured in this file ignore_basepython_conflict = true [testenv] basepython = python3 usedevelop = True install_command = pip install {opts} {packages} setenv = VIRTUAL_ENV={envdir} DISCOVER_DIRECTORY=saharaclient/tests/unit deps = -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = find . -type f -name "*.pyc" -delete stestr run {posargs} whitelist_externals = find rm passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY [testenv:debug] basepython = python3 commands = oslo_debug_helper -t saharaclient/tests/unit {posargs} [testenv:debug-py36] basepython = python3.6 commands = oslo_debug_helper -t saharaclient/tests/unit {posargs} [testenv:cover] setenv = {[testenv]setenv} PYTHON=coverage run --source saharaclient --parallel-mode commands = coverage erase find . -type f -name "*.pyc" -delete stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml coverage report [testenv:pep8] sitepackages = False commands = flake8 [testenv:doc8] deps = -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt doc8 commands = doc8 doc/source [testenv:venv] commands = {posargs} [testenv:docs] deps = -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -r{toxinidir}/requirements.txt -r{toxinidir}/doc/requirements.txt commands = rm -rf doc/build sphinx-build -W -b html doc/source doc/build/html [testenv:releasenotes] deps = -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -r{toxinidir}/requirements.txt -r{toxinidir}/doc/requirements.txt commands = rm -rf releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [flake8] show-source = true builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools [flake8:local-plugins] extension = S361 = checks:import_db_only_in_conductor S362 = checks:hacking_no_author_attr S363 = checks:check_oslo_namespace_imports S364 = commit_message:OnceGitCheckCommitTitleBug S365 = commit_message:OnceGitCheckCommitTitleLength S368 = checks:dict_constructor_with_list_copy S373 = logging_checks:no_translate_logs S374 = logging_checks:accepted_log_levels S375 = checks:use_jsonutils S360 = checks:no_mutable_default_args paths = ./saharaclient/tests/hacking [testenv:lower-constraints] deps = -c{toxinidir}/lower-constraints.txt -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt python-saharaclient-3.1.0/.coveragerc0000664000175000017500000000021513643576737017656 0ustar zuulzuul00000000000000[run] branch = True source = saharaclient omit = .tox/* saharaclient/tests/* [paths] source = saharaclient [report] ignore_errors = True python-saharaclient-3.1.0/ChangeLog0000664000175000017500000006010413643577103017276 0ustar zuulzuul00000000000000CHANGES ======= 3.1.0 ----- * Cleanup py27 support * Update hacking for Python3 * Fix the "image register" command (OSC 5.2.0 regression) 3.0.0 ----- * fix: typo in tox minversion option * [ussuri][goal] Drop python 2.7 support and testing * Switch to Ussuri jobs * Update master for stable/train 2.3.0 ----- * py3: use the train test template and other updates * Update sphinx from current requirements * Py3: fix the OSC download job binary command * Replace git.openstack.org URLs with opendev.org URLs * OpenDev Migration Patch * Update master for stable/stein * Add py37 and API v2 jobs (scenario, tempest) 2.2.0 ----- * Add missing APIv2 features to client, OSC 2.1.0 ----- * Preparing OSC for APIv2 * Add Python 3.6 classifier to setup.cfg * add python 3.6 unit test job * Update devel info: mailing list * Use templates for cover and lower-constraints * add python 3.6 unit test job * switch documentation job to new PTI * import zuul job settings from project-config * Update reno for stable/rocky 2.0.0 ----- * Clean S3 data source create/update * Support of S3 data sources in OSC * Adding boot from volume to osc * Adding boot from volume * Correct a missed job\_execution->job * Fix the cover tox target (switch to stestr) * Revert "Fix the cover tox target (switch to stestr)" * Allow S3 credentials in data source create * Rework saharaclient authentication * Reflect response fixes for jobs/jobs templates * Fix the cover tox target (switch to stestr) * Switch to using stestr * Add release note link in README * Reflect change to multiple clusters creation * Support of the improved force-delete in client * Remove PyPI downloads * fix tox python3 overrides * Fix build with Sphinx 1.7.x * Trivial: Update pypi url to new url 1.6.0 ----- * Tox: cleanup usage of py34, remove a now-useless section * Updated from global requirements * add lower-constraints job * Remove outdated tools and files * Updated from global requirements * Follow the new PTI for document build * Migration to Storyboard (2) * Migration to Storyboard * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Native Zuul v3 jobs * Update reno for stable/queens 1.5.0 ----- * Auto-document members properly * APIv2 support in client * Updated from global requirements * Modify home page in setup.cfg * Updated from global requirements * Updated from global requirements * Support of S3 binaries in OSC * Fix misleading docstring * Properly document job binary "extra" * Updated from global requirements * Updated from global requirements * Updated from global requirements * Remove -U from pip install * Avoid tox\_install.sh for constraints support * Remove setting of version/release from releasenotes 1.4.0 ----- * Updated from global requirements * Updated from global requirements * Add export of node group templates to CLI * Add export of cluster templates * Fix KeyError in "cluster scale" command * Updated from global requirements * [ut] replace .testr.conf with .stestr.conf * Updated from global requirements * Reorganize the documentation following the new structure * Updated from global requirements * Add import of node group templates * Add import of Cluster Templates * Updated from global requirements * Updated from global requirements * Updated from global requirements * Adding volume\_mount\_prefix parameter * Add export of node group templates * Update reno for stable/pike 1.3.0 ----- * Updated from global requirements * Updated from global requirements * Updated from global requirements * Clone sahara hacking checks to saharaclient * Update and optimize documentation links * Updated from global requirements * doc: migrate to openstackdocstheme * doc: enable warning-is-error and fix warnings * Remove log translations * Use 'project' instead of 'tenant' * Updated from global requirements * Updated from global requirements * Updated from global requirements * Fix html\_last\_updated\_fmt for Python3 * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements 1.2.0 ----- * Updated from global requirements * Replace six.iteritems() with .items() * Updated from global requirements * Fix a bug in jobs.py * PYthon 3.5 is added * Updated from global requirements * [Fix gate]Update test requirement * Updated from global requirements * Spelling replaced from "ot" to "of" * Fixed log messages * Updated from global requirements * Updated from global requirements * Set client module \_\_version\_\_ * Update reno for stable/ocata 1.1.0 ----- * Updated from global requirements * [trivial] Fix spelling * Updated from global requirements * Enable coverage report in console output * Add Constraints support * Replace logging with oslo\_log * Updated from global requirements * Updated from global requirements * Show team and repo badges on README * Updated from global requirements 1.0.0 ----- * Updated from global requirements * Updated from global requirements * Provide the cluster info about WebUI * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Add plug-in summary for osc doc * Enable release notes translation * Optimize command job-binary-download * Raise exception in command "plugin configs get" * remove old cli commands * Return error\_code in command "job binary download" * Updated from global requirements * Updated from global requirements * Updated from global requirements * Add newline to strings in stdout/stderr.write() * Updated from global requirements * Updated from global requirements * standardize release note page ordering * Update reno for stable/newton * Fix doc build if git is absent 0.18.0 ------ * Updated from global requirements * Updated from global requirements 0.17.0 ------ * Add sorting ability to Python-saharaclient * Designate integration * Updated from global requirements 0.16.0 ------ * Updated from global requirements * Add pagination ability to Python-saharaclient * Remove discover from test-requirements * Updated from global requirements * include storm.pyleus in job types * Updated from global requirements * support event logs for CLI * plugin's api update support * Updated from global requirements * Remove unused functional tests * Updated from global requirements * Updated from global requirements * Add openstackclient dependency back * Use osc\_lib instead of cliff * Updated from global requirements * Updated from global requirements * Use osc-lib instead of openstackclient * Updated from global requirements * Remove incorrectly used "# flake8: noqa" * print statment has become a function in py3 * Updated from global requirements * avoid additional requirement for building docs * Migrate functional tests to openstack-client 0.15.0 ------ * Change plugin version 'version' parameter to 'plugin-version' * Updated from global requirements * Updated from global requirements * Updated from global requirements * Replace tempest\_lib with tempest.lib * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Remove integration tests from saharaclient * Update reno for stable/mitaka * Updated from global requirements 0.13.0 ------ * Remove unused pngmath Sphinx extension * Fixing updates via CLI * Fixing cluster creation with neutron network via CLI * Use ostestr instead of the custom pretty\_tox.sh * Adding "health verification --show" CLI call * Updated from global requirements * Updated from global requirements * Updated from global requirements * Add debug testenv in tox 0.12.0 ------ * Keystoneclient to keystoneauth migration * Updated from global requirements * verifications impl for saharaclient * Adding release notes for saharaclient * Updated from global requirements * Adding ability to unset fields with update calls * Updated from global requirements * Replace deprecated library function os.popen() with subprocess * Updated from global requirements * Remove argparse dependency from requirements.txt file * Adding release notes for saharaclient * Fixing cluster scaling via CLI * Updated from global requirements * Updated from global requirements * Drop py33 support * Updated from global requirements * Deprecated tox -downloadcache option removed * Updated from global requirements * Add releasenotes to sahara client * Updated from global requirements * Remove py26 support * remove netaddr from requirements * Delete python bytecode before every test run * Updated from global requirements * Updated from global requirements * Adding indications of results after delete operations * Adding ability to get plugin processes via CLI * Updated from global requirements * Replacing hard coded cluster status using utils in sahara-client * Updated from global requirements * Fixing data source update in CLI * Adding ability to provide name or ID of the flavor in CLI * Updated from global requirements * Enabling DeprecationWarning if session is not provided * Adding sphinx extension for CLI docs autogeneration * Last sync from oslo-incubator * Adding autogenerated saharaclient API docs * Adding Sahara CLI overview to docs and fixing warnings * Images CLI improvement * Making desc parameter of update\_image optional * Deprecation of the Sahara CLI * Allowing for shares to be edited on an existing cluster * Fix bypass-url for CLI * Fixing updates for CLI * Add \*.log to gitignore * Adding get\_resource\_id method to CLI utils * Adding Job Binaries support to CLI * Updated from global requirements * Adding Jobs support to CLI * Updated from global requirements * Making parameters of job binaries create optional * Adding Job Types support to CLI * Adding Job Templates support to CLI * Making parameters of job executions create optional * Making parameters of job create optional * Updated from global requirements * Adding Clusters support to CLI * Adding Cluster Templates support to CLI * Adding Node Group Templates support to CLI * Changing public/protected options handling * Improve readme contents * Updated from global requirements * Updated from global requirements * Add the mising requriements * Updated from global requirements * Updated from global requirements * Adding Images support to CLI * Fix functional tests in gate * Fixed problem with tags update * Added volume\_mount\_prefix for node group templates * Adding update and minor fixes to Data Sources CLI * print usage when no argument is specified for python3 * Fixing cluster and node group template updates * Adding sahara\_url and endpoint\_type to OpenstackClient CLI * Adding return of updated image for update\_image, update\_tags * Updated from global requirements * Fix functional tests in saharaclient * Remove duplicate .coverage in .gitignore file * Shows reason of failure in base.get * Remove \`data source update\` from setup.cfg * Change ignore-errors to ignore\_errors * Adding missed public/protected support for jobs creation * Updating saharaclient api docs 0.11.0 ------ * Updated from global requirements * Support updates and ACL for objects * Updated from global requirements * Updated from global requirements * Improve help strings * Fixing query string for multiple opts * Updated from global requirements * Adding Data Sources support to CLI 0.10.1 ------ * Fix backward compat issue for job interfaces * Add initial commit for integration with Openstackclient 0.10.0 ------ * Updated from global requirements * API to mount manila shares to clusters * Add support OS\_ENDPOINT\_TYPE env variable to CLI * Add support of use\_autoconfig field * Add to\_dict() method to Resource class * Updated from global requirements * Add cluster-scale command to the CLI * Adding interface argument for job template and job * Fix \_get\_by\_id\_or\_name method * Updated from global requirements * Updated from global requirements * Allow multiple clusters creation * Sorting 'job-list' output by start\_time * Including 'start\_time' column at 'sahara job-list' command * Updated from global requirements * pass environment variables of proxy to tox * Updated from global requirements * Updated from global requirements 0.9.1 ----- * Adding job binary editing to library and CLI * Pass OpenStack environment variables to client tests * Adding data source editing to library and CLI * Updated from global requirements * Adding node group template editing to CLI * Adding cluster template editing to CLI * Updated from global requirements * Drop use of 'oslo' namespace package * Updated from global requirements * Try getting old service type if session used * Add .coveragerc to saharaclient * Update README to work with release tools 0.9.0 ----- * Uncap library requirements for liberty * Add regions support to saharaclient * Provide user-agent in saharaclient * Mark saharaclient as being a universal wheel * Add CONTRIBUTING.rst * Port to Python 3 * add --name option to assign name to job-binary-internal * Rework authentication * Add support for job-types-list * Add post\_test\_hook for functional tests * Copy functional tests from tempest CLI * Updated from global requirements * Add support for show\_events parameter * Added support of instance locality 0.8.0 ----- * Removed cluster validation on client side * Add how to participate doc * Added --bypass-url support for keystone 3 * Updated from global requirements * Remove strutils from openstack/common * Remove importutils from openstack-common.conf * Pass service type argument in Sahara CLI * Fixed work with 'data\_processing' service spelling * Added support of is\_proxy\_gateway field for node group * Pass endpoint\_type to Sahara client constructor * Use pretty-tox for better test output * Update apiclient.exceptions oslo-incubator module * Update apiclient.auth oslo-incubator module * Update \_i18n oslo-incubator module * Remove unused uuidutils dep * Remove obsolete gettextutils * Remove unused apiclient oslo files * apiclient.auth is required since d02f0e1d (21 sep 2014) * Add find\_unique base manager method * Add ability to get events from saharaclient * Using oslo\_\* instead of oslo.\* * Updated from global requirements * Added SSL-related parameters to the client * Added ability to pass image\_id for node group * Fix output of supported api versions * Use requests-mock for mocking * Remove cyclic dependency * Fix for sahara CLI * Enable W292 and E123 * Move to hacking 0.10 * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Workflow documentation is now in infra-manual * Update oslo-incubator importutils * Update oslo-incubator apiclient.exceptions * Update oslo-incubator cliutils * Updating oslo-incubator * Update oslo-incubator strutils * Set default service\_type to "data-processing" in client * Added unit tests for python bindings 0.7.6 ----- * Replacing data\_processing with data-processing * Adding support for query filtering to list() calls * Fixed old style class declaration * Add Support for Keystone V3 CLI 0.7.5 ----- * Add volume\_type support to node group templates * Support Cinder availability zones * Updated from global requirements * Updated from global requirements * Updated from global requirements * Support Nova availability zones * Fix arguments order in assertEqual * Use base utils from oslo-incubator instead copy-pasted from nova 0.7.4 ----- * Updated from global requirements 0.7.3 ----- * Fixed backward compatibility of sahara client 0.7.2 ----- * Sync oslo cliutils module * Sync oslo apiclient.exceptions module * Updated from global requirements * Sync oslo strutils module * [client] Fixed pep8 and py27 tasks on tox 1.7.2 * Add warn re sorting requirements * Add doc8 tox env * Work toward Python 3.4 support and testing * Fix duplicate help strings 0.7.1 ----- * Updated from global requirements * Added hadling of non-json response from Sahara * Updated from global requirements * Updated from global requirements * Added support of security groups management * Updated from global requirements * Updated from global requirements * updating link to Sahara docs * Add oslo.i18n lib to requirements * Update oslo-incubator cliutils module * Update oslo-incubator apiclient module * Update oslo-incubator strutils module * Update oslo-incubator gettextutils module * Update oslo-incubator apiclient.exceptions module * Remove docutils pin * Jar files for pig and mapreduce EDP tests * Update code for new hacking/pep8/flake8 global requirements * Make Cluster Template accept default\_image\_id field * Fixes ambiguous cli output between "None" and NoneType * Updated from global requirements * Remove vim editor configuration from comments * Updated from global requirements * Updated from global requirements * Don't set json content type for non-json data * Updated from global requirements * Added new hacking version to requirements * Updated from global requirements * Remove excutils from openstack-common.conf * Updated from global requirements * Fix network passing for cluster-template-create * removed unused methods from nova.utils * Update oslo-incubator cliutils * Update oslo-incubator apiclient.exceptions * Update oslo-incubator importutils * Update oslo-incubator strutils (and gettextutils) * Remove timeutils from openstack-common.conf * Updated from global requirements * use strutils.to\_slug() instead of utils.slugify() * replace string format arguments with function parameters * Add help string to job-create * Updated from global requirements * Add EDP jobs to cli integration tests * Add cli cluster creation and teardown test * Updated from global requirements * Updated from global requirements * Stub integration tests for vanilla, vanilla2 and hdp plugins * Restructure tests directory in preparation for cli integration tests * Updated from global requirements * Updated from global requirements * Updated from global requirements 0.7.0 ----- * Remove savanna aliases * Add README file 0.6.0 ----- * Implement "sahara --version" * Change internal variables and classes to 'sahara' * Change packaging references to python-saharaclient * Swap the saharaclient and savannaclient directories * Change client doc references to sahara * Change the enduser facing apects of the shell * Update .gitreview to point on updated repo * Make savanna able to be executed/used as sahara * Add excutils and timeutils to openstack/common * Update oslo-incubator cliutils module * Update oslo-incubator apiclient.exceptions modules * Add missed PrettyTable requirement * Updated from global requirements 0.5.0 ----- * Args should be passed as a list on job creation * Check for presence of 'credentials' before filtering * Check for presence of 'extra' before filtering * Updated from global requirements * Improve help strings * Api Exception improvement * Update oslo-incubator cliutils module * Update oslo-incubator apiclient.exceptions module * update oslo-incubator strutils module * Update oslo-incubator gettextutils module * Update oslo-incubator importutils module * Add importutils module * Fix typo in doc string * Move client docs to the client * Add savannaclient/version.py to store version info * Make the single entrypoint for client * Remove dependent module py3kcompat * Removed copy\_if\_defined function from ClusterManager * print\_list(): do not call decode() on a text string * Some improvements in tox.ini * Sync with global requirements * Changed base Resource class to prevent changing of passed arguments * Add --name option to image unregister & tag cmds * Remove job\_exec\_data argument * Add --name option to appropriate delete commands * Add --name option to appropriate show commands * Add space to name for consistency * Map neutron\_management\_network to net\_id in CLI * Add job-create to CLI * Add job-template-create to CLI * Fixed issue when savanna url is not provided * Update oslo-incubator cliutils module * Update oslo-incubator strutils module * Make --bypass-url default to env(BYPASS\_URL) * Refactor display of a job * Updated from global requirements * Refactor display of job template * Add job-binary-create to CLI * Refactor display of a job binary * Send credentials only when they are provided * Add data-source-create to CLI * Refactor display of a single data-source * Add job-binary-data-create to CLI * Refactor display of job-binary-data elements * Add job-delete to CLI * Add job-show to CLI * Add job-list to CLI * Add job-template-delete to CLI * Add job-template-show to CLI * Add job-template-list to CLI * Add job-binary-delete to CLI * Add job-binary-show to CLI * Add job-binary-list to CLI * Add job-binary-data-delete to CLI * Add job-binary-data-list to CLI * Add data-source-delete to CLI * Add data-source-show to CLI * Add data-source-list to CLI * Fix default service-type name in help * Add cluster-create to CLI * Add cluster-template-create to CLI * Add node-group-template-create to CLI * Add --json option to cluster-show * Add --json option to cluster-template-show * Add --json option to node-group-template-show * Refactor display of cluster * Refactor display of cluster template * Refactor display of node group template * Add cluster-template-show command to CLI * Add node-group-template-show command to CLI * Make plugin commands singular * Force wrap line for cluster-show * Remove version from setup.cfg 0.4.1 ----- * JobExecutionsManager.create() should handle input\_id/output\_id == None * Clean up versions display in plugins-show * Clean up printing of image details * Allow passing extra args to JobExecutionsManager.create() * Add pretty printing of node group fields * Add some pretty printing for list fields * Add initial cluster-template-\* commands to CLI * Add initial node-group-template-\* commands to CLI * Add initial cluster-\* commands to CLI * Add initial image-\* commands to CLI * Update oslo-incubator apiclient module * Update oslo-incubator py3kcompat module * Update oslo-incubator gettextutils module * Add run\_test.sh for running tests * Python 3: use six.iteritems() instead of iteritems() * Python3: use six.moves.urllib.parse instead of urlparse * Add basic tests for savanna CLI * Add initial Savanna CLI * Adding the ability to modify node group templates * Adding the ability to modify cluster templates * Removal of AUTHORS file from repo * Removing vim headers from savannaclient * Fix call to catalog.get during endpoint discovery * Client creation refactoring 0.4.0 ----- * Enforce keystone service endpoint URL for auth * Run tests using testrepository * Bump client version to 0.4.0 * Revert "Support building wheels (PEP-427)" * Add unregister\_image(id) call * Bump version to 2014.1 * Support building wheels (PEP-427) * Add HACKING.rst with link to the style guidelines * Hacking contains all needed requirements * Updated from global requirements * Adding support for downloading a job binary * py33 and pypy added to the envlist * Unused pylint requirement removed * Sync with global requirements 0.3 --- * Add lower bound for the six dep 0.3.rc4 ------- * Fixed UI bug with broken plugins page * Added transient field for cluster 0.3.rc3 ------- * Set upper limit for requests library 0.3.rc2 ------- * Adding Jeremy Stanley to AUTHORS * Remove timeout argument * Update .gitreview file following repository move * Sync with global requirements 0.3.rc1 ------- * Support for keystone v3 * Removed oslo.config and oslo-incubator 0.3a3 ----- * Fix config handling for Node Group Templates 0.3a2 ----- * Bump oslo.config to >=1.2.0 * Fix Node Groups handling on creation * Fix job\_binary\_internals creation * Fix job\_binary\_internals creation * Get job configs call for sync dashboard 0.3a1 ----- * Sync with dashboard * Remove version pbr pins from setup\_requires * Sync requiremnts with global requirements * Adjusting json resource names * Fix to requirements.txt * Adjust client to account API changes for EDP * Added converting project name to project id * Added authentication by tenant name * Create methods return object while object creation * Added support for Job Binaries and Job Executions API * Now create methods return a new object * Added API for job, data\_source and job\_origin * Update pbr and sync requirements * Python Savanna Client initial implementation * Added .gitreview python-saharaclient-3.1.0/lower-constraints.txt0000664000175000017500000000242713643576737022002 0ustar zuulzuul00000000000000appdirs==1.3.0 asn1crypto==0.23.0 Babel==2.3.4 cffi==1.7.0 cliff==2.8.0 cmd2==0.8.0 coverage==4.0 cryptography==2.1 debtcollector==1.2.0 decorator==4.4.0 deprecation==1.0 dogpile.cache==0.6.2 extras==1.0.0 fixtures==3.0.0 future==0.16.0 idna==2.6 iso8601==0.1.11 jmespath==0.9.0 jsonpatch==1.16 jsonpointer==1.13 jsonschema==2.6.0 keystoneauth1==3.4.0 linecache2==1.0.0 mccabe==0.2.1 mock==2.0.0 monotonic==0.6 mox3==0.20.0 msgpack-python==0.4.0 munch==2.1.0 netaddr==0.7.18 netifaces==0.10.4 openstacksdk==0.36.0 os-client-config==1.28.0 os-service-types==1.2.0 osc-lib==2.0.0 oslo.config==5.2.0 oslo.context==2.19.2 oslo.i18n==3.15.3 oslo.log==3.36.0 oslo.serialization==2.18.0 oslo.utils==3.33.0 oslotest==3.2.0 pbr==2.0.0 positional==1.2.1 prettytable==0.7.2 pycparser==2.18 pyinotify==0.9.6 pyOpenSSL==17.1.0 pyparsing==2.1.0 pyperclip==1.5.27 python-cinderclient==3.3.0 python-dateutil==2.5.3 python-glanceclient==2.8.0 python-keystoneclient==3.8.0 python-mimeparse==1.6.0 python-novaclient==9.1.0 python-openstackclient==5.2.0 python-subunit==1.0.0 pytz==2013.6 PyYAML==3.12 requests==2.14.2 requests-mock==1.2.0 requestsexceptions==1.2.0 rfc3986==0.3.1 simplejson==3.5.1 six==1.10.0 stestr==1.0.0 stevedore==1.20.0 testtools==2.2.0 traceback2==1.4.0 unittest2==1.1.0 warlock==1.2.0 wrapt==1.7.0 python-saharaclient-3.1.0/CONTRIBUTING.rst0000664000175000017500000000134613643576737020204 0ustar zuulzuul00000000000000If you would like to contribute to the development of OpenStack, you must follow the steps in the "If you're a developer" section of this page: https://wiki.openstack.org/wiki/How_To_Contribute You can find more Sahara-specific info in our How To Participate guide: https://docs.openstack.org/python-saharaclient/latest/how_to_participate.html Once those steps have been completed, changes to OpenStack should be submitted for review via the Gerrit tool, following the workflow documented at: https://docs.openstack.org/infra/manual/developers.html#development-workflow Pull requests submitted through GitHub will be ignored. Bugs should be filed on Storyboard, not GitHub: https://storyboard.openstack.org/#!/project/934