python-swiftclient-3.9.0/0000775000175000017500000000000013621304050015411 5ustar zuulzuul00000000000000python-swiftclient-3.9.0/.stestr.conf0000664000175000017500000000007513621303750017672 0ustar zuulzuul00000000000000[DEFAULT] test_path=${OS_TEST_PATH:-./test/unit} top_dir=./ python-swiftclient-3.9.0/.zuul.yaml0000664000175000017500000000355213621303750017365 0ustar zuulzuul00000000000000- job: name: swiftclient-swift-functional parent: swift-dsvm-functional description: | Run swift's functional tests with python-swiftclient installed from source instead as package from PyPI. # Ensure that we install python-swiftclient from git and # do not install from pypi. This is needed since the parent # job sets zuul_work_dir to the swift directory and uses tox # for installation. required-projects: - opendev.org/openstack/python-swiftclient - job: name: swiftclient-functional parent: swift-dsvm-functional description: | Run functional tests of python-swiftclient with python-swiftclient installed from source instead as package from PyPI. required-projects: - opendev.org/openstack/python-swiftclient vars: # Override value from parent job to use swiftclient tests zuul_work_dir: "{{ zuul.projects['opendev.org/openstack/python-swiftclient'].src_dir }}" - job: name: swiftclient-functional-py2 parent: swiftclient-functional description: | Run functional tests of python-swiftclient under Python 2 vars: tox_envlist: py2func - project: templates: - check-requirements - lib-forward-testing - openstack-lower-constraints-jobs - openstack-pypy-jobs-nonvoting - openstack-python-jobs - openstack-python3-ussuri-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - swiftclient-swift-functional - swiftclient-functional - swiftclient-functional-py2 - openstack-tox-py38: voting: true gate: jobs: - swiftclient-swift-functional - swiftclient-functional - swiftclient-functional-py2 - openstack-tox-py38: voting: true post: jobs: - openstack-tox-cover python-swiftclient-3.9.0/examples/0000775000175000017500000000000013621304050017227 5ustar zuulzuul00000000000000python-swiftclient-3.9.0/examples/upload.py0000664000175000017500000000501213621303750021071 0ustar zuulzuul00000000000000import logging from os import walk from os.path import join from swiftclient.multithreading import OutputManager from swiftclient.service import SwiftError, SwiftService, SwiftUploadObject from sys import argv logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) _opts = {'object_uu_threads': 20} dir = argv[1] container = argv[2] with SwiftService(options=_opts) as swift, OutputManager() as out_manager: try: # Collect all the files and folders in the given directory objs = [] dir_markers = [] for (_dir, _ds, _fs) in walk(dir): if not (_ds + _fs): dir_markers.append(_dir) else: objs.extend([join(_dir, _f) for _f in _fs]) # Now that we've collected all the required files and dir markers # build the ``SwiftUploadObject``s for the call to upload objs = [ SwiftUploadObject( o, object_name=o.replace( dir, 'my-%s-objects' % dir, 1 ) ) for o in objs ] dir_markers = [ SwiftUploadObject( None, object_name=d.replace( dir, 'my-%s-objects' % dir, 1 ), options={'dir_marker': True} ) for d in dir_markers ] # Schedule uploads on the SwiftService thread pool and iterate # over the results for r in swift.upload(container, objs + dir_markers): if r['success']: if 'object' in r: print(r['object']) elif 'for_object' in r: print( '%s segment %s' % (r['for_object'], r['segment_index']) ) else: error = r['error'] if r['action'] == "create_container": logger.warning( 'Warning: failed to create container ' "'%s'%s", container, error ) elif r['action'] == "upload_object": logger.error( "Failed to upload object %s to container %s: %s" % (container, r['object'], error) ) else: logger.error("%s" % error) except SwiftError as e: logger.error(e.value) python-swiftclient-3.9.0/examples/list.py0000664000175000017500000000176513621303750020573 0ustar zuulzuul00000000000000import logging from swiftclient.service import SwiftService, SwiftError from sys import argv logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) container = argv[1] minimum_size = 10*1024**2 with SwiftService() as swift: try: list_parts_gen = swift.list(container=container) for page in list_parts_gen: if page["success"]: for item in page["listing"]: i_size = int(item["bytes"]) if i_size > minimum_size: i_name = item["name"] i_etag = item["hash"] print( "%s [size: %s] [etag: %s]" % (i_name, i_size, i_etag) ) else: raise page["error"] except SwiftError as e: logger.error(e.value) python-swiftclient-3.9.0/examples/capabilities.py0000664000175000017500000000121113621303750022233 0ustar zuulzuul00000000000000import logging from swiftclient.exceptions import ClientException from swiftclient.service import SwiftService logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) with SwiftService() as swift: try: capabilities_result = swift.capabilities() capabilities = capabilities_result['capabilities'] if 'slo' in capabilities: print('SLO is supported') else: print('SLO is not supported') except ClientException as e: logger.error(e.value) python-swiftclient-3.9.0/examples/stat.py0000664000175000017500000000142213621303750020561 0ustar zuulzuul00000000000000import logging import pprint from swiftclient.service import SwiftService from sys import argv logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) _opts = {'object_dd_threads': 20} with SwiftService(options=_opts) as swift: container = argv[1] objects = argv[2:] header_data = {} stats_it = swift.stat(container=container, objects=objects) for stat_res in stats_it: if stat_res['success']: header_data[stat_res['object']] = stat_res['headers'] else: logger.error( 'Failed to retrieve stats for %s' % stat_res['object'] ) pprint.pprint(header_data) python-swiftclient-3.9.0/examples/download.py0000664000175000017500000000233213621303750021416 0ustar zuulzuul00000000000000import logging from swiftclient.service import SwiftService, SwiftError from sys import argv logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) def is_png(obj): return ( obj["name"].lower().endswith('.png') or obj["content_type"] == 'image/png' ) container = argv[1] with SwiftService() as swift: try: list_options = {"prefix": "archive_2016-01-01/"} list_parts_gen = swift.list(container=container) for page in list_parts_gen: if page["success"]: objects = [ obj["name"] for obj in page["listing"] if is_png(obj) ] for down_res in swift.download( container=container, objects=objects): if down_res['success']: print("'%s' downloaded" % down_res['object']) else: print("'%s' download failed" % down_res['object']) else: raise page["error"] except SwiftError as e: logger.error(e.value) python-swiftclient-3.9.0/examples/delete.py0000664000175000017500000000226013621303750021051 0ustar zuulzuul00000000000000import logging from swiftclient.service import SwiftService from sys import argv logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) _opts = {'object_dd_threads': 20} container = argv[1] objects = argv[2:] with SwiftService(options=_opts) as swift: del_iter = swift.delete(container=container, objects=objects) for del_res in del_iter: c = del_res.get('container', '') o = del_res.get('object', '') a = del_res.get('attempts') if del_res['success'] and not del_res['action'] == 'bulk_delete': rd = del_res.get('response_dict') if rd is not None: t = dict(rd.get('headers', {})) if t: print( 'Successfully deleted {0}/{1} in {2} attempts ' '(transaction id: {3})'.format(c, o, a, t) ) else: print( 'Successfully deleted {0}/{1} in {2} ' 'attempts'.format(c, o, a) ) python-swiftclient-3.9.0/examples/post.py0000664000175000017500000000223513621303750020576 0ustar zuulzuul00000000000000import logging from swiftclient.service import SwiftService, SwiftError from sys import argv logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) container = argv[1] with SwiftService() as swift: try: list_options = {"prefix": "archive_2016-01-01/"} list_parts_gen = swift.list(container=container) for page in list_parts_gen: if page["success"]: objects = [obj["name"] for obj in page["listing"]] post_options = {"header": "X-Delete-After:86400"} for post_res in swift.post( container=container, objects=objects, options=post_options): if post_res['success']: print("Object '%s' POST success" % post_res['object']) else: print("Object '%s' POST failed" % post_res['object']) else: raise page["error"] except SwiftError as e: logger.error(e.value) python-swiftclient-3.9.0/examples/copy.py0000664000175000017500000000215013621303750020557 0ustar zuulzuul00000000000000import logging from swiftclient.service import SwiftService, SwiftCopyObject, SwiftError logging.basicConfig(level=logging.ERROR) logging.getLogger("requests").setLevel(logging.CRITICAL) logging.getLogger("swiftclient").setLevel(logging.CRITICAL) logger = logging.getLogger(__name__) with SwiftService() as swift: try: obj = SwiftCopyObject("c", {"Destination": "/cont/d"}) for i in swift.copy( "cont", ["a", "b", obj], {"meta": ["foo:bar"], "Destination": "/cc"}): if i["success"]: if i["action"] == "copy_object": print( "object %s copied from /%s/%s" % (i["destination"], i["container"], i["object"]) ) if i["action"] == "create_container": print( "container %s created" % i["container"] ) else: if "error" in i and isinstance(i["error"], Exception): raise i["error"] except SwiftError as e: logger.error(e.value) python-swiftclient-3.9.0/bin/0000775000175000017500000000000013621304050016161 5ustar zuulzuul00000000000000python-swiftclient-3.9.0/bin/swift0000775000175000017500000000134013621303750017247 0ustar zuulzuul00000000000000#!/usr/bin/python # Copyright (c) 2014 Christian Schwede # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys from swiftclient.shell import main if __name__ == "__main__": sys.exit(main()) python-swiftclient-3.9.0/.manpages0000775000175000017500000000054513621303750017222 0ustar zuulzuul00000000000000#!/bin/sh RET=0 for MAN in doc/manpages/* ; do OUTPUT=$(LC_ALL=en_US.UTF-8 MANROFFSEQ='' MANWIDTH=80 man --warnings -E UTF-8 -l \ -Tutf8 -Z "$MAN" 2>&1 >/dev/null) if [ -n "$OUTPUT" ] ; then RET=1 echo "$MAN:" echo "$OUTPUT" fi done if [ "$RET" -eq "0" ] ; then echo "All manpages are fine" fi exit "$RET" python-swiftclient-3.9.0/LICENSE0000664000175000017500000002363613621303750016436 0ustar zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. python-swiftclient-3.9.0/lower-constraints.txt0000664000175000017500000000137713621303750021665 0ustar zuulzuul00000000000000alabaster==0.7.10 Babel==2.3.4 certifi==2018.1.18 chardet==3.0.4 coverage==4.0 docutils==0.11 dulwich==0.15.0 extras==1.0.0 fixtures==3.0.0 flake8==2.2.4 futures==3.0.0 hacking==0.10.0 idna==2.6 imagesize==0.7.1 iso8601==0.1.8 Jinja2==2.10 keystoneauth1==3.4.0 linecache2==1.0.0 MarkupSafe==1.0 mccabe==0.2.1 mock==1.2.0 netaddr==0.7.10 openstackdocstheme==1.20.0 openstacksdk==0.11.0 oslo.config==1.2.0 pbr==2.0.0 pep8==1.5.7 PrettyTable==0.7.1 pyflakes==0.8.1 Pygments==2.2.0 python-keystoneclient==0.7.0 python-mimeparse==1.6.0 python-subunit==1.0.0 pytz==2013.6 PyYAML==3.12 reno==2.5.0 requests==1.1.0 six==1.9.0 snowballstemmer==1.2.1 sphinx==1.6.2 sphinxcontrib-websupport==1.0.1 stestr==2.0.0 testtools==2.2.0 traceback2==1.4.0 unittest2==1.1.0 urllib3==1.22 python-swiftclient-3.9.0/python_swiftclient.egg-info/0000775000175000017500000000000013621304050023037 5ustar zuulzuul00000000000000python-swiftclient-3.9.0/python_swiftclient.egg-info/dependency_links.txt0000664000175000017500000000000113621304050027105 0ustar zuulzuul00000000000000 python-swiftclient-3.9.0/python_swiftclient.egg-info/SOURCES.txt0000664000175000017500000000462213621304050024727 0ustar zuulzuul00000000000000.coveragerc .functests .mailmap .manpages .stestr.conf .unittests .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog LICENSE MANIFEST.in README.rst bindep.txt lower-constraints.txt requirements.txt run_tests.sh setup.cfg setup.py test-requirements.txt tox.ini bin/swift doc/Makefile doc/requirements.txt doc/manpages/swift.1 doc/source/client-api.rst doc/source/conf.py doc/source/index.rst doc/source/introduction.rst doc/source/service-api.rst doc/source/swiftclient.rst doc/source/_static/.gitignore doc/source/_templates/.empty doc/source/cli/index.rst examples/capabilities.py examples/copy.py examples/delete.py examples/download.py examples/list.py examples/post.py examples/stat.py examples/upload.py python_swiftclient.egg-info/PKG-INFO python_swiftclient.egg-info/SOURCES.txt python_swiftclient.egg-info/dependency_links.txt python_swiftclient.egg-info/entry_points.txt python_swiftclient.egg-info/not-zip-safe python_swiftclient.egg-info/pbr.json python_swiftclient.egg-info/requires.txt python_swiftclient.egg-info/top_level.txt releasenotes/notes/310-notes-03040158a8683dd8.yaml releasenotes/notes/320_notes-bb367dba1053d34c.yaml releasenotes/notes/340_notes-1777780bbfdb4d96.yaml releasenotes/notes/350_notes-ad0ae19704b2eb88.yaml releasenotes/notes/360_notes-1ec385df13a3a735.yaml releasenotes/notes/361_notes-59e020e68bcdd709.yaml releasenotes/notes/3_8_0_release-bd867fbdb8c895d3.yaml releasenotes/notes/3_8_1_release-cb5648c3ae69bde1.yaml releasenotes/notes/3_9_0_release-3c293d277f14ec22.yaml releasenotes/source/conf.py releasenotes/source/current.rst releasenotes/source/index.rst releasenotes/source/newton.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst swiftclient/__init__.py swiftclient/authv1.py swiftclient/client.py swiftclient/command_helpers.py swiftclient/exceptions.py swiftclient/multithreading.py swiftclient/service.py swiftclient/shell.py swiftclient/utils.py swiftclient/version.py test/__init__.py test/sample.conf test/functional/__init__.py test/functional/test_openstacksdk.py test/functional/test_swiftclient.py test/unit/__init__.py test/unit/test_authv1.py test/unit/test_command_helpers.py test/unit/test_multithreading.py test/unit/test_service.py test/unit/test_shell.py test/unit/test_swiftclient.py test/unit/test_utils.py test/unit/utils.py tools/swift.bash_completionpython-swiftclient-3.9.0/python_swiftclient.egg-info/PKG-INFO0000664000175000017500000000636113621304050024142 0ustar zuulzuul00000000000000Metadata-Version: 2.1 Name: python-swiftclient Version: 3.9.0 Summary: OpenStack Object Storage API Client Library Home-page: https://docs.openstack.org/python-swiftclient/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/python-swiftclient.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on Python bindings to the OpenStack Object Storage API =================================================== .. image:: https://img.shields.io/pypi/v/python-swiftclient.svg :target: https://pypi.org/project/python-swiftclient/ :alt: Latest Version This is a python client for the Swift API. There's a Python API (the ``swiftclient`` module), and a command-line script (``swift``). Development takes place via the usual OpenStack processes as outlined in the `OpenStack wiki`__. __ https://docs.openstack.org/infra/manual/developers.html This code is based on the original client previously included with `OpenStack's Swift`__ The python-swiftclient is licensed under the Apache License like the rest of OpenStack. __ https://github.com/openstack/swift * Free software: Apache license * `PyPI`_ - package installation * `Online Documentation`_ * `Launchpad project`_ - release management * `Blueprints`_ - feature specifications * `Bugs`_ - issue tracking * `Source`_ * `Specs`_ * `How to Contribute`_ * `Release Notes`_ .. _PyPI: https://pypi.org/project/python-swiftclient .. _Online Documentation: https://docs.openstack.org/python-swiftclient/latest/ .. _Launchpad project: https://launchpad.net/python-swiftclient .. _Blueprints: https://blueprints.launchpad.net/python-swiftclient .. _Bugs: https://bugs.launchpad.net/python-swiftclient .. _Source: https://opendev.org/openstack/python-swiftclient .. _How to Contribute: https://docs.openstack.org/infra/manual/developers.html .. _Specs: https://specs.openstack.org/openstack/swift-specs/ .. _Release Notes: https://docs.openstack.org/releasenotes/python-swiftclient .. contents:: Contents: :local: Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Operating System :: Microsoft :: Windows Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Provides-Extra: keystone Provides-Extra: test python-swiftclient-3.9.0/python_swiftclient.egg-info/not-zip-safe0000664000175000017500000000000113621304050025265 0ustar zuulzuul00000000000000 python-swiftclient-3.9.0/python_swiftclient.egg-info/requires.txt0000664000175000017500000000037613621304050025445 0ustar zuulzuul00000000000000requests>=1.1.0 six>=1.9.0 [:(python_version=='2.7' or python_version=='2.6')] futures>=3.0.0 [keystone] python-keystoneclient>=0.7.0 [test] hacking<1.2.0,>=1.1.0 coverage!=4.4,>=4.0 keystoneauth1>=3.4.0 mock>=1.2.0 stestr>=2.0.0 openstacksdk>=0.11.0 python-swiftclient-3.9.0/python_swiftclient.egg-info/pbr.json0000664000175000017500000000005613621304050024516 0ustar zuulzuul00000000000000{"git_version": "259b98f", "is_release": true}python-swiftclient-3.9.0/python_swiftclient.egg-info/top_level.txt0000664000175000017500000000001413621304050025564 0ustar zuulzuul00000000000000swiftclient python-swiftclient-3.9.0/python_swiftclient.egg-info/entry_points.txt0000664000175000017500000000017113621304050026334 0ustar zuulzuul00000000000000[console_scripts] swift = swiftclient.shell:main [keystoneauth1.plugin] v1password = swiftclient.authv1:PasswordLoader python-swiftclient-3.9.0/MANIFEST.in0000664000175000017500000000025613621303750017160 0ustar zuulzuul00000000000000include AUTHORS include ChangeLog include LICENSE include README.rst include run_tests.sh tox.ini recursive-include doc * recursive-include tests * recursive-include tools * python-swiftclient-3.9.0/tools/0000775000175000017500000000000013621304050016551 5ustar zuulzuul00000000000000python-swiftclient-3.9.0/tools/swift.bash_completion0000664000175000017500000000140613621303750023004 0ustar zuulzuul00000000000000declare -a _swift_opts # lazy init _swift_get_current_opt() { local opt for opt in ${_swift_opts[@]} ; do if [[ $(echo ${COMP_WORDS[*]} |grep -c " $opt\$") > 0 ]] || [[ $(echo ${COMP_WORDS[*]} |grep -c " $opt ") > 0 ]] ; then echo $opt return 0 fi done echo "" return 0 } _swift() { local opt cur prev sflags COMPREPLY=() cur="${COMP_WORDS[COMP_CWORD]}" prev="${COMP_WORDS[COMP_CWORD-1]}" if [ "x$_swift_opts" == "x" ] ; then _swift_opts=(`swift bash_completion "$sbc" | sed -e "s/-[-A-Za-z0-9_]*//g" -e "s/ */ /g"`) fi opt="$(_swift_get_current_opt)" COMPREPLY=($(compgen -W "$(swift bash_completion $opt)" -- ${cur})) return 0 } complete -F _swift swift python-swiftclient-3.9.0/bindep.txt0000664000175000017500000000040713621303750017422 0ustar zuulzuul00000000000000# This is a cross-platform list tracking distribution packages needed by tests; # see https://docs.openstack.org/infra/bindep/ for additional information. pypy [test !platform:fedora] pypy-dev [test platform:dpkg] pypy-devel [test platform:rpm !platform:fedora] python-swiftclient-3.9.0/requirements.txt0000664000175000017500000000013713621303750020704 0ustar zuulzuul00000000000000futures>=3.0.0;python_version=='2.7' or python_version=='2.6' # BSD requests>=1.1.0 six>=1.9.0 python-swiftclient-3.9.0/swiftclient/0000775000175000017500000000000013621304050017744 5ustar zuulzuul00000000000000python-swiftclient-3.9.0/swiftclient/__init__.py0000664000175000017500000000205413621303750022064 0ustar zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2012 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ OpenStack Swift Python client binding. """ from .client import * # noqa # At setup.py time, we haven't installed anything yet, so there # is nothing that is able to set this version property. Squelching # that exception here should be fine- if there are problems with # pkg_resources in a real install, that will manifest itself as # an error still try: from swiftclient import version __version__ = version.version_string except Exception: pass python-swiftclient-3.9.0/swiftclient/utils.py0000664000175000017500000003272613621303750021476 0ustar zuulzuul00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """Miscellaneous utility functions for use with Swift.""" from calendar import timegm import collections import gzip import hashlib import hmac import json import logging import six import time import traceback TRUE_VALUES = set(('true', '1', 'yes', 'on', 't', 'y')) EMPTY_ETAG = 'd41d8cd98f00b204e9800998ecf8427e' EXPIRES_ISO8601_FORMAT = '%Y-%m-%dT%H:%M:%SZ' SHORT_EXPIRES_ISO8601_FORMAT = '%Y-%m-%d' TIME_ERRMSG = ('time must either be a whole number or in specific ' 'ISO 8601 format.') def config_true_value(value): """ Returns True if the value is either True or a string in TRUE_VALUES. Returns False otherwise. This function comes from swift.common.utils.config_true_value() """ return value is True or \ (isinstance(value, six.string_types) and value.lower() in TRUE_VALUES) def prt_bytes(num_bytes, human_flag): """ convert a number > 1024 to printable format, either in 4 char -h format as with ls -lh or return as 12 char right justified string """ if not human_flag: return '%12s' % num_bytes num = float(num_bytes) suffixes = [None] + list('KMGTPEZY') for suffix in suffixes[:-1]: if num <= 1023: break num /= 1024.0 else: suffix = suffixes[-1] if not suffix: # num_bytes must be < 1024 return '%4s' % num_bytes elif num >= 10: return '%3d%s' % (num, suffix) else: return '%.1f%s' % (num, suffix) def generate_temp_url(path, seconds, key, method, absolute=False, prefix=False, iso8601=False, ip_range=None): """Generates a temporary URL that gives unauthenticated access to the Swift object. :param path: The full path to the Swift object or prefix if a prefix-based temporary URL should be generated. Example: /v1/AUTH_account/c/o or /v1/AUTH_account/c/prefix. :param seconds: time in seconds or ISO 8601 timestamp. If absolute is False and this is the string representation of an integer, then this specifies the amount of time in seconds for which the temporary URL will be valid. If absolute is True then this specifies an absolute time at which the temporary URL will expire. :param key: The secret temporary URL key set on the Swift cluster. To set a key, run 'swift post -m "Temp-URL-Key: "' :param method: A HTTP method, typically either GET or PUT, to allow for this temporary URL. :param absolute: if True then the seconds parameter is interpreted as a Unix timestamp, if seconds represents an integer. :param prefix: if True then a prefix-based temporary URL will be generated. :param iso8601: if True, a URL containing an ISO 8601 UTC timestamp instead of a UNIX timestamp will be created. :param ip_range: if a valid ip range, restricts the temporary URL to the range of ips. :raises ValueError: if timestamp or path is not in valid format. :return: the path portion of a temporary URL """ try: try: timestamp = float(seconds) except ValueError: formats = ( EXPIRES_ISO8601_FORMAT, EXPIRES_ISO8601_FORMAT[:-1], SHORT_EXPIRES_ISO8601_FORMAT) for f in formats: try: t = time.strptime(seconds, f) except ValueError: t = None else: if f == EXPIRES_ISO8601_FORMAT: timestamp = timegm(t) else: # Use local time if UTC designator is missing. timestamp = int(time.mktime(t)) absolute = True break if t is None: raise ValueError() else: if not timestamp.is_integer(): raise ValueError() timestamp = int(timestamp) if timestamp < 0: raise ValueError() except ValueError: raise ValueError(TIME_ERRMSG) if isinstance(path, six.binary_type): try: path_for_body = path.decode('utf-8') except UnicodeDecodeError: raise ValueError('path must be representable as UTF-8') else: path_for_body = path parts = path_for_body.split('/', 4) if len(parts) != 5 or parts[0] or not all(parts[1:(4 if prefix else 5)]): if prefix: raise ValueError('path must at least contain /v1/a/c/') else: raise ValueError('path must be full path to an object' ' e.g. /v1/a/c/o') standard_methods = ['GET', 'PUT', 'HEAD', 'POST', 'DELETE'] if method.upper() not in standard_methods: logger = logging.getLogger("swiftclient") logger.warning('Non default HTTP method %s for tempurl specified, ' 'possibly an error', method.upper()) if not absolute: expiration = int(time.time() + timestamp) else: expiration = timestamp hmac_parts = [method.upper(), str(expiration), ('prefix:' if prefix else '') + path_for_body] if ip_range: if isinstance(ip_range, six.binary_type): try: ip_range = ip_range.decode('utf-8') except UnicodeDecodeError: raise ValueError( 'ip_range must be representable as UTF-8' ) hmac_parts.insert(0, "ip=%s" % ip_range) hmac_body = u'\n'.join(hmac_parts) # Encode to UTF-8 for py3 compatibility if not isinstance(key, six.binary_type): key = key.encode('utf-8') sig = hmac.new(key, hmac_body.encode('utf-8'), hashlib.sha1).hexdigest() if iso8601: expiration = time.strftime( EXPIRES_ISO8601_FORMAT, time.gmtime(expiration)) temp_url = u'{path}?temp_url_sig={sig}&temp_url_expires={exp}'.format( path=path_for_body, sig=sig, exp=expiration) if ip_range: temp_url += u'&temp_url_ip_range={}'.format(ip_range) if prefix: temp_url += u'&temp_url_prefix={}'.format(parts[4]) # Have return type match path from caller if isinstance(path, six.binary_type): return temp_url.encode('utf-8') else: return temp_url def get_body(headers, body): if headers.get('content-encoding') == 'gzip': with gzip.GzipFile(fileobj=six.BytesIO(body), mode='r') as gz: nbody = gz.read() return nbody return body def parse_api_response(headers, body): body = get_body(headers, body) charset = 'utf-8' # Swift *should* be speaking UTF-8, but check content-type just in case content_type = headers.get('content-type', '') if '; charset=' in content_type: charset = content_type.split('; charset=', 1)[1].split(';', 1)[0] return json.loads(body.decode(charset)) def split_request_headers(options, prefix=''): headers = {} if isinstance(options, collections.Mapping): options = options.items() for item in options: if isinstance(item, six.string_types): if ':' not in item: raise ValueError( "Metadata parameter %s must contain a ':'.\n" "Example: 'Color:Blue' or 'Size:Large'" % item ) item = item.split(':', 1) if len(item) != 2: raise ValueError( "Metadata parameter %r must have exactly two items.\n" "Example: ('Color', 'Blue') or ['Size', 'Large']" % (item, ) ) headers[(prefix + item[0]).title()] = item[1].strip() return headers def report_traceback(): """ Reports a timestamp and full traceback for a given exception. :return: Full traceback and timestamp. """ try: formatted_lines = traceback.format_exc() now = time.time() return formatted_lines, now except AttributeError: return None, None class NoopMD5(object): def __init__(self, *a, **kw): pass def update(self, *a, **kw): pass def hexdigest(self, *a, **kw): return '' class ReadableToIterable(object): """ Wrap a filelike object and act as an iterator. It is recommended to use this class only on files opened in binary mode. Due to the Unicode changes in Python 3, files are now opened using an encoding not suitable for use with the md5 class and because of this hit the exception on every call to next. This could cause problems, especially with large files and small chunk sizes. """ def __init__(self, content, chunk_size=65536, md5=False): """ :param content: The filelike object that is yielded from. :param chunk_size: The max size of each yielded item. :param md5: Flag to enable calculating the MD5 of the content as it is yielded. """ self.md5sum = hashlib.md5() if md5 else NoopMD5() self.content = content self.chunk_size = chunk_size def get_md5sum(self): return self.md5sum.hexdigest() def __next__(self): """ Both ``__next__`` and ``next`` are provided to allow compatibility with python 2 and python 3 and their use of ``iterable.next()`` and ``next(iterable)`` respectively. """ chunk = self.content.read(self.chunk_size) if not chunk: raise StopIteration try: self.md5sum.update(chunk) except TypeError: self.md5sum.update(chunk.encode()) return chunk def next(self): return self.__next__() def __iter__(self): return self class LengthWrapper(object): """ Wrap a filelike object with a maximum length. Fix for https://github.com/kennethreitz/requests/issues/1648. It is recommended to use this class only on files opened in binary mode. """ def __init__(self, readable, length, md5=False): """ :param readable: The filelike object to read from. :param length: The maximum amount of content that can be read from the filelike object before it is simulated to be empty. :param md5: Flag to enable calculating the MD5 of the content as it is read. """ self._md5 = md5 self._reset_md5() self._length = self._remaining = length self._readable = readable self._can_reset = all(hasattr(readable, attr) for attr in ('seek', 'tell')) if self._can_reset: self._start = readable.tell() def __len__(self): return self._length def _reset_md5(self): self.md5sum = hashlib.md5() if self._md5 else NoopMD5() def get_md5sum(self): return self.md5sum.hexdigest() def read(self, size=-1): if self._remaining <= 0: return '' to_read = self._remaining if size < 0 else min(size, self._remaining) chunk = self._readable.read(to_read) self._remaining -= len(chunk) try: self.md5sum.update(chunk) except TypeError: self.md5sum.update(chunk.encode()) return chunk @property def reset(self): if self._can_reset: return self._reset raise AttributeError("%r object has no attribute 'reset'" % type(self).__name__) def _reset(self, *args, **kwargs): if not self._can_reset: raise TypeError('%r object cannot be reset; needs both seek and ' 'tell methods' % type(self._readable).__name__) self._readable.seek(self._start) self._reset_md5() self._remaining = self._length def iter_wrapper(iterable): for chunk in iterable: if len(chunk) == 0: # If we emit an empty chunk, requests will go ahead and send it, # causing the server to close the connection continue yield chunk def n_at_a_time(seq, n): for i in range(0, len(seq), n): yield seq[i:i + n] def n_groups(seq, n): items_per_group = ((len(seq) - 1) // n) + 1 return n_at_a_time(seq, items_per_group) def normalize_manifest_path(path): if six.PY2 and isinstance(path, six.text_type): path = path.encode('utf-8') if path.startswith('/'): return path[1:] return path class JSONableIterable(list): def __init__(self, iterable): self._iterable = iter(iterable) try: self._peeked = next(self._iterable) self._has_items = True except StopIteration: self._peeked = None self._has_items = False def __bool__(self): return self._has_items __nonzero__ = __bool__ def __iter__(self): if self._has_items: yield self._peeked for item in self._iterable: yield item python-swiftclient-3.9.0/swiftclient/multithreading.py0000664000175000017500000001767513621303750023364 0ustar zuulzuul00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import six import sys from concurrent.futures import ThreadPoolExecutor from six.moves.queue import PriorityQueue class OutputManager(object): """ One object to manage and provide helper functions for output. This object is a context manager and returns itself into the context. When entering the context, two printing threads are created (see below) and they are waited on and cleaned up when exiting the context. Also, thread-safe printing to two streams is provided. The :meth:`print_msg` method will print to the supplied ``print_stream`` (defaults to ``sys.stdout``) and the :meth:`error` method will print to the supplied ``error_stream`` (defaults to ``sys.stderr``). Both of these printing methods will format the given string with any supplied ``*args`` (a la printf). On Python 2, Unicode messages are encoded to utf8. The attribute :attr:`self.error_count` is incremented once per error message printed, so an application can tell if any worker threads encountered exceptions or otherwise called :meth:`error` on this instance. The swift command-line tool uses this to exit non-zero if any error strings were printed. """ DEFAULT_OFFSET = 14 def __init__(self, print_stream=None, error_stream=None): """ :param print_stream: The stream to which :meth:`print_msg` sends formatted messages. :param error_stream: The stream to which :meth:`error` sends formatted messages. On Python 2, Unicode messages are encoded to utf8. """ self.print_stream = print_stream or sys.stdout self.print_pool = ThreadPoolExecutor(max_workers=1) self.error_stream = error_stream or sys.stderr self.error_print_pool = ThreadPoolExecutor(max_workers=1) self.error_count = 0 def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.error_print_pool.__exit__(exc_type, exc_value, traceback) self.print_pool.__exit__(exc_type, exc_value, traceback) def print_raw(self, data): self.print_pool.submit(self._write, data, self.print_stream) def _write(self, data, stream): if six.PY3: stream.buffer.write(data) stream.flush() if six.PY2: stream.write(data) stream.flush() def print_msg(self, msg, *fmt_args): if fmt_args: msg = msg % fmt_args self.print_pool.submit(self._print, msg) def print_items(self, items, offset=DEFAULT_OFFSET, skip_missing=False): template = '%%%ds: %%s' % offset for k, v in items: if skip_missing and not v: continue self.print_msg((template % (k, v)).rstrip()) def error(self, msg, *fmt_args): if fmt_args: msg = msg % fmt_args self.error_print_pool.submit(self._print_error, msg) def get_error_count(self): return self.error_count def _print(self, item, stream=None): if stream is None: stream = self.print_stream if six.PY2 and isinstance(item, six.text_type): item = item.encode('utf8') print(item, file=stream) def _print_error(self, item, count=1): self.error_count += count return self._print(item, stream=self.error_stream) def warning(self, msg, *fmt_args): # print to error stream but do not increment error count if fmt_args: msg = msg % fmt_args self.error_print_pool.submit(self._print_error, msg, count=0) class MultiThreadingManager(object): """ One object to manage context for multi-threading. This should make bin/swift less error-prone and allow us to test this code. """ def __init__(self, create_connection, segment_threads=10, object_dd_threads=10, object_uu_threads=10, container_threads=10): """ :param segment_threads: The number of threads allocated to segment uploads :param object_dd_threads: The number of threads allocated to object download/delete jobs :param object_uu_threads: The number of threads allocated to object upload/update based jobs :param container_threads: The number of threads allocated to container/account level jobs """ self.segment_pool = ConnectionThreadPoolExecutor( create_connection, max_workers=segment_threads) self.object_dd_pool = ConnectionThreadPoolExecutor( create_connection, max_workers=object_dd_threads) self.object_uu_pool = ConnectionThreadPoolExecutor( create_connection, max_workers=object_uu_threads) self.container_pool = ConnectionThreadPoolExecutor( create_connection, max_workers=container_threads) def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.segment_pool.__exit__(exc_type, exc_value, traceback) self.object_dd_pool.__exit__(exc_type, exc_value, traceback) self.object_uu_pool.__exit__(exc_type, exc_value, traceback) self.container_pool.__exit__(exc_type, exc_value, traceback) class ConnectionThreadPoolExecutor(ThreadPoolExecutor): """ A wrapper class to maintain a pool of connections alongside the thread pool. We start by creating a priority queue of connections, and each job submitted takes one of those connections (initialising if necessary) and passes it as the first arg to the executed function. At the end of execution that connection is returned to the queue. By using a PriorityQueue we avoid creating more connections than required. We will only create as many connections as are required concurrently. """ def __init__(self, create_connection, max_workers): """ Initializes a new ThreadPoolExecutor instance. :param create_connection: callable to use to create new connections :param max_workers: the maximum number of threads that can be used """ self._connections = PriorityQueue() self._create_connection = create_connection for p in range(0, max_workers): self._connections.put((p, None)) super(ConnectionThreadPoolExecutor, self).__init__(max_workers) def submit(self, fn, *args, **kwargs): """ Schedules the callable, `fn`, to be executed :param fn: the callable to be invoked :param args: the positional arguments for the callable :param kwargs: the keyword arguments for the callable :returns: a Future object representing the execution of the callable """ def conn_fn(): priority = None conn = None try: # If we get a connection we must put it back later (priority, conn) = self._connections.get() if conn is None: conn = self._create_connection() conn_args = (conn,) + args return fn(*conn_args, **kwargs) finally: if priority is not None: self._connections.put((priority, conn)) return super(ConnectionThreadPoolExecutor, self).submit(conn_fn) python-swiftclient-3.9.0/swiftclient/command_helpers.py0000664000175000017500000001536713621303750023500 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from swiftclient.utils import prt_bytes, split_request_headers POLICY_HEADER_PREFIX = 'x-account-storage-policy-' def stat_account(conn, options): items = [] req_headers = split_request_headers(options.get('header', [])) headers = conn.head_account(headers=req_headers) if options['verbose'] > 1: items.extend([ ('StorageURL', conn.url), ('Auth Token', conn.token), ]) container_count = int(headers.get('x-account-container-count', 0)) object_count = prt_bytes(headers.get('x-account-object-count', 0), options['human']).lstrip() bytes_used = prt_bytes(headers.get('x-account-bytes-used', 0), options['human']).lstrip() items.extend([ ('Account', conn.url.rsplit('/', 1)[-1]), ('Containers', container_count), ('Objects', object_count), ('Bytes', bytes_used), ]) policies = set() for header_key, header_value in headers.items(): if header_key.lower().startswith(POLICY_HEADER_PREFIX): policy_name = header_key.rsplit('-', 2)[0].split('-', 4)[-1] policies.add(policy_name) for policy in policies: container_count_header = (POLICY_HEADER_PREFIX + policy + '-container-count') if container_count_header in headers: items.append( ('Containers in policy "' + policy + '"', prt_bytes(headers[container_count_header], options['human']).lstrip()) ) items.extend(( ('Objects in policy "' + policy + '"', prt_bytes( headers.get( POLICY_HEADER_PREFIX + policy + '-object-count', 0), options['human'] ).lstrip()), ('Bytes in policy "' + policy + '"', prt_bytes( headers.get( POLICY_HEADER_PREFIX + policy + '-bytes-used', 0), options['human'] ).lstrip()), )) return items, headers def print_account_stats(items, headers, output_manager): exclude_policy_headers = [] for header_key, header_value in headers.items(): if header_key.lower().startswith(POLICY_HEADER_PREFIX): exclude_policy_headers.append(header_key) items.extend(headers_to_items( headers, meta_prefix='x-account-meta-', exclude_headers=([ 'content-length', 'date', 'x-account-container-count', 'x-account-object-count', 'x-account-bytes-used'] + exclude_policy_headers))) # line up the items nicely offset = max(len(item) for item, value in items) output_manager.print_items(items, offset=offset) def stat_container(conn, options, container): req_headers = split_request_headers(options.get('header', [])) headers = conn.head_container(container, headers=req_headers) items = [] if options['verbose'] > 1: path = '%s/%s' % (conn.url, container) items.extend([ ('URL', path), ('Auth Token', conn.token) ]) object_count = prt_bytes( headers.get('x-container-object-count', 0), options['human']).lstrip() bytes_used = prt_bytes(headers.get('x-container-bytes-used', 0), options['human']).lstrip() items.extend([ ('Account', conn.url.rsplit('/', 1)[-1]), ('Container', container), ('Objects', object_count), ('Bytes', bytes_used), ('Read ACL', headers.get('x-container-read', '')), ('Write ACL', headers.get('x-container-write', '')), ('Sync To', headers.get('x-container-sync-to', '')), ('Sync Key', headers.get('x-container-sync-key', '')) ]) return items, headers def print_container_stats(items, headers, output_manager): items.extend(headers_to_items( headers, meta_prefix='x-container-meta-', exclude_headers=( 'content-length', 'date', 'x-container-object-count', 'x-container-bytes-used', 'x-container-read', 'x-container-write', 'x-container-sync-to', 'x-container-sync-key' ) )) # line up the items nicely offset = max(len(item) for item, value in items) output_manager.print_items(items, offset=offset) def stat_object(conn, options, container, obj): req_headers = split_request_headers(options.get('header', [])) headers = conn.head_object(container, obj, headers=req_headers) items = [] if options['verbose'] > 1: path = '%s/%s/%s' % (conn.url, container, obj) items.extend([ ('URL', path), ('Auth Token', conn.token) ]) content_length = prt_bytes(headers.get('content-length', 0), options['human']).lstrip() items.extend([ ('Account', conn.url.rsplit('/', 1)[-1]), ('Container', container), ('Object', obj), ('Content Type', headers.get('content-type')), ('Content Length', content_length), ('Last Modified', headers.get('last-modified')), ('ETag', headers.get('etag')), ('Manifest', headers.get('x-object-manifest')) ]) return items, headers def print_object_stats(items, headers, output_manager): items.extend(headers_to_items( headers, meta_prefix='x-object-meta-', exclude_headers=( 'content-type', 'content-length', 'last-modified', 'etag', 'date', 'x-object-manifest') )) # line up the items nicely offset = max(len(item) for item, value in items) output_manager.print_items(items, offset=offset, skip_missing=True) def headers_to_items(headers, meta_prefix='', exclude_headers=None): exclude_headers = exclude_headers or [] other_items = [] meta_items = [] for key, value in headers.items(): if key not in exclude_headers: if key.startswith(meta_prefix): meta_key = 'Meta %s' % key[len(meta_prefix):].title() meta_items.append((meta_key, value)) else: other_items.append((key.title(), value)) return meta_items + other_items python-swiftclient-3.9.0/swiftclient/client.py0000664000175000017500000023335613621303750021616 0ustar zuulzuul00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ OpenStack Swift client library used internally """ import socket import re import requests import logging import warnings from distutils.version import StrictVersion from requests.exceptions import RequestException, SSLError from six.moves import http_client from six.moves.urllib.parse import quote as _quote, unquote from six.moves.urllib.parse import urljoin, urlparse, urlunparse from time import sleep, time import six from swiftclient import version as swiftclient_version from swiftclient.exceptions import ClientException from swiftclient.utils import ( iter_wrapper, LengthWrapper, ReadableToIterable, parse_api_response, get_body) # Default is 100, increase to 256 http_client._MAXHEADERS = 256 VERSIONFUL_AUTH_PATH = re.compile(r'v[2-3](?:\.0)?$') AUTH_VERSIONS_V1 = ('1.0', '1', 1) AUTH_VERSIONS_V2 = ('2.0', '2', 2) AUTH_VERSIONS_V3 = ('3.0', '3', 3) USER_METADATA_TYPE = tuple('x-%s-meta-' % type_ for type_ in ('container', 'account', 'object')) try: from logging import NullHandler except ImportError: # Added in Python 2.7 class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None ksexceptions = ksclient_v2 = ksclient_v3 = None try: from keystoneclient import exceptions as ksexceptions # prevent keystoneclient warning us that it has no log handlers logging.getLogger('keystoneclient').addHandler(NullHandler()) from keystoneclient.v2_0 import client as ksclient_v2 except ImportError: pass try: from keystoneclient.v3 import client as ksclient_v3 except ImportError: pass # requests version 1.2.3 try to encode headers in ascii, preventing # utf-8 encoded header to be 'prepared'. This also affects all # (or at least most) versions of requests on py3 if StrictVersion(requests.__version__) < StrictVersion('2.0.0') \ or not six.PY2: from requests.structures import CaseInsensitiveDict def prepare_unicode_headers(self, headers): if headers: self.headers = CaseInsensitiveDict(headers) else: self.headers = CaseInsensitiveDict() requests.models.PreparedRequest.prepare_headers = prepare_unicode_headers logger = logging.getLogger("swiftclient") logger.addHandler(NullHandler()) #: Default behaviour is to redact header values known to contain secrets, #: such as ``X-Auth-Key`` and ``X-Auth-Token``. Up to the first 16 chars #: may be revealed. #: #: To disable, set the value of ``redact_sensitive_headers`` to ``False``. #: #: When header redaction is enabled, ``reveal_sensitive_prefix`` configures the #: maximum length of any sensitive header data sent to the logs. If the header #: is less than twice this length, only ``int(len(value)/2)`` chars will be #: logged; if it is less than 15 chars long, even less will be logged. logger_settings = { 'redact_sensitive_headers': True, 'reveal_sensitive_prefix': 16 } #: A list of sensitive headers to redact in logs. Note that when extending this #: list, the header names must be added in all lower case. LOGGER_SENSITIVE_HEADERS = [ 'x-auth-token', 'x-auth-key', 'x-service-token', 'x-storage-token', 'x-account-meta-temp-url-key', 'x-account-meta-temp-url-key-2', 'x-container-meta-temp-url-key', 'x-container-meta-temp-url-key-2', 'set-cookie' ] def safe_value(name, value): """ Only show up to logger_settings['reveal_sensitive_prefix'] characters from a sensitive header. :param name: Header name :param value: Header value :return: Safe header value """ if name.lower() in LOGGER_SENSITIVE_HEADERS: prefix_length = logger_settings.get('reveal_sensitive_prefix', 16) prefix_length = int( min(prefix_length, (len(value) ** 2) / 32, len(value) / 2) ) redacted_value = value[0:prefix_length] return redacted_value + '...' return value def scrub_headers(headers): """ Redact header values that can contain sensitive information that should not be logged. :param headers: Either a dict or an iterable of two-element tuples :return: Safe dictionary of headers with sensitive information removed """ if isinstance(headers, dict): headers = headers.items() headers = [ (parse_header_string(key), parse_header_string(val)) for (key, val) in headers ] if not logger_settings.get('redact_sensitive_headers', True): return dict(headers) if logger_settings.get('reveal_sensitive_prefix', 16) < 0: logger_settings['reveal_sensitive_prefix'] = 16 return {key: safe_value(key, val) for (key, val) in headers} def http_log(args, kwargs, resp, body): if not logger.isEnabledFor(logging.INFO): return # create and log equivalent curl command string_parts = ['curl -i'] for element in args: if element == 'HEAD': string_parts.append(' -I') elif element in ('GET', 'POST', 'PUT'): string_parts.append(' -X %s' % element) else: string_parts.append(' %s' % parse_header_string(element)) if 'headers' in kwargs: headers = scrub_headers(kwargs['headers']) for element in headers: header = ' -H "%s: %s"' % (element, headers[element]) string_parts.append(header) # log response as debug if good, or info if error if resp.status < 300: log_method = logger.debug else: log_method = logger.info log_method("REQ: %s", "".join(string_parts)) log_method("RESP STATUS: %s %s", resp.status, resp.reason) log_method("RESP HEADERS: %s", scrub_headers(resp.getheaders())) if body: resp_headers = resp_header_dict(resp) nbody = get_body(resp_headers, body) log_method("RESP BODY: %s", nbody) def parse_header_string(data): if not isinstance(data, (six.text_type, six.binary_type)): data = str(data) if six.PY2: if isinstance(data, six.text_type): # Under Python2 requests only returns binary_type, but if we get # some stray text_type input, this should prevent unquote from # interpreting %-encoded data as raw code-points. data = data.encode('utf8') try: unquoted = unquote(data).decode('utf8') except UnicodeDecodeError: try: return data.decode('utf8') except UnicodeDecodeError: return quote(data).decode('utf8') else: if isinstance(data, six.binary_type): # Under Python3 requests only returns text_type and tosses (!) the # rest of the headers. If that ever changes, this should be a sane # approach. try: data = data.decode('ascii') except UnicodeDecodeError: data = quote(data) try: unquoted = unquote(data, errors='strict') except UnicodeDecodeError: return data return unquoted def quote(value, safe='/'): """ Patched version of urllib.quote that encodes utf8 strings before quoting. On Python 3, call directly urllib.parse.quote(). """ if six.PY3: return _quote(value, safe=safe) return _quote(encode_utf8(value), safe) def encode_utf8(value): if type(value) in six.integer_types + (float, bool): # As of requests 2.11.0, headers must be byte- or unicode-strings. # Convert some known-good types as a convenience for developers. # Note that we *don't* convert subclasses, as they may have overriddden # __str__ or __repr__. # See https://github.com/kennethreitz/requests/pull/3366 for more info value = str(value) if isinstance(value, six.text_type): value = value.encode('utf8') return value def encode_meta_headers(headers): """Only encode metadata headers keys""" ret = {} for header, value in headers.items(): value = encode_utf8(value) header = header.lower() if (isinstance(header, six.string_types) and header.startswith(USER_METADATA_TYPE)): header = encode_utf8(header) ret[header] = value return ret class _ObjectBody(object): """ Readable and iterable object body response wrapper. """ def __init__(self, resp, chunk_size): """ Wrap the underlying response :param resp: the response to wrap :param chunk_size: number of bytes to return each iteration/next call """ self.resp = resp self.chunk_size = chunk_size def read(self, length=None): return self.resp.read(length) def __iter__(self): return self def next(self): buf = self.read(self.chunk_size) if not buf: raise StopIteration() return buf def __next__(self): return self.next() def close(self): self.resp.close() class _RetryBody(_ObjectBody): """ Wrapper for object body response which triggers a retry (from offset) if the connection is dropped after partially downloading the object. """ def __init__(self, resp, connection, container, obj, resp_chunk_size=None, query_string=None, response_dict=None, headers=None): """ Wrap the underlying response :param resp: the response to wrap :param connection: Connection class instance :param container: the name of the container the object is in :param obj: the name of object we are downloading :param resp_chunk_size: if defined, chunk size of data to read :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param headers: an optional dictionary with additional headers to include in the request """ super(_RetryBody, self).__init__(resp, resp_chunk_size) self.expected_length = int(self.resp.getheader('Content-Length')) self.conn = connection self.container = container self.obj = obj self.query_string = query_string self.response_dict = response_dict self.headers = dict(headers) if headers is not None else {} self.bytes_read = 0 def read(self, length=None): buf = None try: buf = self.resp.read(length) self.bytes_read += len(buf) except (socket.error, RequestException): if self.conn.attempts > self.conn.retries: raise if (not buf and self.bytes_read < self.expected_length and self.conn.attempts <= self.conn.retries): self.headers['Range'] = 'bytes=%d-' % self.bytes_read self.headers['If-Match'] = self.resp.getheader('ETag') hdrs, body = self.conn._retry(None, get_object, self.container, self.obj, resp_chunk_size=self.chunk_size, query_string=self.query_string, response_dict=self.response_dict, headers=self.headers, attempts=self.conn.attempts) expected_range = 'bytes %d-%d/%d' % ( self.bytes_read, self.expected_length - 1, self.expected_length) if 'content-range' not in hdrs: # Server didn't respond with partial content; manually seek logger.warning('Received 200 while retrying %s/%s; seeking...', self.container, self.obj) to_read = self.bytes_read while to_read > 0: buf = body.resp.read(min(to_read, self.chunk_size)) to_read -= len(buf) elif hdrs['content-range'] != expected_range: msg = ('Expected range "%s" while retrying %s/%s ' 'but got "%s"' % (expected_range, self.container, self.obj, hdrs['content-range'])) raise ClientException(msg) self.resp = body.resp buf = self.read(length) return buf class HTTPConnection(object): def __init__(self, url, proxy=None, cacert=None, insecure=False, cert=None, cert_key=None, ssl_compression=False, default_user_agent=None, timeout=None): """ Make an HTTPConnection or HTTPSConnection :param url: url to connect to :param proxy: proxy to connect through, if any; None by default; str of the format 'http://127.0.0.1:8888' to set one :param cacert: A CA bundle file to use in verifying a TLS server certificate. :param insecure: Allow to access servers without checking SSL certs. The server's certificate will not be verified. :param cert: Client certificate file to connect on SSL server requiring SSL client certificate. :param cert_key: Client certificate private key file. :param ssl_compression: SSL compression should be disabled by default and this setting is not usable as of now. The parameter is kept for backward compatibility. :param default_user_agent: Set the User-Agent header on every request. If set to None (default), the user agent will be "python-swiftclient-". This may be overridden on a per-request basis by explicitly setting the user-agent header on a call to request(). :param timeout: socket read timeout value, passed directly to the requests library. :raises ClientException: Unable to handle protocol scheme """ self.url = url self.parsed_url = urlparse(url) self.host = self.parsed_url.netloc self.port = self.parsed_url.port self.requests_args = {} self.request_session = requests.Session() # Don't use requests's default headers self.request_session.headers = None self.resp = None if self.parsed_url.scheme not in ('http', 'https'): raise ClientException('Unsupported scheme "%s" in url "%s"' % (self.parsed_url.scheme, url)) self.requests_args['verify'] = not insecure if cacert and not insecure: # verify requests parameter is used to pass the CA_BUNDLE file # see: http://docs.python-requests.org/en/latest/user/advanced/ self.requests_args['verify'] = cacert if cert: # NOTE(cbrandily): cert requests parameter is used to pass client # cert path or a tuple with client certificate/key paths. if cert_key: self.requests_args['cert'] = cert, cert_key else: self.requests_args['cert'] = cert if proxy: proxy_parsed = urlparse(proxy) if not proxy_parsed.scheme: raise ClientException("Proxy's missing scheme") self.requests_args['proxies'] = { proxy_parsed.scheme: '%s://%s' % ( proxy_parsed.scheme, proxy_parsed.netloc ) } self.requests_args['stream'] = True if default_user_agent is None: default_user_agent = \ 'python-swiftclient-%s' % swiftclient_version.version_string self.default_user_agent = default_user_agent if timeout: self.requests_args['timeout'] = timeout def __del__(self): """Cleanup resources other than memory""" if self.request_session: # The session we create must be closed to free up file descriptors try: self.request_session.close() finally: self.request_session = None def _request(self, *arg, **kwarg): """Final wrapper before requests call, to be patched in tests""" return self.request_session.request(*arg, **kwarg) def request(self, method, full_path, data=None, headers=None, files=None): """Encode url and header, then call requests.request""" if headers is None: headers = {} else: headers = encode_meta_headers(headers) # set a default User-Agent header if it wasn't passed in if 'user-agent' not in headers: headers['user-agent'] = self.default_user_agent url = "%s://%s%s" % ( self.parsed_url.scheme, self.parsed_url.netloc, full_path) self.resp = self._request(method, url, headers=headers, data=data, files=files, **self.requests_args) return self.resp def putrequest(self, full_path, data=None, headers=None, files=None): """ Use python-requests files upload :param data: Use data generator for chunked-transfer :param files: Use files for default transfer """ return self.request('PUT', full_path, data, headers, files) def getresponse(self): """Adapt requests response to httplib interface""" self.resp.status = self.resp.status_code old_getheader = self.resp.raw.getheader def _decode_header(string): if string is None or six.PY2: return string return string.encode('iso-8859-1').decode('utf-8') def _encode_header(string): if string is None or six.PY2: return string return string.encode('utf-8').decode('iso-8859-1') def getheaders(): return [(_decode_header(k), _decode_header(v)) for k, v in self.resp.headers.items()] def getheader(k, v=None): return _decode_header(old_getheader( _encode_header(k.lower()), _encode_header(v))) def releasing_read(*args, **kwargs): chunk = self.resp.raw.read(*args, **kwargs) if not chunk: # NOTE(sigmavirus24): Release the connection back to the # urllib3's connection pool. This will reduce the number of # log messages seen in bug #1341777. This does not actually # close a socket. It will also prevent people from being # mislead as to the cause of a bug as in bug #1424732. self.resp.close() return chunk self.resp.getheaders = getheaders self.resp.getheader = getheader self.resp.read = releasing_read return self.resp def close(self): if self.resp: self.resp.close() self.request_session.close() def http_connection(*arg, **kwarg): """:returns: tuple of (parsed url, connection object)""" conn = HTTPConnection(*arg, **kwarg) return conn.parsed_url, conn def get_auth_1_0(url, user, key, snet, **kwargs): cacert = kwargs.get('cacert', None) insecure = kwargs.get('insecure', False) cert = kwargs.get('cert') cert_key = kwargs.get('cert_key') timeout = kwargs.get('timeout', None) parsed, conn = http_connection(url, cacert=cacert, insecure=insecure, cert=cert, cert_key=cert_key, timeout=timeout) method = 'GET' headers = {'X-Auth-User': user, 'X-Auth-Key': key} conn.request(method, parsed.path, '', headers) resp = conn.getresponse() body = resp.read() resp.close() conn.close() http_log((url, method,), headers, resp, body) url = resp.getheader('x-storage-url') # There is a side-effect on current Rackspace 1.0 server where a # bad URL would get you that document page and a 200. We error out # if we don't have a x-storage-url header and if we get a body. if resp.status < 200 or resp.status >= 300 or (body and not url): raise ClientException.from_response(resp, 'Auth GET failed', body) if snet: parsed = list(urlparse(url)) # Second item in the list is the netloc netloc = parsed[1] parsed[1] = 'snet-' + netloc url = urlunparse(parsed) token = resp.getheader('x-storage-token', resp.getheader('x-auth-token')) return url, token def get_keystoneclient_2_0(auth_url, user, key, os_options, **kwargs): # this function is only here to preserve the historic 'public' # interface of this module kwargs.update({'auth_version': '2.0'}) return get_auth_keystone(auth_url, user, key, os_options, **kwargs) def get_auth_keystone(auth_url, user, key, os_options, **kwargs): """ Authenticate against a keystone server. We are using the keystoneclient library for authentication. """ insecure = kwargs.get('insecure', False) timeout = kwargs.get('timeout', None) auth_version = kwargs.get('auth_version', None) debug = logger.isEnabledFor(logging.DEBUG) # Add the version suffix in case of versionless Keystone endpoints. If # auth_version is also unset it is likely that it is v3 if not VERSIONFUL_AUTH_PATH.match( urlparse(auth_url).path.rstrip('/').rsplit('/', 1)[-1]): # Normalize auth_url to end in a slash because urljoin auth_url = auth_url.rstrip('/') + '/' if auth_version and auth_version in AUTH_VERSIONS_V2: auth_url = urljoin(auth_url, "v2.0") else: auth_url = urljoin(auth_url, "v3") auth_version = '3' logger.debug("Versionless auth_url - using %s as endpoint" % auth_url) # Legacy default if not set if auth_version is None: auth_version = '2' ksclient = None if auth_version in AUTH_VERSIONS_V3: if ksclient_v3 is not None: ksclient = ksclient_v3 else: if ksclient_v2 is not None: ksclient = ksclient_v2 if ksclient is None: raise ClientException(''' Auth versions 2.0 and 3 require python-keystoneclient, install it or use Auth version 1.0 which requires ST_AUTH, ST_USER, and ST_KEY environment variables to be set or overridden with -A, -U, or -K.''') try: _ksclient = ksclient.Client( username=user, password=key, token=os_options.get('auth_token'), tenant_name=os_options.get('tenant_name'), tenant_id=os_options.get('tenant_id'), user_id=os_options.get('user_id'), user_domain_name=os_options.get('user_domain_name'), user_domain_id=os_options.get('user_domain_id'), project_name=os_options.get('project_name'), project_id=os_options.get('project_id'), project_domain_name=os_options.get('project_domain_name'), project_domain_id=os_options.get('project_domain_id'), debug=debug, cacert=kwargs.get('cacert'), cert=kwargs.get('cert'), key=kwargs.get('cert_key'), auth_url=auth_url, insecure=insecure, timeout=timeout) except ksexceptions.Unauthorized: msg = 'Unauthorized. Check username, password and tenant name/id.' if auth_version in AUTH_VERSIONS_V3: msg = ('Unauthorized. Check username/id, password, ' 'tenant name/id and user/tenant domain name/id.') raise ClientException(msg) except ksexceptions.AuthorizationFailure as err: raise ClientException('Authorization Failure. %s' % err) service_type = os_options.get('service_type') or 'object-store' endpoint_type = os_options.get('endpoint_type') or 'publicURL' try: filter_kwargs = {} if os_options.get('region_name'): filter_kwargs['attr'] = 'region' filter_kwargs['filter_value'] = os_options['region_name'] endpoint = _ksclient.service_catalog.url_for( service_type=service_type, endpoint_type=endpoint_type, **filter_kwargs) except ksexceptions.EndpointNotFound: raise ClientException('Endpoint for %s not found - ' 'have you specified a region?' % service_type) return endpoint, _ksclient.auth_token def get_auth(auth_url, user, key, **kwargs): """ Get authentication/authorization credentials. :kwarg auth_version: the api version of the supplied auth params :kwarg os_options: a dict, the openstack identity service options :returns: a tuple, (storage_url, token) N.B. if the optional os_options parameter includes a non-empty 'object_storage_url' key it will override the default storage url returned by the auth service. The snet parameter is used for Rackspace's ServiceNet internal network implementation. In this function, it simply adds *snet-* to the beginning of the host name for the returned storage URL. With Rackspace Cloud Files, use of this network path causes no bandwidth charges but requires the client to be running on Rackspace's ServiceNet network. """ session = kwargs.get('session', None) auth_version = kwargs.get('auth_version', '1') os_options = kwargs.get('os_options', {}) cacert = kwargs.get('cacert', None) insecure = kwargs.get('insecure', False) cert = kwargs.get('cert') cert_key = kwargs.get('cert_key') timeout = kwargs.get('timeout', None) if session: service_type = os_options.get('service_type', 'object-store') interface = os_options.get('endpoint_type', 'public') region_name = os_options.get('region_name') storage_url = session.get_endpoint(service_type=service_type, interface=interface, region_name=region_name) token = session.get_token() elif auth_version in AUTH_VERSIONS_V1: storage_url, token = get_auth_1_0(auth_url, user, key, kwargs.get('snet'), cacert=cacert, insecure=insecure, cert=cert, cert_key=cert_key, timeout=timeout) elif auth_version in AUTH_VERSIONS_V2 + AUTH_VERSIONS_V3: # We are handling a special use case here where the user argument # specifies both the user name and tenant name in the form tenant:user if user and not kwargs.get('tenant_name') and ':' in user: os_options['tenant_name'], user = user.split(':') # We are allowing to have a tenant_name argument in get_auth # directly without having os_options if kwargs.get('tenant_name'): os_options['tenant_name'] = kwargs['tenant_name'] if not (os_options.get('tenant_name') or os_options.get('tenant_id') or os_options.get('project_name') or os_options.get('project_id')): if auth_version in AUTH_VERSIONS_V2: raise ClientException('No tenant specified') raise ClientException('No project name or project id specified.') storage_url, token = get_auth_keystone(auth_url, user, key, os_options, cacert=cacert, insecure=insecure, cert=cert, cert_key=cert_key, timeout=timeout, auth_version=auth_version) else: raise ClientException('Unknown auth_version %s specified and no ' 'session found.' % auth_version) # Override storage url, if necessary if os_options.get('object_storage_url'): return os_options['object_storage_url'], token else: return storage_url, token def resp_header_dict(resp): resp_headers = {} for header, value in resp.getheaders(): header = parse_header_string(header).lower() resp_headers[header] = parse_header_string(value) return resp_headers def store_response(resp, response_dict): """ store information about an operation into a dict :param resp: an http response object containing the response headers :param response_dict: a dict into which are placed the status, reason and a dict of lower-cased headers """ if response_dict is not None: response_dict['status'] = resp.status response_dict['reason'] = resp.reason response_dict['headers'] = resp_header_dict(resp) def get_account(url, token, marker=None, limit=None, prefix=None, end_marker=None, http_conn=None, full_listing=False, service_token=None, headers=None, delimiter=None): """ Get a listing of containers for the account. :param url: storage URL :param token: auth token :param marker: marker query :param limit: limit query :param prefix: prefix query :param end_marker: end_marker query :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param full_listing: if True, return a full listing, else returns a max of 10000 listings :param service_token: service auth token :param headers: additional headers to include in the request :param delimiter: delimiter query :returns: a tuple of (response headers, a list of containers) The response headers will be a dict and all header names will be lowercase. :raises ClientException: HTTP GET request failed """ req_headers = {'X-Auth-Token': token, 'Accept-Encoding': 'gzip'} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) if not http_conn: http_conn = http_connection(url) if full_listing: rv = get_account(url, token, marker, limit, prefix, end_marker, http_conn, headers=req_headers, delimiter=delimiter) listing = rv[1] while listing: marker = listing[-1]['name'] listing = get_account(url, token, marker, limit, prefix, end_marker, http_conn, headers=req_headers, delimiter=delimiter)[1] if listing: rv[1].extend(listing) return rv parsed, conn = http_conn qs = 'format=json' if marker: qs += '&marker=%s' % quote(marker) if limit: qs += '&limit=%d' % limit if prefix: qs += '&prefix=%s' % quote(prefix) if delimiter: qs += '&delimiter=%s' % quote(delimiter) if end_marker: qs += '&end_marker=%s' % quote(end_marker) full_path = '%s?%s' % (parsed.path, qs) method = 'GET' conn.request(method, full_path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log(("%s?%s" % (url, qs), method,), {'headers': req_headers}, resp, body) resp_headers = resp_header_dict(resp) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Account GET failed', body) if resp.status == 204: return resp_headers, [] return resp_headers, parse_api_response(resp_headers, body) def head_account(url, token, http_conn=None, headers=None, service_token=None): """ Get account stats. :param url: storage URL :param token: auth token :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param headers: additional headers to include in the request :param service_token: service auth token :returns: a dict containing the response's headers (all header names will be lowercase) :raises ClientException: HTTP HEAD request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) method = "HEAD" req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) conn.request(method, parsed.path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log((url, method,), {'headers': req_headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Account HEAD failed', body) resp_headers = resp_header_dict(resp) return resp_headers def post_account(url, token, headers, http_conn=None, response_dict=None, service_token=None, query_string=None, data=None): """ Update an account's metadata. :param url: storage URL :param token: auth token :param headers: additional headers to include in the request :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :param query_string: if set will be appended with '?' to generated path :param data: an optional message body for the request :raises ClientException: HTTP POST request failed :returns: resp_headers, body """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) method = 'POST' path = parsed.path if query_string: path += '?' + query_string req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) conn.request(method, path, data, req_headers) resp = conn.getresponse() body = resp.read() http_log((url, method,), {'headers': req_headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Account POST failed', body) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers, body def get_container(url, token, container, marker=None, limit=None, prefix=None, delimiter=None, end_marker=None, path=None, http_conn=None, full_listing=False, service_token=None, headers=None, query_string=None): """ Get a listing of objects for the container. :param url: storage URL :param token: auth token :param container: container name to get a listing for :param marker: marker query :param limit: limit query :param prefix: prefix query :param delimiter: string to delimit the queries on :param end_marker: marker query :param path: path query (equivalent: "delimiter=/" and "prefix=path/") :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param full_listing: if True, return a full listing, else returns a max of 10000 listings :param service_token: service auth token :param headers: additional headers to include in the request :param query_string: if set will be appended with '?' to generated path :returns: a tuple of (response headers, a list of objects) The response headers will be a dict and all header names will be lowercase. :raises ClientException: HTTP GET request failed """ if not http_conn: http_conn = http_connection(url) if full_listing: rv = get_container(url, token, container, marker, limit, prefix, delimiter, end_marker, path, http_conn, service_token=service_token, headers=headers) listing = rv[1] while listing: if not delimiter: marker = listing[-1]['name'] else: marker = listing[-1].get('name', listing[-1].get('subdir')) listing = get_container(url, token, container, marker, limit, prefix, delimiter, end_marker, path, http_conn, service_token=service_token, headers=headers)[1] if listing: rv[1].extend(listing) return rv parsed, conn = http_conn cont_path = '%s/%s' % (parsed.path, quote(container)) qs = 'format=json' if marker: qs += '&marker=%s' % quote(marker) if limit: qs += '&limit=%d' % limit if prefix: qs += '&prefix=%s' % quote(prefix) if delimiter: qs += '&delimiter=%s' % quote(delimiter) if end_marker: qs += '&end_marker=%s' % quote(end_marker) if path: qs += '&path=%s' % quote(path) if query_string: qs += '&%s' % query_string.lstrip('?') req_headers = {'X-Auth-Token': token, 'Accept-Encoding': 'gzip'} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) method = 'GET' conn.request(method, '%s?%s' % (cont_path, qs), '', req_headers) resp = conn.getresponse() body = resp.read() http_log(('%(url)s%(cont_path)s?%(qs)s' % {'url': url.replace(parsed.path, ''), 'cont_path': cont_path, 'qs': qs}, method,), {'headers': req_headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Container GET failed', body) resp_headers = resp_header_dict(resp) if resp.status == 204: return resp_headers, [] return resp_headers, parse_api_response(resp_headers, body) def head_container(url, token, container, http_conn=None, headers=None, service_token=None): """ Get container stats. :param url: storage URL :param token: auth token :param container: container name to get stats for :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param headers: additional headers to include in the request :param service_token: service auth token :returns: a dict containing the response's headers (all header names will be lowercase) :raises ClientException: HTTP HEAD request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) method = 'HEAD' req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) conn.request(method, path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': req_headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response( resp, 'Container HEAD failed', body) resp_headers = resp_header_dict(resp) return resp_headers def put_container(url, token, container, headers=None, http_conn=None, response_dict=None, service_token=None, query_string=None): """ Create a container :param url: storage URL :param token: auth token :param container: container name to create :param headers: additional headers to include in the request :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :param query_string: if set will be appended with '?' to generated path :raises ClientException: HTTP PUT request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) method = 'PUT' req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) if 'content-length' not in (k.lower() for k in req_headers): req_headers['Content-Length'] = '0' if query_string: path += '?' + query_string.lstrip('?') conn.request(method, path, '', req_headers) resp = conn.getresponse() body = resp.read() store_response(resp, response_dict) http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': req_headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Container PUT failed', body) def post_container(url, token, container, headers, http_conn=None, response_dict=None, service_token=None): """ Update a container's metadata. :param url: storage URL :param token: auth token :param container: container name to update :param headers: additional headers to include in the request :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :raises ClientException: HTTP POST request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) method = 'POST' req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) if 'content-length' not in (k.lower() for k in headers): req_headers['Content-Length'] = '0' conn.request(method, path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': req_headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response( resp, 'Container POST failed', body) def delete_container(url, token, container, http_conn=None, response_dict=None, service_token=None, query_string=None, headers=None): """ Delete a container :param url: storage URL :param token: auth token :param container: container name to delete :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :param query_string: if set will be appended with '?' to generated path :param headers: additional headers to include in the request :raises ClientException: HTTP DELETE request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) if headers: headers = dict(headers) else: headers = {} headers['X-Auth-Token'] = token if service_token: headers['X-Service-Token'] = service_token if query_string: path += '?' + query_string.lstrip('?') method = 'DELETE' conn.request(method, path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response( resp, 'Container DELETE failed', body) def get_object(url, token, container, name, http_conn=None, resp_chunk_size=None, query_string=None, response_dict=None, headers=None, service_token=None): """ Get an object :param url: storage URL :param token: auth token :param container: container name that the object is in :param name: object name to get :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param resp_chunk_size: if defined, chunk size of data to read. NOTE: If you specify a resp_chunk_size you must fully read the object's contents before making another request. :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param headers: an optional dictionary with additional headers to include in the request :param service_token: service auth token :returns: a tuple of (response headers, the object's contents) The response headers will be a dict and all header names will be lowercase. :raises ClientException: HTTP GET request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s/%s' % (parsed.path, quote(container), quote(name)) if query_string: path += '?' + query_string method = 'GET' headers = headers.copy() if headers else {} headers['X-Auth-Token'] = token if service_token: headers['X-Service-Token'] = service_token conn.request(method, path, '', headers) resp = conn.getresponse() parsed_response = {} store_response(resp, parsed_response) if response_dict is not None: response_dict.update(parsed_response) if resp.status < 200 or resp.status >= 300: body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) raise ClientException.from_response(resp, 'Object GET failed', body) if resp_chunk_size: object_body = _ObjectBody(resp, resp_chunk_size) else: object_body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, None) return parsed_response['headers'], object_body def head_object(url, token, container, name, http_conn=None, service_token=None, headers=None, query_string=None): """ Get object info :param url: storage URL :param token: auth token :param container: container name that the object is in :param name: object name to get info for :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param service_token: service auth token :param headers: additional headers to include in the request :returns: a dict containing the response's headers (all header names will be lowercase) :raises ClientException: HTTP HEAD request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s/%s' % (parsed.path, quote(container), quote(name)) if query_string: path += '?' + query_string if headers: headers = dict(headers) else: headers = {} headers['X-Auth-Token'] = token method = 'HEAD' if service_token: headers['X-Service-Token'] = service_token conn.request(method, path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Object HEAD failed', body) resp_headers = resp_header_dict(resp) return resp_headers def put_object(url, token=None, container=None, name=None, contents=None, content_length=None, etag=None, chunk_size=None, content_type=None, headers=None, http_conn=None, proxy=None, query_string=None, response_dict=None, service_token=None): """ Put an object :param url: storage URL :param token: auth token; if None, no token will be sent :param container: container name that the object is in; if None, the container name is expected to be part of the url :param name: object name to put; if None, the object name is expected to be part of the url :param contents: a string, a file-like object or an iterable to read object data from; if None, a zero-byte put will be done :param content_length: value to send as content-length header; also limits the amount read from contents; if None, it will be computed via the contents or chunked transfer encoding will be used :param etag: etag of contents; if None, no etag will be sent :param chunk_size: chunk size of data to write; it defaults to 65536; used only if the contents object has a 'read' method, e.g. file-like objects, ignored otherwise :param content_type: value to send as content-type header, overriding any value included in the headers param; if None and no value is found in the headers param, an empty string value will be sent :param headers: additional headers to include in the request, if any :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param proxy: proxy to connect through, if any; None by default; str of the format 'http://127.0.0.1:8888' to set one :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :returns: etag :raises ClientException: HTTP PUT request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url, proxy=proxy) path = parsed.path if container: path = '%s/%s' % (path.rstrip('/'), quote(container)) if name: path = '%s/%s' % (path.rstrip('/'), quote(name)) if query_string: path += '?' + query_string if headers: headers = dict(headers) else: headers = {} if token: headers['X-Auth-Token'] = token if service_token: headers['X-Service-Token'] = service_token if etag: headers['ETag'] = etag.strip('"') if content_length is not None: headers['Content-Length'] = str(content_length) else: for n, v in headers.items(): if n.lower() == 'content-length': content_length = int(v) if content_type is not None: headers['Content-Type'] = content_type elif 'Content-Type' not in headers: if StrictVersion(requests.__version__) < StrictVersion('2.4.0'): # python-requests sets application/x-www-form-urlencoded otherwise # if using python3. headers['Content-Type'] = '' if not contents: headers['Content-Length'] = '0' if isinstance(contents, (ReadableToIterable, LengthWrapper)): conn.putrequest(path, headers=headers, data=contents) elif hasattr(contents, 'read'): if chunk_size is None: chunk_size = 65536 if content_length is None: data = ReadableToIterable(contents, chunk_size, md5=False) else: data = LengthWrapper(contents, content_length, md5=False) conn.putrequest(path, headers=headers, data=data) else: if chunk_size is not None: warn_msg = ('%s object has no "read" method, ignoring chunk_size' % type(contents).__name__) warnings.warn(warn_msg, stacklevel=2) # Match requests's is_stream test if hasattr(contents, '__iter__') and not isinstance(contents, ( six.text_type, six.binary_type, list, tuple, dict)): contents = iter_wrapper(contents) conn.request('PUT', path, contents, headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'PUT',), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Object PUT failed', body) etag = resp.getheader('etag', '').strip('"') return etag def post_object(url, token, container, name, headers, http_conn=None, response_dict=None, service_token=None): """ Update object metadata :param url: storage URL :param token: auth token :param container: container name that the object is in :param name: name of the object to update :param headers: additional headers to include in the request :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :raises ClientException: HTTP POST request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s/%s' % (parsed.path, quote(container), quote(name)) req_headers = {'X-Auth-Token': token} if service_token: req_headers['X-Service-Token'] = service_token if headers: req_headers.update(headers) conn.request('POST', path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'POST',), {'headers': req_headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Object POST failed', body) def copy_object(url, token, container, name, destination=None, headers=None, fresh_metadata=None, http_conn=None, response_dict=None, service_token=None): """ Copy object :param url: storage URL :param token: auth token; if None, no token will be sent :param container: container name that the source object is in :param name: source object name :param destination: The container and object name of the destination object in the form of /container/object; if None, the copy will use the source as the destination. :param headers: additional headers to include in the request :param fresh_metadata: Enables object creation that omits existing user metadata, default None :param http_conn: HTTP connection object (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :raises ClientException: HTTP COPY request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = parsed.path container = quote(container) name = quote(name) path = '%s/%s/%s' % (path.rstrip('/'), container, name) headers = dict(headers) if headers else {} if destination is not None: headers['Destination'] = quote(destination) elif container and name: headers['Destination'] = '/%s/%s' % (container, name) if token is not None: headers['X-Auth-Token'] = token if service_token is not None: headers['X-Service-Token'] = service_token if fresh_metadata is not None: # remove potential fresh metadata headers for fresh_hdr in [hdr for hdr in headers.keys() if hdr.lower() == 'x-fresh-metadata']: headers.pop(fresh_hdr) headers['X-Fresh-Metadata'] = 'true' if fresh_metadata else 'false' conn.request('COPY', path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'COPY',), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Object COPY failed', body) def delete_object(url, token=None, container=None, name=None, http_conn=None, headers=None, proxy=None, query_string=None, response_dict=None, service_token=None): """ Delete object :param url: storage URL :param token: auth token; if None, no token will be sent :param container: container name that the object is in; if None, the container name is expected to be part of the url :param name: object name to delete; if None, the object name is expected to be part of the url :param http_conn: a tuple of (parsed url, HTTPConnection object), (If None, it will create the conn object) :param headers: additional headers to include in the request :param proxy: proxy to connect through, if any; None by default; str of the format 'http://127.0.0.1:8888' to set one :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param service_token: service auth token :raises ClientException: HTTP DELETE request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url, proxy=proxy) path = parsed.path if container: path = '%s/%s' % (path.rstrip('/'), quote(container)) if name: path = '%s/%s' % (path.rstrip('/'), quote(name)) if query_string: path += '?' + query_string if headers: headers = dict(headers) else: headers = {} if token: headers['X-Auth-Token'] = token if service_token: headers['X-Service-Token'] = service_token conn.request('DELETE', path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'DELETE',), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response(resp, 'Object DELETE failed', body) def get_capabilities(http_conn): """ Get cluster capability infos. :param http_conn: a tuple of (parsed url, HTTPConnection object) :returns: a dict containing the cluster capabilities :raises ClientException: HTTP Capabilities GET failed """ parsed, conn = http_conn headers = {'Accept-Encoding': 'gzip'} conn.request('GET', parsed.path, '', headers) resp = conn.getresponse() body = resp.read() http_log((parsed.geturl(), 'GET',), {'headers': headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException.from_response( resp, 'Capabilities GET failed', body) resp_headers = resp_header_dict(resp) return parse_api_response(resp_headers, body) class Connection(object): """ Convenience class to make requests that will also retry the request Requests will have an X-Auth-Token header whose value is either the preauthtoken or a token obtained from the auth service using the user credentials provided as args to the constructor. If os_options includes a service_username then requests will also have an X-Service-Token header whose value is a token obtained from the auth service using the service credentials. In this case the request url will be set to the storage_url obtained from the auth service for the service user, unless this is overridden by a preauthurl. """ def __init__(self, authurl=None, user=None, key=None, retries=5, preauthurl=None, preauthtoken=None, snet=False, starting_backoff=1, max_backoff=64, tenant_name=None, os_options=None, auth_version="1", cacert=None, insecure=False, cert=None, cert_key=None, ssl_compression=True, retry_on_ratelimit=False, timeout=None, session=None, force_auth_retry=False): """ :param authurl: authentication URL :param user: user name to authenticate as :param key: key/password to authenticate with :param retries: Number of times to retry the request before failing :param preauthurl: storage URL (if you have already authenticated) :param preauthtoken: authentication token (if you have already authenticated) note authurl/user/key/tenant_name are not required when specifying preauthtoken :param snet: use SERVICENET internal network default is False :param starting_backoff: initial delay between retries (seconds) :param max_backoff: maximum delay between retries (seconds) :param auth_version: OpenStack auth version, default is 1.0 :param tenant_name: The tenant/account name, required when connecting to an auth 2.0 system. :param os_options: The OpenStack options which can have tenant_id, auth_token, service_type, endpoint_type, tenant_name, object_storage_url, region_name, service_username, service_project_name, service_key :param insecure: Allow to access servers without checking SSL certs. The server's certificate will not be verified. :param cert: Client certificate file to connect on SSL server requiring SSL client certificate. :param cert_key: Client certificate private key file. :param ssl_compression: Whether to enable compression at the SSL layer. If set to 'False' and the pyOpenSSL library is present an attempt to disable SSL compression will be made. This may provide a performance increase for https upload/download operations. :param retry_on_ratelimit: by default, a ratelimited connection will raise an exception to the caller. Setting this parameter to True will cause a retry after a backoff. :param timeout: The connect timeout for the HTTP connection. :param session: A keystoneauth session object. :param force_auth_retry: reset auth info even if client got unexpected error except 401 Unauthorized. """ self.session = session self.authurl = authurl self.user = user self.key = key self.retries = retries self.http_conn = None self.attempts = 0 self.snet = snet self.starting_backoff = starting_backoff self.max_backoff = max_backoff self.auth_version = auth_version self.os_options = dict(os_options or {}) if tenant_name: self.os_options['tenant_name'] = tenant_name if preauthurl: self.os_options['object_storage_url'] = preauthurl self.url = preauthurl or self.os_options.get('object_storage_url') self.token = preauthtoken or self.os_options.get('auth_token') if self.os_options.get('service_username', None): self.service_auth = True else: self.service_auth = False self.service_token = None self.cacert = cacert self.insecure = insecure self.cert = cert self.cert_key = cert_key self.ssl_compression = ssl_compression self.auth_end_time = 0 self.retry_on_ratelimit = retry_on_ratelimit self.timeout = timeout self.force_auth_retry = force_auth_retry def close(self): if (self.http_conn and isinstance(self.http_conn, tuple) and len(self.http_conn) > 1): conn = self.http_conn[1] conn.close() self.http_conn = None def get_auth(self): self.url, self.token = get_auth(self.authurl, self.user, self.key, session=self.session, snet=self.snet, auth_version=self.auth_version, os_options=self.os_options, cacert=self.cacert, insecure=self.insecure, cert=self.cert, cert_key=self.cert_key, timeout=self.timeout) return self.url, self.token def get_service_auth(self): opts = self.os_options service_options = {} service_options['tenant_name'] = opts.get('service_project_name', None) service_options['region_name'] = opts.get('region_name', None) service_options['object_storage_url'] = opts.get('object_storage_url', None) service_user = opts.get('service_username', None) service_key = opts.get('service_key', None) return get_auth(self.authurl, service_user, service_key, session=self.session, snet=self.snet, auth_version=self.auth_version, os_options=service_options, cacert=self.cacert, insecure=self.insecure, timeout=self.timeout) def http_connection(self, url=None): return http_connection(url if url else self.url, cacert=self.cacert, insecure=self.insecure, cert=self.cert, cert_key=self.cert_key, ssl_compression=self.ssl_compression, timeout=self.timeout) def _add_response_dict(self, target_dict, kwargs): if target_dict is not None and 'response_dict' in kwargs: response_dict = kwargs['response_dict'] if 'response_dicts' in target_dict: target_dict['response_dicts'].append(response_dict) else: target_dict['response_dicts'] = [response_dict] target_dict.update(response_dict) def _retry(self, reset_func, func, *args, **kwargs): retried_auth = False backoff = self.starting_backoff caller_response_dict = kwargs.pop('response_dict', None) self.attempts = kwargs.pop('attempts', 0) while self.attempts <= self.retries or retried_auth: self.attempts += 1 try: if not self.url or not self.token: self.url, self.token = self.get_auth() self.close() if self.service_auth and not self.service_token: self.url, self.service_token = self.get_service_auth() self.close() self.auth_end_time = time() if not self.http_conn: self.http_conn = self.http_connection() kwargs['http_conn'] = self.http_conn if caller_response_dict is not None: kwargs['response_dict'] = {} rv = func(self.url, self.token, *args, service_token=self.service_token, **kwargs) self._add_response_dict(caller_response_dict, kwargs) return rv except SSLError: raise except (socket.error, RequestException): self._add_response_dict(caller_response_dict, kwargs) if self.attempts > self.retries: raise self.http_conn = None except ClientException as err: self._add_response_dict(caller_response_dict, kwargs) if err.http_status == 401: if self.session: should_retry = self.session.invalidate() else: # Without a proper session, just check for auth creds should_retry = all((self.authurl, self.user, self.key)) self.url = self.token = self.service_token = None if retried_auth or not should_retry: raise retried_auth = True elif self.attempts > self.retries or err.http_status is None: raise elif err.http_status == 408: self.http_conn = None elif 500 <= err.http_status <= 599: pass elif self.retry_on_ratelimit and err.http_status == 498: pass else: raise if self.force_auth_retry: self.url = self.token = self.service_token = None sleep(backoff) backoff = min(backoff * 2, self.max_backoff) if reset_func: reset_func(func, *args, **kwargs) def head_account(self, headers=None): """Wrapper for :func:`head_account`""" return self._retry(None, head_account, headers=headers) def get_account(self, marker=None, limit=None, prefix=None, end_marker=None, full_listing=False, headers=None, delimiter=None): """Wrapper for :func:`get_account`""" # TODO(unknown): With full_listing=True this will restart the entire # listing with each retry. Need to make a better version that just # retries where it left off. return self._retry(None, get_account, marker=marker, limit=limit, prefix=prefix, end_marker=end_marker, full_listing=full_listing, headers=headers, delimiter=delimiter) def post_account(self, headers, response_dict=None, query_string=None, data=None): """Wrapper for :func:`post_account`""" return self._retry(None, post_account, headers, query_string=query_string, data=data, response_dict=response_dict) def head_container(self, container, headers=None): """Wrapper for :func:`head_container`""" return self._retry(None, head_container, container, headers=headers) def get_container(self, container, marker=None, limit=None, prefix=None, delimiter=None, end_marker=None, path=None, full_listing=False, headers=None, query_string=None): """Wrapper for :func:`get_container`""" # TODO(unknown): With full_listing=True this will restart the entire # listing with each retry. Need to make a better version that just # retries where it left off. return self._retry(None, get_container, container, marker=marker, limit=limit, prefix=prefix, delimiter=delimiter, end_marker=end_marker, path=path, full_listing=full_listing, headers=headers, query_string=query_string) def put_container(self, container, headers=None, response_dict=None, query_string=None): """Wrapper for :func:`put_container`""" return self._retry(None, put_container, container, headers=headers, response_dict=response_dict, query_string=query_string) def post_container(self, container, headers, response_dict=None): """Wrapper for :func:`post_container`""" return self._retry(None, post_container, container, headers, response_dict=response_dict) def delete_container(self, container, response_dict=None, query_string=None, headers={}): """Wrapper for :func:`delete_container`""" return self._retry(None, delete_container, container, response_dict=response_dict, query_string=query_string, headers=headers) def head_object(self, container, obj, headers=None, query_string=None): """Wrapper for :func:`head_object`""" return self._retry(None, head_object, container, obj, headers=headers, query_string=query_string) def get_object(self, container, obj, resp_chunk_size=None, query_string=None, response_dict=None, headers=None): """Wrapper for :func:`get_object`""" rheaders, body = self._retry(None, get_object, container, obj, resp_chunk_size=resp_chunk_size, query_string=query_string, response_dict=response_dict, headers=headers) is_not_range_request = ( not headers or 'range' not in (k.lower() for k in headers)) retry_is_possible = ( is_not_range_request and resp_chunk_size and self.attempts <= self.retries and rheaders.get('transfer-encoding') is None) if retry_is_possible: body = _RetryBody(body.resp, self, container, obj, resp_chunk_size=resp_chunk_size, query_string=query_string, response_dict=response_dict, headers=headers) return rheaders, body def put_object(self, container, obj, contents, content_length=None, etag=None, chunk_size=None, content_type=None, headers=None, query_string=None, response_dict=None): """Wrapper for :func:`put_object`""" def _default_reset(*args, **kwargs): raise ClientException('put_object(%r, %r, ...) failure and no ' 'ability to reset contents for reupload.' % (container, obj)) if isinstance(contents, str) or not contents: # if its a str or None then you can retry as much as you want reset_func = None else: reset_func = _default_reset if self.retries > 0: tell = getattr(contents, 'tell', None) seek = getattr(contents, 'seek', None) reset = getattr(contents, 'reset', None) if tell and seek: orig_pos = tell() def reset_func(*a, **kw): seek(orig_pos) elif reset: reset_func = reset return self._retry(reset_func, put_object, container, obj, contents, content_length=content_length, etag=etag, chunk_size=chunk_size, content_type=content_type, headers=headers, query_string=query_string, response_dict=response_dict) def post_object(self, container, obj, headers, response_dict=None): """Wrapper for :func:`post_object`""" return self._retry(None, post_object, container, obj, headers, response_dict=response_dict) def copy_object(self, container, obj, destination=None, headers=None, fresh_metadata=None, response_dict=None): """Wrapper for :func:`copy_object`""" return self._retry(None, copy_object, container, obj, destination, headers, fresh_metadata, response_dict=response_dict) def delete_object(self, container, obj, query_string=None, response_dict=None, headers=None): """Wrapper for :func:`delete_object`""" return self._retry(None, delete_object, container, obj, query_string=query_string, response_dict=response_dict, headers=headers) def get_capabilities(self, url=None): url = url or self.url if not url: url, _ = self.get_auth() parsed = urlparse(urljoin(url, '/info')) if not self.http_conn: self.http_conn = self.http_connection(url) return get_capabilities((parsed, self.http_conn[1])) python-swiftclient-3.9.0/swiftclient/shell.py0000775000175000017500000025035613621303750021451 0ustar zuulzuul00000000000000#!/usr/bin/python -u # Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function, unicode_literals import argparse import getpass import io import json import logging import signal import socket import warnings from os import environ, walk, _exit as os_exit from os.path import isfile, isdir, join from six import text_type, PY2 from six.moves.urllib.parse import unquote, urlparse from sys import argv as sys_argv, exit, stderr, stdin from time import gmtime, strftime from swiftclient import RequestException from swiftclient.utils import config_true_value, generate_temp_url, \ prt_bytes, JSONableIterable from swiftclient.multithreading import OutputManager from swiftclient.exceptions import ClientException from swiftclient import __version__ as client_version from swiftclient.client import logger_settings as client_logger_settings, \ parse_header_string from swiftclient.service import SwiftService, SwiftError, \ SwiftUploadObject, get_conn, process_options from swiftclient.command_helpers import print_account_stats, \ print_container_stats, print_object_stats try: from shlex import quote as sh_quote except ImportError: from pipes import quote as sh_quote BASENAME = 'swift' commands = ('delete', 'download', 'list', 'post', 'copy', 'stat', 'upload', 'capabilities', 'info', 'tempurl', 'auth', 'bash_completion') def immediate_exit(signum, frame): stderr.write(" Aborted\n") os_exit(2) st_delete_options = '''[--all] [--leave-segments] [--object-threads ] [--container-threads ] [--header ] [--prefix ] [ [] [...]] ''' st_delete_help = ''' Delete a container or objects within a container. Positional arguments: [] Name of container to delete from. [] Name of object to delete. Specify multiple times for multiple objects. Optional arguments: -a, --all Delete all containers and objects. --leave-segments Do not delete segments of manifest objects. -H, --header Adds a custom request header to use for deleting objects or an entire container . --object-threads Number of threads to use for deleting objects. Default is 10. --container-threads Number of threads to use for deleting containers. Default is 10. --prefix Only delete objects beginning with . '''.strip("\n") def st_delete(parser, args, output_manager, return_parser=False): parser.add_argument( '-a', '--all', action='store_true', dest='yes_all', default=False, help='Delete all containers and objects.') parser.add_argument( '-p', '--prefix', dest='prefix', help='Only delete items beginning with .') parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Adds a custom request header to use for deleting objects ' 'or an entire container.') parser.add_argument( '--leave-segments', action='store_true', dest='leave_segments', default=False, help='Do not delete segments of manifest objects.') parser.add_argument( '--object-threads', type=int, default=10, help='Number of threads to use for deleting objects. ' 'Its value must be a positive integer. Default is 10.') parser.add_argument( '--container-threads', type=int, default=10, help='Number of threads to use for deleting containers. ' 'Its value must be a positive integer. Default is 10.') # We return the parser to build up the bash_completion if return_parser: return parser (options, args) = parse_args(parser, args) args = args[1:] if (not args and not options['yes_all']) or (args and options['yes_all']): output_manager.error('Usage: %s delete %s\n%s', BASENAME, st_delete_options, st_delete_help) return if options['object_threads'] <= 0: output_manager.error( 'ERROR: option --object-threads should be a positive integer.' '\n\nUsage: %s delete %s\n%s', BASENAME, st_delete_options, st_delete_help) return if options['container_threads'] <= 0: output_manager.error( 'ERROR: option --container-threads should be a positive integer.' '\n\nUsage: %s delete %s\n%s', BASENAME, st_delete_options, st_delete_help) return options['object_dd_threads'] = options['object_threads'] with SwiftService(options=options) as swift: try: if not args: del_iter = swift.delete() else: container = args[0] if '/' in container: output_manager.error( 'WARNING: / in container name; you ' "might have meant '%s' instead of '%s'." % (container.replace('/', ' ', 1), container) ) return objects = args[1:] if objects: del_iter = swift.delete(container=container, objects=objects) else: del_iter = swift.delete(container=container) for r in del_iter: c = r.get('container', '') o = r.get('object', '') a = (' [after {0} attempts]'.format(r.get('attempts')) if r.get('attempts', 1) > 1 else '') if r['action'] == 'bulk_delete': if r['success']: objs = r.get('objects', []) for o, err in r.get('result', {}).get('Errors', []): # o will be of the form quote("//") o = unquote(o) if PY2: # In PY3, unquote(unicode) uses utf-8 like we # want, but PY2 uses latin-1 o = o.encode('latin-1').decode('utf-8') output_manager.error('Error Deleting: {0}: {1}' .format(o[1:], err)) try: objs.remove(o[len(c) + 2:]) except ValueError: # shouldn't happen, but ignoring it won't hurt pass for o in objs: if options['yes_all']: p = '{0}/{1}'.format(c, o) else: p = o output_manager.print_msg('{0}{1}'.format(p, a)) else: for o in r.get('objects', []): output_manager.error('Error Deleting: {0}/{1}: {2}' .format(c, o, r['error'])) else: if r['success']: if options['verbose']: if r['action'] == 'delete_object': if options['yes_all']: p = '{0}/{1}'.format(c, o) else: p = o elif r['action'] == 'delete_segment': p = '{0}/{1}'.format(c, o) elif r['action'] == 'delete_container': p = c output_manager.print_msg('{0}{1}'.format(p, a)) else: p = '{0}/{1}'.format(c, o) if o else c output_manager.error('Error Deleting: {0}: {1}' .format(p, r['error'])) except SwiftError as err: output_manager.error(err.value) st_download_options = '''[--all] [--marker ] [--prefix ] [--output ] [--output-dir ] [--object-threads ] [--ignore-checksum] [--container-threads ] [--no-download] [--skip-identical] [--remove-prefix] [--header ] [--no-shuffle] [ [] [...]] ''' st_download_help = ''' Download objects from containers. Positional arguments: [] Name of container to download from. To download a whole account, omit this and specify --all. [] Name of object to download. Specify multiple times for multiple objects. Omit this to download all objects from the container. Optional arguments: -a, --all Indicates that you really want to download everything in the account. -m, --marker Marker to use when starting a container or account download. -p, --prefix Only download items beginning with -r, --remove-prefix An optional flag for --prefix , use this option to download items without -o, --output For a single file download, stream the output to . Specifying "-" as will redirect to stdout. -D, --output-dir An optional directory to which to store objects. By default, all objects are recreated in the current directory. --object-threads Number of threads to use for downloading objects. Default is 10. --container-threads Number of threads to use for downloading containers. Default is 10. --no-download Perform download(s), but don't actually write anything to disk. -H, --header Adds a customized request header to the query, like "Range" or "If-Match". This option may be repeated. Example: --header "content-type:text/plain" --skip-identical Skip downloading files that are identical on both sides. --ignore-checksum Turn off checksum validation for downloads. --no-shuffle By default, when downloading a complete account or container, download order is randomised in order to reduce the load on individual drives when multiple clients are executed simultaneously to download the same set of objects (e.g. a nightly automated download script to multiple servers). Enable this option to submit download jobs to the thread pool in the order they are listed in the object store. --ignore-mtime Ignore the 'X-Object-Meta-Mtime' header when downloading an object. Instead, create atime and mtime with fresh timestamps. '''.strip("\n") def st_download(parser, args, output_manager, return_parser=False): parser.add_argument( '-a', '--all', action='store_true', dest='yes_all', default=False, help='Indicates that you really want to download ' 'everything in the account.') parser.add_argument( '-m', '--marker', dest='marker', default='', help='Marker to use when starting a container or ' 'account download.') parser.add_argument( '-p', '--prefix', dest='prefix', help='Only download items beginning with the .') parser.add_argument( '-o', '--output', dest='out_file', help='For a single ' 'download, stream the output to . ' 'Specifying "-" as will redirect to stdout.') parser.add_argument( '-D', '--output-dir', dest='out_directory', help='An optional directory to which to store objects. ' 'By default, all objects are recreated in the current directory.') parser.add_argument( '-r', '--remove-prefix', action='store_true', dest='remove_prefix', default=False, help='An optional flag for --prefix , ' 'use this option to download items without .') parser.add_argument( '--object-threads', type=int, default=10, help='Number of threads to use for downloading objects. ' 'Its value must be a positive integer. Default is 10.') parser.add_argument( '--container-threads', type=int, default=10, help='Number of threads to use for downloading containers. ' 'Its value must be a positive integer. Default is 10.') parser.add_argument( '--no-download', action='store_true', default=False, help="Perform download(s), but don't actually write anything to disk.") parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Adds a customized request header to the query, like "Range" or ' '"If-Match". This option may be repeated. ' 'Example: --header "content-type:text/plain"') parser.add_argument( '--skip-identical', action='store_true', dest='skip_identical', default=False, help='Skip downloading files that are identical on ' 'both sides.') parser.add_argument( '--ignore-checksum', action='store_false', dest='checksum', default=True, help='Turn off checksum validation for downloads.') parser.add_argument( '--no-shuffle', action='store_false', dest='shuffle', default=True, help='By default, download order is randomised in order ' 'to reduce the load on individual drives when multiple clients are ' 'executed simultaneously to download the same set of objects (e.g. a ' 'nightly automated download script to multiple servers). Enable this ' 'option to submit download jobs to the thread pool in the order they ' 'are listed in the object store.') parser.add_argument( '--ignore-mtime', action='store_true', dest='ignore_mtime', default=False, help='By default, the object-meta-mtime header is used ' 'to store the access and modified timestamp for the downloaded file. ' 'With this option, the header is ignored and the timestamps are ' 'created freshly.') # We return the parser to build up the bash_completion if return_parser: return parser (options, args) = parse_args(parser, args) args = args[1:] if options['out_file'] == '-': options['verbose'] = 0 if options['out_file'] and len(args) != 2: exit('-o option only allowed for single file downloads') if not options['prefix']: options['remove_prefix'] = False if options['out_directory'] and len(args) == 2: exit('Please use -o option for single file downloads and renames') if (not args and not options['yes_all']) or (args and options['yes_all']): output_manager.error('Usage: %s download %s\n%s', BASENAME, st_download_options, st_download_help) return if options['object_threads'] <= 0: output_manager.error( 'ERROR: option --object-threads should be a positive integer.\n\n' 'Usage: %s download %s\n%s', BASENAME, st_download_options, st_download_help) return if options['container_threads'] <= 0: output_manager.error( 'ERROR: option --container-threads should be a positive integer.' '\n\nUsage: %s download %s\n%s', BASENAME, st_download_options, st_download_help) return options['object_dd_threads'] = options['object_threads'] with SwiftService(options=options) as swift: try: if not args: down_iter = swift.download() else: container = args[0] if '/' in container: output_manager.error( 'WARNING: / in container name; you ' "might have meant '%s' instead of '%s'." % (container.replace('/', ' ', 1), container) ) return objects = args[1:] if not objects: down_iter = swift.download(container) else: down_iter = swift.download(container, objects) for down in down_iter: if options['out_file'] == '-' and 'contents' in down: contents = down['contents'] for chunk in contents: output_manager.print_raw(chunk) else: if down['success']: if options['verbose']: start_time = down['start_time'] headers_receipt = \ down['headers_receipt'] - start_time auth_time = down['auth_end_time'] - start_time finish_time = down['finish_time'] read_length = down['read_length'] attempts = down['attempts'] total_time = finish_time - start_time down_time = total_time - auth_time _mega = 1000000 if down['pseudodir']: time_str = ( 'auth %.3fs, headers %.3fs, total %.3fs, ' 'pseudo' % ( auth_time, headers_receipt, total_time ) ) else: speed = float(read_length) / down_time / _mega time_str = ( 'auth %.3fs, headers %.3fs, total %.3fs, ' '%.3f MB/s' % ( auth_time, headers_receipt, total_time, speed ) ) path = down['path'] if attempts > 1: output_manager.print_msg( '%s [%s after %d attempts]', path, time_str, attempts ) else: output_manager.print_msg( '%s [%s]', path, time_str ) else: error = down['error'] path = down['path'] container = down['container'] obj = down['object'] if isinstance(error, ClientException): if error.http_status == 304 and \ options['skip_identical']: output_manager.print_msg( "Skipped identical file '%s'", path) continue if error.http_status == 404: output_manager.error( "Object '%s/%s' not found", container, obj) continue output_manager.error( "Error downloading object '%s/%s': %s", container, obj, error) except SwiftError as e: output_manager.error(e.value) except Exception as e: output_manager.error(e) st_list_options = '''[--long] [--lh] [--totals] [--prefix ] [--delimiter ] [--header ] [] ''' st_list_help = ''' Lists the containers for the account or the objects for a container. Positional arguments: [] Name of container to list object in. Optional arguments: -l, --long Long listing format, similar to ls -l. --lh Report sizes in human readable format similar to ls -lh. -t, --totals Used with -l or --lh, only report totals. -p , --prefix Only list items beginning with the prefix. -d , --delimiter Roll up items with the given delimiter. For containers only. See OpenStack Swift API documentation for what this means. -H, --header Adds a custom request header to use for listing. '''.strip('\n') def st_list(parser, args, output_manager, return_parser=False): def _print_stats(options, stats, human): total_count = total_bytes = 0 container = stats.get("container", None) for item in stats["listing"]: item_name = item.get('name') if not options['long'] and not human: output_manager.print_msg(item.get('name', item.get('subdir'))) else: if not container: # listing containers item_bytes = item.get('bytes') byte_str = prt_bytes(item_bytes, human) count = item.get('count') total_count += count try: meta = item.get('meta') utc = gmtime(float(meta.get('x-timestamp'))) datestamp = strftime('%Y-%m-%d %H:%M:%S', utc) except TypeError: datestamp = '????-??-?? ??:??:??' if not options['totals']: output_manager.print_msg( "%5s %s %s %s", count, byte_str, datestamp, item_name) else: # list container contents subdir = item.get('subdir') content_type = item.get('content_type') if subdir is None: item_bytes = item.get('bytes') byte_str = prt_bytes(item_bytes, human) date, xtime = item.get('last_modified').split('T') xtime = xtime.split('.')[0] else: item_bytes = 0 byte_str = prt_bytes(item_bytes, human) date = xtime = '' item_name = subdir if not options['totals']: output_manager.print_msg( "%s %10s %8s %24s %s", byte_str, date, xtime, content_type, item_name) total_bytes += item_bytes # report totals if options['long'] or human: if not container: output_manager.print_msg( "%5s %s", prt_bytes(total_count, True), prt_bytes(total_bytes, human)) else: output_manager.print_msg( prt_bytes(total_bytes, human)) parser.add_argument( '-l', '--long', dest='long', action='store_true', default=False, help='Long listing format, similar to ls -l.') parser.add_argument( '--lh', dest='human', action='store_true', default=False, help='Report sizes in human readable format, ' "similar to ls -lh.") parser.add_argument( '-t', '--totals', dest='totals', help='used with -l or --lh, only report totals.', action='store_true', default=False) parser.add_argument( '-p', '--prefix', dest='prefix', help='Only list items beginning with the prefix.') parser.add_argument( '-d', '--delimiter', dest='delimiter', help='Roll up items with the given delimiter. For containers ' 'only. See OpenStack Swift API documentation for ' 'what this means.') parser.add_argument('-j', '--json', action='store_true', help='print listing information in json') parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Adds a custom request header to use for listing.') # We return the parser to build up the bash_completion if return_parser: return parser options, args = parse_args(parser, args) args = args[1:] if options['delimiter'] and not args: exit('-d option only allowed for container listings') human = options.pop('human') if human: options['long'] = True if options['totals'] and not options['long']: output_manager.error( "Listing totals only works with -l or --lh.") return with SwiftService(options=options) as swift: try: if not args: stats_parts_gen = swift.list() else: container = args[0] args = args[1:] if "/" in container or args: output_manager.error( 'Usage: %s list %s\n%s', BASENAME, st_list_options, st_list_help) return else: stats_parts_gen = swift.list(container=container) if options.get('json', False): def listing(stats_parts_gen=stats_parts_gen): for stats in stats_parts_gen: if stats["success"]: for item in stats['listing']: yield item else: raise stats["error"] json.dump( JSONableIterable(listing()), output_manager.print_stream, sort_keys=True, indent=2) output_manager.print_msg('') return for stats in stats_parts_gen: if stats["success"]: _print_stats(options, stats, human) else: raise stats["error"] except SwiftError as e: output_manager.error(e.value) st_stat_options = '''[--lh] [--header ] [ []] ''' st_stat_help = ''' Displays information for the account, container, or object. Positional arguments: [] Name of container to stat from. [] Name of object to stat. Optional arguments: --lh Report sizes in human readable format similar to ls -lh. -H, --header Adds a custom request header to use for stat. '''.strip('\n') def st_stat(parser, args, output_manager, return_parser=False): parser.add_argument( '--lh', dest='human', action='store_true', default=False, help='Report sizes in human readable format similar to ls -lh.') parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Adds a custom request header to use for stat.') # We return the parser to build up the bash_completion if return_parser: return parser options, args = parse_args(parser, args) args = args[1:] with SwiftService(options=options) as swift: try: if not args: stat_result = swift.stat() if not stat_result['success']: raise stat_result['error'] items = stat_result['items'] headers = stat_result['headers'] print_account_stats(items, headers, output_manager) else: container = args[0] if '/' in container: output_manager.error( 'WARNING: / in container name; you might have ' "meant '%s' instead of '%s'." % (container.replace('/', ' ', 1), container)) return args = args[1:] if not args: stat_result = swift.stat(container=container) if not stat_result['success']: raise stat_result['error'] items = stat_result['items'] headers = stat_result['headers'] print_container_stats(items, headers, output_manager) else: if len(args) == 1: objects = [args[0]] stat_results = swift.stat( container=container, objects=objects) for stat_result in stat_results: # only 1 result if stat_result["success"]: items = stat_result['items'] headers = stat_result['headers'] print_object_stats( items, headers, output_manager ) else: raise(stat_result["error"]) else: output_manager.error( 'Usage: %s stat %s\n%s', BASENAME, st_stat_options, st_stat_help) except SwiftError as e: output_manager.error(e.value) st_post_options = '''[--read-acl ] [--write-acl ] [--sync-to ] [--sync-key ] [--meta ] [--header
] [ []] ''' st_post_help = ''' Updates meta information for the account, container, or object. If the container is not found, it will be created automatically. Positional arguments: [] Name of container to post to. [] Name of object to post. Optional arguments: -r, --read-acl Read ACL for containers. Quick summary of ACL syntax: .r:*, .r:-.example.com, .r:www.example.com, account1 (v1.0 identity API only), account1:*, account2:user2 (v2.0+ identity API). -w, --write-acl Write ACL for containers. Quick summary of ACL syntax: account1 (v1.0 identity API only), account1:*, account2:user2 (v2.0+ identity API). -t, --sync-to Sync To for containers, for multi-cluster replication. -k, --sync-key Sync Key for containers, for multi-cluster replication. -m, --meta Sets a meta data item. This option may be repeated. Example: -m Color:Blue -m Size:Large -H, --header Adds a customized request header. This option may be repeated. Example -H "content-type:text/plain" -H "Content-Length: 4000" '''.strip('\n') def st_post(parser, args, output_manager, return_parser=False): parser.add_argument( '-r', '--read-acl', dest='read_acl', help='Read ACL for containers. ' 'Quick summary of ACL syntax: .r:*, .r:-.example.com, ' '.r:www.example.com, account1, account2:user2') parser.add_argument( '-w', '--write-acl', dest='write_acl', help='Write ACL for ' 'containers. Quick summary of ACL syntax: account1, ' 'account2:user2') parser.add_argument( '-t', '--sync-to', dest='sync_to', help='Sets the ' 'Sync To for containers, for multi-cluster replication.') parser.add_argument( '-k', '--sync-key', dest='sync_key', help='Sets the ' 'Sync Key for containers, for multi-cluster replication.') parser.add_argument( '-m', '--meta', action='append', dest='meta', default=[], help='Sets a meta data item. This option may be repeated. ' 'Example: -m Color:Blue -m Size:Large') parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Adds a customized request header. ' 'This option may be repeated. ' 'Example: -H "content-type:text/plain" ' '-H "Content-Length: 4000"') # We return the parser to build up the bash_completion if return_parser: return parser (options, args) = parse_args(parser, args) args = args[1:] if (options['read_acl'] or options['write_acl'] or options['sync_to'] or options['sync_key']) and not args: exit('-r, -w, -t, and -k options only allowed for containers') with SwiftService(options=options) as swift: try: if not args: result = swift.post() else: container = args[0] if '/' in container: output_manager.error( 'WARNING: / in container name; you might have ' "meant '%s' instead of '%s'." % (args[0].replace('/', ' ', 1), args[0])) return args = args[1:] if args: if len(args) == 1: objects = [args[0]] results_iterator = swift.post( container=container, objects=objects ) result = next(results_iterator) else: output_manager.error( 'Usage: %s post %s\n%s', BASENAME, st_post_options, st_post_help) return else: result = swift.post(container=container) if not result["success"]: raise(result["error"]) except SwiftError as e: output_manager.error(e.value) st_copy_options = '''[--destination ] [--fresh-metadata] [--meta ] [--header
] [] [...] ''' st_copy_help = ''' Copies object to new destination, optionally updates objects metadata. If destination is not set, will update metadata of object Positional arguments: Name of container to copy from. Name of object to copy. Specify multiple times for multiple objects Optional arguments: -d, --destination The container and name of the destination object. Name of destination object can be omitted, then will be same as name of source object. Supplying multiple objects and destination with object name is invalid. -M, --fresh-metadata Copy the object without any existing metadata, If not set, metadata will be preserved or appended -m, --meta Sets a meta data item. This option may be repeated. Example: -m Color:Blue -m Size:Large -H, --header Adds a customized request header. This option may be repeated. Example -H "content-type:text/plain" -H "Content-Length: 4000" '''.strip('\n') def st_copy(parser, args, output_manager, return_parser=False): parser.add_argument( '-d', '--destination', help='The container and name of the ' 'destination object') parser.add_argument( '-M', '--fresh-metadata', action='store_true', help='Copy the object without any existing metadata', default=False) parser.add_argument( '-m', '--meta', action='append', dest='meta', default=[], help='Sets a meta data item. This option may be repeated. ' 'Example: -m Color:Blue -m Size:Large') parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Adds a customized request header. ' 'This option may be repeated. ' 'Example: -H "content-type:text/plain" ' '-H "Content-Length: 4000"') # We return the parser to build up the bash_completion if return_parser: return parser (options, args) = parse_args(parser, args) args = args[1:] with SwiftService(options=options) as swift: try: if len(args) >= 2: container = args[0] if '/' in container: output_manager.error( 'WARNING: / in container name; you might have ' "meant '%s' instead of '%s'." % (args[0].replace('/', ' ', 1), args[0])) return objects = [arg for arg in args[1:]] for r in swift.copy( container=container, objects=objects, options=options): if r['success']: if options['verbose']: if r['action'] == 'copy_object': output_manager.print_msg( '%s/%s copied to %s' % ( r['container'], r['object'], r['destination'] or '')) if r['action'] == 'create_container': output_manager.print_msg( 'created container %s' % r['container'] ) else: error = r['error'] if 'action' in r and r['action'] == 'create_container': # it is not an error to be unable to create the # container so print a warning and carry on output_manager.warning( 'Warning: failed to create container ' "'%s': %s", container, error ) else: output_manager.error("%s" % error) else: output_manager.error( 'Usage: %s copy %s\n%s', BASENAME, st_copy_options, st_copy_help) return except SwiftError as e: output_manager.error(e.value) st_upload_options = '''[--changed] [--skip-identical] [--segment-size ] [--segment-container ] [--leave-segments] [--object-threads ] [--segment-threads ] [--meta ] [--header
] [--use-slo] [--ignore-checksum] [--object-name ] [] [...] ''' st_upload_help = ''' Uploads specified files and directories to the given container. Positional arguments: Name of container to upload to. Name of file or directory to upload. Specify multiple times for multiple uploads. If "-" is specified, reads content from standard input (--object-name is required in this case). Optional arguments: -c, --changed Only upload files that have changed since the last upload. --skip-identical Skip uploading files that are identical on both sides. -S, --segment-size Upload files in segments no larger than (in Bytes) and then create a "manifest" file that will download all the segments as if it were the original file. --segment-container Upload the segments into the specified container. If not specified, the segments will be uploaded to a _segments container to not pollute the main listings. --leave-segments Indicates that you want the older segments of manifest objects left alone (in the case of overwrites). --object-threads Number of threads to use for uploading full objects. Default is 10. --segment-threads Number of threads to use for uploading object segments. Default is 10. -m, --meta Sets a meta data item. This option may be repeated. Example: -m Color:Blue -m Size:Large -H, --header Adds a customized request header. This option may be repeated. Example: -H "content-type:text/plain" -H "Content-Length: 4000". --use-slo When used in conjunction with --segment-size it will create a Static Large Object instead of the default Dynamic Large Object. --object-name Upload file and name object to or upload dir and use as object prefix instead of folder name. --ignore-checksum Turn off checksum validation for uploads. '''.strip('\n') def st_upload(parser, args, output_manager, return_parser=False): DEFAULT_STDIN_SEGMENT = 10 * 1024 * 1024 parser.add_argument( '-c', '--changed', action='store_true', dest='changed', default=False, help='Only upload files that have changed since ' 'the last upload.') parser.add_argument( '--skip-identical', action='store_true', dest='skip_identical', default=False, help='Skip uploading files that are identical on ' 'both sides.') parser.add_argument( '-S', '--segment-size', dest='segment_size', help='Upload files ' 'in segments no larger than (in Bytes) and then create a ' '"manifest" file that will download all the segments as if it were ' 'the original file. Sizes may also be expressed as bytes with the ' 'B suffix, kilobytes with the K suffix, megabytes with the M suffix ' 'or gigabytes with the G suffix.') parser.add_argument( '-C', '--segment-container', dest='segment_container', help='Upload the segments into the specified container. ' 'If not specified, the segments will be uploaded to a ' '_segments container to not pollute the main ' ' listings.') parser.add_argument( '--leave-segments', action='store_true', dest='leave_segments', default=False, help='Indicates that you want ' 'the older segments of manifest objects left alone (in the case of ' 'overwrites).') parser.add_argument( '--object-threads', type=int, default=10, help='Number of threads to use for uploading full objects. ' 'Its value must be a positive integer. Default is 10.') parser.add_argument( '--segment-threads', type=int, default=10, help='Number of threads to use for uploading object segments. ' 'Its value must be a positive integer. Default is 10.') parser.add_argument( '-m', '--meta', action='append', dest='meta', default=[], help='Sets a meta data item. This option may be repeated. ' 'Example: -m Color:Blue -m Size:Large') parser.add_argument( '-H', '--header', action='append', dest='header', default=[], help='Set request headers with the syntax header:value. ' ' This option may be repeated. Example: -H "content-type:text/plain" ' '-H "Content-Length: 4000"') parser.add_argument( '--use-slo', action='store_true', default=False, help='When used in conjunction with --segment-size, it will ' 'create a Static Large Object instead of the default ' 'Dynamic Large Object.') parser.add_argument( '--object-name', dest='object_name', help='Upload file and name object to or upload dir and ' 'use as object prefix instead of folder name.') parser.add_argument( '--ignore-checksum', dest='checksum', default=True, action='store_false', help='Turn off checksum validation for uploads.') # We return the parser to build up the bash_completion if return_parser: return parser options, args = parse_args(parser, args) args = args[1:] if len(args) < 2: output_manager.error( 'Usage: %s upload %s\n%s', BASENAME, st_upload_options, st_upload_help) return else: container = args[0] files = args[1:] from_stdin = '-' in files if from_stdin and len(files) > 1: output_manager.error( 'upload from stdin cannot be used along with other files') return if options['object_name'] is not None: if len(files) > 1: output_manager.error('object-name only be used with 1 file or dir') return else: orig_path = files[0] elif from_stdin: output_manager.error( 'object-name must be specified with uploads from stdin') return if options['segment_size']: try: # If segment size only has digits assume it is bytes int(options['segment_size']) except ValueError: try: size_mod = "BKMG".index(options['segment_size'][-1].upper()) multiplier = int(options['segment_size'][:-1]) except ValueError: output_manager.error("Invalid segment size") return options['segment_size'] = str((1024 ** size_mod) * multiplier) if int(options['segment_size']) <= 0: output_manager.error("segment-size should be positive") return if options['object_threads'] <= 0: output_manager.error( 'ERROR: option --object-threads should be a positive integer.' '\n\nUsage: %s upload %s\n%s', BASENAME, st_upload_options, st_upload_help) return if options['segment_threads'] <= 0: output_manager.error( 'ERROR: option --segment-threads should be a positive integer.' '\n\nUsage: %s upload %s\n%s', BASENAME, st_upload_options, st_upload_help) return if from_stdin: if not options['use_slo']: options['use_slo'] = True if not options['segment_size']: options['segment_size'] = DEFAULT_STDIN_SEGMENT options['object_uu_threads'] = options['object_threads'] with SwiftService(options=options) as swift: try: objs = [] dir_markers = [] for f in files: if f == '-': fd = io.open(stdin.fileno(), mode='rb') objs.append(SwiftUploadObject( fd, object_name=options['object_name'])) # We ensure that there is exactly one "file" to upload in # this case -- stdin break if isfile(f): objs.append(f) elif isdir(f): for (_dir, _ds, _fs) in walk(f): if not (_ds + _fs): dir_markers.append(_dir) else: objs.extend([join(_dir, _f) for _f in _fs]) else: output_manager.error("Local file '%s' not found" % f) # Now that we've collected all the required files and dir markers # build the tuples for the call to upload if options['object_name'] is not None and not from_stdin: objs = [ SwiftUploadObject( o, object_name=o.replace( orig_path, options['object_name'], 1 ) ) for o in objs ] dir_markers = [ SwiftUploadObject( None, object_name=d.replace( orig_path, options['object_name'], 1 ), options={'dir_marker': True} ) for d in dir_markers ] for r in swift.upload(container, objs + dir_markers): if r['success']: if options['verbose']: if 'attempts' in r and r['attempts'] > 1: if 'object' in r: output_manager.print_msg( '%s [after %d attempts]' % (r['object'], r['attempts']) ) else: if 'object' in r: output_manager.print_msg(r['object']) elif 'for_object' in r: output_manager.print_msg( '%s segment %s' % (r['for_object'], r['segment_index']) ) else: error = r['error'] if 'action' in r and r['action'] == "create_container": # it is not an error to be unable to create the # container so print a warning and carry on if isinstance(error, ClientException): if (r['headers'] and 'X-Storage-Policy' in r['headers']): msg = ' with Storage Policy %s' % \ r['headers']['X-Storage-Policy'].strip() else: msg = ' '.join(str(x) for x in ( error.http_status, error.http_reason) ) if error.http_response_content: if msg: msg += ': ' msg += (error.http_response_content .decode('utf8')[:60]) msg = ': %s' % msg else: msg = ': %s' % error output_manager.warning( 'Warning: failed to create container ' "'%s'%s", r['container'], msg ) else: output_manager.error("%s" % error) too_large = (isinstance(error, ClientException) and error.http_status == 413) if too_large and options['verbose'] > 0: output_manager.error( "Consider using the --segment-size option " "to chunk the object") except SwiftError as e: output_manager.error(e.value) st_capabilities_options = '''[--json] [] ''' st_info_options = st_capabilities_options st_capabilities_help = ''' Retrieve capability of the proxy. Optional positional arguments: Proxy URL of the cluster to retrieve capabilities. Optional arguments: --json Print the cluster capabilities in JSON format. '''.strip('\n') st_info_help = st_capabilities_help def st_capabilities(parser, args, output_manager, return_parser=False): def _print_compo_cap(name, capabilities): for feature, options in sorted(capabilities.items(), key=lambda x: x[0]): output_manager.print_msg("%s: %s" % (name, feature)) if options: output_manager.print_msg(" Options:") for key, value in sorted(options.items(), key=lambda x: x[0]): output_manager.print_msg(" %s: %s" % (key, value)) parser.add_argument('--json', action='store_true', help='print capability information in json') # We return the parser to build up the bash_completion if return_parser: return parser (options, args) = parse_args(parser, args) if args and len(args) > 2: output_manager.error('Usage: %s capabilities %s\n%s', BASENAME, st_capabilities_options, st_capabilities_help) return with SwiftService(options=options) as swift: try: if len(args) == 2: url = args[1] capabilities_result = swift.capabilities(url) capabilities = capabilities_result['capabilities'] else: capabilities_result = swift.capabilities() capabilities = capabilities_result['capabilities'] if options['json']: output_manager.print_msg( json.dumps(capabilities, sort_keys=True, indent=2)) else: capabilities = dict(capabilities) _print_compo_cap('Core', {'swift': capabilities['swift']}) del capabilities['swift'] _print_compo_cap('Additional middleware', capabilities) except SwiftError as e: output_manager.error(e.value) st_info = st_capabilities st_auth_help = ''' Display auth related authentication variables in shell friendly format. Commands to run to export storage url and auth token into OS_STORAGE_URL and OS_AUTH_TOKEN: swift auth Commands to append to a runcom file (e.g. ~/.bashrc, /etc/profile) for automatic authentication: swift auth -v -U test:tester -K testing \ -A http://localhost:8080/auth/v1.0 '''.strip('\n') def st_auth(parser, args, thread_manager, return_parser=False): # We return the parser to build up the bash_completion if return_parser: return parser (options, args) = parse_args(parser, args) if options['verbose'] > 1: if options['auth_version'] in ('1', '1.0'): print('export ST_AUTH=%s' % sh_quote(options['auth'])) print('export ST_USER=%s' % sh_quote(options['user'])) print('export ST_KEY=%s' % sh_quote(options['key'])) else: print('export OS_IDENTITY_API_VERSION=%s' % sh_quote( options['auth_version'])) print('export OS_AUTH_VERSION=%s' % sh_quote( options['auth_version'])) print('export OS_AUTH_URL=%s' % sh_quote(options['auth'])) for k, v in sorted(options.items()): if v and k.startswith('os_') and \ k not in ('os_auth_url', 'os_options'): print('export %s=%s' % (k.upper(), sh_quote(v))) else: conn = get_conn(options) url, token = conn.get_auth() print('export OS_STORAGE_URL=%s' % sh_quote(url)) print('export OS_AUTH_TOKEN=%s' % sh_quote(token)) st_tempurl_options = '''[--absolute] [--prefix-based] [--iso8601]