python-swiftclient-2.0.3/0000775000175300017540000000000012304670716016531 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/tests/0000775000175300017540000000000012304670716017673 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/tests/test_multithreading.py0000664000175300017540000003305212304670635024327 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2013 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time import mock import testtools import threading from cStringIO import StringIO from Queue import Queue, Empty from swiftclient import multithreading as mt from swiftclient.exceptions import ClientException class ThreadTestCase(testtools.TestCase): def setUp(self): super(ThreadTestCase, self).setUp() self.got_args_kwargs = Queue() self.starting_thread_count = threading.active_count() def _func(self, q_item, *args, **kwargs): self.got_items.put(q_item) self.got_args_kwargs.put((args, kwargs)) if q_item == 'go boom': raise Exception('I went boom!') if q_item == 'c boom': raise ClientException( 'Client Boom', http_scheme='http', http_host='192.168.22.1', http_port=80, http_path='/booze', http_status=404, http_reason='to much', http_response_content='no sir!') return 'best result EVAR!' def assertQueueContains(self, queue, expected_contents): got_contents = [] try: while True: got_contents.append(queue.get(timeout=0.1)) except Empty: pass if isinstance(expected_contents, set): got_contents = set(got_contents) self.assertEqual(expected_contents, got_contents) class TestQueueFunctionThread(ThreadTestCase): def setUp(self): super(TestQueueFunctionThread, self).setUp() self.input_queue = Queue() self.got_items = Queue() self.stored_results = [] self.qft = mt.QueueFunctionThread(self.input_queue, self._func, 'one_arg', 'two_arg', red_fish='blue_arg', store_results=self.stored_results) self.qft.start() def tearDown(self): if self.qft.is_alive(): self.finish_up_thread() super(TestQueueFunctionThread, self).tearDown() def finish_up_thread(self): self.input_queue.put(mt.StopWorkerThreadSignal()) while self.qft.is_alive(): time.sleep(0.05) def test_plumbing_and_store_results(self): self.input_queue.put('abc') self.input_queue.put(123) self.finish_up_thread() self.assertQueueContains(self.got_items, ['abc', 123]) self.assertQueueContains(self.got_args_kwargs, [ (('one_arg', 'two_arg'), {'red_fish': 'blue_arg'}), (('one_arg', 'two_arg'), {'red_fish': 'blue_arg'})]) self.assertEqual(self.stored_results, ['best result EVAR!', 'best result EVAR!']) def test_exception_handling(self): self.input_queue.put('go boom') self.input_queue.put('ok') self.input_queue.put('go boom') self.finish_up_thread() self.assertQueueContains(self.got_items, ['go boom', 'ok', 'go boom']) self.assertEqual(len(self.qft.exc_infos), 2) self.assertEqual(Exception, self.qft.exc_infos[0][0]) self.assertEqual(Exception, self.qft.exc_infos[1][0]) self.assertEqual(('I went boom!',), self.qft.exc_infos[0][1].args) self.assertEqual(('I went boom!',), self.qft.exc_infos[1][1].args) class TestQueueFunctionManager(ThreadTestCase): def setUp(self): super(TestQueueFunctionManager, self).setUp() self.thread_manager = mock.create_autospec( mt.MultiThreadingManager, spec_set=True, instance=True) self.thread_count = 4 self.error_counter = [0] self.got_items = Queue() self.stored_results = [] self.qfq = mt.QueueFunctionManager( self._func, self.thread_count, self.thread_manager, thread_args=('1arg', '2arg'), thread_kwargs={'a': 'b', 'store_results': self.stored_results}, error_counter=self.error_counter, connection_maker=self.connection_maker) def connection_maker(self): return 'yup, I made a connection' def test_context_manager_without_error_counter(self): self.qfq = mt.QueueFunctionManager( self._func, self.thread_count, self.thread_manager, thread_args=('1arg', '2arg'), thread_kwargs={'a': 'b', 'store_results': self.stored_results}, connection_maker=self.connection_maker) with self.qfq as input_queue: self.assertEqual(self.starting_thread_count + self.thread_count, threading.active_count()) input_queue.put('go boom') self.assertEqual(self.starting_thread_count, threading.active_count()) error_strs = map(str, self.thread_manager.error.call_args_list) self.assertEqual(1, len(error_strs)) self.assertTrue('Exception: I went boom!' in error_strs[0]) def test_context_manager_without_conn_maker_or_error_counter(self): self.qfq = mt.QueueFunctionManager( self._func, self.thread_count, self.thread_manager, thread_args=('1arg', '2arg'), thread_kwargs={'a': 'b'}) with self.qfq as input_queue: self.assertEqual(self.starting_thread_count + self.thread_count, threading.active_count()) for i in range(20): input_queue.put('slap%d' % i) self.assertEqual(self.starting_thread_count, threading.active_count()) self.assertEqual([], self.thread_manager.error.call_args_list) self.assertEqual(0, self.error_counter[0]) self.assertQueueContains(self.got_items, set(['slap%d' % i for i in range(20)])) self.assertQueueContains( self.got_args_kwargs, [(('1arg', '2arg'), {'a': 'b'})] * 20) self.assertEqual(self.stored_results, []) def test_context_manager_with_exceptions(self): with self.qfq as input_queue: self.assertEqual(self.starting_thread_count + self.thread_count, threading.active_count()) for i in range(20): input_queue.put('item%d' % i if i % 2 == 0 else 'go boom') self.assertEqual(self.starting_thread_count, threading.active_count()) error_strs = map(str, self.thread_manager.error.call_args_list) self.assertEqual(10, len(error_strs)) self.assertTrue(all(['Exception: I went boom!' in s for s in error_strs])) self.assertEqual(10, self.error_counter[0]) expected_items = set(['go boom'] + ['item%d' % i for i in range(20) if i % 2 == 0]) self.assertQueueContains(self.got_items, expected_items) self.assertQueueContains( self.got_args_kwargs, [(('yup, I made a connection', '1arg', '2arg'), {'a': 'b'})] * 20) self.assertEqual(self.stored_results, ['best result EVAR!'] * 10) def test_context_manager_with_client_exceptions(self): with self.qfq as input_queue: self.assertEqual(self.starting_thread_count + self.thread_count, threading.active_count()) for i in range(20): input_queue.put('item%d' % i if i % 2 == 0 else 'c boom') self.assertEqual(self.starting_thread_count, threading.active_count()) error_strs = map(str, self.thread_manager.error.call_args_list) self.assertEqual(10, len(error_strs)) stringification = 'Client Boom: ' \ 'http://192.168.22.1:80/booze 404 to much no sir!' self.assertTrue(all([stringification in s for s in error_strs])) self.assertEqual(10, self.error_counter[0]) expected_items = set(['c boom'] + ['item%d' % i for i in range(20) if i % 2 == 0]) self.assertQueueContains(self.got_items, expected_items) self.assertQueueContains( self.got_args_kwargs, [(('yup, I made a connection', '1arg', '2arg'), {'a': 'b'})] * 20) self.assertEqual(self.stored_results, ['best result EVAR!'] * 10) def test_context_manager_with_connection_maker(self): with self.qfq as input_queue: self.assertEqual(self.starting_thread_count + self.thread_count, threading.active_count()) for i in range(20): input_queue.put('item%d' % i) self.assertEqual(self.starting_thread_count, threading.active_count()) self.assertEqual([], self.thread_manager.error.call_args_list) self.assertEqual(0, self.error_counter[0]) self.assertQueueContains(self.got_items, set(['item%d' % i for i in range(20)])) self.assertQueueContains( self.got_args_kwargs, [(('yup, I made a connection', '1arg', '2arg'), {'a': 'b'})] * 20) self.assertEqual(self.stored_results, ['best result EVAR!'] * 20) class TestMultiThreadingManager(ThreadTestCase): @mock.patch('swiftclient.multithreading.QueueFunctionManager') def test_instantiation(self, mock_qfq): thread_manager = mt.MultiThreadingManager() self.assertEqual([ mock.call(thread_manager._print, 1, thread_manager), mock.call(thread_manager._print_error, 1, thread_manager), ], mock_qfq.call_args_list) # These contexts don't get entered into until the # MultiThreadingManager's context is entered. self.assertEqual([], thread_manager.printer.__enter__.call_args_list) self.assertEqual([], thread_manager.error_printer.__enter__.call_args_list) # Test default values for the streams. self.assertEqual(sys.stdout, thread_manager.print_stream) self.assertEqual(sys.stderr, thread_manager.error_stream) @mock.patch('swiftclient.multithreading.QueueFunctionManager') def test_queue_manager_no_args(self, mock_qfq): thread_manager = mt.MultiThreadingManager() mock_qfq.reset_mock() mock_qfq.return_value = 'slap happy!' self.assertEqual( 'slap happy!', thread_manager.queue_manager(self._func, 88)) self.assertEqual([ mock.call(self._func, 88, thread_manager, thread_args=(), thread_kwargs={}, connection_maker=None, error_counter=None) ], mock_qfq.call_args_list) @mock.patch('swiftclient.multithreading.QueueFunctionManager') def test_queue_manager_with_args(self, mock_qfq): thread_manager = mt.MultiThreadingManager() mock_qfq.reset_mock() mock_qfq.return_value = 'do run run' self.assertEqual( 'do run run', thread_manager.queue_manager(self._func, 88, 'fun', times='are', connection_maker='abc', to='be had', error_counter='def')) self.assertEqual([ mock.call(self._func, 88, thread_manager, thread_args=('fun',), thread_kwargs={'times': 'are', 'to': 'be had'}, connection_maker='abc', error_counter='def') ], mock_qfq.call_args_list) def test_printers(self): out_stream = StringIO() err_stream = StringIO() with mt.MultiThreadingManager( print_stream=out_stream, error_stream=err_stream) as thread_manager: # Sanity-checking these gives power to the previous test which # looked at the default values of thread_manager.print/error_stream self.assertEqual(out_stream, thread_manager.print_stream) self.assertEqual(err_stream, thread_manager.error_stream) self.assertEqual(self.starting_thread_count + 2, threading.active_count()) thread_manager.print_msg('one-argument') thread_manager.print_msg('one %s, %d fish', 'fish', 88) thread_manager.error('I have %d problems, but a %s is not one', 99, u'\u062A\u062A') thread_manager.print_msg('some\n%s\nover the %r', 'where', u'\u062A\u062A') thread_manager.error('one-error-argument') thread_manager.error('Sometimes\n%.1f%% just\ndoes not\nwork!', 3.14159) self.assertEqual(self.starting_thread_count, threading.active_count()) out_stream.seek(0) self.assertEqual([ 'one-argument\n', 'one fish, 88 fish\n', 'some\n', 'where\n', "over the u'\\u062a\\u062a'\n", ], list(out_stream.readlines())) err_stream.seek(0) self.assertEqual([ u'I have 99 problems, but a \u062A\u062A is not one\n'.encode( 'utf8'), 'one-error-argument\n', 'Sometimes\n', '3.1% just\n', 'does not\n', 'work!\n', ], list(err_stream.readlines())) self.assertEqual(3, thread_manager.error_count) if __name__ == '__main__': testtools.main() python-swiftclient-2.0.3/tests/test_swiftclient.py0000664000175300017540000011306512304670635023645 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # TODO: More tests import mock import logging import socket import StringIO import testtools import warnings from urlparse import urlparse # TODO: mock http connection class with more control over headers from .utils import fake_http_connect, fake_get_keystoneclient_2_0 from swiftclient import client as c class TestClientException(testtools.TestCase): def test_is_exception(self): self.assertTrue(issubclass(c.ClientException, Exception)) def test_format(self): exc = c.ClientException('something failed') self.assertTrue('something failed' in str(exc)) test_kwargs = ( 'scheme', 'host', 'port', 'path', 'query', 'status', 'reason', 'device', ) for value in test_kwargs: kwargs = { 'http_%s' % value: value, } exc = c.ClientException('test', **kwargs) self.assertTrue(value in str(exc)) class TestJsonImport(testtools.TestCase): def tearDown(self): try: import json except ImportError: pass else: reload(json) try: import simplejson except ImportError: pass else: reload(simplejson) super(TestJsonImport, self).tearDown() def test_any(self): self.assertTrue(hasattr(c, 'json_loads')) def test_no_simplejson(self): # break simplejson try: import simplejson except ImportError: # not installed, so we don't have to break it for these tests pass else: delattr(simplejson, 'loads') reload(c) try: from json import loads except ImportError: # this case is stested in _no_json pass else: self.assertEqual(loads, c.json_loads) class MockHttpTest(testtools.TestCase): def setUp(self): super(MockHttpTest, self).setUp() def fake_http_connection(*args, **kwargs): _orig_http_connection = c.http_connection return_read = kwargs.get('return_read') query_string = kwargs.get('query_string') storage_url = kwargs.get('storage_url') def wrapper(url, proxy=None, cacert=None, insecure=False, ssl_compression=True): if storage_url: self.assertEqual(storage_url, url) parsed, _conn = _orig_http_connection(url, proxy=proxy) conn = fake_http_connect(*args, **kwargs)() def request(method, url, *args, **kwargs): if query_string: self.assertTrue(url.endswith('?' + query_string)) if url.endswith('invalid_cert') and not insecure: from swiftclient import client as c raise c.ClientException("invalid_certificate") return conn.request = request conn.has_been_read = False _orig_read = conn.read def read(*args, **kwargs): conn.has_been_read = True return _orig_read(*args, **kwargs) conn.read = return_read or read return parsed, conn return wrapper self.fake_http_connection = fake_http_connection def tearDown(self): super(MockHttpTest, self).tearDown() reload(c) class MockHttpResponse(): def __init__(self, status=0): self.status = status self.status_code = status self.reason = "OK" self.buffer = [] class Raw: def read(): pass self.raw = Raw() def read(self): return "" def getheader(self, name, default): return "" def getheaders(self): return {"key1": "value1", "key2": "value2"} def fake_response(self): return MockHttpResponse(self.status) def _fake_request(self, *arg, **kwarg): self.status = 200 # This simulate previous httplib implementation that would do a # putrequest() and then use putheader() to send header. for k, v in kwarg['headers'].iteritems(): self.buffer.append('%s: %s' % (k, v)) return self.fake_response() class TestHttpHelpers(MockHttpTest): def test_quote(self): value = 'standard string' self.assertEqual('standard%20string', c.quote(value)) value = u'\u0075nicode string' self.assertEqual('unicode%20string', c.quote(value)) def test_http_connection(self): url = 'http://www.test.com' _junk, conn = c.http_connection(url) self.assertTrue(isinstance(conn, c.HTTPConnection)) url = 'https://www.test.com' _junk, conn = c.http_connection(url) self.assertTrue(isinstance(conn, c.HTTPConnection)) url = 'ftp://www.test.com' self.assertRaises(c.ClientException, c.http_connection, url) def test_validate_headers(self): headers = {'key': 'value'} self.assertEqual(c.validate_headers(headers), None) headers = {'key': 'value1\nvalue2'} self.assertRaises(c.InvalidHeadersException, c.validate_headers, headers) headers = {'key': 'value1\rvalue2'} self.assertRaises(c.InvalidHeadersException, c.validate_headers, headers) def test_validate_headers_with_other_than_str(self): for t in (None, 1, 1.0, 1L, u"A"): self.assertEqual(c.validate_headers({'key': t}), None) # TODO: following tests are placeholders, need more tests, better coverage class TestGetAuth(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf') self.assertEqual(url, None) self.assertEqual(token, None) def test_invalid_auth(self): c.http_connection = self.fake_http_connection(200) self.assertRaises(c.ClientException, c.get_auth, 'http://www.tests.com', 'asdf', 'asdf', auth_version="foo") def test_auth_v1(self): c.http_connection = self.fake_http_connection(200, auth_v1=True) url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf', auth_version="1.0") self.assertEqual(url, 'storageURL') self.assertEqual(token, 'someauthtoken') def test_auth_v1_insecure(self): c.http_connection = self.fake_http_connection(200, auth_v1=True) url, token = c.get_auth('http://www.test.com/invalid_cert', 'asdf', 'asdf', auth_version='1.0', insecure=True) self.assertEqual(url, 'storageURL') self.assertEqual(token, 'someauthtoken') self.assertRaises(c.ClientException, c.get_auth, 'http://www.test.com/invalid_cert', 'asdf', 'asdf', auth_version='1.0') def test_auth_v2(self): os_options = {'tenant_name': 'asdf'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0(os_options) url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf', os_options=os_options, auth_version="2.0") self.assertTrue(url.startswith("http")) self.assertTrue(token) def test_auth_v2_no_tenant_name(self): c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0({}) self.assertRaises(c.ClientException, c.get_auth, 'http://www.tests.com', 'asdf', 'asdf', os_options={}, auth_version='2.0') def test_auth_v2_with_tenant_user_in_user(self): tenant_option = {'tenant_name': 'foo'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0(tenant_option) url, token = c.get_auth('http://www.test.com', 'foo:bar', 'asdf', os_options={}, auth_version="2.0") self.assertTrue(url.startswith("http")) self.assertTrue(token) def test_auth_v2_tenant_name_no_os_options(self): tenant_option = {'tenant_name': 'asdf'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0(tenant_option) url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf', tenant_name='asdf', os_options={}, auth_version="2.0") self.assertTrue(url.startswith("http")) self.assertTrue(token) def test_auth_v2_with_os_options(self): os_options = {'service_type': 'object-store', 'endpoint_type': 'internalURL', 'tenant_name': 'asdf'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0(os_options) url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf', os_options=os_options, auth_version="2.0") self.assertTrue(url.startswith("http")) self.assertTrue(token) def test_auth_v2_with_tenant_user_in_user_no_os_options(self): tenant_option = {'tenant_name': 'foo'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0(tenant_option) url, token = c.get_auth('http://www.test.com', 'foo:bar', 'asdf', auth_version="2.0") self.assertTrue(url.startswith("http")) self.assertTrue(token) def test_auth_v2_with_os_region_name(self): os_options = {'region_name': 'good-region', 'tenant_name': 'asdf'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0(os_options) url, token = c.get_auth('http://www.test.com', 'asdf', 'asdf', os_options=os_options, auth_version="2.0") self.assertTrue(url.startswith("http")) self.assertTrue(token) def test_auth_v2_no_endpoint(self): os_options = {'region_name': 'unknown_region', 'tenant_name': 'asdf'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0( os_options, c.ClientException) self.assertRaises(c.ClientException, c.get_auth, 'http://www.tests.com', 'asdf', 'asdf', os_options=os_options, auth_version='2.0') def test_auth_v2_ks_exception(self): c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0( {}, c.ClientException) self.assertRaises(c.ClientException, c.get_auth, 'http://www.tests.com', 'asdf', 'asdf', os_options={}, auth_version='2.0') def test_auth_v2_cacert(self): os_options = {'tenant_name': 'foo'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0( os_options, None) auth_url_secure = 'https://www.tests.com' auth_url_insecure = 'https://www.tests.com/self-signed-certificate' url, token = c.get_auth(auth_url_secure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0', insecure=False) self.assertTrue(url.startswith("http")) self.assertTrue(token) url, token = c.get_auth(auth_url_insecure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0', cacert='ca.pem', insecure=False) self.assertTrue(url.startswith("http")) self.assertTrue(token) self.assertRaises(c.ClientException, c.get_auth, auth_url_insecure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0') self.assertRaises(c.ClientException, c.get_auth, auth_url_insecure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0', insecure=False) def test_auth_v2_insecure(self): os_options = {'tenant_name': 'foo'} c.get_keystoneclient_2_0 = fake_get_keystoneclient_2_0( os_options, None) auth_url_secure = 'https://www.tests.com' auth_url_insecure = 'https://www.tests.com/invalid-certificate' url, token = c.get_auth(auth_url_secure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0') self.assertTrue(url.startswith("http")) self.assertTrue(token) url, token = c.get_auth(auth_url_insecure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0', insecure=True) self.assertTrue(url.startswith("http")) self.assertTrue(token) self.assertRaises(c.ClientException, c.get_auth, auth_url_insecure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0') self.assertRaises(c.ClientException, c.get_auth, auth_url_insecure, 'asdf', 'asdf', os_options=os_options, auth_version='2.0', insecure=False) class TestGetAccount(MockHttpTest): def test_no_content(self): c.http_connection = self.fake_http_connection(204) value = c.get_account('http://www.test.com', 'asdf')[1] self.assertEqual(value, []) def test_param_marker(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&marker=marker") c.get_account('http://www.test.com', 'asdf', marker='marker') def test_param_limit(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&limit=10") c.get_account('http://www.test.com', 'asdf', limit=10) def test_param_prefix(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&prefix=asdf/") c.get_account('http://www.test.com', 'asdf', prefix='asdf/') def test_param_end_marker(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&end_marker=end_marker") c.get_account('http://www.test.com', 'asdf', end_marker='end_marker') class TestHeadAccount(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) value = c.head_account('http://www.tests.com', 'asdf') # TODO: Hmm. This doesn't really test too much as it uses a fake that # always returns the same dict. I guess it "exercises" the code, so # I'll leave it for now. self.assertEqual(type(value), dict) def test_server_error(self): body = 'c' * 65 c.http_connection = self.fake_http_connection(500, body=body) self.assertRaises(c.ClientException, c.head_account, 'http://www.tests.com', 'asdf') try: c.head_account('http://www.tests.com', 'asdf') except c.ClientException as e: new_body = "[first 60 chars of response] " + body[0:60] self.assertEqual(e.__str__()[-89:], new_body) class TestGetContainer(MockHttpTest): def test_no_content(self): c.http_connection = self.fake_http_connection(204) value = c.get_container('http://www.test.com', 'asdf', 'asdf')[1] self.assertEqual(value, []) def test_param_marker(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&marker=marker") c.get_container('http://www.test.com', 'asdf', 'asdf', marker='marker') def test_param_limit(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&limit=10") c.get_container('http://www.test.com', 'asdf', 'asdf', limit=10) def test_param_prefix(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&prefix=asdf/") c.get_container('http://www.test.com', 'asdf', 'asdf', prefix='asdf/') def test_param_delimiter(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&delimiter=/") c.get_container('http://www.test.com', 'asdf', 'asdf', delimiter='/') def test_param_end_marker(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&end_marker=end_marker") c.get_container('http://www.test.com', 'asdf', 'asdf', end_marker='end_marker') def test_param_path(self): c.http_connection = self.fake_http_connection( 204, query_string="format=json&path=asdf") c.get_container('http://www.test.com', 'asdf', 'asdf', path='asdf') class TestHeadContainer(MockHttpTest): def test_server_error(self): body = 'c' * 60 c.http_connection = self.fake_http_connection(500, body=body) self.assertRaises(c.ClientException, c.head_container, 'http://www.test.com', 'asdf', 'asdf', ) try: c.head_container('http://www.test.com', 'asdf', 'asdf') except c.ClientException as e: self.assertEqual(e.http_response_content, body) class TestPutContainer(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) value = c.put_container('http://www.test.com', 'asdf', 'asdf') self.assertEqual(value, None) def test_server_error(self): body = 'c' * 60 c.http_connection = self.fake_http_connection(500, body=body) self.assertRaises(c.ClientException, c.put_container, 'http://www.test.com', 'asdf', 'asdf', ) try: c.put_container('http://www.test.com', 'asdf', 'asdf') except c.ClientException as e: self.assertEqual(e.http_response_content, body) class TestDeleteContainer(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) value = c.delete_container('http://www.test.com', 'asdf', 'asdf') self.assertEqual(value, None) class TestGetObject(MockHttpTest): def test_server_error(self): c.http_connection = self.fake_http_connection(500) self.assertRaises(c.ClientException, c.get_object, 'http://www.test.com', 'asdf', 'asdf', 'asdf') def test_query_string(self): c.http_connection = self.fake_http_connection(200, query_string="hello=20") c.get_object('http://www.test.com', 'asdf', 'asdf', 'asdf', query_string="hello=20") def test_request_headers(self): request_args = {} def fake_request(method, url, body=None, headers=None): request_args['method'] = method request_args['url'] = url request_args['body'] = body request_args['headers'] = headers return conn = self.fake_http_connection(200)('http://www.test.com/') conn[1].request = fake_request headers = {'Range': 'bytes=1-2'} c.get_object('url_is_irrelevant', 'TOKEN', 'container', 'object', http_conn=conn, headers=headers) self.assertFalse(request_args['headers'] is None, "No headers in the request") self.assertTrue('Range' in request_args['headers'], "No Range header in the request") self.assertEqual(request_args['headers']['Range'], 'bytes=1-2') class TestHeadObject(MockHttpTest): def test_server_error(self): c.http_connection = self.fake_http_connection(500) self.assertRaises(c.ClientException, c.head_object, 'http://www.test.com', 'asdf', 'asdf', 'asdf') class TestPutObject(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf') value = c.put_object(*args) self.assertTrue(isinstance(value, basestring)) def test_unicode_ok(self): conn = c.http_connection(u'http://www.test.com/') mock_file = StringIO.StringIO(u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91') args = (u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', '\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', mock_file) headers = {'X-Header1': u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', 'X-2': 1, 'X-3': {'a': 'b'}, 'a-b': '.x:yz mn:fg:lp'} resp = MockHttpResponse() conn[1].getresponse = resp.fake_response conn[1]._request = resp._fake_request value = c.put_object(*args, headers=headers, http_conn=conn) self.assertTrue(isinstance(value, basestring)) # Test for RFC-2616 encoded symbols self.assertTrue("a-b: .x:yz mn:fg:lp" in resp.buffer[0], "[a-b: .x:yz mn:fg:lp] header is missing") def test_chunk_warning(self): conn = c.http_connection('http://www.test.com/') mock_file = StringIO.StringIO('asdf') args = ('asdf', 'asdf', 'asdf', 'asdf', mock_file) resp = MockHttpResponse() conn[1].getresponse = resp.fake_response conn[1]._request = resp._fake_request with warnings.catch_warnings(record=True) as w: c.put_object(*args, chunk_size=20, headers={}, http_conn=conn) self.assertEqual(len(w), 0) body = 'c' * 60 c.http_connection = self.fake_http_connection(200, body=body) args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf') with warnings.catch_warnings(record=True) as w: c.put_object(*args, chunk_size=20) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) def test_server_error(self): body = 'c' * 60 c.http_connection = self.fake_http_connection(500, body=body) args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf') self.assertRaises(c.ClientException, c.put_object, *args) try: c.put_object(*args) except c.ClientException as e: self.assertEqual(e.http_response_content, body) def test_query_string(self): c.http_connection = self.fake_http_connection(200, query_string="hello=20") c.put_object('http://www.test.com', 'asdf', 'asdf', 'asdf', query_string="hello=20") class TestPostObject(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', {}) c.post_object(*args) def test_unicode_ok(self): conn = c.http_connection(u'http://www.test.com/') args = (u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', '\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91') headers = {'X-Header1': u'\u5929\u7a7a\u4e2d\u7684\u4e4c\u4e91', 'X-2': '1', 'X-3': {'a': 'b'}, 'a-b': '.x:yz mn:kl:qr'} resp = MockHttpResponse() conn[1].getresponse = resp.fake_response conn[1]._request = resp._fake_request c.post_object(*args, headers=headers, http_conn=conn) # Test for RFC-2616 encoded symbols self.assertTrue("a-b: .x:yz mn:kl:qr" in resp.buffer[0], "[a-b: .x:yz mn:kl:qr] header is missing") def test_server_error(self): body = 'c' * 60 c.http_connection = self.fake_http_connection(500, body=body) args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', {}) self.assertRaises(c.ClientException, c.post_object, *args) try: c.post_object(*args) except c.ClientException as e: self.assertEqual(e.http_response_content, body) class TestDeleteObject(MockHttpTest): def test_ok(self): c.http_connection = self.fake_http_connection(200) c.delete_object('http://www.test.com', 'asdf', 'asdf', 'asdf') def test_server_error(self): c.http_connection = self.fake_http_connection(500) self.assertRaises(c.ClientException, c.delete_object, 'http://www.test.com', 'asdf', 'asdf', 'asdf') def test_query_string(self): c.http_connection = self.fake_http_connection(200, query_string="hello=20") c.delete_object('http://www.test.com', 'asdf', 'asdf', 'asdf', query_string="hello=20") class TestGetCapabilities(MockHttpTest): def test_ok(self): conn = self.fake_http_connection(200, body='{}') http_conn = conn('http://www.test.com/info') self.assertEqual(type(c.get_capabilities(http_conn)), dict) self.assertTrue(http_conn[1].has_been_read) def test_server_error(self): conn = self.fake_http_connection(500) http_conn = conn('http://www.test.com/info') self.assertRaises(c.ClientException, c.get_capabilities, http_conn) class TestConnection(MockHttpTest): def test_instance(self): conn = c.Connection('http://www.test.com', 'asdf', 'asdf') self.assertEqual(conn.retries, 5) def test_instance_kwargs(self): args = {'user': 'ausername', 'key': 'secretpass', 'authurl': 'http://www.test.com', 'tenant_name': 'atenant'} conn = c.Connection(**args) self.assertEqual(type(conn), c.Connection) def test_instance_kwargs_token(self): args = {'preauthtoken': 'atoken123', 'preauthurl': 'http://www.test.com:8080/v1/AUTH_123456'} conn = c.Connection(**args) self.assertEqual(type(conn), c.Connection) def test_storage_url_override(self): static_url = 'http://overridden.storage.url' c.http_connection = self.fake_http_connection( 200, body='[]', storage_url=static_url) conn = c.Connection('http://auth.url/', 'some_user', 'some_key', os_options={ 'object_storage_url': static_url}) method_signatures = ( (conn.head_account, []), (conn.get_account, []), (conn.head_container, ('asdf',)), (conn.get_container, ('asdf',)), (conn.put_container, ('asdf',)), (conn.delete_container, ('asdf',)), (conn.head_object, ('asdf', 'asdf')), (conn.get_object, ('asdf', 'asdf')), (conn.put_object, ('asdf', 'asdf', 'asdf')), (conn.post_object, ('asdf', 'asdf', {})), (conn.delete_object, ('asdf', 'asdf')), ) with mock.patch('swiftclient.client.get_auth_1_0') as mock_get_auth: mock_get_auth.return_value = ('http://auth.storage.url', 'tToken') for method, args in method_signatures: method(*args) def test_get_capabilities(self): conn = c.Connection() with mock.patch('swiftclient.client.get_capabilities') as get_cap: conn.get_capabilities('http://storage2.test.com') parsed = get_cap.call_args[0][0][0] self.assertEqual(parsed.path, '/info') self.assertEqual(parsed.netloc, 'storage2.test.com') conn.get_auth = lambda: ('http://storage.test.com/v1/AUTH_test', 'token') conn.get_capabilities() parsed = get_cap.call_args[0][0][0] self.assertEqual(parsed.path, '/info') self.assertEqual(parsed.netloc, 'storage.test.com') def test_retry(self): c.http_connection = self.fake_http_connection(500) def quick_sleep(*args): pass c.sleep = quick_sleep conn = c.Connection('http://www.test.com', 'asdf', 'asdf') self.assertRaises(c.ClientException, conn.head_account) self.assertEqual(conn.attempts, conn.retries + 1) def test_retry_on_ratelimit(self): c.http_connection = self.fake_http_connection(498) def quick_sleep(*args): pass c.sleep = quick_sleep # test retries conn = c.Connection('http://www.test.com', 'asdf', 'asdf', retry_on_ratelimit=True) self.assertRaises(c.ClientException, conn.head_account) self.assertEqual(conn.attempts, conn.retries + 1) # test default no-retry conn = c.Connection('http://www.test.com', 'asdf', 'asdf') self.assertRaises(c.ClientException, conn.head_account) self.assertEqual(conn.attempts, 1) def test_resp_read_on_server_error(self): c.http_connection = self.fake_http_connection(500) conn = c.Connection('http://www.test.com', 'asdf', 'asdf', retries=0) def get_auth(*args, **kwargs): return 'http://www.new.com', 'new' conn.get_auth = get_auth self.url, self.token = conn.get_auth() method_signatures = ( (conn.head_account, []), (conn.get_account, []), (conn.head_container, ('asdf',)), (conn.get_container, ('asdf',)), (conn.put_container, ('asdf',)), (conn.delete_container, ('asdf',)), (conn.head_object, ('asdf', 'asdf')), (conn.get_object, ('asdf', 'asdf')), (conn.put_object, ('asdf', 'asdf', 'asdf')), (conn.post_object, ('asdf', 'asdf', {})), (conn.delete_object, ('asdf', 'asdf')), ) for method, args in method_signatures: self.assertRaises(c.ClientException, method, *args) try: self.assertTrue(conn.http_conn[1].has_been_read) except AssertionError: msg = '%s did not read resp on server error' % method.__name__ self.fail(msg) except Exception as e: raise e.__class__("%s - %s" % (method.__name__, e)) def test_reauth(self): c.http_connection = self.fake_http_connection(401) def get_auth(*args, **kwargs): return 'http://www.new.com', 'new' def swap_sleep(*args): self.swap_sleep_called = True c.get_auth = get_auth c.http_connection = self.fake_http_connection(200) c.sleep = swap_sleep self.swap_sleep_called = False conn = c.Connection('http://www.test.com', 'asdf', 'asdf', preauthurl='http://www.old.com', preauthtoken='old', ) self.assertEqual(conn.attempts, 0) self.assertEqual(conn.url, 'http://www.old.com') self.assertEqual(conn.token, 'old') conn.head_account() self.assertTrue(self.swap_sleep_called) self.assertEqual(conn.attempts, 2) self.assertEqual(conn.url, 'http://www.new.com') self.assertEqual(conn.token, 'new') def test_reset_stream(self): class LocalContents(object): def __init__(self, tell_value=0): self.already_read = False self.seeks = [] self.tell_value = tell_value def tell(self): return self.tell_value def seek(self, position): self.seeks.append(position) self.already_read = False def read(self, size=-1): if self.already_read: return '' else: self.already_read = True return 'abcdef' class LocalConnection(object): def __init__(self, parsed_url=None): self.reason = "" if parsed_url: self.host = parsed_url.netloc self.port = parsed_url.netloc def putrequest(self, *args, **kwargs): self.send() def putheader(self, *args, **kwargs): return def endheaders(self, *args, **kwargs): return def send(self, *args, **kwargs): raise socket.error('oops') def request(self, *args, **kwargs): return def getresponse(self, *args, **kwargs): self.status = 200 return self def getheader(self, *args, **kwargs): return 'header' def getheaders(self): return {"key1": "value1", "key2": "value2"} def read(self, *args, **kwargs): return '' def local_http_connection(url, proxy=None, cacert=None, insecure=False, ssl_compression=True): parsed = urlparse(url) return parsed, LocalConnection() orig_conn = c.http_connection try: c.http_connection = local_http_connection conn = c.Connection('http://www.example.com', 'asdf', 'asdf', retries=1, starting_backoff=.0001) contents = LocalContents() exc = None try: conn.put_object('c', 'o', contents) except socket.error as err: exc = err self.assertEqual(contents.seeks, [0]) self.assertEqual(str(exc), 'oops') contents = LocalContents(tell_value=123) exc = None try: conn.put_object('c', 'o', contents) except socket.error as err: exc = err self.assertEqual(contents.seeks, [123]) self.assertEqual(str(exc), 'oops') contents = LocalContents() contents.tell = None exc = None try: conn.put_object('c', 'o', contents) except c.ClientException as err: exc = err self.assertEqual(contents.seeks, []) self.assertEqual(str(exc), "put_object('c', 'o', ...) failure " "and no ability to reset contents for reupload.") finally: c.http_connection = orig_conn class TestLogging(MockHttpTest): """ Make sure all the lines in http_log are covered. """ def setUp(self): super(TestLogging, self).setUp() self.swiftclient_logger = logging.getLogger("swiftclient") self.log_level = self.swiftclient_logger.getEffectiveLevel() self.swiftclient_logger.setLevel(logging.INFO) def tearDown(self): self.swiftclient_logger.setLevel(self.log_level) super(TestLogging, self).tearDown() def test_put_ok(self): c.http_connection = self.fake_http_connection(200) args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf') value = c.put_object(*args) self.assertTrue(isinstance(value, basestring)) def test_head_error(self): c.http_connection = self.fake_http_connection(500) self.assertRaises(c.ClientException, c.head_object, 'http://www.test.com', 'asdf', 'asdf', 'asdf') def test_get_error(self): body = 'c' * 65 conn = self.fake_http_connection( 404, body=body)('http://www.test.com/') request_args = {} def fake_request(method, url, body=None, headers=None): request_args['method'] = method request_args['url'] = url request_args['body'] = body request_args['headers'] = headers return conn[1].request = fake_request headers = {'Range': 'bytes=1-2'} self.assertRaises( c.ClientException, c.get_object, 'url_is_irrelevant', 'TOKEN', 'container', 'object', http_conn=conn, headers=headers) class TestCloseConnection(MockHttpTest): def test_close_none(self): c.http_connection = self.fake_http_connection(200) conn = c.Connection('http://www.test.com', 'asdf', 'asdf') self.assertEqual(conn.http_conn, None) conn.close() self.assertEqual(conn.http_conn, None) def test_close_ok(self): url = 'http://www.test.com' c.http_connection = self.fake_http_connection(200) conn = c.Connection(url, 'asdf', 'asdf') self.assertEqual(conn.http_conn, None) conn.http_conn = c.http_connection(url) self.assertEqual(type(conn.http_conn), tuple) self.assertEqual(len(conn.http_conn), 2) http_conn_obj = conn.http_conn[1] self.assertEqual(http_conn_obj.isclosed(), False) conn.close() self.assertEqual(http_conn_obj.isclosed(), True) self.assertEqual(conn.http_conn, None) if __name__ == '__main__': testtools.main() python-swiftclient-2.0.3/tests/test_command_helpers.py0000664000175300017540000001415512304670635024452 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2013 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from StringIO import StringIO import mock import testtools from swiftclient import command_helpers as h from swiftclient.multithreading import MultiThreadingManager class TestStatHelpers(testtools.TestCase): def setUp(self): super(TestStatHelpers, self).setUp() conn_attrs = { 'url': 'http://storage/v1/a', 'token': 'tk12345', } self.conn = mock.MagicMock(**conn_attrs) self.options = mock.MagicMock(human=False, verbose=1) self.stdout = StringIO() self.stderr = StringIO() self.thread_manager = MultiThreadingManager(self.stdout, self.stderr) def assertOut(self, expected): real = self.stdout.getvalue() # commonly if we strip of blank lines we have a match try: self.assertEqual(expected.strip('\n'), real.strip('\n')) except AssertionError: # could be anything, try to find typos line by line expected_lines = [line.lstrip() for line in expected.splitlines() if line.strip()] real_lines = [line.lstrip() for line in real.splitlines() if line.strip()] for expected, real in zip(expected_lines, real_lines): self.assertEqual(expected, real) # not a typo, might be an indent thing, hopefully you can spot it raise def test_stat_account_human(self): self.options.human = True # stub head_account stub_headers = { 'x-account-container-count': 42, 'x-account-object-count': 1000000, 'x-account-bytes-used': 2 ** 30, } self.conn.head_account.return_value = stub_headers with self.thread_manager as thread_manager: h.stat_account(self.conn, self.options, thread_manager) expected = """ Account: a Containers: 42 Objects: 976K Bytes: 1.0G """ self.assertOut(expected) def test_stat_account_verbose(self): self.options.verbose += 1 # stub head_account stub_headers = { 'x-account-container-count': 42, 'x-account-object-count': 1000000, 'x-account-bytes-used': 2 ** 30, } self.conn.head_account.return_value = stub_headers with self.thread_manager as thread_manager: h.stat_account(self.conn, self.options, thread_manager) expected = """ StorageURL: http://storage/v1/a Auth Token: tk12345 Account: a Containers: 42 Objects: 1000000 Bytes: 1073741824 """ self.assertOut(expected) def test_stat_container_human(self): self.options.human = True # stub head container request stub_headers = { 'x-container-object-count': 10 ** 6, 'x-container-bytes-used': 2 ** 30, } self.conn.head_container.return_value = stub_headers args = ('c',) with self.thread_manager as thread_manager: h.stat_container(self.conn, self.options, args, thread_manager) expected = """ Account: a Container: c Objects: 976K Bytes: 1.0G Read ACL: Write ACL: Sync To: Sync Key: """ self.assertOut(expected) def test_stat_container_verbose(self): self.options.verbose += 1 # stub head container request stub_headers = { 'x-container-object-count': 10 ** 6, 'x-container-bytes-used': 2 ** 30, } self.conn.head_container.return_value = stub_headers args = ('c',) with self.thread_manager as thread_manager: h.stat_container(self.conn, self.options, args, thread_manager) expected = """ URL: http://storage/v1/a/c Auth Token: tk12345 Account: a Container: c Objects: 1000000 Bytes: 1073741824 Read ACL: Write ACL: Sync To: Sync Key: """ self.assertOut(expected) def test_stat_object_human(self): self.options.human = True # stub head object request stub_headers = { 'content-length': 2 ** 20, 'x-object-meta-color': 'blue', 'etag': '68b329da9893e34099c7d8ad5cb9c940', 'content-encoding': 'gzip', } self.conn.head_object.return_value = stub_headers args = ('c', 'o') with self.thread_manager as thread_manager: h.stat_object(self.conn, self.options, args, thread_manager) expected = """ Account: a Container: c Object: o Content Length: 1.0M ETag: 68b329da9893e34099c7d8ad5cb9c940 Meta Color: blue Content-Encoding: gzip """ self.assertOut(expected) def test_stat_object_verbose(self): self.options.verbose += 1 # stub head object request stub_headers = { 'content-length': 2 ** 20, 'x-object-meta-color': 'blue', 'etag': '68b329da9893e34099c7d8ad5cb9c940', 'content-encoding': 'gzip', } self.conn.head_object.return_value = stub_headers args = ('c', 'o') with self.thread_manager as thread_manager: h.stat_object(self.conn, self.options, args, thread_manager) expected = """ URL: http://storage/v1/a/c/o Auth Token: tk12345 Account: a Container: c Object: o Content Length: 1048576 ETag: 68b329da9893e34099c7d8ad5cb9c940 Meta Color: blue Content-Encoding: gzip """ self.assertOut(expected) python-swiftclient-2.0.3/tests/test_utils.py0000664000175300017540000001270712304670635022453 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2013 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import testtools from StringIO import StringIO import tempfile from swiftclient import utils as u class TestConfigTrueValue(testtools.TestCase): def test_TRUE_VALUES(self): for v in u.TRUE_VALUES: self.assertEqual(v, v.lower()) def test_config_true_value(self): orig_trues = u.TRUE_VALUES try: u.TRUE_VALUES = 'hello world'.split() for val in 'hello world HELLO WORLD'.split(): self.assertTrue(u.config_true_value(val) is True) self.assertTrue(u.config_true_value(True) is True) self.assertTrue(u.config_true_value('foo') is False) self.assertTrue(u.config_true_value(False) is False) finally: u.TRUE_VALUES = orig_trues class TestPrtBytes(testtools.TestCase): def test_zero_bytes(self): bytes_ = 0 raw = '0' human = '0' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_one_byte(self): bytes_ = 1 raw = '1' human = '1' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_less_than_one_k(self): bytes_ = (2 ** 10) - 1 raw = '1023' human = '1023' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_one_k(self): bytes_ = 2 ** 10 raw = '1024' human = '1.0K' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_a_decimal_k(self): bytes_ = (3 * 2 ** 10) + 512 raw = '3584' human = '3.5K' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_a_bit_less_than_one_meg(self): bytes_ = (2 ** 20) - (2 ** 10) raw = '1047552' human = '1023K' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_just_a_hair_less_than_one_meg(self): bytes_ = (2 ** 20) - (2 ** 10) + 1 raw = '1047553' human = '1.0M' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_one_meg(self): bytes_ = 2 ** 20 raw = '1048576' human = '1.0M' self.assertEqual(raw, u.prt_bytes(bytes_, False).lstrip()) self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_ten_meg(self): bytes_ = 10 * 2 ** 20 human = '10M' self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_bit_less_than_ten_meg(self): bytes_ = (10 * 2 ** 20) - (100 * 2 ** 10) human = '9.9M' self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_just_a_hair_less_than_ten_meg(self): bytes_ = (10 * 2 ** 20) - 1 human = '10.0M' self.assertEqual(human, u.prt_bytes(bytes_, True).lstrip()) def test_a_yotta(self): bytes_ = 42 * 2 ** 80 self.assertEqual('42Y', u.prt_bytes(bytes_, True).lstrip()) def test_overflow(self): bytes_ = 2 ** 90 self.assertEqual('1024Y', u.prt_bytes(bytes_, True).lstrip()) class TestLengthWrapper(testtools.TestCase): def test_stringio(self): contents = StringIO('a' * 100) data = u.LengthWrapper(contents, 42) self.assertEqual(42, len(data)) read_data = ''.join(iter(data.read, '')) self.assertEqual(42, len(read_data)) self.assertEqual('a' * 42, read_data) def test_tempfile(self): with tempfile.NamedTemporaryFile() as f: f.write('a' * 100) f.flush() contents = open(f.name) data = u.LengthWrapper(contents, 42) self.assertEqual(42, len(data)) read_data = ''.join(iter(data.read, '')) self.assertEqual(42, len(read_data)) self.assertEqual('a' * 42, read_data) def test_segmented_file(self): with tempfile.NamedTemporaryFile() as f: segment_length = 1024 segments = ('a', 'b', 'c', 'd') for c in segments: f.write(c * segment_length) f.flush() for i, c in enumerate(segments): contents = open(f.name) contents.seek(i * segment_length) data = u.LengthWrapper(contents, segment_length) self.assertEqual(segment_length, len(data)) read_data = ''.join(iter(data.read, '')) self.assertEqual(segment_length, len(read_data)) self.assertEqual(c * segment_length, read_data) python-swiftclient-2.0.3/tests/utils.py0000664000175300017540000001260712304670635021413 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from requests import RequestException from time import sleep def fake_get_keystoneclient_2_0(os_options, exc=None, **kwargs): def fake_get_keystoneclient_2_0(auth_url, user, key, actual_os_options, **actual_kwargs): if exc: raise exc('test') if actual_os_options != os_options: return "", None if auth_url.startswith("https") and \ auth_url.endswith("invalid-certificate") and \ not actual_kwargs['insecure']: from swiftclient import client as c raise c.ClientException("invalid-certificate") if auth_url.startswith("https") and \ auth_url.endswith("self-signed-certificate") and \ not actual_kwargs['insecure'] and \ actual_kwargs['cacert'] is None: from swiftclient import client as c raise c.ClientException("unverified-certificate") return "http://url/", "token" return fake_get_keystoneclient_2_0 def fake_http_connect(*code_iter, **kwargs): class FakeConn(object): def __init__(self, status, etag=None, body='', timestamp='1'): self.status = status self.reason = 'Fake' self.host = '1.2.3.4' self.port = '1234' self.sent = 0 self.received = 0 self.etag = etag self.body = body self.timestamp = timestamp self._is_closed = True def connect(self): self._is_closed = False def close(self): self._is_closed = True def isclosed(self): return self._is_closed def getresponse(self): if kwargs.get('raise_exc'): raise Exception('test') return self def getexpect(self): if self.status == -2: raise RequestException() if self.status == -3: return FakeConn(507) return FakeConn(100) def getheaders(self): headers = {'content-length': len(self.body), 'content-type': 'x-application/test', 'x-timestamp': self.timestamp, 'last-modified': self.timestamp, 'x-object-meta-test': 'testing', 'etag': self.etag or '"68b329da9893e34099c7d8ad5cb9c940"', 'x-works': 'yes', 'x-account-container-count': 12345} if not self.timestamp: del headers['x-timestamp'] try: if container_ts_iter.next() is False: headers['x-container-timestamp'] = '1' except StopIteration: pass if 'slow' in kwargs: headers['content-length'] = '4' if 'headers' in kwargs: headers.update(kwargs['headers']) if 'auth_v1' in kwargs: headers.update( {'x-storage-url': 'storageURL', 'x-auth-token': 'someauthtoken'}) return headers.items() def read(self, amt=None): if 'slow' in kwargs: if self.sent < 4: self.sent += 1 sleep(0.1) return ' ' rv = self.body[:amt] self.body = self.body[amt:] return rv def send(self, amt=None): if 'slow' in kwargs: if self.received < 4: self.received += 1 sleep(0.1) def getheader(self, name, default=None): return dict(self.getheaders()).get(name.lower(), default) timestamps_iter = iter(kwargs.get('timestamps') or ['1'] * len(code_iter)) etag_iter = iter(kwargs.get('etags') or [None] * len(code_iter)) x = kwargs.get('missing_container', [False] * len(code_iter)) if not isinstance(x, (tuple, list)): x = [x] * len(code_iter) container_ts_iter = iter(x) code_iter = iter(code_iter) def connect(*args, **ckwargs): if 'give_content_type' in kwargs: if len(args) >= 7 and 'Content-Type' in args[6]: kwargs['give_content_type'](args[6]['Content-Type']) else: kwargs['give_content_type']('') if 'give_connect' in kwargs: kwargs['give_connect'](*args, **ckwargs) status = code_iter.next() etag = etag_iter.next() timestamp = timestamps_iter.next() if status <= 0: raise RequestException() fake_conn = FakeConn(status, etag, body=kwargs.get('body', ''), timestamp=timestamp) fake_conn.connect() return fake_conn return connect python-swiftclient-2.0.3/tests/__init__.py0000664000175300017540000000000012304670635021772 0ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/requirements.txt0000664000175300017540000000004012304670635022007 0ustar jenkinsjenkins00000000000000requests>=1.1 simplejson>=2.0.9 python-swiftclient-2.0.3/AUTHORS0000664000175300017540000000000112304670716017570 0ustar jenkinsjenkins00000000000000 python-swiftclient-2.0.3/python_swiftclient.egg-info/0000775000175300017540000000000012304670716024157 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/python_swiftclient.egg-info/top_level.txt0000664000175300017540000000001412304670716026704 0ustar jenkinsjenkins00000000000000swiftclient python-swiftclient-2.0.3/python_swiftclient.egg-info/dependency_links.txt0000664000175300017540000000000112304670716030225 0ustar jenkinsjenkins00000000000000 python-swiftclient-2.0.3/python_swiftclient.egg-info/PKG-INFO0000664000175300017540000000311012304670716025247 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: python-swiftclient Version: 2.0.3 Summary: OpenStack Object Storage API Client Library Home-page: http://www.openstack.org/ Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: UNKNOWN Description: Python bindings to the OpenStack Object Storage API =================================================== This is a python client for the Swift API. There's a Python API (the ``swiftclient`` module), and a command-line script (``swift``). Development takes place via the usual OpenStack processes as outlined in the `OpenStack wiki`__. The master repository is on GitHub__. __ http://wiki.openstack.org/HowToContribute __ http://github.com/openstack/python-swiftclient This code is based on original the client previously included with `OpenStack's swift`__ The python-swiftclient is licensed under the Apache License like the rest of OpenStack. __ http://github.com/openstack/swift .. contents:: Contents: :local: Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 2.6 python-swiftclient-2.0.3/python_swiftclient.egg-info/SOURCES.txt0000664000175300017540000000160612304670716026046 0ustar jenkinsjenkins00000000000000.coveragerc .mailmap .testr.conf .unittests AUTHORS ChangeLog LICENSE MANIFEST.in README.rst requirements.txt run_tests.sh setup.cfg setup.py test-requirements.txt tox.ini bin/swift doc/Makefile doc/manpages/swift.1 doc/source/conf.py doc/source/index.rst doc/source/swiftclient.rst doc/source/_static/.empty doc/source/_templates/.empty python_swiftclient.egg-info/PKG-INFO python_swiftclient.egg-info/SOURCES.txt python_swiftclient.egg-info/dependency_links.txt python_swiftclient.egg-info/not-zip-safe python_swiftclient.egg-info/requires.txt python_swiftclient.egg-info/top_level.txt swiftclient/__init__.py swiftclient/client.py swiftclient/command_helpers.py swiftclient/exceptions.py swiftclient/multithreading.py swiftclient/utils.py swiftclient/version.py tests/__init__.py tests/test_command_helpers.py tests/test_multithreading.py tests/test_swiftclient.py tests/test_utils.py tests/utils.pypython-swiftclient-2.0.3/python_swiftclient.egg-info/not-zip-safe0000664000175300017540000000000112304670715026404 0ustar jenkinsjenkins00000000000000 python-swiftclient-2.0.3/python_swiftclient.egg-info/requires.txt0000664000175300017540000000003712304670716026557 0ustar jenkinsjenkins00000000000000requests>=1.1 simplejson>=2.0.9python-swiftclient-2.0.3/swiftclient/0000775000175300017540000000000012304670716021064 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/swiftclient/version.py0000664000175300017540000000240712304670635023126 0ustar jenkinsjenkins00000000000000# Copyright 2012 OpenStack LLC # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pkg_resources try: # First, try to get our version out of PKG-INFO. If we're installed, # this'll let us find our version without pulling in pbr. After all, if # we're installed on a system, we're not in a Git-managed source tree, so # pbr doesn't really buy us anything. version_string = pkg_resources.get_provider( pkg_resources.Requirement.parse('python-swiftclient')).version except pkg_resources.DistributionNotFound: # No PKG-INFO? We're probably running from a checkout, then. Let pbr do # its thing to figure out a version number. import pbr.version version_string = str(pbr.version.VersionInfo('python-swiftclient')) python-swiftclient-2.0.3/swiftclient/exceptions.py0000664000175300017540000000472512304670635023627 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2013 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. class ClientException(Exception): def __init__(self, msg, http_scheme='', http_host='', http_port='', http_path='', http_query='', http_status=0, http_reason='', http_device='', http_response_content=''): Exception.__init__(self, msg) self.msg = msg self.http_scheme = http_scheme self.http_host = http_host self.http_port = http_port self.http_path = http_path self.http_query = http_query self.http_status = http_status self.http_reason = http_reason self.http_device = http_device self.http_response_content = http_response_content def __str__(self): a = self.msg b = '' if self.http_scheme: b += '%s://' % self.http_scheme if self.http_host: b += self.http_host if self.http_port: b += ':%s' % self.http_port if self.http_path: b += self.http_path if self.http_query: b += '?%s' % self.http_query if self.http_status: if b: b = '%s %s' % (b, self.http_status) else: b = str(self.http_status) if self.http_reason: if b: b = '%s %s' % (b, self.http_reason) else: b = '- %s' % self.http_reason if self.http_device: if b: b = '%s: device %s' % (b, self.http_device) else: b = 'device %s' % self.http_device if self.http_response_content: if len(self.http_response_content) <= 60: b += ' %s' % self.http_response_content else: b += ' [first 60 chars of response] %s' \ % self.http_response_content[:60] return b and '%s: %s' % (a, b) or a class InvalidHeadersException(Exception): pass python-swiftclient-2.0.3/swiftclient/client.py0000664000175300017540000015223412304670635022723 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ OpenStack Swift client library used internally """ import socket import requests import sys import logging import warnings from distutils.version import StrictVersion from requests.exceptions import RequestException, SSLError from urllib import quote as _quote from urlparse import urlparse, urlunparse from time import sleep, time from swiftclient.exceptions import ClientException, InvalidHeadersException from swiftclient.utils import LengthWrapper try: from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None # requests version 1.2.3 try to encode headers in ascii, preventing # utf-8 encoded header to be 'prepared' if StrictVersion(requests.__version__) < StrictVersion('2.0.0'): from requests.structures import CaseInsensitiveDict def prepare_unicode_headers(self, headers): if headers: self.headers = CaseInsensitiveDict(headers) else: self.headers = CaseInsensitiveDict() requests.models.PreparedRequest.prepare_headers = prepare_unicode_headers logger = logging.getLogger("swiftclient") logger.addHandler(NullHandler()) def http_log(args, kwargs, resp, body): if not logger.isEnabledFor(logging.INFO): return # create and log equivalent curl command string_parts = ['curl -i'] for element in args: if element == 'HEAD': string_parts.append(' -I') elif element in ('GET', 'POST', 'PUT'): string_parts.append(' -X %s' % element) else: string_parts.append(' %s' % element) if 'headers' in kwargs: for element in kwargs['headers']: header = ' -H "%s: %s"' % (element, kwargs['headers'][element]) string_parts.append(header) # log response as debug if good, or info if error if resp.status < 300: log_method = logger.debug else: log_method = logger.info log_method("REQ: %s" % "".join(string_parts)) log_method("RESP STATUS: %s %s" % (resp.status, resp.reason)) log_method("RESP HEADERS: %s", resp.getheaders()) if body: log_method("RESP BODY: %s", body) def quote(value, safe='/'): """ Patched version of urllib.quote that encodes utf8 strings before quoting """ value = encode_utf8(value) if isinstance(value, str): return _quote(value, safe) else: return value def validate_headers(headers): if headers: for key, raw_value in headers.iteritems(): value = str(encode_utf8(raw_value)) if '\n' in value: raise InvalidHeadersException("%r header contained a " "newline" % key) if '\r' in value: raise InvalidHeadersException("%r header contained a " "carriage return" % key) def encode_utf8(value): if isinstance(value, unicode): value = value.encode('utf8') return value # look for a real json parser first try: # simplejson is popular and pretty good from simplejson import loads as json_loads except ImportError: # 2.6 will have a json module in the stdlib from json import loads as json_loads class HTTPConnection: def __init__(self, url, proxy=None, cacert=None, insecure=False, ssl_compression=False): """ Make an HTTPConnection or HTTPSConnection :param url: url to connect to :param proxy: proxy to connect through, if any; None by default; str of the format 'http://127.0.0.1:8888' to set one :param cacert: A CA bundle file to use in verifying a TLS server certificate. :param insecure: Allow to access servers without checking SSL certs. The server's certificate will not be verified. :param ssl_compression: SSL compression should be disabled by default and this setting is not usable as of now. The parameter is kept for backward compatibility. :raises ClientException: Unable to handle protocol scheme """ self.url = url self.parsed_url = urlparse(url) self.host = self.parsed_url.netloc self.port = self.parsed_url.port self.requests_args = {} if self.parsed_url.scheme not in ('http', 'https'): raise ClientException("Unsupported scheme") self.requests_args['verify'] = not insecure if cacert and not insecure: # verify requests parameter is used to pass the CA_BUNDLE file # see: http://docs.python-requests.org/en/latest/user/advanced/ self.requests_args['verify'] = cacert if proxy: proxy_parsed = urlparse(proxy) if not proxy_parsed.scheme: raise ClientException("Proxy's missing scheme") self.requests_args['proxies'] = { proxy_parsed.scheme: '%s://%s' % ( proxy_parsed.scheme, proxy_parsed.netloc ) } self.requests_args['stream'] = True def _request(self, *arg, **kwarg): """ Final wrapper before requests call, to be patched in tests """ return requests.request(*arg, **kwarg) def request(self, method, full_path, data=None, headers={}, files=None): """ Encode url and header, then call requests.request """ headers = dict((encode_utf8(x), encode_utf8(y)) for x, y in headers.iteritems()) url = encode_utf8("%s://%s%s" % ( self.parsed_url.scheme, self.parsed_url.netloc, full_path)) self.resp = self._request(method, url, headers=headers, data=data, files=files, **self.requests_args) return self.resp def putrequest(self, full_path, data=None, headers={}, files=None): """ Use python-requests files upload :param data: Use data generator for chunked-transfer :param files: Use files for default transfer """ return self.request('PUT', full_path, data, headers, files) def getresponse(self): """ Adapt requests response to httplib interface """ self.resp.status = self.resp.status_code old_getheader = self.resp.raw.getheader def getheaders(): return self.resp.headers.items() def getheader(k, v=None): return old_getheader(k.lower(), v) self.resp.getheaders = getheaders self.resp.getheader = getheader self.resp.read = self.resp.raw.read return self.resp def http_connection(*arg, **kwarg): """ :returns: tuple of (parsed url, connection object) """ conn = HTTPConnection(*arg, **kwarg) return conn.parsed_url, conn def get_auth_1_0(url, user, key, snet, **kwargs): insecure = kwargs.get('insecure', False) parsed, conn = http_connection(url, insecure=insecure) method = 'GET' conn.request(method, parsed.path, '', {'X-Auth-User': user, 'X-Auth-Key': key}) resp = conn.getresponse() body = resp.read() http_log((url, method,), {}, resp, body) url = resp.getheader('x-storage-url') # There is a side-effect on current Rackspace 1.0 server where a # bad URL would get you that document page and a 200. We error out # if we don't have a x-storage-url header and if we get a body. if resp.status < 200 or resp.status >= 300 or (body and not url): raise ClientException('Auth GET failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=parsed.path, http_status=resp.status, http_reason=resp.reason) if snet: parsed = list(urlparse(url)) # Second item in the list is the netloc netloc = parsed[1] parsed[1] = 'snet-' + netloc url = urlunparse(parsed) return url, resp.getheader('x-storage-token', resp.getheader('x-auth-token')) def get_keystoneclient_2_0(auth_url, user, key, os_options, **kwargs): """ Authenticate against a auth 2.0 server. We are using the keystoneclient library for our 2.0 authentication. """ insecure = kwargs.get('insecure', False) debug = logger.isEnabledFor(logging.DEBUG) and True or False try: from keystoneclient.v2_0 import client as ksclient from keystoneclient import exceptions except ImportError: sys.exit(''' Auth version 2.0 requires python-keystoneclient, install it or use Auth version 1.0 which requires ST_AUTH, ST_USER, and ST_KEY environment variables to be set or overridden with -A, -U, or -K.''') try: _ksclient = ksclient.Client(username=user, password=key, tenant_name=os_options.get('tenant_name'), tenant_id=os_options.get('tenant_id'), debug=debug, cacert=kwargs.get('cacert'), auth_url=auth_url, insecure=insecure) except exceptions.Unauthorized: raise ClientException('Unauthorised. Check username, password' ' and tenant name/id') except exceptions.AuthorizationFailure as err: raise ClientException('Authorization Failure. %s' % err) service_type = os_options.get('service_type') or 'object-store' endpoint_type = os_options.get('endpoint_type') or 'publicURL' try: endpoint = _ksclient.service_catalog.url_for( attr='region', filter_value=os_options.get('region_name'), service_type=service_type, endpoint_type=endpoint_type) except exceptions.EndpointNotFound: raise ClientException('Endpoint for %s not found - ' 'have you specified a region?' % service_type) return (endpoint, _ksclient.auth_token) def get_auth(auth_url, user, key, **kwargs): """ Get authentication/authorization credentials. The snet parameter is used for Rackspace's ServiceNet internal network implementation. In this function, it simply adds *snet-* to the beginning of the host name for the returned storage URL. With Rackspace Cloud Files, use of this network path causes no bandwidth charges but requires the client to be running on Rackspace's ServiceNet network. """ auth_version = kwargs.get('auth_version', '1') os_options = kwargs.get('os_options', {}) storage_url, token = None, None insecure = kwargs.get('insecure', False) if auth_version in ['1.0', '1', 1]: storage_url, token = get_auth_1_0(auth_url, user, key, kwargs.get('snet'), insecure=insecure) elif auth_version in ['2.0', '2', 2]: # We are allowing to specify a token/storage-url to re-use # without having to re-authenticate. if (os_options.get('object_storage_url') and os_options.get('auth_token')): return (os_options.get('object_storage_url'), os_options.get('auth_token')) # We are handling a special use case here when we were # allowing specifying the account/tenant_name with the -U # argument if not kwargs.get('tenant_name') and ':' in user: (os_options['tenant_name'], user) = user.split(':') # We are allowing to have an tenant_name argument in get_auth # directly without having os_options if kwargs.get('tenant_name'): os_options['tenant_name'] = kwargs['tenant_name'] if (not 'tenant_name' in os_options): raise ClientException('No tenant specified') cacert = kwargs.get('cacert', None) storage_url, token = get_keystoneclient_2_0(auth_url, user, key, os_options, cacert=cacert, insecure=insecure) else: raise ClientException('Unknown auth_version %s specified.' % auth_version) # Override storage url, if necessary if os_options.get('object_storage_url'): return os_options['object_storage_url'], token else: return storage_url, token def store_response(resp, response_dict): """ store information about an operation into a dict :param resp: an http response object containing the response headers :param response_dict: a dict into which are placed the status, reason and a dict of lower-cased headers """ if response_dict is not None: resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value response_dict['status'] = resp.status response_dict['reason'] = resp.reason response_dict['headers'] = resp_headers def get_account(url, token, marker=None, limit=None, prefix=None, end_marker=None, http_conn=None, full_listing=False): """ Get a listing of containers for the account. :param url: storage URL :param token: auth token :param marker: marker query :param limit: limit query :param prefix: prefix query :param end_marker: end_marker query :param http_conn: HTTP connection object (If None, it will create the conn object) :param full_listing: if True, return a full listing, else returns a max of 10000 listings :returns: a tuple of (response headers, a list of containers) The response headers will be a dict and all header names will be lowercase. :raises ClientException: HTTP GET request failed """ if not http_conn: http_conn = http_connection(url) if full_listing: rv = get_account(url, token, marker, limit, prefix, end_marker, http_conn) listing = rv[1] while listing: marker = listing[-1]['name'] listing = \ get_account(url, token, marker, limit, prefix, end_marker, http_conn)[1] if listing: rv[1].extend(listing) return rv parsed, conn = http_conn qs = 'format=json' if marker: qs += '&marker=%s' % quote(marker) if limit: qs += '&limit=%d' % limit if prefix: qs += '&prefix=%s' % quote(prefix) if end_marker: qs += '&end_marker=%s' % quote(end_marker) full_path = '%s?%s' % (parsed.path, qs) headers = {'X-Auth-Token': token} method = 'GET' conn.request(method, full_path, '', headers) resp = conn.getresponse() body = resp.read() http_log(("%s?%s" % (url, qs), method,), {'headers': headers}, resp, body) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value if resp.status < 200 or resp.status >= 300: raise ClientException('Account GET failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=parsed.path, http_query=qs, http_status=resp.status, http_reason=resp.reason, http_response_content=body) if resp.status == 204: return resp_headers, [] return resp_headers, json_loads(body) def head_account(url, token, http_conn=None): """ Get account stats. :param url: storage URL :param token: auth token :param http_conn: HTTP connection object (If None, it will create the conn object) :returns: a dict containing the response's headers (all header names will be lowercase) :raises ClientException: HTTP HEAD request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) method = "HEAD" headers = {'X-Auth-Token': token} conn.request(method, parsed.path, '', headers) resp = conn.getresponse() body = resp.read() http_log((url, method,), {'headers': headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException('Account HEAD failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=parsed.path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers def post_account(url, token, headers, http_conn=None, response_dict=None): """ Update an account's metadata. :param url: storage URL :param token: auth token :param headers: additional headers to include in the request :param http_conn: HTTP connection object (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :raises ClientException: HTTP POST request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) method = 'POST' headers['X-Auth-Token'] = token conn.request(method, parsed.path, '', headers) resp = conn.getresponse() body = resp.read() http_log((url, method,), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException('Account POST failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=parsed.path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) def get_container(url, token, container, marker=None, limit=None, prefix=None, delimiter=None, end_marker=None, path=None, http_conn=None, full_listing=False): """ Get a listing of objects for the container. :param url: storage URL :param token: auth token :param container: container name to get a listing for :param marker: marker query :param limit: limit query :param prefix: prefix query :param delimiter: string to delimit the queries on :param end_marker: marker query :param path: path query (equivalent: "delimiter=/" and "prefix=path/") :param http_conn: HTTP connection object (If None, it will create the conn object) :param full_listing: if True, return a full listing, else returns a max of 10000 listings :returns: a tuple of (response headers, a list of objects) The response headers will be a dict and all header names will be lowercase. :raises ClientException: HTTP GET request failed """ if not http_conn: http_conn = http_connection(url) if full_listing: rv = get_container(url, token, container, marker, limit, prefix, delimiter, end_marker, path, http_conn) listing = rv[1] while listing: if not delimiter: marker = listing[-1]['name'] else: marker = listing[-1].get('name', listing[-1].get('subdir')) listing = get_container(url, token, container, marker, limit, prefix, delimiter, end_marker, path, http_conn)[1] if listing: rv[1].extend(listing) return rv parsed, conn = http_conn cont_path = '%s/%s' % (parsed.path, quote(container)) qs = 'format=json' if marker: qs += '&marker=%s' % quote(marker) if limit: qs += '&limit=%d' % limit if prefix: qs += '&prefix=%s' % quote(prefix) if delimiter: qs += '&delimiter=%s' % quote(delimiter) if end_marker: qs += '&end_marker=%s' % quote(end_marker) if path: qs += '&path=%s' % quote(path) headers = {'X-Auth-Token': token} method = 'GET' conn.request(method, '%s?%s' % (cont_path, qs), '', headers) resp = conn.getresponse() body = resp.read() http_log(('%(url)s%(cont_path)s?%(qs)s' % {'url': url.replace(parsed.path, ''), 'cont_path': cont_path, 'qs': qs}, method,), {'headers': headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException('Container GET failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=cont_path, http_query=qs, http_status=resp.status, http_reason=resp.reason, http_response_content=body) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value if resp.status == 204: return resp_headers, [] return resp_headers, json_loads(body) def head_container(url, token, container, http_conn=None, headers=None): """ Get container stats. :param url: storage URL :param token: auth token :param container: container name to get stats for :param http_conn: HTTP connection object (If None, it will create the conn object) :returns: a dict containing the response's headers (all header names will be lowercase) :raises ClientException: HTTP HEAD request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) method = 'HEAD' req_headers = {'X-Auth-Token': token} if headers: req_headers.update(headers) conn.request(method, path, '', req_headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': req_headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException('Container HEAD failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers def put_container(url, token, container, headers=None, http_conn=None, response_dict=None): """ Create a container :param url: storage URL :param token: auth token :param container: container name to create :param headers: additional headers to include in the request :param http_conn: HTTP connection object (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :raises ClientException: HTTP PUT request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) method = 'PUT' if not headers: headers = {} headers['X-Auth-Token'] = token if not 'content-length' in (k.lower() for k in headers): headers['Content-Length'] = '0' conn.request(method, path, '', headers) resp = conn.getresponse() body = resp.read() store_response(resp, response_dict) http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException('Container PUT failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) def post_container(url, token, container, headers, http_conn=None, response_dict=None): """ Update a container's metadata. :param url: storage URL :param token: auth token :param container: container name to update :param headers: additional headers to include in the request :param http_conn: HTTP connection object (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :raises ClientException: HTTP POST request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) method = 'POST' headers['X-Auth-Token'] = token if not 'content-length' in (k.lower() for k in headers): headers['Content-Length'] = '0' conn.request(method, path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException('Container POST failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) def delete_container(url, token, container, http_conn=None, response_dict=None): """ Delete a container :param url: storage URL :param token: auth token :param container: container name to delete :param http_conn: HTTP connection object (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :raises ClientException: HTTP DELETE request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s' % (parsed.path, quote(container)) headers = {'X-Auth-Token': token} method = 'DELETE' conn.request(method, path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException('Container DELETE failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) def get_object(url, token, container, name, http_conn=None, resp_chunk_size=None, query_string=None, response_dict=None, headers=None): """ Get an object :param url: storage URL :param token: auth token :param container: container name that the object is in :param name: object name to get :param http_conn: HTTP connection object (If None, it will create the conn object) :param resp_chunk_size: if defined, chunk size of data to read. NOTE: If you specify a resp_chunk_size you must fully read the object's contents before making another request. :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :param headers: an optional dictionary with additional headers to include in the request :returns: a tuple of (response headers, the object's contents) The response headers will be a dict and all header names will be lowercase. :raises ClientException: HTTP GET request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s/%s' % (parsed.path, quote(container), quote(name)) if query_string: path += '?' + query_string method = 'GET' headers = headers.copy() if headers else {} headers['X-Auth-Token'] = token conn.request(method, path, '', headers) resp = conn.getresponse() parsed_response = {} store_response(resp, parsed_response) if response_dict is not None: response_dict.update(parsed_response) if resp.status < 200 or resp.status >= 300: body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) raise ClientException('Object GET failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) if resp_chunk_size: def _object_body(): buf = resp.read(resp_chunk_size) while buf: yield buf buf = resp.read(resp_chunk_size) object_body = _object_body() else: object_body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, None) return parsed_response['headers'], object_body def head_object(url, token, container, name, http_conn=None): """ Get object info :param url: storage URL :param token: auth token :param container: container name that the object is in :param name: object name to get info for :param http_conn: HTTP connection object (If None, it will create the conn object) :returns: a dict containing the response's headers (all header names will be lowercase) :raises ClientException: HTTP HEAD request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s/%s' % (parsed.path, quote(container), quote(name)) method = 'HEAD' headers = {'X-Auth-Token': token} conn.request(method, path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,), {'headers': headers}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException('Object HEAD failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers def put_object(url, token=None, container=None, name=None, contents=None, content_length=None, etag=None, chunk_size=None, content_type=None, headers=None, http_conn=None, proxy=None, query_string=None, response_dict=None): """ Put an object :param url: storage URL :param token: auth token; if None, no token will be sent :param container: container name that the object is in; if None, the container name is expected to be part of the url :param name: object name to put; if None, the object name is expected to be part of the url :param contents: a string or a file like object to read object data from; if None, a zero-byte put will be done :param content_length: value to send as content-length header; also limits the amount read from contents; if None, it will be computed via the contents or chunked transfer encoding will be used :param etag: etag of contents; if None, no etag will be sent :param chunk_size: chunk size of data to write; it defaults to 65536; used only if the the contents object has a 'read' method, eg. file-like objects, ignored otherwise :param content_type: value to send as content-type header; if None, no content-type will be set (remote end will likely try to auto-detect it) :param headers: additional headers to include in the request, if any :param http_conn: HTTP connection object (If None, it will create the conn object) :param proxy: proxy to connect through, if any; None by default; str of the format 'http://127.0.0.1:8888' to set one :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :returns: etag :raises ClientException: HTTP PUT request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url, proxy=proxy) path = parsed.path if container: path = '%s/%s' % (path.rstrip('/'), quote(container)) if name: path = '%s/%s' % (path.rstrip('/'), quote(name)) if query_string: path += '?' + query_string if headers: headers = dict(headers) else: headers = {} if token: headers['X-Auth-Token'] = token if etag: headers['ETag'] = etag.strip('"') if content_length is not None: headers['Content-Length'] = str(content_length) else: for n, v in headers.iteritems(): if n.lower() == 'content-length': content_length = int(v) if content_type is not None: headers['Content-Type'] = content_type if not contents: headers['Content-Length'] = '0' if hasattr(contents, 'read'): if chunk_size is None: chunk_size = 65536 if content_length is None: def chunk_reader(): while True: data = contents.read(chunk_size) if not data: break yield data conn.putrequest(path, headers=headers, data=chunk_reader()) else: # Fixes https://github.com/kennethreitz/requests/issues/1648 data = LengthWrapper(contents, content_length) conn.putrequest(path, headers=headers, data=data) else: if chunk_size is not None: warn_msg = '%s object has no \"read\" method, ignoring chunk_size'\ % type(contents).__name__ warnings.warn(warn_msg, stacklevel=2) conn.request('PUT', path, contents, headers) resp = conn.getresponse() body = resp.read() headers = {'X-Auth-Token': token} http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'PUT',), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException('Object PUT failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) return resp.getheader('etag', '').strip('"') def post_object(url, token, container, name, headers, http_conn=None, response_dict=None): """ Update object metadata :param url: storage URL :param token: auth token :param container: container name that the object is in :param name: name of the object to update :param headers: additional headers to include in the request :param http_conn: HTTP connection object (If None, it will create the conn object) :param response_dict: an optional dictionary into which to place the response - status, reason and headers :raises ClientException: HTTP POST request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url) path = '%s/%s/%s' % (parsed.path, quote(container), quote(name)) headers['X-Auth-Token'] = token conn.request('POST', path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'POST',), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException('Object POST failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) def delete_object(url, token=None, container=None, name=None, http_conn=None, headers=None, proxy=None, query_string=None, response_dict=None): """ Delete object :param url: storage URL :param token: auth token; if None, no token will be sent :param container: container name that the object is in; if None, the container name is expected to be part of the url :param name: object name to delete; if None, the object name is expected to be part of the url :param http_conn: HTTP connection object (If None, it will create the conn object) :param headers: additional headers to include in the request :param proxy: proxy to connect through, if any; None by default; str of the format 'http://127.0.0.1:8888' to set one :param query_string: if set will be appended with '?' to generated path :param response_dict: an optional dictionary into which to place the response - status, reason and headers :raises ClientException: HTTP DELETE request failed """ if http_conn: parsed, conn = http_conn else: parsed, conn = http_connection(url, proxy=proxy) path = parsed.path if container: path = '%s/%s' % (path.rstrip('/'), quote(container)) if name: path = '%s/%s' % (path.rstrip('/'), quote(name)) if query_string: path += '?' + query_string if headers: headers = dict(headers) else: headers = {} if token: headers['X-Auth-Token'] = token conn.request('DELETE', path, '', headers) resp = conn.getresponse() body = resp.read() http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'DELETE',), {'headers': headers}, resp, body) store_response(resp, response_dict) if resp.status < 200 or resp.status >= 300: raise ClientException('Object DELETE failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) def get_capabilities(http_conn): """ Get cluster capability infos. :param http_conn: HTTP connection :returns: a dict containing the cluster capabilities :raises ClientException: HTTP Capabilities GET failed """ parsed, conn = http_conn conn.request('GET', parsed.path, '') resp = conn.getresponse() body = resp.read() http_log((parsed.geturl(), 'GET',), {'headers': {}}, resp, body) if resp.status < 200 or resp.status >= 300: raise ClientException('Capabilities GET failed', http_scheme=parsed.scheme, http_host=conn.host, http_port=conn.port, http_path=parsed.path, http_status=resp.status, http_reason=resp.reason, http_response_content=body) return json_loads(body) class Connection(object): """Convenience class to make requests that will also retry the request""" def __init__(self, authurl=None, user=None, key=None, retries=5, preauthurl=None, preauthtoken=None, snet=False, starting_backoff=1, max_backoff=64, tenant_name=None, os_options=None, auth_version="1", cacert=None, insecure=False, ssl_compression=True, retry_on_ratelimit=False): """ :param authurl: authentication URL :param user: user name to authenticate as :param key: key/password to authenticate with :param retries: Number of times to retry the request before failing :param preauthurl: storage URL (if you have already authenticated) :param preauthtoken: authentication token (if you have already authenticated) note authurl/user/key/tenant_name are not required when specifying preauthtoken :param snet: use SERVICENET internal network default is False :param starting_backoff: initial delay between retries (seconds) :param max_backoff: maximum delay between retries (seconds) :param auth_version: OpenStack auth version, default is 1.0 :param tenant_name: The tenant/account name, required when connecting to a auth 2.0 system. :param os_options: The OpenStack options which can have tenant_id, auth_token, service_type, endpoint_type, tenant_name, object_storage_url, region_name :param insecure: Allow to access servers without checking SSL certs. The server's certificate will not be verified. :param ssl_compression: Whether to enable compression at the SSL layer. If set to 'False' and the pyOpenSSL library is present an attempt to disable SSL compression will be made. This may provide a performance increase for https upload/download operations. :param retry_on_ratelimit: by default, a ratelimited connection will raise an exception to the caller. Setting this parameter to True will cause a retry after a backoff. """ self.authurl = authurl self.user = user self.key = key self.retries = retries self.http_conn = None self.url = preauthurl self.token = preauthtoken self.attempts = 0 self.snet = snet self.starting_backoff = starting_backoff self.max_backoff = max_backoff self.auth_version = auth_version self.os_options = os_options or {} if tenant_name: self.os_options['tenant_name'] = tenant_name self.cacert = cacert self.insecure = insecure self.ssl_compression = ssl_compression self.auth_end_time = 0 self.retry_on_ratelimit = retry_on_ratelimit def close(self): if self.http_conn and type(self.http_conn) is tuple\ and len(self.http_conn) > 1: conn = self.http_conn[1] if hasattr(conn, 'close') and callable(conn.close): conn.close() self.http_conn = None def get_auth(self): return get_auth(self.authurl, self.user, self.key, snet=self.snet, auth_version=self.auth_version, os_options=self.os_options, cacert=self.cacert, insecure=self.insecure) def http_connection(self): return http_connection(self.url, cacert=self.cacert, insecure=self.insecure, ssl_compression=self.ssl_compression) def _add_response_dict(self, target_dict, kwargs): if target_dict is not None: response_dict = kwargs['response_dict'] if 'response_dicts' in target_dict: target_dict['response_dicts'].append(response_dict) else: target_dict['response_dicts'] = [response_dict] target_dict.update(response_dict) def _retry(self, reset_func, func, *args, **kwargs): self.attempts = 0 retried_auth = False backoff = self.starting_backoff caller_response_dict = kwargs.pop('response_dict', None) while self.attempts <= self.retries: self.attempts += 1 try: if not self.url or not self.token: self.url, self.token = self.get_auth() self.http_conn = None self.auth_end_time = time() if not self.http_conn: self.http_conn = self.http_connection() kwargs['http_conn'] = self.http_conn if caller_response_dict is not None: kwargs['response_dict'] = {} rv = func(self.url, self.token, *args, **kwargs) self._add_response_dict(caller_response_dict, kwargs) return rv except SSLError: raise except (socket.error, RequestException) as e: self._add_response_dict(caller_response_dict, kwargs) if self.attempts > self.retries: logger.exception(e) raise self.http_conn = None except ClientException as err: self._add_response_dict(caller_response_dict, kwargs) if self.attempts > self.retries: logger.exception(err) raise if err.http_status == 401: self.url = self.token = None if retried_auth or not all((self.authurl, self.user, self.key)): logger.exception(err) raise retried_auth = True elif err.http_status == 408: self.http_conn = None elif 500 <= err.http_status <= 599: pass elif self.retry_on_ratelimit and err.http_status == 498: pass else: logger.exception(err) raise sleep(backoff) backoff = min(backoff * 2, self.max_backoff) if reset_func: reset_func(func, *args, **kwargs) def head_account(self): """Wrapper for :func:`head_account`""" return self._retry(None, head_account) def get_account(self, marker=None, limit=None, prefix=None, end_marker=None, full_listing=False): """Wrapper for :func:`get_account`""" # TODO(unknown): With full_listing=True this will restart the entire # listing with each retry. Need to make a better version that just # retries where it left off. return self._retry(None, get_account, marker=marker, limit=limit, prefix=prefix, end_marker=end_marker, full_listing=full_listing) def post_account(self, headers, response_dict=None): """Wrapper for :func:`post_account`""" return self._retry(None, post_account, headers, response_dict=response_dict) def head_container(self, container): """Wrapper for :func:`head_container`""" return self._retry(None, head_container, container) def get_container(self, container, marker=None, limit=None, prefix=None, delimiter=None, end_marker=None, path=None, full_listing=False): """Wrapper for :func:`get_container`""" # TODO(unknown): With full_listing=True this will restart the entire # listing with each retry. Need to make a better version that just # retries where it left off. return self._retry(None, get_container, container, marker=marker, limit=limit, prefix=prefix, delimiter=delimiter, end_marker=end_marker, path=path, full_listing=full_listing) def put_container(self, container, headers=None, response_dict=None): """Wrapper for :func:`put_container`""" return self._retry(None, put_container, container, headers=headers, response_dict=response_dict) def post_container(self, container, headers, response_dict=None): """Wrapper for :func:`post_container`""" return self._retry(None, post_container, container, headers, response_dict=response_dict) def delete_container(self, container, response_dict=None): """Wrapper for :func:`delete_container`""" return self._retry(None, delete_container, container, response_dict=response_dict) def head_object(self, container, obj): """Wrapper for :func:`head_object`""" return self._retry(None, head_object, container, obj) def get_object(self, container, obj, resp_chunk_size=None, query_string=None, response_dict=None, headers=None): """Wrapper for :func:`get_object`""" return self._retry(None, get_object, container, obj, resp_chunk_size=resp_chunk_size, query_string=query_string, response_dict=response_dict, headers=headers) def put_object(self, container, obj, contents, content_length=None, etag=None, chunk_size=None, content_type=None, headers=None, query_string=None, response_dict=None): """Wrapper for :func:`put_object`""" def _default_reset(*args, **kwargs): raise ClientException('put_object(%r, %r, ...) failure and no ' 'ability to reset contents for reupload.' % (container, obj)) if isinstance(contents, str) or not contents: # if its a str or None then you can retry as much as you want reset_func = None else: reset_func = _default_reset if self.retries > 0: tell = getattr(contents, 'tell', None) seek = getattr(contents, 'seek', None) if tell and seek: orig_pos = tell() reset_func = lambda *a, **k: seek(orig_pos) return self._retry(reset_func, put_object, container, obj, contents, content_length=content_length, etag=etag, chunk_size=chunk_size, content_type=content_type, headers=headers, query_string=query_string, response_dict=response_dict) def post_object(self, container, obj, headers, response_dict=None): """Wrapper for :func:`post_object`""" return self._retry(None, post_object, container, obj, headers, response_dict=response_dict) def delete_object(self, container, obj, query_string=None, response_dict=None): """Wrapper for :func:`delete_object`""" return self._retry(None, delete_object, container, obj, query_string=query_string, response_dict=response_dict) def get_capabilities(self, url=None): if not url: url, _ = self.get_auth() scheme = urlparse(url).scheme netloc = urlparse(url).netloc url = scheme + '://' + netloc + '/info' http_conn = http_connection(url, ssl_compression=self.ssl_compression) return get_capabilities(http_conn) python-swiftclient-2.0.3/swiftclient/command_helpers.py0000664000175300017540000001061312304670635024577 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from swiftclient.utils import prt_bytes def stat_account(conn, options, thread_manager): headers = conn.head_account() if options.verbose > 1: thread_manager.print_items(( ('StorageURL', conn.url), ('Auth Token', conn.token), )) container_count = int(headers.get('x-account-container-count', 0)) object_count = prt_bytes(headers.get('x-account-object-count', 0), options.human).lstrip() bytes_used = prt_bytes(headers.get('x-account-bytes-used', 0), options.human).lstrip() thread_manager.print_items(( ('Account', conn.url.rsplit('/', 1)[-1]), ('Containers', container_count), ('Objects', object_count), ('Bytes', bytes_used), )) thread_manager.print_headers(headers, meta_prefix='x-account-meta-', exclude_headers=( 'content-length', 'date', 'x-account-container-count', 'x-account-object-count', 'x-account-bytes-used')) def stat_container(conn, options, args, thread_manager): headers = conn.head_container(args[0]) if options.verbose > 1: path = '%s/%s' % (conn.url, args[0]) thread_manager.print_items(( ('URL', path), ('Auth Token', conn.token), )) object_count = prt_bytes( headers.get('x-container-object-count', 0), options.human).lstrip() bytes_used = prt_bytes(headers.get('x-container-bytes-used', 0), options.human).lstrip() thread_manager.print_items(( ('Account', conn.url.rsplit('/', 1)[-1]), ('Container', args[0]), ('Objects', object_count), ('Bytes', bytes_used), ('Read ACL', headers.get('x-container-read', '')), ('Write ACL', headers.get('x-container-write', '')), ('Sync To', headers.get('x-container-sync-to', '')), ('Sync Key', headers.get('x-container-sync-key', '')), )) thread_manager.print_headers(headers, meta_prefix='x-container-meta-', exclude_headers=( 'content-length', 'date', 'x-container-object-count', 'x-container-bytes-used', 'x-container-read', 'x-container-write', 'x-container-sync-to', 'x-container-sync-key')) def stat_object(conn, options, args, thread_manager): headers = conn.head_object(args[0], args[1]) if options.verbose > 1: path = '%s/%s/%s' % (conn.url, args[0], args[1]) thread_manager.print_items(( ('URL', path), ('Auth Token', conn.token), )) content_length = prt_bytes(headers.get('content-length', 0), options.human).lstrip() thread_manager.print_items(( ('Account', conn.url.rsplit('/', 1)[-1]), ('Container', args[0]), ('Object', args[1]), ('Content Type', headers.get('content-type')), ('Content Length', content_length), ('Last Modified', headers.get('last-modified')), ('ETag', headers.get('etag')), ('Manifest', headers.get('x-object-manifest')), ), skip_missing=True) thread_manager.print_headers(headers, meta_prefix='x-object-meta-', exclude_headers=( 'content-type', 'content-length', 'last-modified', 'etag', 'date', 'x-object-manifest')) python-swiftclient-2.0.3/swiftclient/utils.py0000664000175300017540000000436712304670635022610 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """Miscellaneous utility functions for use with Swift.""" TRUE_VALUES = set(('true', '1', 'yes', 'on', 't', 'y')) def config_true_value(value): """ Returns True if the value is either True or a string in TRUE_VALUES. Returns False otherwise. This function come from swift.common.utils.config_true_value() """ return value is True or \ (isinstance(value, basestring) and value.lower() in TRUE_VALUES) def prt_bytes(bytes, human_flag): """ convert a number > 1024 to printable format, either in 4 char -h format as with ls -lh or return as 12 char right justified string """ if human_flag: suffix = '' mods = list('KMGTPEZY') temp = float(bytes) if temp > 0: while (temp > 1023): try: suffix = mods.pop(0) except IndexError: break temp /= 1024.0 if suffix != '': if temp >= 10: bytes = '%3d%s' % (temp, suffix) else: bytes = '%.1f%s' % (temp, suffix) if suffix == '': # must be < 1024 bytes = '%4s' % bytes else: bytes = '%12s' % bytes return(bytes) class LengthWrapper(object): def __init__(self, readable, length): self._length = self._remaining = length self._readable = readable def __len__(self): return self._length def read(self, *args, **kwargs): if self._remaining <= 0: return '' chunk = self._readable.read( *args, **kwargs)[:self._remaining] self._remaining -= len(chunk) return chunk python-swiftclient-2.0.3/swiftclient/multithreading.py0000664000175300017540000002655412304670635024472 0ustar jenkinsjenkins00000000000000# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from itertools import chain import sys from time import sleep from Queue import Queue from threading import Thread from traceback import format_exception from swiftclient.exceptions import ClientException class StopWorkerThreadSignal(object): pass class QueueFunctionThread(Thread): """ Calls `func`` for each item in ``queue``; ``func`` is called with a de-queued item as the first arg followed by ``*args`` and ``**kwargs``. Any exceptions raised by ``func`` are stored in :attr:`self.exc_infos`. If the optional kwarg ``store_results`` is specified, it must be a list and each result of invoking ``func`` will be appended to that list. Putting a :class:`StopWorkerThreadSignal` instance into queue will cause this thread to exit. """ def __init__(self, queue, func, *args, **kwargs): """ :param queue: A :class:`Queue` object from which work jobs will be pulled. :param func: A callable which will be invoked with a dequeued item followed by ``*args`` and ``**kwargs``. :param \*args: Optional positional arguments for ``func``. :param \*\*kwargs: Optional kwargs for func. If the kwarg ``store_results`` is specified, its value must be a list, and every result from invoking ``func`` will be appended to the supplied list. The kwarg ``store_results`` will not be passed into ``func``. """ Thread.__init__(self) self.queue = queue self.func = func self.args = args self.kwargs = kwargs self.exc_infos = [] self.store_results = kwargs.pop('store_results', None) def run(self): while True: item = self.queue.get() if isinstance(item, StopWorkerThreadSignal): break try: result = self.func(item, *self.args, **self.kwargs) if self.store_results is not None: self.store_results.append(result) except Exception: self.exc_infos.append(sys.exc_info()) class QueueFunctionManager(object): """ A context manager to handle the life-cycle of a single :class:`Queue` and a list of associated :class:`QueueFunctionThread` instances. This class is not usually instantiated directly. Instead, call the :meth:`MultiThreadingManager.queue_manager` object method, which will return an instance of this class. When entering the context, ``thread_count`` :class:`QueueFunctionThread` instances are created and started. The input queue is returned. Inside the context, any work item put into the queue will get worked on by one of the :class:`QueueFunctionThread` instances. When the context is exited, all threads are sent a :class:`StopWorkerThreadSignal` instance and then all threads are waited upon. Finally, any exceptions from any of the threads are reported on via the supplied ``thread_manager``'s :meth:`error` method. If an ``error_counter`` list was supplied on instantiation, its first element is incremented once for every exception which occurred. """ def __init__(self, func, thread_count, thread_manager, thread_args=None, thread_kwargs=None, error_counter=None, connection_maker=None): """ :param func: The worker function which will be passed into each :class:`QueueFunctionThread`'s constructor. :param thread_count: The number of worker threads to run. :param thread_manager: An instance of :class:`MultiThreadingManager`. :param thread_args: Optional positional arguments to be passed into each invocation of ``func`` after the de-queued work item. :param thread_kwargs: Optional keyword arguments to be passed into each invocation of ``func``. If a list is supplied as the ``store_results`` keyword argument, it will be filled with every result of invoking ``func`` in all threads. :param error_counter: Optional list containing one integer. If supplied, the list's first element will be incremented once for each exception in any thread. This happens only when exiting the context. :param connection_maker: Optional callable. If supplied, this callable will be invoked once per created thread, and the result will be passed into func after the de-queued work item but before ``thread_args`` and ``thread_kwargs``. This is used to ensure each thread has its own connection to Swift. """ self.func = func self.thread_count = thread_count self.thread_manager = thread_manager self.error_counter = error_counter self.connection_maker = connection_maker self.queue = Queue(10000) self.thread_list = [] self.thread_args = thread_args if thread_args else () self.thread_kwargs = thread_kwargs if thread_kwargs else {} def __enter__(self): for _junk in range(self.thread_count): if self.connection_maker: thread_args = (self.connection_maker(),) + self.thread_args else: thread_args = self.thread_args qf_thread = QueueFunctionThread(self.queue, self.func, *thread_args, **self.thread_kwargs) qf_thread.start() self.thread_list.append(qf_thread) return self.queue def __exit__(self, exc_type, exc_value, traceback): for thread in [t for t in self.thread_list if t.isAlive()]: self.queue.put(StopWorkerThreadSignal()) while any(map(QueueFunctionThread.is_alive, self.thread_list)): sleep(0.05) for thread in self.thread_list: for info in thread.exc_infos: if self.error_counter: self.error_counter[0] += 1 if isinstance(info[1], ClientException): self.thread_manager.error(str(info[1])) else: self.thread_manager.error(''.join(format_exception(*info))) class MultiThreadingManager(object): """ One object to manage context for multi-threading. This should make bin/swift less error-prone and allow us to test this code. This object is a context manager and returns itself into the context. When entering the context, two printing threads are created (see below) and they are waited on and cleaned up when exiting the context. A convenience method, :meth:`queue_manager`, is provided to create a :class:`QueueFunctionManager` context manager (a thread-pool with an associated input queue for work items). Also, thread-safe printing to two streams is provided. The :meth:`print_msg` method will print to the supplied ``print_stream`` (defaults to ``sys.stdout``) and the :meth:`error` method will print to the supplied ``error_stream`` (defaults to ``sys.stderr``). Both of these printing methods will format the given string with any supplied ``*args`` (a la printf) and encode the result to utf8 if necessary. The attribute :attr:`self.error_count` is incremented once per error message printed, so an application can tell if any worker threads encountered exceptions or otherwise called :meth:`error` on this instance. The swift command-line tool uses this to exit non-zero if any error strings were printed. """ def __init__(self, print_stream=sys.stdout, error_stream=sys.stderr): """ :param print_stream: The stream to which :meth:`print_msg` sends formatted messages, encoded to utf8 if necessary. :param error_stream: The stream to which :meth:`error` sends formatted messages, encoded to utf8 if necessary. """ self.print_stream = print_stream self.printer = QueueFunctionManager(self._print, 1, self) self.error_stream = error_stream self.error_printer = QueueFunctionManager(self._print_error, 1, self) self.error_count = 0 def __enter__(self): self.printer.__enter__() self.error_printer.__enter__() return self def __exit__(self, exc_type, exc_value, traceback): self.error_printer.__exit__(exc_type, exc_value, traceback) self.printer.__exit__(exc_type, exc_value, traceback) def queue_manager(self, func, thread_count, *args, **kwargs): connection_maker = kwargs.pop('connection_maker', None) error_counter = kwargs.pop('error_counter', None) return QueueFunctionManager(func, thread_count, self, thread_args=args, thread_kwargs=kwargs, connection_maker=connection_maker, error_counter=error_counter) def print_msg(self, msg, *fmt_args): if fmt_args: msg = msg % fmt_args self.printer.queue.put(msg) def print_items(self, items, offset=14, skip_missing=False): lines = [] template = '%%%ds: %%s' % offset for k, v in items: if skip_missing and not v: continue lines.append((template % (k, v)).rstrip()) self.print_msg('\n'.join(lines)) def print_headers(self, headers, meta_prefix='', exclude_headers=None, offset=14): exclude_headers = exclude_headers or [] meta_headers = [] other_headers = [] template = '%%%ds: %%s' % offset for key, value in headers.items(): if key.startswith(meta_prefix): meta_key = 'Meta %s' % key[len(meta_prefix):].title() meta_headers.append(template % (meta_key, value)) elif key not in exclude_headers: other_headers.append(template % (key.title(), value)) self.print_msg('\n'.join(chain(meta_headers, other_headers))) def error(self, msg, *fmt_args): if fmt_args: msg = msg % fmt_args self.error_printer.queue.put(msg) def _print(self, item, stream=None): if stream is None: stream = self.print_stream if isinstance(item, unicode): item = item.encode('utf8') print >>stream, item def _print_error(self, item): self.error_count += 1 return self._print(item, stream=self.error_stream) python-swiftclient-2.0.3/swiftclient/__init__.py0000664000175300017540000000206412304670635023177 0ustar jenkinsjenkins00000000000000# -*- encoding: utf-8 -*- # Copyright (c) 2012 Rackspace # flake8: noqa # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """" OpenStack Swift Python client binding. """ from .client import * # At setup.py time, we haven't installed anything yet, so there # is nothing that is able to set this version property. Squelching # that exception here should be fine- if there are problems with # pkg_resources in a real install, that will manifest itself as # an error still try: from swiftclient import version __version__ = version.version_string except Exception: pass python-swiftclient-2.0.3/.testr.conf0000664000175300017540000000023512304670635020617 0ustar jenkinsjenkins00000000000000[DEFAULT] test_command=${PYTHON:-python} -m subunit.run discover -t ./ ./tests $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list python-swiftclient-2.0.3/setup.cfg0000664000175300017540000000164212304670716020355 0ustar jenkinsjenkins00000000000000[metadata] name = python-swiftclient summary = OpenStack Object Storage API Client Library description-file = README.rst author = OpenStack author-email = openstack-dev@lists.openstack.org home-page = http://www.openstack.org/ classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 2.6 [global] setup-hooks = pbr.hooks.setup_hook [files] packages = swiftclient scripts = bin/swift data_files = share/man/man1 = doc/manpages/swift.1 [build_sphinx] source-dir = doc/source build-dir = doc/build all_files = 1 [upload_sphinx] upload-dir = doc/build/html [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 python-swiftclient-2.0.3/README.rst0000664000175300017540000000132312304670635020217 0ustar jenkinsjenkins00000000000000Python bindings to the OpenStack Object Storage API =================================================== This is a python client for the Swift API. There's a Python API (the ``swiftclient`` module), and a command-line script (``swift``). Development takes place via the usual OpenStack processes as outlined in the `OpenStack wiki`__. The master repository is on GitHub__. __ http://wiki.openstack.org/HowToContribute __ http://github.com/openstack/python-swiftclient This code is based on original the client previously included with `OpenStack's swift`__ The python-swiftclient is licensed under the Apache License like the rest of OpenStack. __ http://github.com/openstack/swift .. contents:: Contents: :local: python-swiftclient-2.0.3/PKG-INFO0000664000175300017540000000311012304670716017621 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: python-swiftclient Version: 2.0.3 Summary: OpenStack Object Storage API Client Library Home-page: http://www.openstack.org/ Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: UNKNOWN Description: Python bindings to the OpenStack Object Storage API =================================================== This is a python client for the Swift API. There's a Python API (the ``swiftclient`` module), and a command-line script (``swift``). Development takes place via the usual OpenStack processes as outlined in the `OpenStack wiki`__. The master repository is on GitHub__. __ http://wiki.openstack.org/HowToContribute __ http://github.com/openstack/python-swiftclient This code is based on original the client previously included with `OpenStack's swift`__ The python-swiftclient is licensed under the Apache License like the rest of OpenStack. __ http://github.com/openstack/swift .. contents:: Contents: :local: Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 2.6 python-swiftclient-2.0.3/test-requirements.txt0000664000175300017540000000021712304670635022772 0ustar jenkinsjenkins00000000000000hacking>=0.8.0,<0.9 coverage>=3.6 discover mock>=1.0 python-keystoneclient>=0.6.0 sphinx>=1.1.2,<1.2 testrepository>=0.0.18 testtools>=0.9.34 python-swiftclient-2.0.3/setup.py0000664000175300017540000000141512304670635020244 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools setuptools.setup( setup_requires=['pbr'], pbr=True) python-swiftclient-2.0.3/.coveragerc0000664000175300017540000000015612304670635020654 0ustar jenkinsjenkins00000000000000[run] branch = True source = swiftclient omit = swiftclient/openstack/common/* [report] ignore-errors = True python-swiftclient-2.0.3/bin/0000775000175300017540000000000012304670716017301 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/bin/swift0000775000175300017540000020251512304670635020370 0ustar jenkinsjenkins00000000000000#!/usr/bin/python -u # Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import signal import socket import logging from errno import EEXIST, ENOENT from hashlib import md5 from optparse import OptionParser, SUPPRESS_HELP from os import environ, listdir, makedirs, utime, _exit as os_exit from os.path import basename, dirname, getmtime, getsize, isdir, join, \ sep as os_path_sep from random import shuffle from sys import argv, exit, stderr, stdout from time import sleep, time, gmtime, strftime from urllib import quote, unquote try: import simplejson as json except ImportError: import json from swiftclient import Connection, RequestException from swiftclient import command_helpers from swiftclient.utils import config_true_value, prt_bytes from swiftclient.multithreading import MultiThreadingManager from swiftclient.exceptions import ClientException from swiftclient import __version__ as client_version def get_conn(options): """ Return a connection building it from the options. """ return Connection(options.auth, options.user, options.key, options.retries, auth_version=options.auth_version, os_options=options.os_options, snet=options.snet, cacert=options.os_cacert, insecure=options.insecure, ssl_compression=options.ssl_compression) def mkdirs(path): try: makedirs(path) except OSError as err: if err.errno != EEXIST: raise def immediate_exit(signum, frame): stderr.write(" Aborted\n") os_exit(2) st_delete_options = '''[-all] [--leave-segments] [--object-threads ] [--container-threads ] [object] ''' st_delete_help = ''' Delete a container or objects within a container Positional arguments: Name of container to delete from [object] Name of object to delete. Specify multiple times for multiple objects Optional arguments: --all Delete all containers and objects --leave-segments Do not delete segments of manifest objects --object-threads Number of threads to use for deleting objects. Default is 10 --container-threads Number of threads to use for deleting containers. Default is 10 '''.strip("\n") def st_delete(parser, args, thread_manager): parser.add_option( '-a', '--all', action='store_true', dest='yes_all', default=False, help='Indicates that you really want to delete ' 'everything in the account') parser.add_option( '', '--leave-segments', action='store_true', dest='leave_segments', default=False, help='Indicates that you want the segments of manifest' 'objects left alone') parser.add_option( '', '--object-threads', type=int, default=10, help='Number of threads to use for deleting objects') parser.add_option('', '--container-threads', type=int, default=10, help='Number of threads to use for ' 'deleting containers') (options, args) = parse_args(parser, args) args = args[1:] if (not args and not options.yes_all) or (args and options.yes_all): thread_manager.error('Usage: %s delete %s\n%s', basename(argv[0]), st_delete_options, st_delete_help) return def _delete_segment((container, obj), conn): conn.delete_object(container, obj) if options.verbose: if conn.attempts > 2: thread_manager.print_msg( '%s/%s [after %d attempts]', container, obj, conn.attempts) else: thread_manager.print_msg('%s/%s', container, obj) def _delete_object((container, obj), conn): try: old_manifest = None query_string = None if not options.leave_segments: try: headers = conn.head_object(container, obj) old_manifest = headers.get('x-object-manifest') if config_true_value( headers.get('x-static-large-object')): query_string = 'multipart-manifest=delete' except ClientException as err: if err.http_status != 404: raise conn.delete_object(container, obj, query_string=query_string) if old_manifest: segment_manager = thread_manager.queue_manager( _delete_segment, options.object_threads, connection_maker=create_connection) segment_queue = segment_manager.queue scontainer, sprefix = old_manifest.split('/', 1) scontainer = unquote(scontainer) sprefix = unquote(sprefix).rstrip('/') + '/' for delobj in conn.get_container(scontainer, prefix=sprefix)[1]: segment_queue.put((scontainer, delobj['name'])) if not segment_queue.empty(): with segment_manager: pass if options.verbose: path = options.yes_all and join(container, obj) or obj if path[:1] in ('/', '\\'): path = path[1:] if conn.attempts > 1: thread_manager.print_msg('%s [after %d attempts]', path, conn.attempts) else: thread_manager.print_msg(path) except ClientException as err: if err.http_status != 404: raise thread_manager.error("Object '%s/%s' not found", container, obj) def _delete_container(container, conn, object_queue): try: marker = '' while True: objects = [o['name'] for o in conn.get_container(container, marker=marker)[1]] if not objects: break for obj in objects: object_queue.put((container, obj)) marker = objects[-1] while not object_queue.empty(): sleep(0.05) attempts = 1 while True: try: conn.delete_container(container) break except ClientException as err: if err.http_status != 409: raise if attempts > 10: raise attempts += 1 sleep(1) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Container %r not found', container) create_connection = lambda: get_conn(options) obj_manager = thread_manager.queue_manager( _delete_object, options.object_threads, connection_maker=create_connection) with obj_manager as object_queue: cont_manager = thread_manager.queue_manager( _delete_container, options.container_threads, object_queue, connection_maker=create_connection) with cont_manager as container_queue: if not args: conn = create_connection() try: marker = '' while True: containers = [ c['name'] for c in conn.get_account(marker=marker)[1]] if not containers: break for container in containers: container_queue.put(container) marker = containers[-1] except ClientException as err: if err.http_status != 404: raise thread_manager.error('Account not found') elif len(args) == 1: if '/' in args[0]: print >> stderr, 'WARNING: / in container name; you ' \ 'might have meant %r instead of %r.' % ( args[0].replace('/', ' ', 1), args[0]) container_queue.put(args[0]) else: for obj in args[1:]: object_queue.put((args[0], obj)) st_download_options = '''[--all] [--marker] [--prefix ] [--output ] [--object-threads ] [--container-threads ] [--no-download] [object] ''' st_download_help = ''' Download objects from containers Positional arguments: Name of container to download from. To download a whole account, omit this and specify --all. [object] Name of object to download. Specify multiple times for multiple objects. Omit this to download all objects from the container. Optional arguments: --all Indicates that you really want to download everything in the account --marker Marker to use when starting a container or account download --prefix Only download items beginning with --output For a single file download, stream the output to . Specifying "-" as will redirect to stdout --object-threads Number of threads to use for downloading objects. Default is 10 --container-threads Number of threads to use for downloading containers. Default is 10 --no-download Perform download(s), but don't actually write anything to disk --header Adds a customized request header to the query, like "Range" or "If-Match". This argument is repeatable. Example --header "content-type:text/plain" --skip-identical Skip downloading files that are identical on both sides '''.strip("\n") def st_download(parser, args, thread_manager): parser.add_option( '-a', '--all', action='store_true', dest='yes_all', default=False, help='Indicates that you really want to download ' 'everything in the account') parser.add_option( '-m', '--marker', dest='marker', default='', help='Marker to use when starting a container or ' 'account download') parser.add_option( '-p', '--prefix', dest='prefix', help='Will only download items beginning with the prefix') parser.add_option( '-o', '--output', dest='out_file', help='For a single ' 'file download, stream the output to an alternate location ') parser.add_option( '', '--object-threads', type=int, default=10, help='Number of threads to use for downloading objects') parser.add_option( '', '--container-threads', type=int, default=10, help='Number of threads to use for downloading containers') parser.add_option( '', '--no-download', action='store_true', default=False, help="Perform download(s), but don't actually write anything to disk") parser.add_option( '-H', '--header', action='append', dest='header', default=[], help='Specify a request header, as --header NAME:VALUE. ' 'Adds a customized request header to the query, like "Range" or ' '"If-Match". This argument is repeatable. Example' ' --header "content-type:text/plain"') parser.add_option( '--skip-identical', action='store_true', dest='skip_identical', default=False, help='Skip downloading files that are identical on ' 'both sides') (options, args) = parse_args(parser, args) args = args[1:] if options.out_file == '-': options.verbose = 0 if options.out_file and len(args) != 2: exit('-o option only allowed for single file downloads') if (not args and not options.yes_all) or (args and options.yes_all): thread_manager.error('Usage: %s download %s\n%s', basename(argv[0]), st_download_options, st_download_help) return req_headers = split_headers(options.header, '', thread_manager) def _download_object(queue_arg, conn): if len(queue_arg) == 2: container, obj = queue_arg out_file = None elif len(queue_arg) == 3: container, obj, out_file = queue_arg else: raise Exception("Invalid queue_arg length of %s" % len(queue_arg)) path = options.yes_all and join(container, obj) or obj path = path.lstrip(os_path_sep) if options.skip_identical and out_file != '-': filename = out_file if out_file else path try: fp = open(filename, 'rb') except IOError: pass else: with fp: md5sum = md5() while True: data = fp.read(65536) if not data: break md5sum.update(data) req_headers['If-None-Match'] = md5sum.hexdigest() try: start_time = time() headers, body = \ conn.get_object(container, obj, resp_chunk_size=65536, headers=req_headers) headers_receipt = time() content_type = headers.get('content-type') if 'content-length' in headers: content_length = int(headers.get('content-length')) else: content_length = None etag = headers.get('etag') md5sum = None make_dir = not options.no_download and out_file != "-" if content_type.split(';', 1)[0] == 'text/directory': if make_dir and not isdir(path): mkdirs(path) read_length = 0 if 'x-object-manifest' not in headers and \ 'x-static-large-object' not in headers: md5sum = md5() for chunk in body: read_length += len(chunk) if md5sum: md5sum.update(chunk) else: dirpath = dirname(path) if make_dir and dirpath and not isdir(dirpath): mkdirs(dirpath) if not options.no_download: if out_file == "-": fp = stdout elif out_file: fp = open(out_file, 'wb') else: fp = open(path, 'wb') read_length = 0 if 'x-object-manifest' not in headers and \ 'x-static-large-object' not in headers: md5sum = md5() for chunk in body: if not options.no_download: fp.write(chunk) read_length += len(chunk) if md5sum: md5sum.update(chunk) if not options.no_download: fp.close() if md5sum and md5sum.hexdigest() != etag: thread_manager.error('%s: md5sum != etag, %s != %s', path, md5sum.hexdigest(), etag) if content_length is not None and read_length != content_length: thread_manager.error( '%s: read_length != content_length, %d != %d', path, read_length, content_length) if 'x-object-meta-mtime' in headers and not options.out_file \ and not options.no_download: mtime = float(headers['x-object-meta-mtime']) utime(path, (mtime, mtime)) if options.verbose: finish_time = time() auth_time = conn.auth_end_time - start_time headers_receipt = headers_receipt - start_time total_time = finish_time - start_time download_time = total_time - auth_time time_str = ('auth %.3fs, headers %.3fs, total %.3fs, ' '%.3f MB/s' % ( auth_time, headers_receipt, total_time, float(read_length) / download_time / 1000000)) if conn.attempts > 1: thread_manager.print_msg('%s [%s after %d attempts]', path, time_str, conn.attempts) else: thread_manager.print_msg('%s [%s]', path, time_str) except ClientException as err: if err.http_status == 304 and options.skip_identical: thread_manager.print_msg("Skipped identical file '%s'", path) return if err.http_status != 404: raise thread_manager.error("Object '%s/%s' not found", container, obj) def _download_container(queue_arg, conn): if len(queue_arg) == 2: container, object_queue = queue_arg prefix = None elif len(queue_arg) == 3: container, object_queue, prefix = queue_arg else: raise Exception("Invalid queue_arg length of %s" % len(queue_arg)) try: marker = options.marker while True: objects = [ o['name'] for o in conn.get_container(container, marker=marker, prefix=prefix)[1]] if not objects: break marker = objects[-1] shuffle(objects) for obj in objects: object_queue.put((container, obj)) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Container %r not found', container) create_connection = lambda: get_conn(options) obj_manager = thread_manager.queue_manager( _download_object, options.object_threads, connection_maker=create_connection) with obj_manager as object_queue: cont_manager = thread_manager.queue_manager( _download_container, options.container_threads, connection_maker=create_connection) with cont_manager as container_queue: if not args: # --all case conn = create_connection() try: marker = options.marker while True: containers = [ c['name'] for c in conn.get_account( marker=marker, prefix=options.prefix)[1]] if not containers: break marker = containers[-1] shuffle(containers) for container in containers: container_queue.put((container, object_queue)) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Account not found') elif len(args) == 1: if '/' in args[0]: print >> stderr, ( 'WARNING: / in container name; you might have meant ' '%r instead of %r.' % ( args[0].replace('/', ' ', 1), args[0])) container_queue.put((args[0], object_queue, options.prefix)) else: if len(args) == 2: obj = args[1] object_queue.put((args[0], obj, options.out_file)) else: for obj in args[1:]: object_queue.put((args[0], obj)) st_list_options = '''[--long] [--lh] [--totals] [--prefix ] [--delimiter ] ''' st_list_help = ''' Lists the containers for the account or the objects for a container Positional arguments: [container] Name of container to list object in Optional arguments: --long Long listing format, similar to ls -l --lh Report sizes in human readable format similar to ls -lh --totals Used with -l or --lh, only report totals --prefix Only list items beginning with the prefix --delimiter Roll up items with the given delimiter. For containers only. See OpenStack Swift API documentation for what this means. '''.strip('\n') def st_list(parser, args, thread_manager): parser.add_option( '-l', '--long', dest='long', help='Long listing ' 'similar to ls -l command', action='store_true', default=False) parser.add_option( '--lh', dest='human', help='report sizes as human ' "similar to ls -lh switch, but -h taken", action='store_true', default=False) parser.add_option( '-t', '--totals', dest='totals', help='used with -l or --lh, ' 'only report totals', action='store_true', default=False) parser.add_option( '-p', '--prefix', dest='prefix', help='Will only list items beginning with the prefix') parser.add_option( '-d', '--delimiter', dest='delimiter', help='Will roll up items with the given delimiter' ' (see OpenStack Swift API documentation for what this means)') (options, args) = parse_args(parser, args) args = args[1:] if options.delimiter and not args: exit('-d option only allowed for container listings') if len(args) > 1 or len(args) == 1 and args[0].find('/') >= 0: thread_manager.error('Usage: %s list %s\n%s', basename(argv[0]), st_list_options, st_list_help) return conn = get_conn(options) try: marker = '' total_count = total_bytes = 0 while True: if not args: items = \ conn.get_account(marker=marker, prefix=options.prefix)[1] else: items = conn.get_container( args[0], marker=marker, prefix=options.prefix, delimiter=options.delimiter)[1] if not items: break for item in items: item_name = item.get('name') if not options.long and not options.human: thread_manager.print_msg( item.get('name', item.get('subdir'))) else: item_bytes = item.get('bytes') total_bytes += item_bytes if len(args) == 0: # listing containers byte_str = prt_bytes(item_bytes, options.human) count = item.get('count') total_count += count try: meta = conn.head_container(item_name) utc = gmtime(float(meta.get('x-timestamp'))) datestamp = strftime('%Y-%m-%d %H:%M:%S', utc) except ClientException: datestamp = '????-??-?? ??:??:??' if not options.totals: thread_manager.print_msg("%5s %s %s %s", count, byte_str, datestamp, item_name) else: # list container contents subdir = item.get('subdir') if subdir is None: byte_str = prt_bytes(item_bytes, options.human) date, xtime = item.get('last_modified').split('T') xtime = xtime.split('.')[0] else: byte_str = prt_bytes(0, options.human) date = xtime = '' item_name = subdir if not options.totals: thread_manager.print_msg("%s %10s %8s %s", byte_str, date, xtime, item_name) marker = items[-1].get('name', items[-1].get('subdir')) # report totals if options.long or options.human: if len(args) == 0: thread_manager.print_msg( "%5s %s", prt_bytes(total_count, True), prt_bytes(total_bytes, options.human)) else: thread_manager.print_msg(prt_bytes(total_bytes, options.human)) except ClientException as err: if err.http_status != 404: raise if not args: thread_manager.error('Account not found') else: thread_manager.error('Container %r not found', args[0]) st_stat_options = '''[--lh] [container] [object] ''' st_stat_help = ''' Displays information for the account, container, or object Positional arguments: [container] Name of container to stat from [object] Name of object to stat. Specify multiple times for multiple objects Optional arguments: --lh Report sizes in human readable format similar to ls -lh '''.strip('\n') def st_stat(parser, args, thread_manager): parser.add_option( '--lh', dest='human', help="report totals like 'list --lh'", action='store_true', default=False) (options, args) = parse_args(parser, args) args = args[1:] conn = get_conn(options) if not args: try: command_helpers.stat_account(conn, options, thread_manager) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Account not found') elif len(args) == 1: if '/' in args[0]: print >> stderr, 'WARNING: / in container name; you might have ' \ 'meant %r instead of %r.' % \ (args[0].replace('/', ' ', 1), args[0]) try: command_helpers.stat_container(conn, options, args, thread_manager) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Container %r not found', args[0]) elif len(args) == 2: try: command_helpers.stat_object(conn, options, args, thread_manager) except ClientException as err: if err.http_status != 404: raise thread_manager.error("Object %s/%s not found", args[0], args[1]) else: thread_manager.error('Usage: %s stat %s\n%s', basename(argv[0]), st_stat_options, st_stat_help) st_post_options = '''[--read-acl ] [--write-acl ] [--sync-to] [--sync-key ] [--meta ] [--header
] [container] [object] ''' st_post_help = ''' Updates meta information for the account, container, or object. If the container is not found, it will be created automatically. Positional arguments: [container] Name of container to post to [object] Name of object to post. Specify multiple times for multiple objects Optional arguments: --read-acl Read ACL for containers. Quick summary of ACL syntax: .r:*, .r:-.example.com, .r:www.example.com, account1, account2:user2 --write-acl Write ACL for containers. Quick summary of ACL syntax: account1 account2:user2 --sync-to Sync To for containers, for multi-cluster replication --sync-key Sync Key for containers, for multi-cluster replication --meta Sets a meta data item. This option may be repeated. Example: -m Color:Blue -m Size:Large --header
Set request headers. This option may be repeated. Example -H "content-type:text/plain" '''.strip('\n') def st_post(parser, args, thread_manager): parser.add_option( '-r', '--read-acl', dest='read_acl', help='Sets the ' 'Read ACL for containers. Quick summary of ACL syntax: .r:*, ' '.r:-.example.com, .r:www.example.com, account1, account2:user2') parser.add_option( '-w', '--write-acl', dest='write_acl', help='Sets the ' 'Write ACL for containers. Quick summary of ACL syntax: account1, ' 'account2:user2') parser.add_option( '-t', '--sync-to', dest='sync_to', help='Sets the ' 'Sync To for containers, for multi-cluster replication.') parser.add_option( '-k', '--sync-key', dest='sync_key', help='Sets the ' 'Sync Key for containers, for multi-cluster replication.') parser.add_option( '-m', '--meta', action='append', dest='meta', default=[], help='Sets a meta data item with the syntax name:value. This option ' 'may be repeated. Example: -m Color:Blue -m Size:Large') parser.add_option( '-H', '--header', action='append', dest='header', default=[], help='Set request headers with the syntax header:value. ' ' This option may be repeated. Example -H "content-type:text/plain" ' '-H "Content-Length: 4000"') (options, args) = parse_args(parser, args) args = args[1:] if (options.read_acl or options.write_acl or options.sync_to or options.sync_key) and not args: exit('-r, -w, -t, and -k options only allowed for containers') conn = get_conn(options) if not args: headers = split_headers( options.meta, 'X-Account-Meta-', thread_manager) headers.update(split_headers(options.header, '', thread_manager)) try: conn.post_account(headers=headers) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Account not found') elif len(args) == 1: if '/' in args[0]: print >> stderr, 'WARNING: / in container name; you might have ' \ 'meant %r instead of %r.' % \ (args[0].replace('/', ' ', 1), args[0]) headers = split_headers(options.meta, 'X-Container-Meta-', thread_manager) headers.update(split_headers(options.header, '', thread_manager)) if options.read_acl is not None: headers['X-Container-Read'] = options.read_acl if options.write_acl is not None: headers['X-Container-Write'] = options.write_acl if options.sync_to is not None: headers['X-Container-Sync-To'] = options.sync_to if options.sync_key is not None: headers['X-Container-Sync-Key'] = options.sync_key try: conn.post_container(args[0], headers=headers) except ClientException as err: if err.http_status != 404: raise conn.put_container(args[0], headers=headers) elif len(args) == 2: headers = split_headers(options.meta, 'X-Object-Meta-', thread_manager) # add header options to the headers object for the request. headers.update(split_headers(options.header, '', thread_manager)) try: conn.post_object(args[0], args[1], headers=headers) except ClientException as err: if err.http_status != 404: raise thread_manager.error("Object '%s/%s' not found", args[0], args[1]) else: thread_manager.error('Usage: %s post %s\n%s', basename(argv[0]), st_post_options, st_post_help) st_upload_options = '''[--changed] [--skip-identical] [--segment-size ] [--segment-container ] [--leave-segments] [--object-threads ] [--segment-threads ] [--header
] [--use-slo] [--object-name ] ''' st_upload_help = ''' Uploads specified files and directories to the given container Positional arguments: Name of container to upload to Name of file or directory to upload. Specify multiple times for multiple uploads Optional arguments: --changed Only upload files that have changed since the last upload --skip-identical Skip uploading files that are identical on both sides --segment-size Upload files in segments no larger than and then create a "manifest" file that will download all the segments as if it were the original file --segment-container Upload the segments into the specified container. If not specified, the segments will be uploaded to a _segments container so as to not pollute the main listings. --leave-segments Indicates that you want the older segments of manifest objects left alone (in the case of overwrites) --object-threads Number of threads to use for uploading full objects. Default is 10. --segment-threads Number of threads to use for uploading object segments. Default is 10. --header
Set request headers with the syntax header:value. This option may be repeated. Example -H "content-type:text/plain". --use-slo When used in conjunction with --segment-size will create a Static Large Object instead of the default Dynamic Large Object. --object-name Upload file and name object to or upload dir and use as object prefix instead of folder name '''.strip('\n') def st_upload(parser, args, thread_manager): parser.add_option( '-c', '--changed', action='store_true', dest='changed', default=False, help='Will only upload files that have changed since ' 'the last upload') parser.add_option( '--skip-identical', action='store_true', dest='skip_identical', default=False, help='Skip uploading files that are identical on ' 'both sides') parser.add_option( '-S', '--segment-size', dest='segment_size', help='Will ' 'upload files in segments no larger than and then create a ' '"manifest" file that will download all the segments as if it were ' 'the original file.') parser.add_option( '-C', '--segment-container', dest='segment_container', help='Will upload the segments into the specified container.' 'If not specified, the segments will be uploaded to ' '_segments container so as to not pollute the main ' ' listings.') parser.add_option( '', '--leave-segments', action='store_true', dest='leave_segments', default=False, help='Indicates that you want ' 'the older segments of manifest objects left alone (in the case of ' 'overwrites)') parser.add_option( '', '--object-threads', type=int, default=10, help='Number of threads to use for uploading full objects') parser.add_option( '', '--segment-threads', type=int, default=10, help='Number of threads to use for uploading object segments') parser.add_option( '-H', '--header', action='append', dest='header', default=[], help='Set request headers with the syntax header:value. ' ' This option may be repeated. Example -H "content-type:text/plain" ' '-H "Content-Length: 4000"') parser.add_option( '', '--use-slo', action='store_true', default=False, help='When used in conjunction with --segment-size will ' 'create a Static Large Object instead of the default ' 'Dynamic Large Object.') parser.add_option( '', '--object-name', dest='object_name', help='Upload file and name object to the name specified') (options, args) = parse_args(parser, args) args = args[1:] if len(args) < 2: thread_manager.error( 'Usage: %s upload %s\n%s', basename(argv[0]), st_upload_options, st_upload_help) return def _segment_job(job, conn): if job.get('delete', False): conn.delete_object(job['container'], job['obj']) else: fp = open(job['path'], 'rb') fp.seek(job['segment_start']) seg_container = args[0] + '_segments' if options.segment_container: seg_container = options.segment_container etag = conn.put_object(job.get('container', seg_container), job['obj'], fp, content_length=job['segment_size']) job['segment_location'] = '/%s/%s' % (seg_container, job['obj']) job['segment_etag'] = etag if options.verbose and 'log_line' in job: if conn.attempts > 1: thread_manager.print_msg('%s [after %d attempts]', job['log_line'], conn.attempts) else: thread_manager.print_msg(job['log_line']) return job def _object_job(job, conn): path = job['path'] container = job.get('container', args[0]) dir_marker = job.get('dir_marker', False) object_name = job['object_name'] try: if object_name is not None: object_name.replace("\\", "/") obj = object_name else: obj = path if obj.startswith('./') or obj.startswith('.\\'): obj = obj[2:] if obj.startswith('/'): obj = obj[1:] put_headers = {'x-object-meta-mtime': "%f" % getmtime(path)} if dir_marker: if options.changed: try: headers = conn.head_object(container, obj) ct = headers.get('content-type') cl = int(headers.get('content-length')) et = headers.get('etag') mt = headers.get('x-object-meta-mtime') if ct.split(';', 1)[0] == 'text/directory' and \ cl == 0 and \ et == 'd41d8cd98f00b204e9800998ecf8427e' and \ mt == put_headers['x-object-meta-mtime']: return except ClientException as err: if err.http_status != 404: raise conn.put_object(container, obj, '', content_length=0, content_type='text/directory', headers=put_headers) else: # We need to HEAD all objects now in case we're overwriting a # manifest object and need to delete the old segments # ourselves. old_manifest = None old_slo_manifest_paths = [] new_slo_manifest_paths = set() if options.changed or options.skip_identical \ or not options.leave_segments: if options.skip_identical: checksum = None try: fp = open(path, 'rb') except IOError: pass else: with fp: md5sum = md5() while True: data = fp.read(65536) if not data: break md5sum.update(data) checksum = md5sum.hexdigest() try: headers = conn.head_object(container, obj) cl = int(headers.get('content-length')) mt = headers.get('x-object-meta-mtime') if (options.skip_identical and checksum == headers.get('etag')): thread_manager.print_msg( "Skipped identical file '%s'", path) return if options.changed and cl == getsize(path) and \ mt == put_headers['x-object-meta-mtime']: return if not options.leave_segments: old_manifest = headers.get('x-object-manifest') if config_true_value( headers.get('x-static-large-object')): headers, manifest_data = conn.get_object( container, obj, query_string='multipart-manifest=get') for old_seg in json.loads(manifest_data): seg_path = old_seg['name'].lstrip('/') if isinstance(seg_path, unicode): seg_path = seg_path.encode('utf-8') old_slo_manifest_paths.append(seg_path) except ClientException as err: if err.http_status != 404: raise # Merge the command line header options to the put_headers put_headers.update(split_headers(options.header, '', thread_manager)) # Don't do segment job if object is not big enough if options.segment_size and \ getsize(path) > int(options.segment_size): seg_container = container + '_segments' if options.segment_container: seg_container = options.segment_container full_size = getsize(path) slo_segments = [] error_counter = [0] segment_manager = thread_manager.queue_manager( _segment_job, options.segment_threads, store_results=slo_segments, error_counter=error_counter, connection_maker=create_connection) with segment_manager as segment_queue: segment = 0 segment_start = 0 while segment_start < full_size: segment_size = int(options.segment_size) if segment_start + segment_size > full_size: segment_size = full_size - segment_start if options.use_slo: segment_name = '%s/slo/%s/%s/%s/%08d' % ( obj, put_headers['x-object-meta-mtime'], full_size, options.segment_size, segment) else: segment_name = '%s/%s/%s/%s/%08d' % ( obj, put_headers['x-object-meta-mtime'], full_size, options.segment_size, segment) segment_queue.put( {'path': path, 'obj': segment_name, 'segment_start': segment_start, 'segment_size': segment_size, 'segment_index': segment, 'log_line': '%s segment %s' % (obj, segment)}) segment += 1 segment_start += segment_size if error_counter[0]: raise ClientException( 'Aborting manifest creation ' 'because not all segments could be uploaded. %s/%s' % (container, obj)) if options.use_slo: slo_segments.sort(key=lambda d: d['segment_index']) for seg in slo_segments: seg_loc = seg['segment_location'].lstrip('/') if isinstance(seg_loc, unicode): seg_loc = seg_loc.encode('utf-8') new_slo_manifest_paths.add(seg_loc) manifest_data = json.dumps([ {'path': d['segment_location'], 'etag': d['segment_etag'], 'size_bytes': d['segment_size']} for d in slo_segments]) put_headers['x-static-large-object'] = 'true' conn.put_object(container, obj, manifest_data, headers=put_headers, query_string='multipart-manifest=put') else: new_object_manifest = '%s/%s/%s/%s/%s/' % ( quote(seg_container), quote(obj), put_headers['x-object-meta-mtime'], full_size, options.segment_size) if old_manifest and old_manifest.rstrip('/') == \ new_object_manifest.rstrip('/'): old_manifest = None put_headers['x-object-manifest'] = new_object_manifest conn.put_object(container, obj, '', content_length=0, headers=put_headers) else: conn.put_object( container, obj, open(path, 'rb'), content_length=getsize(path), headers=put_headers) if old_manifest or old_slo_manifest_paths: segment_manager = thread_manager.queue_manager( _segment_job, options.segment_threads, connection_maker=create_connection) segment_queue = segment_manager.queue if old_manifest: scontainer, sprefix = old_manifest.split('/', 1) scontainer = unquote(scontainer) sprefix = unquote(sprefix).rstrip('/') + '/' for delobj in conn.get_container(scontainer, prefix=sprefix)[1]: segment_queue.put( {'delete': True, 'container': scontainer, 'obj': delobj['name']}) if old_slo_manifest_paths: for seg_to_delete in old_slo_manifest_paths: if seg_to_delete in new_slo_manifest_paths: continue scont, sobj = \ seg_to_delete.split('/', 1) segment_queue.put( {'delete': True, 'container': scont, 'obj': sobj}) if not segment_queue.empty(): with segment_manager: pass if options.verbose: if conn.attempts > 1: thread_manager.print_msg('%s [after %d attempts]', obj, conn.attempts) else: thread_manager.print_msg(obj) except OSError as err: if err.errno != ENOENT: raise thread_manager.error('Local file %r not found', path) def _upload_dir(path, object_queue, object_name): names = listdir(path) if not names: object_queue.put({'path': path, 'object_name': object_name, 'dir_marker': True}) else: for name in listdir(path): subpath = join(path, name) subobjname = None if object_name is not None: subobjname = join(object_name, name) if isdir(subpath): _upload_dir(subpath, object_queue, subobjname) else: object_queue.put({'path': subpath, 'object_name': subobjname}) create_connection = lambda: get_conn(options) conn = create_connection() # Try to create the container, just in case it doesn't exist. If this # fails, it might just be because the user doesn't have container PUT # permissions, so we'll ignore any error. If there's really a problem, # it'll surface on the first object PUT. try: conn.put_container(args[0]) if options.segment_size is not None: seg_container = args[0] + '_segments' if options.segment_container: seg_container = options.segment_container conn.put_container(seg_container) except ClientException as err: msg = ' '.join(str(x) for x in (err.http_status, err.http_reason)) if err.http_response_content: if msg: msg += ': ' msg += err.http_response_content[:60] thread_manager.error( 'Error trying to create container %r: %s', args[0], msg) except Exception as err: thread_manager.error( 'Error trying to create container %r: %s', args[0], err) if options.object_name is not None: if len(args[1:]) > 1: thread_manager.error('object-name only be used with 1 file or dir') return object_name = options.object_name object_manager = thread_manager.queue_manager( _object_job, options.object_threads, connection_maker=create_connection) with object_manager as object_queue: try: for arg in args[1:]: if isdir(arg): _upload_dir(arg, object_queue, object_name) else: object_queue.put({'path': arg, 'object_name': object_name}) except ClientException as err: if err.http_status != 404: raise thread_manager.error('Account not found') st_capabilities_options = "[]" st_capabilities_help = ''' Retrieve capability of the proxy Optional positional arguments: proxy URL of the cluster to retrieve capabilities ''' def st_capabilities(parser, args, thread_manager): def _print_compo_cap(name, capabilities): for feature, options in sorted(capabilities.items(), key=lambda x: x[0]): thread_manager.print_msg("%s: %s" % (name, feature)) if options: thread_manager.print_msg(" Options:") for key, value in sorted(options.items(), key=lambda x: x[0]): thread_manager.print_msg(" %s: %s" % (key, value)) (options, args) = parse_args(parser, args) if (args and len(args) > 2): thread_manager.error('Usage: %s capabilities %s\n%s', basename(argv[0]), st_capabilities_options, st_capabilities_help) return conn = get_conn(options) url = None if len(args) == 2: url = args[1] capabilities = conn.get_capabilities(url) _print_compo_cap('Core', {'swift': capabilities['swift']}) del capabilities['swift'] _print_compo_cap('Additional middleware', capabilities) def split_headers(options, prefix='', thread_manager=None): """ Splits 'Key: Value' strings and returns them as a dictionary. :param options: An array of 'Key: Value' strings :param prefix: String to prepend to all of the keys in the dictionary. :param thread_manager: MultiThreadingManager for thread safe error reporting. """ headers = {} for item in options: split_item = item.split(':', 1) if len(split_item) == 2: headers[(prefix + split_item[0]).title()] = split_item[1] else: error_string = "Metadata parameter %s must contain a ':'.\n%s" \ % (item, st_post_help) if thread_manager: thread_manager.error(error_string) else: exit(error_string) return headers def parse_args(parser, args, enforce_requires=True): if not args: args = ['-h'] (options, args) = parser.parse_args(args) if (not (options.auth and options.user and options.key)): # Use 2.0 auth if none of the old args are present options.auth_version = '2.0' # Use new-style args if old ones not present if not options.auth and options.os_auth_url: options.auth = options.os_auth_url if not options.user and options.os_username: options.user = options.os_username if not options.key and options.os_password: options.key = options.os_password # Specific OpenStack options options.os_options = { 'tenant_id': options.os_tenant_id, 'tenant_name': options.os_tenant_name, 'service_type': options.os_service_type, 'endpoint_type': options.os_endpoint_type, 'auth_token': options.os_auth_token, 'object_storage_url': options.os_storage_url, 'region_name': options.os_region_name, } if len(args) > 1 and args[0] == "capabilities": return options, args if (options.os_options.get('object_storage_url') and options.os_options.get('auth_token') and options.auth_version == '2.0'): return options, args if enforce_requires and \ not (options.auth and options.user and options.key): exit(''' Auth version 1.0 requires ST_AUTH, ST_USER, and ST_KEY environment variables to be set or overridden with -A, -U, or -K. Auth version 2.0 requires OS_AUTH_URL, OS_USERNAME, OS_PASSWORD, and OS_TENANT_NAME OS_TENANT_ID to be set or overridden with --os-auth-url, --os-username, --os-password, --os-tenant-name or os-tenant-id. Note: adding "-V 2" is necessary for this.'''.strip('\n')) return options, args if __name__ == '__main__': version = client_version parser = OptionParser(version='%%prog %s' % version, usage=''' usage: %%prog [--version] [--help] [--snet] [--verbose] [--debug] [--info] [--quiet] [--auth ] [--auth-version ] [--user ] [--key ] [--retries ] [--os-username ] [--os-password ] [--os-tenant-id ] [--os-tenant-name ] [--os-auth-url ] [--os-auth-token ] [--os-storage-url ] [--os-region-name ] [--os-service-type ] [--os-endpoint-type ] [--os-cacert ] [--insecure] [--no-ssl-compression] ... Command-line interface to the OpenStack Swift API. Positional arguments: delete Delete a container or objects within a container download Download objects from containers list Lists the containers for the account or the objects for a container post Updates meta information for the account, container, or object; creates containers if not present stat Displays information for the account, container, or object upload Uploads files or directories to the given container capabilities List cluster capabilities Examples: %%prog -A https://auth.api.rackspacecloud.com/v1.0 -U user -K api_key stat -v %%prog --os-auth-url https://api.example.com/v2.0 --os-tenant-name tenant \\ --os-username user --os-password password list %%prog --os-auth-token 6ee5eb33efad4e45ab46806eac010566 \\ --os-storage-url https://10.1.5.2:8080/v1/AUTH_ced809b6a4baea7aeab61a \\ list %%prog list --lh '''.strip('\n') % globals()) parser.add_option('-s', '--snet', action='store_true', dest='snet', default=False, help='Use SERVICENET internal network') parser.add_option('-v', '--verbose', action='count', dest='verbose', default=1, help='Print more info') parser.add_option('--debug', action='store_true', dest='debug', default=False, help='Show the curl commands and results ' 'of all http queries regardless of result status.') parser.add_option('--info', action='store_true', dest='info', default=False, help='Show the curl commands and results ' ' of all http queries which return an error.') parser.add_option('-q', '--quiet', action='store_const', dest='verbose', const=0, default=1, help='Suppress status output') parser.add_option('-A', '--auth', dest='auth', default=environ.get('ST_AUTH'), help='URL for obtaining an auth token') parser.add_option('-V', '--auth-version', dest='auth_version', default=environ.get('ST_AUTH_VERSION', '1.0'), type=str, help='Specify a version for authentication. ' 'Defaults to 1.0.') parser.add_option('-U', '--user', dest='user', default=environ.get('ST_USER'), help='User name for obtaining an auth token.') parser.add_option('-K', '--key', dest='key', default=environ.get('ST_KEY'), help='Key for obtaining an auth token.') parser.add_option('-R', '--retries', type=int, default=5, dest='retries', help='The number of times to retry a failed connection.') parser.add_option('--os-username', metavar='', default=environ.get('OS_USERNAME'), help='OpenStack username. Defaults to env[OS_USERNAME].') parser.add_option('--os_username', help=SUPPRESS_HELP) parser.add_option('--os-password', metavar='', default=environ.get('OS_PASSWORD'), help='OpenStack password. Defaults to env[OS_PASSWORD].') parser.add_option('--os_password', help=SUPPRESS_HELP) parser.add_option('--os-tenant-id', metavar='', default=environ.get('OS_TENANT_ID'), help='OpenStack tenant ID. ' 'Defaults to env[OS_TENANT_ID]') parser.add_option('--os_tenant_id', help=SUPPRESS_HELP) parser.add_option('--os-tenant-name', metavar='', default=environ.get('OS_TENANT_NAME'), help='OpenStack tenant name. ' 'Defaults to env[OS_TENANT_NAME].') parser.add_option('--os_tenant_name', help=SUPPRESS_HELP) parser.add_option('--os-auth-url', metavar='', default=environ.get('OS_AUTH_URL'), help='OpenStack auth URL. Defaults to env[OS_AUTH_URL].') parser.add_option('--os_auth_url', help=SUPPRESS_HELP) parser.add_option('--os-auth-token', metavar='', default=environ.get('OS_AUTH_TOKEN'), help='OpenStack token. Defaults to env[OS_AUTH_TOKEN]. ' 'Used with --os-storage-url to bypass the ' 'usual username/password authentication.') parser.add_option('--os_auth_token', help=SUPPRESS_HELP) parser.add_option('--os-storage-url', metavar='', default=environ.get('OS_STORAGE_URL'), help='OpenStack storage URL. ' 'Defaults to env[OS_STORAGE_URL]. ' 'Overrides the storage url returned during auth. ' 'Will bypass authentication when used with ' '--os-auth-token.') parser.add_option('--os_storage_url', help=SUPPRESS_HELP) parser.add_option('--os-region-name', metavar='', default=environ.get('OS_REGION_NAME'), help='OpenStack region name. ' 'Defaults to env[OS_REGION_NAME]') parser.add_option('--os_region_name', help=SUPPRESS_HELP) parser.add_option('--os-service-type', metavar='', default=environ.get('OS_SERVICE_TYPE'), help='OpenStack Service type. ' 'Defaults to env[OS_SERVICE_TYPE]') parser.add_option('--os_service_type', help=SUPPRESS_HELP) parser.add_option('--os-endpoint-type', metavar='', default=environ.get('OS_ENDPOINT_TYPE'), help='OpenStack Endpoint type. ' 'Defaults to env[OS_ENDPOINT_TYPE]') parser.add_option('--os-cacert', metavar='', default=environ.get('OS_CACERT'), help='Specify a CA bundle file to use in verifying a ' 'TLS (https) server certificate. ' 'Defaults to env[OS_CACERT]') default_val = config_true_value(environ.get('SWIFTCLIENT_INSECURE')) parser.add_option('--insecure', action="store_true", dest="insecure", default=default_val, help='Allow swiftclient to access servers without ' 'having to verify the SSL certificate. ' 'Defaults to env[SWIFTCLIENT_INSECURE] ' '(set to \'true\' to enable).') parser.add_option('--no-ssl-compression', action='store_false', dest='ssl_compression', default=True, help='This option is deprecated and not used anymore. ' 'SSL compression should be disabled by default ' 'by the system SSL library') parser.disable_interspersed_args() (options, args) = parse_args(parser, argv[1:], enforce_requires=False) parser.enable_interspersed_args() commands = ('delete', 'download', 'list', 'post', 'stat', 'upload', 'capabilities') if not args or args[0] not in commands: parser.print_usage() if args: exit('no such command: %s' % args[0]) exit() signal.signal(signal.SIGINT, immediate_exit) if options.debug or options.info: logger = logging.getLogger("swiftclient") if options.debug: logging.basicConfig(level=logging.DEBUG) elif options.info: logging.basicConfig(level=logging.INFO) had_error = False with MultiThreadingManager() as thread_manager: parser.usage = globals()['st_%s_help' % args[0]] try: globals()['st_%s' % args[0]](parser, argv[1:], thread_manager) except (ClientException, RequestException, socket.error) as err: thread_manager.error(str(err)) had_error = thread_manager.error_count if had_error: exit(1) python-swiftclient-2.0.3/tox.ini0000664000175300017540000000153412304670635020047 0ustar jenkinsjenkins00000000000000[tox] envlist = py26,py27,py33,pypy,pep8 minversion = 1.6 skipsdist = True [testenv] usedevelop = True install_command = pip install -U {opts} {packages} setenv = VIRTUAL_ENV={envdir} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = python setup.py testr --testr-args="{posargs}" [testenv:pep8] commands = flake8 bin/swift swiftclient tests [testenv:venv] commands = {posargs} [testenv:cover] commands = python setup.py testr --coverage [tox:jenkins] downloadcache = ~/cache/pip [flake8] # it's not a bug that we aren't using all of hacking # H102 -> apache2 license exists # H103 -> license is apache # H201 -> no bare excepts # H501 -> don't use locals() for str formatting # H903 -> \n not \r\n ignore = H select = H102, H103, H201, H501, H903 show-source = True exclude = .venv,.tox,dist,doc,test,*egg python-swiftclient-2.0.3/.mailmap0000664000175300017540000000007612304670635020155 0ustar jenkinsjenkins00000000000000Kun Huang python-swiftclient-2.0.3/ChangeLog0000664000175300017540000001550712304670716020313 0ustar jenkinsjenkins00000000000000CHANGES ======= 2.0.3 ----- * Add LengthWrapper in put_object to honor content_length param * Updated from global requirements * Remove useless statement * swift.1 manpage fix for groff warnings 2.0.2 ----- * Remove multipart/form-data file upload 2.0.1 ----- * Fix --insecure option on auth * Only run flake8 on swiftclient code 2.0 --- 1.9.0 ----- * Remove extraneous vim configuration comments * Rename Openstack to OpenStack * Port to python-requests * Add option to skip downloading/uploading identical files * Remove tox locale overrides * Fix swiftclient help * Fix misspellings in python swiftclient * changed things because reasons * Add missing backslash * match hacking rules in swift * Updated from global requirements * Install manpage in share/man/man1 instead of man/man1 * assertEquals is deprecated, use assertEqual * Add capabilities option * Install swiftclient manpage * Replace xrange in for loop with range * Add --object-name * retry on ratelimit * Fix help of some optional arguments * Updates tox.ini to use new features * Fix Sphinx version issue * Enable usage of proxies defined in environment (http(s)_proxy) * Don't crash when header is value of None * Fix download bandwidth for swift command * Updates .gitignore * Allow custom headers when using swift download (CLI) * Replaced two references to Cloud Files with Swift * Fix a typo in help text: "downlad" * Add close to swiftclient.client.Connection * enhance swiftclient logging * Clarify main help for post subcommand * Fixes python-swiftclient debugging message 1.8.0 ----- * Make pbr only a build-time dependency * Add verbose output to all stat commands * assertEquals is deprecated, use assertEqual (H602) * Skip sniffing and reseting if retry is disabled * user defined headers added to swift post queries 1.7.0 ----- * Sync with global requirements * fix bug with replace old *LOs * Extend usage message for `swift download` 1.6.0 ----- * Added support for running the tests under PyPy with tox * Remove redundant unit suffix * Reformat help outputs * Add a NullHandler when setting up library logging * Assignment to reserved built-in symbol "file" * Added headers argument support to get_object() * Move multi-threading code to a library * fix(gitignore) : Ignore *.egg files * python3: Start of adding basic python3 support * Added log statements in swift client * Update docstring for swiftclient.Connection.__init__ * Refuse carriage return in header value * Adds max-backoff for retries in Connection * Allow setting # of retries in the binary 1.5.0 ----- * Note '-V 2' is necessary for auth 2.0 * Allow storage url override for both auth vers * Add *.swp into .gitignore * Add -p option to download command * add -t for totals to list command and --lh to stat * add optional 'response_dict' parameters to many calls into which they'll return a dictionary of the response status, reason and headers * Fixes re-auth flow with expired tokens * Remove explicit distribute depend * Add -l and --lh switches to swift 'list' command * Changed the call to set_tunnel to work in python 2.6 or python 2.7 since its name changed between versions * Add option to disable SSL compression * python3: Introduce py33 to tox.ini * Rename requires files to standard names * remove busy-wait so that swift client won't use up all CPU cycles * log get_auth request url instead of x-storage-url * Update the man page * Add .coveragerc file to show correct code coverage * do not warn about etag for slo * Eradicate eventlet and fix bug lp:959221 * Add end_marker and path query parameters * Switch to pbr for setup * Switch to flake8 * Improve Python 3.x compatibility * Confirm we have auth creds before clearing preauth 1.4.0 ----- * Improve auth option help * Static large object support * Fixed pep8 errors in test directory * Allow user to specify headers at the command line * Enhance put_object to inform when chunk is ignored * Allow v2 to use storage_url/storage_token directly * Add client man page swift.1 * Allow to specify segment container * Added "/" check when list containers * Print useful message when keystoneclient is not installed * Fix reporting version 1.3.0 ----- * Use testr instead of nose * Update to latest oslo version/setup * Add generated files to .gitignore * Add env[SWIFTCLIENT_INSECURE] * Fix debug feature and add --debug to swift * Use testtools as base class for test cases * Add --os-cacert * Add --insecure option to fix bug #1077869 * Don't segment objects smaller than --segment-size * Don't add trailing slash to auth URL * Adding segment size as another x-object-manifest component * Stop loss of precision when writing 'x-object-meta-mtime' * Remove unused json_request * fixed inconsistencies in parameter descriptions * tell nose to explicity test the 'tests' directory * Fixes setup compatibility issue on Windows * Force utf-8 encode of HTTPConnection params * swiftclient Connection : default optional arguments to None * Add OpenStack trove classifier for PyPI * Resolves issue with empty os_options for swift-bench & swift-dispersion-report * Catch authorization failures * Do not use dictionaries as default parameters 1.2.0 ----- * Add region_name support * Allow endpoint type to be specified * PEP8 cleanup * PEP8 issues fixed * Add ability to download without writing to disk * Fix PEP8 issues * Change '_' to '-' in options * Fix swiftclient 400 error when OS_AUTH_URL is set * Add nosehtmloutput as a test dependency * Shuffle download order (of containers and objects) * Add timing stats to verbose download output * Ensure Content-Length header when PUT/POST a container * Make python-keystoneclient optional * Fix container delete throughput and 409 retries * Consume version info from pkg_resources * Use keystoneclient for authentication * Removes the title "Swift Web" from landing page 1.1.1 ----- * Now url encodes/decodes x-object-manifest values * Configurable concurrency for swift client * Allow specify tenant:user in user * Make swift exit on ctrl-c * Add post-tag versioning * Don't suppress openstack auth options * Make swift not hang on error * Fix pep8 errors w/pep8==1.3 * Add missing test/tools files to the tarball * Add build_sphinx options * Make CLI exit nonzero on error * Add doc and version in swiftclient.__init__.py * Raise ClientException for invalid auth version * Version bump after pypi release 1.1.0 ----- * Removed now-unused .cache.bundle references * Added setup.cfg for verbose test output * Add run_tests.sh script here * Adding fake_http_connect to test.utils * Add openstack project infrastructure * Add logging * Defined version to 1.0 * Add CHANGELOG LICENSE and MANIFEST.in * Delete old test_client and add a gitignore * Rename client to swiftclient * Fix links * Import script from swift to run unittests * Add test_client from original swift repository * Add AUTHORS file * Make sure we get a header StorageURL with 1.0 * Allow specify the tenant in user * First commit python-swiftclient-2.0.3/doc/0000775000175300017540000000000012304670716017276 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/Makefile0000664000175300017540000000616012304670635020741 0ustar jenkinsjenkins00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXSOURCE = source PAPER = BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SPHINXSOURCE) .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-novaclient.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-novaclient.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." python-swiftclient-2.0.3/doc/manpages/0000775000175300017540000000000012304670716021071 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/manpages/swift.10000664000175300017540000001250312304670635022310 0ustar jenkinsjenkins00000000000000.\" .\" Author: Joao Marcelo Martins or .\" Copyright (c) 2010-2011 OpenStack Foundation. .\" .\" Licensed under the Apache License, Version 2.0 (the "License"); .\" you may not use this file except in compliance with the License. .\" You may obtain a copy of the License at .\" .\" http://www.apache.org/licenses/LICENSE-2.0 .\" .\" Unless required by applicable law or agreed to in writing, software .\" distributed under the License is distributed on an "AS IS" BASIS, .\" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or .\" implied. .\" See the License for the specific language governing permissions and .\" limitations under the License. .\" .TH swift 1 "8/26/2011" "Linux" "OpenStack Swift" .SH NAME .LP .B swift \- OpenStack Swift client tool .SH SYNOPSIS .LP .B swift [options] [args] .SH DESCRIPTION .PP The \fBswift\fR tool is a command line utility for communicating with an OpenStack Object Storage (Swift) environment. It allows one to perform several types of operations. .SH COMMANDS .PP \fBstat\fR [\fIcontainer\fR] [\fIobject\fR] .RS 4 Displays information for the account, container, or object depending on the args given (if any). In verbose mode, the Storage URL and the authentication token are displayed as well. .RE \fBlist\fR [\fIcommand-options\fR] [\fIcontainer\fR] .RS 4 Lists the containers for the account or the objects for a container. The \-p or \-\-prefix is an option that will only list items beginning with that prefix. The \-d or \-\-delimiter is option (for container listings only) that will roll up items with the given delimiter (see OpenStack Swift general documentation for what this means). The \-l and \-\-lh options provide more detail, similar to ls \-l and ls \-lh, the latter providing sizes in human readable format (eg 3K, 12M, etc). These latter 2 switches use more overhead to get those details, which is directly proportional to the number of container or objects being listed. .RE \fBupload\fR [\fIcommand-options\fR] container file_or_directory [\fIfile_or_directory\fR] [...] .RS 4 Uploads to the given container the files and directories specified by the remaining args. The \-c or \-\-changed is an option that will only upload files that have changed since the last upload. The \-\-object-name is an option that will upload file and name object to or upload dir and use as object prefix. The \-S or \-\-segment\-size and \-\-leave\-segments are options as well (see \-\-help for more). .RE \fBpost\fR [\fIcommand-options\fR] [\fIcontainer\fR] [\fIobject\fR] .RS 4 Updates meta information for the account, container, or object depending on the args given. If the container is not found, it will be created automatically; but this is not true for accounts and objects. Containers also allow the \-r (or \-\-read\-acl) and \-w (or \-\-write\-acl) options. The \-m or \-\-meta option is allowed on all and used to define the user meta data items to set in the form Name:Value. This option can be repeated. \fBExample\fR: post \-m Color:Blue \-m Size:Large .RE \fBdownload\fR [\fIcommand-options\fR] [\fIcontainer\fR] [\fIobject\fR] [\fIobject\fR] [...] .RS 4 Downloads everything in the account (with \-\-all), or everything in a container, or a list of objects depending on the args given. For a single object download, you may use the \-o [\-\-output] option to redirect the output to a specific file or if "-" then just redirect to stdout. You can specify optional headers with the repeatable cURL-like option \-H [\-\-header]. .RE \fBdelete\fR [\fIcommand-options\fR] [\fIcontainer\fR] [\fIobject\fR] [\fIobject\fR] [...] .RS 4 Deletes everything in the account (with \-\-all), or everything in a container, or a list of objects depending on the args given. Segments of manifest objects will be deleted as well, unless you specify the \-\-leave\-segments option. .RE \fBcapabilities\fR [\fIproxy-url\fR] .RS 4 Displays cluster capabilities. The output includes the list of the activated Swift middlewares as well as relevant options for each ones. Addtionaly the command displays relevant options for the Swift core. If the proxy-url option is not provided the storage-url retrieved after authentication is used as proxy-url. .RE .SH OPTIONS .PD 0 .IP "--version Show program's version number and exit" .IP "-h, --help Show this help message and exit" .IP "-s, --snet Use SERVICENET internal network" .IP "-v, --verbose Print more info" .IP "-q, --quiet Suppress status output" .IP "-A AUTH, --auth=AUTH URL for obtaining an auth token " .IP "-U USER, --user=USER User name for obtaining an auth token" .IP "-V 1|2 Authentication protocol version" .IP "-K KEY, --key=KEY Key for obtaining an auth token" .IP "--os-storage-url=URL Use this instead of URL returned from auth" .PD .SH EXAMPLE .PP swift \-A https://127.0.0.1:443/auth/v1.0 \-U swiftops:swiftops \-K swiftops stat .RS 2 .PD 0 .IP " Account: AUTH_43b42dae-dc0b-4a4b-ac55-97de614d6e6e" .IP "Containers: 1" .IP " Objects: 1" .IP " Bytes: 1124" .IP "Accept-Ranges: bytes" .IP "X-Trans-Id: txb21186a9eef64ed295a1e95896a0fc72" .PD .RE .SH DOCUMENTATION .LP More in depth documentation about OpenStack Swift as a whole can be found at .BI http://swift.openstack.org python-swiftclient-2.0.3/doc/source/0000775000175300017540000000000012304670716020576 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/source/conf.py0000664000175300017540000001502012304670635022073 0ustar jenkinsjenkins00000000000000# -*- coding: utf-8 -*- # # Swiftclient documentation build configuration file, created by # sphinx-quickstart on Tue Apr 17 02:17:37 2012. # # This file is execfile()d with the current directory set to its containing # dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.append(os.path.abspath('.')) BASE_DIR = os.path.dirname(os.path.abspath(__file__)) ROOT = os.path.abspath(os.path.join(BASE_DIR, "..", "..")) sys.path.insert(0, ROOT) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage'] autoclass_content = 'both' autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Swiftclient' copyright = u'2013 OpenStack, LLC.' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # import swiftclient.version release = swiftclient.version.version_string version = swiftclient.version.version_string # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'SwiftClientwebdoc' # -- Options for LaTeX output ------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]) latex_documents = [ ('index', 'SwiftClient.tex', u'SwiftClient Documentation', u'OpenStack, LLC.', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True python-swiftclient-2.0.3/doc/source/_templates/0000775000175300017540000000000012304670716022733 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/source/_templates/.empty0000664000175300017540000000000012304670635024060 0ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/source/index.rst0000664000175300017540000000147512304670635022446 0ustar jenkinsjenkins00000000000000SwiftClient Web *************** Copyright 2013 OpenStack, LLC. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Code-Generated Documentation ============================ .. toctree:: :maxdepth: 2 swiftclient Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` python-swiftclient-2.0.3/doc/source/_static/0000775000175300017540000000000012304670716022224 5ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/source/_static/.empty0000664000175300017540000000000012304670635023351 0ustar jenkinsjenkins00000000000000python-swiftclient-2.0.3/doc/source/swiftclient.rst0000664000175300017540000000052712304670635023667 0ustar jenkinsjenkins00000000000000.. _swiftclient_package: swiftclient ============== .. automodule:: swiftclient swiftclient.client ================== .. automodule:: swiftclient.client swiftclient.exceptions ====================== .. automodule:: swiftclient.exceptions swiftclient.multithreading ========================== .. automodule:: swiftclient.multithreading python-swiftclient-2.0.3/LICENSE0000664000175300017540000002363612304670635017550 0ustar jenkinsjenkins00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. python-swiftclient-2.0.3/MANIFEST.in0000664000175300017540000000025612304670635020272 0ustar jenkinsjenkins00000000000000include AUTHORS include ChangeLog include LICENSE include README.rst include run_tests.sh tox.ini recursive-include doc * recursive-include tests * recursive-include tools * python-swiftclient-2.0.3/run_tests.sh0000775000175300017540000000211012304670635021110 0ustar jenkinsjenkins00000000000000#!/bin/bash function usage { echo "Usage: $0 [OPTION]..." echo "Run python-swiftclient's test suite(s)" echo "" echo " -p, --pep8 Just run pep8" echo " -h, --help Print this usage message" echo "" echo "This script is deprecated and currently retained for compatibility." echo 'You can run the full test suite for multiple environments by running "tox".' echo 'You can run tests for only python 2.7 by running "tox -e py27", or run only' echo 'the pep8 tests with "tox -e pep8".' exit } command -v tox > /dev/null 2>&1 if [ $? -ne 0 ]; then echo 'This script requires "tox" to run.' echo 'You can install it with "pip install tox".' exit 1; fi just_pep8=0 function process_option { case "$1" in -h|--help) usage;; -p|--pep8) let just_pep8=1;; esac } for arg in "$@"; do process_option $arg done if [ $just_pep8 -eq 1 ]; then tox -e pep8 exit fi tox -e py27 $toxargs 2>&1 | tee run_tests.err.log || exit if [ ${PIPESTATUS[0]} -ne 0 ]; then exit ${PIPESTATUS[0]} fi if [ -z "$toxargs" ]; then tox -e pep8 fi python-swiftclient-2.0.3/.unittests0000775000175300017540000000015112304670635020574 0ustar jenkinsjenkins00000000000000#!/bin/bash set -e python setup.py testr --coverage RET=$? coverage report -m rm -f .coverage exit $RET