httplib2-0.8/0000750017135500116100000000000012115724726012605 5ustar jcgregorioenghttplib2-0.8/python2/0000750017135500116100000000000012115724726014210 5ustar jcgregorioenghttplib2-0.8/python2/httplib2test.py0000750017135500116100000022562012115724726017224 0ustar jcgregorioeng#!/usr/bin/env python2.4 """ httplib2test A set of unit tests for httplib2.py. Requires Python 2.4 or later """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = [] __license__ = "MIT" __history__ = """ """ __version__ = "0.1 ($Rev: 118 $)" import StringIO import base64 import httplib import httplib2 import os import pickle import socket import sys import time import unittest import urlparse try: import ssl except ImportError: pass # Python 2.3 support if not hasattr(unittest.TestCase, 'assertTrue'): unittest.TestCase.assertTrue = unittest.TestCase.failUnless unittest.TestCase.assertFalse = unittest.TestCase.failIf # The test resources base uri base = 'http://bitworking.org/projects/httplib2/test/' #base = 'http://localhost/projects/httplib2/test/' cacheDirName = ".cache" class CredentialsTest(unittest.TestCase): def test(self): c = httplib2.Credentials() c.add("joe", "password") self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0]) self.assertEqual(("joe", "password"), list(c.iter(""))[0]) c.add("fred", "password2", "wellformedweb.org") self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0]) self.assertEqual(1, len(list(c.iter("bitworking.org")))) self.assertEqual(2, len(list(c.iter("wellformedweb.org")))) self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org"))) c.clear() self.assertEqual(0, len(list(c.iter("bitworking.org")))) c.add("fred", "password2", "wellformedweb.org") self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org"))) self.assertEqual(0, len(list(c.iter("bitworking.org")))) self.assertEqual(0, len(list(c.iter("")))) class ParserTest(unittest.TestCase): def testFromStd66(self): self.assertEqual( ('http', 'example.com', '', None, None ), httplib2.parse_uri("http://example.com")) self.assertEqual( ('https', 'example.com', '', None, None ), httplib2.parse_uri("https://example.com")) self.assertEqual( ('https', 'example.com:8080', '', None, None ), httplib2.parse_uri("https://example.com:8080")) self.assertEqual( ('http', 'example.com', '/', None, None ), httplib2.parse_uri("http://example.com/")) self.assertEqual( ('http', 'example.com', '/path', None, None ), httplib2.parse_uri("http://example.com/path")) self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', None ), httplib2.parse_uri("http://example.com/path?a=1&b=2")) self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred")) self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred")) class UrlNormTest(unittest.TestCase): def test(self): self.assertEqual( "http://example.org/", httplib2.urlnorm("http://example.org")[-1]) self.assertEqual( "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1]) self.assertEqual( "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1]) self.assertEqual( "http://example.org/mypath?a=b", httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1]) self.assertEqual( "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1]) self.assertEqual( httplib2.urlnorm("http://localhost:80/"), httplib2.urlnorm("HTTP://LOCALHOST:80")) try: httplib2.urlnorm("/") self.fail("Non-absolute URIs should raise an exception") except httplib2.RelativeURIError: pass class UrlSafenameTest(unittest.TestCase): def test(self): # Test that different URIs end up generating different safe names self.assertEqual( "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f", httplib2.safename("http://example.org/fred/?a=b")) self.assertEqual( "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b", httplib2.safename("http://example.org/fred?/a=b")) self.assertEqual( "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968", httplib2.safename("http://www.example.org/fred?/a=b")) self.assertEqual( httplib2.safename(httplib2.urlnorm("http://www")[-1]), httplib2.safename(httplib2.urlnorm("http://WWW")[-1])) self.assertEqual( "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d", httplib2.safename("https://www.example.org/fred?/a=b")) self.assertNotEqual( httplib2.safename("http://www"), httplib2.safename("https://www")) # Test the max length limits uri = "http://" + ("w" * 200) + ".org" uri2 = "http://" + ("w" * 201) + ".org" self.assertNotEqual( httplib2.safename(uri2), httplib2.safename(uri)) # Max length should be 200 + 1 (",") + 32 self.assertEqual(233, len(httplib2.safename(uri2))) self.assertEqual(233, len(httplib2.safename(uri))) # Unicode if sys.version_info >= (2,3): self.assertEqual( "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193", httplib2.safename(u"http://\u2304.org/fred/?a=b")) class _MyResponse(StringIO.StringIO): def __init__(self, body, **kwargs): StringIO.StringIO.__init__(self, body) self.headers = kwargs def iteritems(self): return self.headers.iteritems() class _MyHTTPConnection(object): "This class is just a mock of httplib.HTTPConnection used for testing" def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None): self.host = host self.port = port self.timeout = timeout self.log = "" self.sock = None def set_debuglevel(self, level): pass def connect(self): "Connect to a host on a given port." pass def close(self): pass def request(self, method, request_uri, body, headers): pass def getresponse(self): return _MyResponse("the body", status="200") class HttpTest(unittest.TestCase): def setUp(self): if os.path.exists(cacheDirName): [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)] if sys.version_info < (2, 6): disable_cert_validation = True else: disable_cert_validation = False self.http = httplib2.Http( cacheDirName, disable_ssl_certificate_validation=disable_cert_validation) self.http.clear_credentials() def testIPv6NoSSL(self): try: self.http.request("http://[::1]/") except socket.gaierror: self.fail("should get the address family right for IPv6") except socket.error: # Even if IPv6 isn't installed on a machine it should just raise socket.error pass def testIPv6SSL(self): try: self.http.request("https://[::1]/") except socket.gaierror: self.fail("should get the address family right for IPv6") except httplib2.CertificateHostnameMismatch: # We connected and verified that the certificate doesn't match # the name. Good enough. pass except socket.error: # Even if IPv6 isn't installed on a machine it should just raise socket.error pass def testConnectionType(self): self.http.force_exception_to_status_code = False response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection) self.assertEqual(response['content-location'], "http://bitworking.org") self.assertEqual(content, "the body") def testGetUnknownServer(self): self.http.force_exception_to_status_code = False try: self.http.request("http://fred.bitworking.org/") self.fail("An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server.") except httplib2.ServerNotFoundError: pass # Now test with exceptions turned off self.http.force_exception_to_status_code = True (response, content) = self.http.request("http://fred.bitworking.org/") self.assertEqual(response['content-type'], 'text/plain') self.assertTrue(content.startswith("Unable to find")) self.assertEqual(response.status, 400) def testGetConnectionRefused(self): self.http.force_exception_to_status_code = False try: self.http.request("http://localhost:7777/") self.fail("An socket.error exception must be thrown on Connection Refused.") except socket.error: pass # Now test with exceptions turned off self.http.force_exception_to_status_code = True (response, content) = self.http.request("http://localhost:7777/") self.assertEqual(response['content-type'], 'text/plain') self.assertTrue("Connection refused" in content or "actively refused" in content, "Unexpected status %(content)s" % vars()) self.assertEqual(response.status, 400) def testGetIRI(self): if sys.version_info >= (2,3): uri = urlparse.urljoin(base, u"reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}") (response, content) = self.http.request(uri, "GET") d = self.reflector(content) self.assertTrue(d.has_key('QUERY_STRING')) self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0) def testGetIsDefaultMethod(self): # Test that GET is the default method uri = urlparse.urljoin(base, "methods/method_reflector.cgi") (response, content) = self.http.request(uri) self.assertEqual(response['x-method'], "GET") def testDifferentMethods(self): # Test that all methods can be used uri = urlparse.urljoin(base, "methods/method_reflector.cgi") for method in ["GET", "PUT", "DELETE", "POST"]: (response, content) = self.http.request(uri, method, body=" ") self.assertEqual(response['x-method'], method) def testHeadRead(self): # Test that we don't try to read the response of a HEAD request # since httplib blocks response.read() for HEAD requests. # Oddly enough this doesn't appear as a problem when doing HEAD requests # against Apache servers. uri = "http://www.google.com/" (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertEqual(content, "") def testGetNoCache(self): # Test that can do a GET w/o the cache turned on. http = httplib2.Http() uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.previous, None) def testGetOnlyIfCachedCacheHit(self): # Test that can do a GET with cache and 'only-if-cached' uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'}) self.assertEqual(response.fromcache, True) self.assertEqual(response.status, 200) def testGetOnlyIfCachedCacheMiss(self): # Test that can do a GET with no cache with 'only-if-cached' uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'}) self.assertEqual(response.fromcache, False) self.assertEqual(response.status, 504) def testGetOnlyIfCachedNoCacheAtAll(self): # Test that can do a GET with no cache with 'only-if-cached' # Of course, there might be an intermediary beyond us # that responds to the 'only-if-cached', so this # test can't really be guaranteed to pass. http = httplib2.Http() uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = http.request(uri, "GET", headers={'cache-control': 'only-if-cached'}) self.assertEqual(response.fromcache, False) self.assertEqual(response.status, 504) def testUserAgent(self): # Test that we provide a default user-agent uri = urlparse.urljoin(base, "user-agent/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertTrue(content.startswith("Python-httplib2/")) def testUserAgentNonDefault(self): # Test that the default user-agent can be over-ridden uri = urlparse.urljoin(base, "user-agent/test.cgi") (response, content) = self.http.request(uri, "GET", headers={'User-Agent': 'fred/1.0'}) self.assertEqual(response.status, 200) self.assertTrue(content.startswith("fred/1.0")) def testGet300WithLocation(self): # Test the we automatically follow 300 redirects if a Location: header is provided uri = urlparse.urljoin(base, "300/with-location-header.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 300) self.assertEqual(response.previous.fromcache, False) # Confirm that the intermediate 300 is not cached (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 300) self.assertEqual(response.previous.fromcache, False) def testGet300WithLocationNoRedirect(self): # Test the we automatically follow 300 redirects if a Location: header is provided self.http.follow_redirects = False uri = urlparse.urljoin(base, "300/with-location-header.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 300) def testGet300WithoutLocation(self): # Not giving a Location: header in a 300 response is acceptable # In which case we just return the 300 response uri = urlparse.urljoin(base, "300/without-location-header.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 300) self.assertTrue(response['content-type'].startswith("text/html")) self.assertEqual(response.previous, None) def testGet301(self): # Test that we automatically follow 301 redirects # and that we cache the 301 response uri = urlparse.urljoin(base, "301/onestep.asis") destination = urlparse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertTrue(response.has_key('content-location')) self.assertEqual(response['content-location'], destination) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 301) self.assertEqual(response.previous.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response['content-location'], destination) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 301) self.assertEqual(response.previous.fromcache, True) def testHead301(self): # Test that we automatically follow 301 redirects uri = urlparse.urljoin(base, "301/onestep.asis") destination = urlparse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertEqual(response.previous.status, 301) self.assertEqual(response.previous.fromcache, False) def testGet301NoRedirect(self): # Test that we automatically follow 301 redirects # and that we cache the 301 response self.http.follow_redirects = False uri = urlparse.urljoin(base, "301/onestep.asis") destination = urlparse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 301) def testGet302(self): # Test that we automatically follow 302 redirects # and that we DO NOT cache the 302 response uri = urlparse.urljoin(base, "302/onestep.asis") destination = urlparse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response['content-location'], destination) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 302) self.assertEqual(response.previous.fromcache, False) uri = urlparse.urljoin(base, "302/onestep.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.assertEqual(response['content-location'], destination) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 302) self.assertEqual(response.previous.fromcache, False) self.assertEqual(response.previous['content-location'], uri) uri = urlparse.urljoin(base, "302/twostep.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 302) self.assertEqual(response.previous.fromcache, False) def testGet302RedirectionLimit(self): # Test that we can set a lower redirection limit # and that we raise an exception when we exceed # that limit. self.http.force_exception_to_status_code = False uri = urlparse.urljoin(base, "302/twostep.asis") try: (response, content) = self.http.request(uri, "GET", redirections = 1) self.fail("This should not happen") except httplib2.RedirectLimit: pass except Exception, e: self.fail("Threw wrong kind of exception ") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET", redirections = 1) self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Redirected more")) self.assertEqual("302", response['status']) self.assertTrue(content.startswith("")) self.assertTrue(response.previous != None) def testGet302NoLocation(self): # Test that we throw an exception when we get # a 302 with no Location: header. self.http.force_exception_to_status_code = False uri = urlparse.urljoin(base, "302/no-location.asis") try: (response, content) = self.http.request(uri, "GET") self.fail("Should never reach here") except httplib2.RedirectMissingLocation: pass except Exception, e: self.fail("Threw wrong kind of exception ") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Redirected but")) self.assertEqual("302", response['status']) self.assertTrue(content.startswith("This is content")) def testGet301ViaHttps(self): # Google always redirects to https://www.google.com (response, content) = self.http.request("https://code.google.com/apis/", "GET") self.assertEqual(200, response.status) self.assertEqual(301, response.previous.status) def testGetViaHttps(self): # Test that we can handle HTTPS (response, content) = self.http.request("https://www.google.com/adsense/", "GET") self.assertEqual(200, response.status) def testGetViaHttpsSpecViolationOnLocation(self): # Test that we follow redirects through HTTPS # even if they violate the spec by including # a relative Location: header instead of an # absolute one. (response, content) = self.http.request("https://www.google.com/adsense", "GET") self.assertEqual(200, response.status) self.assertNotEqual(None, response.previous) def testSslCertValidation(self): if sys.version_info >= (2, 6): # Test that we get an ssl.SSLError when specifying a non-existent CA # certs file. http = httplib2.Http(ca_certs='/nosuchfile') self.assertRaises(ssl.SSLError, http.request, "https://www.google.com/", "GET") # Test that we get a SSLHandshakeError if we try to access # https;//www.google.com, using a CA cert file that doesn't contain # the CA Gogole uses (i.e., simulating a cert that's not signed by a # trusted CA). other_ca_certs = os.path.join( os.path.dirname(os.path.abspath(httplib2.__file__ )), "test", "other_cacerts.txt") http = httplib2.Http(ca_certs=other_ca_certs) self.assertRaises(httplib2.SSLHandshakeError, http.request, "https://www.google.com/", "GET") def testSslCertValidationDoubleDots(self): if sys.version_info >= (2, 6): # Test that we get match a double dot cert try: self.http.request("https://1.www.appspot.com/", "GET") except httplib2.CertificateHostnameMismatch: self.fail('cert with *.*.appspot.com should not raise an exception.') def testSslHostnameValidation(self): pass # No longer a valid test. #if sys.version_info >= (2, 6): # The SSL server at google.com:443 returns a certificate for # 'www.google.com', which results in a host name mismatch. # Note that this test only works because the ssl module and httplib2 # do not support SNI; for requests specifying a server name of # 'google.com' via SNI, a matching cert would be returned. # self.assertRaises(httplib2.CertificateHostnameMismatch, # self.http.request, "https://google.com/", "GET") def testSslCertValidationWithoutSslModuleFails(self): if sys.version_info < (2, 6): http = httplib2.Http(disable_ssl_certificate_validation=False) self.assertRaises(httplib2.CertificateValidationUnsupported, http.request, "https://www.google.com/", "GET") def testGetViaHttpsKeyCert(self): # At this point I can only test # that the key and cert files are passed in # correctly to httplib. It would be nice to have # a real https endpoint to test against. # bitworking.org presents an certificate for a non-matching host # (*.webfaction.com), so we need to disable cert checking for this test. http = httplib2.Http(timeout=2, disable_ssl_certificate_validation=True) http.add_certificate("akeyfile", "acertfile", "bitworking.org") try: (response, content) = http.request("https://bitworking.org", "GET") except: pass self.assertEqual(http.connections["https:bitworking.org"].key_file, "akeyfile") self.assertEqual(http.connections["https:bitworking.org"].cert_file, "acertfile") try: (response, content) = http.request("https://notthere.bitworking.org", "GET") except: pass self.assertEqual(http.connections["https:notthere.bitworking.org"].key_file, None) self.assertEqual(http.connections["https:notthere.bitworking.org"].cert_file, None) def testGet303(self): # Do a follow-up GET on a Location: header # returned from a POST that gave a 303. uri = urlparse.urljoin(base, "303/303.cgi") (response, content) = self.http.request(uri, "POST", " ") self.assertEqual(response.status, 200) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 303) def testGet303NoRedirect(self): # Do a follow-up GET on a Location: header # returned from a POST that gave a 303. self.http.follow_redirects = False uri = urlparse.urljoin(base, "303/303.cgi") (response, content) = self.http.request(uri, "POST", " ") self.assertEqual(response.status, 303) def test303ForDifferentMethods(self): # Test that all methods can be used uri = urlparse.urljoin(base, "303/redirect-to-reflector.cgi") for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]: (response, content) = self.http.request(uri, method, body=" ") self.assertEqual(response['x-method'], method_on_303) def test303AndForwardAuthorizationHeader(self): # Test that all methods can be used uri = urlparse.urljoin(base, "303/redirect-to-header-reflector.cgi") headers = {'authorization': 'Bearer foo'} response, content = self.http.request(uri, 'GET', body=" ", headers=headers) # self.assertTrue('authorization' not in content) self.http.follow_all_redirects = True self.http.forward_authorization_headers = True response, content = self.http.request(uri, 'GET', body=" ", headers=headers) # Oh, how I wish Apache didn't eat the Authorization header. # self.assertTrue('authorization' in content) def testGet304(self): # Test that we use ETags properly to validate our cache uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'must-revalidate'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) cache_file_name = os.path.join(cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1])) f = open(cache_file_name, "r") status_line = f.readline() f.close() self.assertTrue(status_line.startswith("status:")) (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers = {'range': 'bytes=0-0'}) self.assertEqual(response.status, 206) self.assertEqual(response.fromcache, False) def testGetIgnoreEtag(self): # Test that we can forcibly ignore ETags uri = urlparse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertTrue(d.has_key('HTTP_IF_NONE_MATCH')) self.http.ignore_etag = True (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertEqual(response.fromcache, False) self.assertFalse(d.has_key('HTTP_IF_NONE_MATCH')) def testOverrideEtag(self): # Test that we can forcibly ignore ETags uri = urlparse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertTrue(d.has_key('HTTP_IF_NONE_MATCH')) self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0', 'if-none-match': 'fred'}) d = self.reflector(content) self.assertTrue(d.has_key('HTTP_IF_NONE_MATCH')) self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred") #MAP-commented this out because it consistently fails # def testGet304EndToEnd(self): # # Test that end to end headers get overwritten in the cache # uri = urlparse.urljoin(base, "304/end2end.cgi") # (response, content) = self.http.request(uri, "GET") # self.assertNotEqual(response['etag'], "") # old_date = response['date'] # time.sleep(2) # # (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'}) # # The response should be from the cache, but the Date: header should be updated. # new_date = response['date'] # self.assertNotEqual(new_date, old_date) # self.assertEqual(response.status, 200) # self.assertEqual(response.fromcache, True) def testGet304LastModified(self): # Test that we can still handle a 304 # by only using the last-modified cache validator. uri = urlparse.urljoin(base, "304/last-modified-only/last-modified-only.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['last-modified'], "") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) def testGet307(self): # Test that we do follow 307 redirects but # do not cache the 307 uri = urlparse.urljoin(base, "307/onestep.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 307) self.assertEqual(response.previous.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.assertEqual(content, "This is the final destination.\n") self.assertEqual(response.previous.status, 307) self.assertEqual(response.previous.fromcache, False) def testGet410(self): # Test that we pass 410's through uri = urlparse.urljoin(base, "410/410.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 410) def testVaryHeaderSimple(self): """ RFC 2616 13.6 When the cache receives a subsequent request whose Request-URI specifies one or more cache entries including a Vary header field, the cache MUST NOT use such a cache entry to construct a response to the new request unless all of the selecting request-headers present in the new request match the corresponding stored request-headers in the original request. """ # test that the vary header is sent uri = urlparse.urljoin(base, "vary/accept.asis") (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertTrue(response.has_key('vary')) # get the resource again, from the cache since accept header in this # request is the same as the request (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True, msg="Should be from cache") # get the resource again, not from cache since Accept headers does not match (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False, msg="Should not be from cache") # get the resource again, without any Accept header, so again no match (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False, msg="Should not be from cache") def testNoVary(self): pass # when there is no vary, a different Accept header (e.g.) should not # impact if the cache is used # test that the vary header is not sent # uri = urlparse.urljoin(base, "vary/no-vary.asis") # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) # self.assertEqual(response.status, 200) # self.assertFalse(response.has_key('vary')) # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) # self.assertEqual(response.status, 200) # self.assertEqual(response.fromcache, True, msg="Should be from cache") # # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'}) # self.assertEqual(response.status, 200) # self.assertEqual(response.fromcache, True, msg="Should be from cache") def testVaryHeaderDouble(self): uri = urlparse.urljoin(base, "vary/accept-double.asis") (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'}) self.assertEqual(response.status, 200) self.assertTrue(response.has_key('vary')) # we are from cache (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'}) self.assertEqual(response.fromcache, True, msg="Should be from cache") (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) # get the resource again, not from cache, varied headers don't match exact (response, content) = self.http.request(uri, "GET", headers={'Accept-Language': 'da'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False, msg="Should not be from cache") def testVaryUnusedHeader(self): # A header's value is not considered to vary if it's not used at all. uri = urlparse.urljoin(base, "vary/unused-header.asis") (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertTrue(response.has_key('vary')) # we are from cache (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain',}) self.assertEqual(response.fromcache, True, msg="Should be from cache") def testHeadGZip(self): # Test that we don't try to decompress a HEAD response uri = urlparse.urljoin(base, "gzip/final-destination.txt") (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertNotEqual(int(response['content-length']), 0) self.assertEqual(content, "") def testGetGZip(self): # Test that we support gzip compression uri = urlparse.urljoin(base, "gzip/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertFalse(response.has_key('content-encoding')) self.assertTrue(response.has_key('-content-encoding')) self.assertEqual(int(response['content-length']), len("This is the final destination.\n")) self.assertEqual(content, "This is the final destination.\n") def testPostAndGZipResponse(self): uri = urlparse.urljoin(base, "gzip/post.cgi") (response, content) = self.http.request(uri, "POST", body=" ") self.assertEqual(response.status, 200) self.assertFalse(response.has_key('content-encoding')) self.assertTrue(response.has_key('-content-encoding')) def testGetGZipFailure(self): # Test that we raise a good exception when the gzip fails self.http.force_exception_to_status_code = False uri = urlparse.urljoin(base, "gzip/failed-compression.asis") try: (response, content) = self.http.request(uri, "GET") self.fail("Should never reach here") except httplib2.FailedToDecompressContent: pass except Exception: self.fail("Threw wrong kind of exception") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Content purported")) def testTimeout(self): self.http.force_exception_to_status_code = True uri = urlparse.urljoin(base, "timeout/timeout.cgi") try: import socket socket.setdefaulttimeout(1) except: # Don't run the test if we can't set the timeout return (response, content) = self.http.request(uri) self.assertEqual(response.status, 408) self.assertTrue(response.reason.startswith("Request Timeout")) self.assertTrue(content.startswith("Request Timeout")) def testIndividualTimeout(self): uri = urlparse.urljoin(base, "timeout/timeout.cgi") http = httplib2.Http(timeout=1) http.force_exception_to_status_code = True (response, content) = http.request(uri) self.assertEqual(response.status, 408) self.assertTrue(response.reason.startswith("Request Timeout")) self.assertTrue(content.startswith("Request Timeout")) def testHTTPSInitTimeout(self): c = httplib2.HTTPSConnectionWithTimeout('localhost', 80, timeout=47) self.assertEqual(47, c.timeout) def testGetDeflate(self): # Test that we support deflate compression uri = urlparse.urljoin(base, "deflate/deflated.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertFalse(response.has_key('content-encoding')) self.assertEqual(int(response['content-length']), len("This is the final destination.")) self.assertEqual(content, "This is the final destination.") def testGetDeflateFailure(self): # Test that we raise a good exception when the deflate fails self.http.force_exception_to_status_code = False uri = urlparse.urljoin(base, "deflate/failed-compression.asis") try: (response, content) = self.http.request(uri, "GET") self.fail("Should never reach here") except httplib2.FailedToDecompressContent: pass except Exception: self.fail("Threw wrong kind of exception") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Content purported")) def testGetDuplicateHeaders(self): # Test that duplicate headers get concatenated via ',' uri = urlparse.urljoin(base, "duplicate-headers/multilink.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, "This is content\n") self.assertEqual(response['link'].split(",")[0], '; rel="home"; title="BitWorking"') def testGetCacheControlNoCache(self): # Test Cache-Control: no-cache on requests uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-cache'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlPragmaNoCache(self): # Test Pragma: no-cache on requests uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers={'Pragma': 'no-cache'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlNoStoreRequest(self): # A no-store request means that the response should not be stored. uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlNoStoreResponse(self): # A no-store response means that the response should not be stored. uri = urlparse.urljoin(base, "no-store/no-store.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlNoCacheNoStoreRequest(self): # Test that a no-store, no-cache clears the entry from the cache # even if it was cached previously. uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'}) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testUpdateInvalidatesCache(self): # Test that calling PUT or DELETE on a # URI that is cache invalidates that cache. uri = urlparse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "DELETE") self.assertEqual(response.status, 405) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.fromcache, False) def testUpdateUsesCachedETag(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urlparse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "PUT", body="foo") self.assertEqual(response.status, 200) (response, content) = self.http.request(uri, "PUT", body="foo") self.assertEqual(response.status, 412) def testUpdatePatchUsesCachedETag(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urlparse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "PATCH", body="foo") self.assertEqual(response.status, 200) (response, content) = self.http.request(uri, "PATCH", body="foo") self.assertEqual(response.status, 412) def testUpdateUsesCachedETagAndOCMethod(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urlparse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.http.optimistic_concurrency_methods.append("DELETE") (response, content) = self.http.request(uri, "DELETE") self.assertEqual(response.status, 200) def testUpdateUsesCachedETagOverridden(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urlparse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "PUT", body="foo", headers={'if-match': 'fred'}) self.assertEqual(response.status, 412) def testBasicAuth(self): # Test Basic Authentication uri = urlparse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urlparse.urljoin(base, "basic/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urlparse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testBasicAuthWithDomain(self): # Test Basic Authentication uri = urlparse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urlparse.urljoin(base, "basic/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('joe', 'password', "example.org") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urlparse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) domain = urlparse.urlparse(base)[1] self.http.add_credentials('joe', 'password', domain) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urlparse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testBasicAuthTwoDifferentCredentials(self): # Test Basic Authentication with multiple sets of credentials uri = urlparse.urljoin(base, "basic2/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urlparse.urljoin(base, "basic2/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('fred', 'barney') (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urlparse.urljoin(base, "basic2/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testBasicAuthNested(self): # Test Basic Authentication with resources # that are nested uri = urlparse.urljoin(base, "basic-nested/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urlparse.urljoin(base, "basic-nested/subdir") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) # Now add in credentials one at a time and test. self.http.add_credentials('joe', 'password') uri = urlparse.urljoin(base, "basic-nested/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urlparse.urljoin(base, "basic-nested/subdir") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('fred', 'barney') uri = urlparse.urljoin(base, "basic-nested/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urlparse.urljoin(base, "basic-nested/subdir") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testDigestAuth(self): # Test that we support Digest Authentication uri = urlparse.urljoin(base, "digest/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urlparse.urljoin(base, "digest/file.txt") (response, content) = self.http.request(uri, "GET") def testDigestAuthNextNonceAndNC(self): # Test that if the server sets nextnonce that we reset # the nonce count back to 1 uri = urlparse.urljoin(base, "digest/file.txt") self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) info = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) info2 = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) if info.has_key('nextnonce'): self.assertEqual(info2['nc'], 1) def testDigestAuthStale(self): # Test that we can handle a nonce becoming stale uri = urlparse.urljoin(base, "digest-expire/file.txt") self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) info = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) time.sleep(3) # Sleep long enough that the nonce becomes stale (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) self.assertFalse(response.fromcache) self.assertTrue(response._stale_digest) info3 = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) def reflector(self, content): return dict( [tuple(x.split("=", 1)) for x in content.strip().split("\n")] ) def testReflector(self): uri = urlparse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") d = self.reflector(content) self.assertTrue(d.has_key('HTTP_USER_AGENT')) def testConnectionClose(self): uri = "http://www.google.com/" (response, content) = self.http.request(uri, "GET") for c in self.http.connections.values(): self.assertNotEqual(None, c.sock) (response, content) = self.http.request(uri, "GET", headers={"connection": "close"}) for c in self.http.connections.values(): self.assertEqual(None, c.sock) def testPickleHttp(self): pickled_http = pickle.dumps(self.http) new_http = pickle.loads(pickled_http) self.assertEqual(sorted(new_http.__dict__.keys()), sorted(self.http.__dict__.keys())) for key in new_http.__dict__: if key in ('certificates', 'credentials'): self.assertEqual(new_http.__dict__[key].credentials, self.http.__dict__[key].credentials) elif key == 'cache': self.assertEqual(new_http.__dict__[key].cache, self.http.__dict__[key].cache) else: self.assertEqual(new_http.__dict__[key], self.http.__dict__[key]) def testPickleHttpWithConnection(self): self.http.request('http://bitworking.org', connection_type=_MyHTTPConnection) pickled_http = pickle.dumps(self.http) new_http = pickle.loads(pickled_http) self.assertEqual(self.http.connections.keys(), ['http:bitworking.org']) self.assertEqual(new_http.connections, {}) def testPickleCustomRequestHttp(self): def dummy_request(*args, **kwargs): return new_request(*args, **kwargs) dummy_request.dummy_attr = 'dummy_value' self.http.request = dummy_request pickled_http = pickle.dumps(self.http) self.assertFalse("S'request'" in pickled_http) try: import memcache class HttpTestMemCached(HttpTest): def setUp(self): self.cache = memcache.Client(['127.0.0.1:11211'], debug=0) #self.cache = memcache.Client(['10.0.0.4:11211'], debug=1) self.http = httplib2.Http(self.cache) self.cache.flush_all() # Not exactly sure why the sleep is needed here, but # if not present then some unit tests that rely on caching # fail. Memcached seems to lose some sets immediately # after a flush_all if the set is to a value that # was previously cached. (Maybe the flush is handled async?) time.sleep(1) self.http.clear_credentials() except: pass # ------------------------------------------------------------------------ class HttpPrivateTest(unittest.TestCase): def testParseCacheControl(self): # Test that we can parse the Cache-Control header self.assertEqual({}, httplib2._parse_cache_control({})) self.assertEqual({'no-cache': 1}, httplib2._parse_cache_control({'cache-control': ' no-cache'})) cc = httplib2._parse_cache_control({'cache-control': ' no-cache, max-age = 7200'}) self.assertEqual(cc['no-cache'], 1) self.assertEqual(cc['max-age'], '7200') cc = httplib2._parse_cache_control({'cache-control': ' , '}) self.assertEqual(cc[''], 1) try: cc = httplib2._parse_cache_control({'cache-control': 'Max-age=3600;post-check=1800,pre-check=3600'}) self.assertTrue("max-age" in cc) except: self.fail("Should not throw exception") def testNormalizeHeaders(self): # Test that we normalize headers to lowercase h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'}) self.assertTrue(h.has_key('cache-control')) self.assertTrue(h.has_key('other')) self.assertEqual('Stuff', h['other']) def testExpirationModelTransparent(self): # Test that no-cache makes our request TRANSPARENT response_headers = { 'cache-control': 'max-age=7200' } request_headers = { 'cache-control': 'no-cache' } self.assertEqual("TRANSPARENT", httplib2._entry_disposition(response_headers, request_headers)) def testMaxAgeNonNumeric(self): # Test that no-cache makes our request TRANSPARENT response_headers = { 'cache-control': 'max-age=fred, min-fresh=barney' } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelNoCacheResponse(self): # The date and expires point to an entry that should be # FRESH, but the no-cache over-rides that. now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)), 'cache-control': 'no-cache' } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelStaleRequestMustReval(self): # must-revalidate forces STALE self.assertEqual("STALE", httplib2._entry_disposition({}, {'cache-control': 'must-revalidate'})) def testExpirationModelStaleResponseMustReval(self): # must-revalidate forces STALE self.assertEqual("STALE", httplib2._entry_disposition({'cache-control': 'must-revalidate'}, {})) def testExpirationModelFresh(self): response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()), 'cache-control': 'max-age=2' } request_headers = { } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) time.sleep(3) self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationMaxAge0(self): response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()), 'cache-control': 'max-age=0' } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateAndExpires(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)), } request_headers = { } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) time.sleep(3) self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpiresZero(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': "0", } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateOnly(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+3)), } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelOnlyIfCached(self): response_headers = { } request_headers = { 'cache-control': 'only-if-cached', } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelMaxAgeBoth(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'cache-control': 'max-age=2' } request_headers = { 'cache-control': 'max-age=0' } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateAndExpiresMinFresh1(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)), } request_headers = { 'cache-control': 'min-fresh=2' } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateAndExpiresMinFresh2(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)), } request_headers = { 'cache-control': 'min-fresh=2' } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) def testParseWWWAuthenticateEmpty(self): res = httplib2._parse_www_authenticate({}) self.assertEqual(len(res.keys()), 0) def testParseWWWAuthenticate(self): # different uses of spaces around commas res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'}) self.assertEqual(len(res.keys()), 1) self.assertEqual(len(res['test'].keys()), 5) # tokens with non-alphanum res = httplib2._parse_www_authenticate({ 'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'}) self.assertEqual(len(res.keys()), 1) self.assertEqual(len(res['t*!%#st'].keys()), 2) # quoted string with quoted pairs res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="a \\"test\\" realm"'}) self.assertEqual(len(res.keys()), 1) self.assertEqual(res['test']['realm'], 'a "test" realm') def testParseWWWAuthenticateStrict(self): httplib2.USE_WWW_AUTH_STRICT_PARSING = 1; self.testParseWWWAuthenticate(); httplib2.USE_WWW_AUTH_STRICT_PARSING = 0; def testParseWWWAuthenticateBasic(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me"'}) basic = res['basic'] self.assertEqual('me', basic['realm']) res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm="MD5"'}) basic = res['basic'] self.assertEqual('me', basic['realm']) self.assertEqual('MD5', basic['algorithm']) res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm=MD5'}) basic = res['basic'] self.assertEqual('me', basic['realm']) self.assertEqual('MD5', basic['algorithm']) def testParseWWWAuthenticateBasic2(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me",other="fred" '}) basic = res['basic'] self.assertEqual('me', basic['realm']) self.assertEqual('fred', basic['other']) def testParseWWWAuthenticateBasic3(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic REAlm="me" '}) basic = res['basic'] self.assertEqual('me', basic['realm']) def testParseWWWAuthenticateDigest(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) def testParseWWWAuthenticateMultiple(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce']) self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque']) basic = res['basic'] self.assertEqual('me', basic['realm']) def testParseWWWAuthenticateMultiple2(self): # Handle an added comma between challenges, which might get thrown in if the challenges were # originally sent in separate www-authenticate headers. res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce']) self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque']) basic = res['basic'] self.assertEqual('me', basic['realm']) def testParseWWWAuthenticateMultiple3(self): # Handle an added comma between challenges, which might get thrown in if the challenges were # originally sent in separate www-authenticate headers. res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce']) self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque']) basic = res['basic'] self.assertEqual('me', basic['realm']) wsse = res['wsse'] self.assertEqual('foo', wsse['realm']) self.assertEqual('UsernameToken', wsse['profile']) def testParseWWWAuthenticateMultiple4(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) digest = res['digest'] self.assertEqual('test-real.m@host.com', digest['realm']) self.assertEqual('\tauth,auth-int', digest['qop']) self.assertEqual('(*)&^&$%#', digest['nonce']) def testParseWWWAuthenticateMoreQuoteCombos(self): res = httplib2._parse_www_authenticate({'www-authenticate':'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'}) digest = res['digest'] self.assertEqual('myrealm', digest['realm']) def testParseWWWAuthenticateMalformed(self): try: res = httplib2._parse_www_authenticate({'www-authenticate':'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'}) self.fail("should raise an exception") except httplib2.MalformedHeader: pass def testDigestObject(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = { 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"' } content = "" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") our_request = "authorization: %s" % headers['authorization'] working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"' self.assertEqual(our_request, working_request) def testDigestObjectWithOpaque(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = { 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", opaque="atestopaque"' } content = "" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") our_request = "authorization: %s" % headers['authorization'] working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46", opaque="atestopaque"' self.assertEqual(our_request, working_request) def testDigestObjectStale(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = httplib2.Response({ }) response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true' response.status = 401 content = "" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) # Returns true to force a retry self.assertTrue( d.response(response, content) ) def testDigestObjectAuthInfo(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = httplib2.Response({ }) response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true' response['authentication-info'] = 'nextnonce="fred"' content = "" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) # Returns true to force a retry self.assertFalse( d.response(response, content) ) self.assertEqual('fred', d.challenge['nonce']) self.assertEqual(1, d.challenge['nc']) def testWsseAlgorithm(self): digest = httplib2._wsse_username_token("d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm") expected = "quR/EWLAV4xLf9Zqyw4pDmfV9OY=" self.assertEqual(expected, digest) def testEnd2End(self): # one end to end header response = {'content-type': 'application/atom+xml', 'te': 'deflate'} end2end = httplib2._get_end2end_headers(response) self.assertTrue('content-type' in end2end) self.assertTrue('te' not in end2end) self.assertTrue('connection' not in end2end) # one end to end header that gets eliminated response = {'connection': 'content-type', 'content-type': 'application/atom+xml', 'te': 'deflate'} end2end = httplib2._get_end2end_headers(response) self.assertTrue('content-type' not in end2end) self.assertTrue('te' not in end2end) self.assertTrue('connection' not in end2end) # Degenerate case of no headers response = {} end2end = httplib2._get_end2end_headers(response) self.assertEquals(0, len(end2end)) # Degenerate case of connection referrring to a header not passed in response = {'connection': 'content-type'} end2end = httplib2._get_end2end_headers(response) self.assertEquals(0, len(end2end)) class TestProxyInfo(unittest.TestCase): def setUp(self): self.orig_env = dict(os.environ) def tearDown(self): os.environ.clear() os.environ.update(self.orig_env) def test_from_url(self): pi = httplib2.proxy_info_from_url('http://myproxy.example.com') self.assertEquals(pi.proxy_host, 'myproxy.example.com') self.assertEquals(pi.proxy_port, 80) self.assertEquals(pi.proxy_user, None) def test_from_url_ident(self): pi = httplib2.proxy_info_from_url('http://zoidberg:fish@someproxy:99') self.assertEquals(pi.proxy_host, 'someproxy') self.assertEquals(pi.proxy_port, 99) self.assertEquals(pi.proxy_user, 'zoidberg') self.assertEquals(pi.proxy_pass, 'fish') def test_from_env(self): os.environ['http_proxy'] = 'http://myproxy.example.com:8080' pi = httplib2.proxy_info_from_environment() self.assertEquals(pi.proxy_host, 'myproxy.example.com') self.assertEquals(pi.proxy_port, 8080) self.assertEquals(pi.bypass_hosts, []) def test_from_env_no_proxy(self): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['https_proxy'] = 'http://myproxy.example.com:81' os.environ['no_proxy'] = 'localhost,otherhost.domain.local' pi = httplib2.proxy_info_from_environment('https') self.assertEquals(pi.proxy_host, 'myproxy.example.com') self.assertEquals(pi.proxy_port, 81) self.assertEquals(pi.bypass_hosts, ['localhost', 'otherhost.domain.local']) def test_from_env_none(self): os.environ.clear() pi = httplib2.proxy_info_from_environment() self.assertEquals(pi, None) def test_applies_to(self): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['https_proxy'] = 'http://myproxy.example.com:81' os.environ['no_proxy'] = 'localhost,otherhost.domain.local,example.com' pi = httplib2.proxy_info_from_environment() self.assertFalse(pi.applies_to('localhost')) self.assertTrue(pi.applies_to('www.google.com')) self.assertFalse(pi.applies_to('www.example.com')) def test_no_proxy_star(self): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['NO_PROXY'] = '*' pi = httplib2.proxy_info_from_environment() for host in ('localhost', '169.254.38.192', 'www.google.com'): self.assertFalse(pi.applies_to(host)) if __name__ == '__main__': unittest.main() httplib2-0.8/python2/httplib2.egg-info/0000750017135500116100000000000012115724726017432 5ustar jcgregorioenghttplib2-0.8/python2/httplib2.egg-info/PKG-INFO0000640017135500116100000000444112115724726020533 0ustar jcgregorioengMetadata-Version: 1.1 Name: httplib2 Version: 0.8 Summary: A comprehensive HTTP client library. Home-page: http://code.google.com/p/httplib2/ Author: Joe Gregorio Author-email: joe@bitworking.org License: MIT Download-URL: http://httplib2.googlecode.com/files/httplib2-0.8.tar.gz Description: A comprehensive HTTP client library, ``httplib2`` supports many features left out of other HTTP libraries. **HTTP and HTTPS** HTTPS support is only available if the socket module was compiled with SSL support. **Keep-Alive** Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible. **Authentication** The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS. * Digest * Basic * WSSE **Caching** The module can optionally operate with a private cache that understands the Cache-Control: header and uses both the ETag and Last-Modified cache validators. Both file system and memcached based caches are supported. **All Methods** The module can handle any HTTP request method, not just GET and POST. **Redirects** Automatically follows 3XX redirects on GETs. **Compression** Handles both 'deflate' and 'gzip' types of compression. **Lost update support** Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout **Unit Tested** A large and growing set of unit tests. Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Internet :: WWW/HTTP Classifier: Topic :: Software Development :: Libraries httplib2-0.8/python2/httplib2.egg-info/SOURCES.txt0000640017135500116100000000142312115724726021317 0ustar jcgregorioengMANIFEST.in README setup.py python2/httplib2test.py python2/httplib2test_appengine.py python2/httplib2/__init__.py python2/httplib2/cacerts.txt python2/httplib2/iri2uri.py python2/httplib2/socks.py python2/httplib2.egg-info/PKG-INFO python2/httplib2.egg-info/SOURCES.txt python2/httplib2.egg-info/dependency_links.txt python2/httplib2.egg-info/top_level.txt python2/httplib2/test/__init__.py python2/httplib2/test/miniserver.py python2/httplib2/test/other_cacerts.txt python2/httplib2/test/smoke_test.py python2/httplib2/test/test_no_socket.py python2/httplib2/test/brokensocket/socket.py python2/httplib2/test/functional/test_proxies.py python3/httplib2test.py python3/httplib2/__init__.py python3/httplib2/cacerts.txt python3/httplib2/iri2uri.py python3/httplib2/test/other_cacerts.txthttplib2-0.8/python2/httplib2.egg-info/top_level.txt0000640017135500116100000000001112115724726022155 0ustar jcgregorioenghttplib2 httplib2-0.8/python2/httplib2.egg-info/dependency_links.txt0000640017135500116100000000000112115724726023501 0ustar jcgregorioeng httplib2-0.8/python2/httplib2/0000750017135500116100000000000012115724726015740 5ustar jcgregorioenghttplib2-0.8/python2/httplib2/cacerts.txt0000640017135500116100000011536612115724726020142 0ustar jcgregorioeng# Certifcate Authority certificates for validating SSL connections. # # This file contains PEM format certificates generated from # http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt # # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is the Netscape security libraries. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1994-2000 # the Initial Developer. All Rights Reserved. # # Contributor(s): # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** Verisign/RSA Secure Server CA ============================= -----BEGIN CERTIFICATE----- MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0 MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII 0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3 YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc 1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA== -----END CERTIFICATE----- Thawte Personal Basic CA ======================== -----BEGIN CERTIFICATE----- MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53 dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7 G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7 c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P 9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ== -----END CERTIFICATE----- Thawte Personal Premium CA ========================== -----BEGIN CERTIFICATE----- MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ -----END CERTIFICATE----- Thawte Personal Freemail CA =========================== -----BEGIN CERTIFICATE----- MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa /RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei gQ== -----END CERTIFICATE----- Thawte Server CA ================ -----BEGIN CERTIFICATE----- MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG 7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ qdq5snUb9kLy78fyGPmJvKP/iiMucEc= -----END CERTIFICATE----- Thawte Premium Server CA ======================== -----BEGIN CERTIFICATE----- MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG 9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== -----END CERTIFICATE----- Equifax Secure CA ================= -----BEGIN CERTIFICATE----- MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y 7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh 1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 -----END CERTIFICATE----- Verisign Class 1 Public Primary Certification Authority ======================================================= -----BEGIN CERTIFICATE----- MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05 NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR 4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5 FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx lA== -----END CERTIFICATE----- Verisign Class 2 Public Primary Certification Authority ======================================================= -----BEGIN CERTIFICATE----- MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7 FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY -----END CERTIFICATE----- Verisign Class 3 Public Primary Certification Authority ======================================================= -----BEGIN CERTIFICATE----- MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k -----END CERTIFICATE----- Verisign Class 1 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68 DzFc6PLZ -----END CERTIFICATE----- Verisign Class 2 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn jBJ7xUS0rg== -----END CERTIFICATE----- Verisign Class 3 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY oJ2daZH9 -----END CERTIFICATE----- Verisign Class 4 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP T8qAkbYp -----END CERTIFICATE----- Verisign Class 1 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4 nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO 8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2 6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4 wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3 ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4 E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g== -----END CERTIFICATE----- Verisign Class 2 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u 7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q -----END CERTIFICATE----- Verisign Class 3 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te 2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC /Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== -----END CERTIFICATE----- Verisign Class 4 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ +mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c 2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== -----END CERTIFICATE----- Equifax Secure Global eBusiness CA ================================== -----BEGIN CERTIFICATE----- MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc 58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv 8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV -----END CERTIFICATE----- Equifax Secure eBusiness CA 1 ============================= -----BEGIN CERTIFICATE----- MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN /Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== -----END CERTIFICATE----- Equifax Secure eBusiness CA 2 ============================= -----BEGIN CERTIFICATE----- MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy 0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN -----END CERTIFICATE----- Thawte Time Stamping CA ======================= -----BEGIN CERTIFICATE----- MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT 6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL 8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC 9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ CayJSdM= -----END CERTIFICATE----- thawte Primary Root CA ====================== -----BEGIN CERTIFICATE----- MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta 3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk 6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 /qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 jVaMaA== -----END CERTIFICATE----- VeriSign Class 3 Public Primary Certification Authority - G5 ============================================================ -----BEGIN CERTIFICATE----- MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y 5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ 4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq -----END CERTIFICATE----- Entrust.net Secure Server Certification Authority ================================================= -----BEGIN CERTIFICATE----- MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN 95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd 2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= -----END CERTIFICATE----- Go Daddy Certification Authority Root Certificate Bundle ======================================================== -----BEGIN CERTIFICATE----- MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ 6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o 0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV U+4= -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0 aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv 2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1 QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw== -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG 9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd -----END CERTIFICATE----- GeoTrust Global CA ================== -----BEGIN CERTIFICATE----- MIIDfTCCAuagAwIBAgIDErvmMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNVBAYTAlVT MRAwDgYDVQQKEwdFcXVpZmF4MS0wKwYDVQQLEyRFcXVpZmF4IFNlY3VyZSBDZXJ0 aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDIwNTIxMDQwMDAwWhcNMTgwODIxMDQwMDAw WjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UE AxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9m OSm9BXiLnTjoBbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIu T8rxh0PBFpVXLVDviS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6c JmTM386DGXHKTubU1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmR Cw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5asz PeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo4HwMIHtMB8GA1UdIwQYMBaAFEjm aPkr0rKV10fYIyAQTzOYkJ/UMB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrM TjAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjA6BgNVHR8EMzAxMC+g LaArhilodHRwOi8vY3JsLmdlb3RydXN0LmNvbS9jcmxzL3NlY3VyZWNhLmNybDBO BgNVHSAERzBFMEMGBFUdIAAwOzA5BggrBgEFBQcCARYtaHR0cHM6Ly93d3cuZ2Vv dHJ1c3QuY29tL3Jlc291cmNlcy9yZXBvc2l0b3J5MA0GCSqGSIb3DQEBBQUAA4GB AHbhEm5OSxYShjAGsoEIz/AIx8dxfmbuwu3UOx//8PDITtZDOLC5MH0Y0FWDomrL NhGc6Ehmo21/uBPUR/6LWlxz/K7ZGzIZOKuXNBSqltLroxwUCEm2u+WR74M26x1W b8ravHNjkOR/ez4iyz0H7V84dJzjA1BOoa+Y7mHyhD8S -----END CERTIFICATE----- httplib2-0.8/python2/httplib2/iri2uri.py0000640017135500116100000000736412115724726017712 0ustar jcgregorioeng""" iri2uri Converts an IRI to a URI. """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = [] __version__ = "1.0.0" __license__ = "MIT" __history__ = """ """ import urlparse # Convert an IRI to a URI following the rules in RFC 3987 # # The characters we need to enocde and escape are defined in the spec: # # iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD # ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF # / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD # / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD # / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD # / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD # / %xD0000-DFFFD / %xE1000-EFFFD escape_range = [ (0xA0, 0xD7FF), (0xE000, 0xF8FF), (0xF900, 0xFDCF), (0xFDF0, 0xFFEF), (0x10000, 0x1FFFD), (0x20000, 0x2FFFD), (0x30000, 0x3FFFD), (0x40000, 0x4FFFD), (0x50000, 0x5FFFD), (0x60000, 0x6FFFD), (0x70000, 0x7FFFD), (0x80000, 0x8FFFD), (0x90000, 0x9FFFD), (0xA0000, 0xAFFFD), (0xB0000, 0xBFFFD), (0xC0000, 0xCFFFD), (0xD0000, 0xDFFFD), (0xE1000, 0xEFFFD), (0xF0000, 0xFFFFD), (0x100000, 0x10FFFD), ] def encode(c): retval = c i = ord(c) for low, high in escape_range: if i < low: break if i >= low and i <= high: retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) break return retval def iri2uri(uri): """Convert an IRI to a URI. Note that IRIs must be passed in a unicode strings. That is, do not utf-8 encode the IRI before passing it into the function.""" if isinstance(uri ,unicode): (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) authority = authority.encode('idna') # For each character in 'ucschar' or 'iprivate' # 1. encode as utf-8 # 2. then %-encode each octet of that utf-8 uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) uri = "".join([encode(c) for c in uri]) return uri if __name__ == "__main__": import unittest class Test(unittest.TestCase): def test_uris(self): """Test that URIs are invariant under the transformation.""" invariant = [ u"ftp://ftp.is.co.za/rfc/rfc1808.txt", u"http://www.ietf.org/rfc/rfc2396.txt", u"ldap://[2001:db8::7]/c=GB?objectClass?one", u"mailto:John.Doe@example.com", u"news:comp.infosystems.www.servers.unix", u"tel:+1-816-555-1212", u"telnet://192.0.2.16:80/", u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] for uri in invariant: self.assertEqual(uri, iri2uri(uri)) def test_iri(self): """ Test that the right type of escaping is done for each part of the URI.""" self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) unittest.main() httplib2-0.8/python2/httplib2/__init__.py0000640017135500116100000020776512115724726020073 0ustar jcgregorioengfrom __future__ import generators """ httplib2 A caching http interface that supports ETags and gzip to conserve bandwidth. Requires Python 2.3 or later Changelog: 2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", "James Antill", "Xavier Verges Farrero", "Jonathan Feinberg", "Blair Zajac", "Sam Ruby", "Louis Nyffenegger"] __license__ = "MIT" __version__ = "0.8" import re import sys import email import email.Utils import email.Message import email.FeedParser import StringIO import gzip import zlib import httplib import urlparse import urllib import base64 import os import copy import calendar import time import random import errno try: from hashlib import sha1 as _sha, md5 as _md5 except ImportError: # prior to Python 2.5, these were separate modules import sha import md5 _sha = sha.new _md5 = md5.new import hmac from gettext import gettext as _ import socket try: from httplib2 import socks except ImportError: try: import socks except (ImportError, AttributeError): socks = None # Build the appropriate socket wrapper for ssl try: import ssl # python 2.6 ssl_SSLError = ssl.SSLError def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation, ca_certs): if disable_validation: cert_reqs = ssl.CERT_NONE else: cert_reqs = ssl.CERT_REQUIRED # We should be specifying SSL version 3 or TLS v1, but the ssl module # doesn't expose the necessary knobs. So we need to go with the default # of SSLv23. return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, cert_reqs=cert_reqs, ca_certs=ca_certs) except (AttributeError, ImportError): ssl_SSLError = None def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation, ca_certs): if not disable_validation: raise CertificateValidationUnsupported( "SSL certificate validation is not supported without " "the ssl module installed. To avoid this error, install " "the ssl module, or explicity disable validation.") ssl_sock = socket.ssl(sock, key_file, cert_file) return httplib.FakeSocket(sock, ssl_sock) if sys.version_info >= (2,3): from iri2uri import iri2uri else: def iri2uri(uri): return uri def has_timeout(timeout): # python 2.6 if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) return (timeout is not None) __all__ = [ 'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', 'debuglevel', 'ProxiesUnavailableError'] # The httplib debug level, set to a non-zero value to get debug output debuglevel = 0 # A request will be tried 'RETRIES' times if it fails at the socket/connection level. RETRIES = 2 # Python 2.3 support if sys.version_info < (2,4): def sorted(seq): seq.sort() return seq # Python 2.3 support def HTTPResponse__getheaders(self): """Return list of (header, value) tuples.""" if self.msg is None: raise httplib.ResponseNotReady() return self.msg.items() if not hasattr(httplib.HTTPResponse, 'getheaders'): httplib.HTTPResponse.getheaders = HTTPResponse__getheaders # All exceptions raised here derive from HttpLib2Error class HttpLib2Error(Exception): pass # Some exceptions can be caught and optionally # be turned back into responses. class HttpLib2ErrorWithResponse(HttpLib2Error): def __init__(self, desc, response, content): self.response = response self.content = content HttpLib2Error.__init__(self, desc) class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass class RedirectLimit(HttpLib2ErrorWithResponse): pass class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class MalformedHeader(HttpLib2Error): pass class RelativeURIError(HttpLib2Error): pass class ServerNotFoundError(HttpLib2Error): pass class ProxiesUnavailableError(HttpLib2Error): pass class CertificateValidationUnsupported(HttpLib2Error): pass class SSLHandshakeError(HttpLib2Error): pass class NotSupportedOnThisPlatform(HttpLib2Error): pass class CertificateHostnameMismatch(SSLHandshakeError): def __init__(self, desc, host, cert): HttpLib2Error.__init__(self, desc) self.host = host self.cert = cert # Open Items: # ----------- # Proxy support # Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) # Pluggable cache storage (supports storing the cache in # flat files by default. We need a plug-in architecture # that can support Berkeley DB and Squid) # == Known Issues == # Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. # Does not handle Cache-Control: max-stale # Does not use Age: headers when calculating cache freshness. # The number of redirections to follow before giving up. # Note that only GET redirects are automatically followed. # Will also honor 301 requests by saving that info and never # requesting that URI again. DEFAULT_MAX_REDIRECTS = 5 try: # Users can optionally provide a module that tells us where the CA_CERTS # are located. import ca_certs_locater CA_CERTS = ca_certs_locater.get() except ImportError: # Default CA certificates file bundled with httplib2. CA_CERTS = os.path.join( os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") # Which headers are hop-by-hop headers by default HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] def _get_end2end_headers(response): hopbyhop = list(HOP_BY_HOP) hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) return [header for header in response.keys() if header not in hopbyhop] URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") def parse_uri(uri): """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ groups = URI.match(uri).groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) def urlnorm(uri): (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) authority = authority.lower() scheme = scheme.lower() if not path: path = "/" # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. request_uri = query and "?".join([path, query]) or path scheme = scheme.lower() defrag_uri = scheme + "://" + authority + request_uri return scheme, authority, request_uri, defrag_uri # Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) re_url_scheme = re.compile(r'^\w+://') re_slash = re.compile(r'[?/:|]+') def safename(filename): """Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in. """ try: if re_url_scheme.match(filename): if isinstance(filename,str): filename = filename.decode('utf-8') filename = filename.encode('idna') else: filename = filename.encode('idna') except UnicodeError: pass if isinstance(filename,unicode): filename=filename.encode('utf-8') filemd5 = _md5(filename).hexdigest() filename = re_url_scheme.sub("", filename) filename = re_slash.sub(",", filename) # limit length of filename if len(filename)>200: filename=filename[:200] return ",".join((filename, filemd5)) NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') def _normalize_headers(headers): return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) def _parse_cache_control(headers): retval = {} if headers.has_key('cache-control'): parts = headers['cache-control'].split(',') parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] retval = dict(parts_with_args + parts_wo_args) return retval # Whether to use a strict mode to parse WWW-Authenticate headers # Might lead to bad results in case of ill-formed header value, # so disabled by default, falling back to relaxed parsing. # Set to true to turn on, usefull for testing servers. USE_WWW_AUTH_STRICT_PARSING = 0 # In regex below: # [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP # "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space # Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: # \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? current_age: retval = "FRESH" return retval def _decompressContent(response, new_content): content = new_content try: encoding = response.get('content-encoding', None) if encoding in ['gzip', 'deflate']: if encoding == 'gzip': content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() if encoding == 'deflate': content = zlib.decompress(content) response['content-length'] = str(len(content)) # Record the historical presence of the encoding in a way the won't interfere. response['-content-encoding'] = response['content-encoding'] del response['content-encoding'] except IOError: content = "" raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) return content def _updateCache(request_headers, response_headers, content, cache, cachekey): if cachekey: cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) if cc.has_key('no-store') or cc_response.has_key('no-store'): cache.delete(cachekey) else: info = email.Message.Message() for key, value in response_headers.iteritems(): if key not in ['status','content-encoding','transfer-encoding']: info[key] = value # Add annotations to the cache to indicate what headers # are variant for this request. vary = response_headers.get('vary', None) if vary: vary_headers = vary.lower().replace(' ', '').split(',') for header in vary_headers: key = '-varied-%s' % header try: info[key] = request_headers[header] except KeyError: pass status = response_headers.status if status == 304: status = 200 status_header = 'status: %d\r\n' % status header_str = info.as_string() header_str = re.sub("\r(?!\n)|(? 0: service = "cl" # No point in guessing Base or Spreadsheet #elif request_uri.find("spreadsheets") > 0: # service = "wise" auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) lines = content.split('\n') d = dict([tuple(line.split("=", 1)) for line in lines if line]) if resp.status == 403: self.Auth = "" else: self.Auth = d['Auth'] def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" headers['authorization'] = 'GoogleLogin Auth=' + self.Auth AUTH_SCHEME_CLASSES = { "basic": BasicAuthentication, "wsse": WsseAuthentication, "digest": DigestAuthentication, "hmacdigest": HmacDigestAuthentication, "googlelogin": GoogleLoginAuthentication } AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] class FileCache(object): """Uses a local directory as a store for cached files. Not really safe to use if multiple threads or processes are going to be running on the same cache. """ def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior self.cache = cache self.safe = safe if not os.path.exists(cache): os.makedirs(self.cache) def get(self, key): retval = None cacheFullPath = os.path.join(self.cache, self.safe(key)) try: f = file(cacheFullPath, "rb") retval = f.read() f.close() except IOError: pass return retval def set(self, key, value): cacheFullPath = os.path.join(self.cache, self.safe(key)) f = file(cacheFullPath, "wb") f.write(value) f.close() def delete(self, key): cacheFullPath = os.path.join(self.cache, self.safe(key)) if os.path.exists(cacheFullPath): os.remove(cacheFullPath) class Credentials(object): def __init__(self): self.credentials = [] def add(self, name, password, domain=""): self.credentials.append((domain.lower(), name, password)) def clear(self): self.credentials = [] def iter(self, domain): for (cdomain, name, password) in self.credentials: if cdomain == "" or domain == cdomain: yield (name, password) class KeyCerts(Credentials): """Identical to Credentials except that name/password are mapped to key/cert.""" pass class AllHosts(object): pass class ProxyInfo(object): """Collect information required to use a proxy.""" bypass_hosts = () def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX constants. For example: p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) """ self.proxy_type = proxy_type self.proxy_host = proxy_host self.proxy_port = proxy_port self.proxy_rdns = proxy_rdns self.proxy_user = proxy_user self.proxy_pass = proxy_pass def astuple(self): return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass) def isgood(self): return (self.proxy_host != None) and (self.proxy_port != None) def applies_to(self, hostname): return not self.bypass_host(hostname) def bypass_host(self, hostname): """Has this host been excluded from the proxy config""" if self.bypass_hosts is AllHosts: return True bypass = False for domain in self.bypass_hosts: if hostname.endswith(domain): bypass = True return bypass def proxy_info_from_environment(method='http'): """ Read proxy info from the environment variables. """ if method not in ['http', 'https']: return env_var = method + '_proxy' url = os.environ.get(env_var, os.environ.get(env_var.upper())) if not url: return pi = proxy_info_from_url(url, method) no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', '')) bypass_hosts = [] if no_proxy: bypass_hosts = no_proxy.split(',') # special case, no_proxy=* means all hosts bypassed if no_proxy == '*': bypass_hosts = AllHosts pi.bypass_hosts = bypass_hosts return pi def proxy_info_from_url(url, method='http'): """ Construct a ProxyInfo from a URL (such as http_proxy env var) """ url = urlparse.urlparse(url) username = None password = None port = None if '@' in url[1]: ident, host_port = url[1].split('@', 1) if ':' in ident: username, password = ident.split(':', 1) else: password = ident else: host_port = url[1] if ':' in host_port: host, port = host_port.split(':', 1) else: host = host_port if port: port = int(port) else: port = dict(https=443, http=80)[method] proxy_type = 3 # socks.PROXY_TYPE_HTTP return ProxyInfo( proxy_type = proxy_type, proxy_host = host, proxy_port = port, proxy_user = username or None, proxy_pass = password or None, ) class HTTPConnectionWithTimeout(httplib.HTTPConnection): """ HTTPConnection subclass that supports timeouts All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout """ def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): httplib.HTTPConnection.__init__(self, host, port, strict) self.timeout = timeout self.proxy_info = proxy_info def connect(self): """Connect to the host and port specified in __init__.""" # Mostly verbatim from httplib.py. if self.proxy_info and socks is None: raise ProxiesUnavailableError( 'Proxy support missing but proxy use was requested!') msg = "getaddrinfo returns an empty list" if self.proxy_info and self.proxy_info.isgood(): use_proxy = True proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() else: use_proxy = False if use_proxy and proxy_rdns: host = proxy_host port = proxy_port else: host = self.host port = self.port for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: if use_proxy: self.sock = socks.socksocket(af, socktype, proto) self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) else: self.sock = socket.socket(af, socktype, proto) self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) # Different from httplib: support timeouts. if has_timeout(self.timeout): self.sock.settimeout(self.timeout) # End of difference from httplib. if self.debuglevel > 0: print "connect: (%s, %s) ************" % (self.host, self.port) if use_proxy: print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) self.sock.connect((self.host, self.port) + sa[2:]) except socket.error, msg: if self.debuglevel > 0: print "connect fail: (%s, %s)" % (self.host, self.port) if use_proxy: print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if self.sock: self.sock.close() self.sock = None continue break if not self.sock: raise socket.error, msg class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): """ This class allows communication via SSL. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout """ def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict) self.timeout = timeout self.proxy_info = proxy_info if ca_certs is None: ca_certs = CA_CERTS self.ca_certs = ca_certs self.disable_ssl_certificate_validation = \ disable_ssl_certificate_validation # The following two methods were adapted from https_wrapper.py, released # with the Google Appengine SDK at # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py # under the following license: # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # def _GetValidHostsForCert(self, cert): """Returns a list of valid host globs for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. Returns: list: A list of valid host globs. """ if 'subjectAltName' in cert: return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns'] else: return [x[0][1] for x in cert['subject'] if x[0][0].lower() == 'commonname'] def _ValidateCertificateHostname(self, cert, hostname): """Validates that a given hostname is valid for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. hostname: The hostname to test. Returns: bool: Whether or not the hostname is valid for this certificate. """ hosts = self._GetValidHostsForCert(cert) for host in hosts: host_re = host.replace('.', '\.').replace('*', '[^.]*') if re.search('^%s$' % (host_re,), hostname, re.I): return True return False def connect(self): "Connect to a host on a given (SSL) port." msg = "getaddrinfo returns an empty list" if self.proxy_info and self.proxy_info.isgood(): use_proxy = True proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() else: use_proxy = False if use_proxy and proxy_rdns: host = proxy_host port = proxy_port else: host = self.host port = self.port address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) for family, socktype, proto, canonname, sockaddr in address_info: try: if use_proxy: sock = socks.socksocket(family, socktype, proto) sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) else: sock = socket.socket(family, socktype, proto) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if has_timeout(self.timeout): sock.settimeout(self.timeout) sock.connect((self.host, self.port)) self.sock =_ssl_wrap_socket( sock, self.key_file, self.cert_file, self.disable_ssl_certificate_validation, self.ca_certs) if self.debuglevel > 0: print "connect: (%s, %s)" % (self.host, self.port) if use_proxy: print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if not self.disable_ssl_certificate_validation: cert = self.sock.getpeercert() hostname = self.host.split(':', 0)[0] if not self._ValidateCertificateHostname(cert, hostname): raise CertificateHostnameMismatch( 'Server presented certificate that does not match ' 'host %s: %s' % (hostname, cert), hostname, cert) except ssl_SSLError, e: if sock: sock.close() if self.sock: self.sock.close() self.sock = None # Unfortunately the ssl module doesn't seem to provide any way # to get at more detailed error information, in particular # whether the error is due to certificate validation or # something else (such as SSL protocol mismatch). if e.errno == ssl.SSL_ERROR_SSL: raise SSLHandshakeError(e) else: raise except (socket.timeout, socket.gaierror): raise except socket.error, msg: if self.debuglevel > 0: print "connect fail: (%s, %s)" % (self.host, self.port) if use_proxy: print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if self.sock: self.sock.close() self.sock = None continue break if not self.sock: raise socket.error, msg SCHEME_TO_CONNECTION = { 'http': HTTPConnectionWithTimeout, 'https': HTTPSConnectionWithTimeout } # Use a different connection object for Google App Engine try: try: from google.appengine.api import apiproxy_stub_map if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: raise ImportError # Bail out; we're not actually running on App Engine. from google.appengine.api.urlfetch import fetch from google.appengine.api.urlfetch import InvalidURLError except (ImportError, AttributeError): from google3.apphosting.api import apiproxy_stub_map if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: raise ImportError # Bail out; we're not actually running on App Engine. from google3.apphosting.api.urlfetch import fetch from google3.apphosting.api.urlfetch import InvalidURLError def _new_fixed_fetch(validate_certificate): def fixed_fetch(url, payload=None, method="GET", headers={}, allow_truncated=False, follow_redirects=True, deadline=5): return fetch(url, payload=payload, method=method, headers=headers, allow_truncated=allow_truncated, follow_redirects=follow_redirects, deadline=deadline, validate_certificate=validate_certificate) return fixed_fetch class AppEngineHttpConnection(httplib.HTTPConnection): """Use httplib on App Engine, but compensate for its weirdness. The parameters key_file, cert_file, proxy_info, ca_certs, and disable_ssl_certificate_validation are all dropped on the ground. """ def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): httplib.HTTPConnection.__init__(self, host, port=port, strict=strict, timeout=timeout) class AppEngineHttpsConnection(httplib.HTTPSConnection): """Same as AppEngineHttpConnection, but for HTTPS URIs.""" def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict, timeout=timeout) self._fetch = _new_fixed_fetch( not disable_ssl_certificate_validation) # Update the connection classes to use the Googel App Engine specific ones. SCHEME_TO_CONNECTION = { 'http': AppEngineHttpConnection, 'https': AppEngineHttpsConnection } except (ImportError, AttributeError): pass class Http(object): """An HTTP client that handles: - all methods - caching - ETags - compression, - HTTPS - Basic - Digest - WSSE and more. """ def __init__(self, cache=None, timeout=None, proxy_info=proxy_info_from_environment, ca_certs=None, disable_ssl_certificate_validation=False): """If 'cache' is a string then it is used as a directory name for a disk cache. Otherwise it must be an object that supports the same interface as FileCache. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout `proxy_info` may be: - a callable that takes the http scheme ('http' or 'https') and returns a ProxyInfo instance per request. By default, uses proxy_nfo_from_environment. - a ProxyInfo instance (static proxy config). - None (proxy disabled). ca_certs is the path of a file containing root CA certificates for SSL server certificate validation. By default, a CA cert file bundled with httplib2 is used. If disable_ssl_certificate_validation is true, SSL cert validation will not be performed. """ self.proxy_info = proxy_info self.ca_certs = ca_certs self.disable_ssl_certificate_validation = \ disable_ssl_certificate_validation # Map domain name to an httplib connection self.connections = {} # The location of the cache, for now a directory # where cached responses are held. if cache and isinstance(cache, basestring): self.cache = FileCache(cache) else: self.cache = cache # Name/password self.credentials = Credentials() # Key/cert self.certificates = KeyCerts() # authorization objects self.authorizations = [] # If set to False then no redirects are followed, even safe ones. self.follow_redirects = True # Which HTTP methods do we apply optimistic concurrency to, i.e. # which methods get an "if-match:" etag header added to them. self.optimistic_concurrency_methods = ["PUT", "PATCH"] # If 'follow_redirects' is True, and this is set to True then # all redirecs are followed, including unsafe ones. self.follow_all_redirects = False self.ignore_etag = False self.force_exception_to_status_code = False self.timeout = timeout # Keep Authorization: headers on a redirect. self.forward_authorization_headers = False def __getstate__(self): state_dict = copy.copy(self.__dict__) # In case request is augmented by some foreign object such as # credentials which handle auth if 'request' in state_dict: del state_dict['request'] if 'connections' in state_dict: del state_dict['connections'] return state_dict def __setstate__(self, state): self.__dict__.update(state) self.connections = {} def _auth_from_challenge(self, host, request_uri, headers, response, content): """A generator that creates Authorization objects that can be applied to requests. """ challenges = _parse_www_authenticate(response, 'www-authenticate') for cred in self.credentials.iter(host): for scheme in AUTH_SCHEME_ORDER: if challenges.has_key(scheme): yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) def add_credentials(self, name, password, domain=""): """Add a name and password that will be used any time a request requires authentication.""" self.credentials.add(name, password, domain) def add_certificate(self, key, cert, domain): """Add a key and cert that will be used any time a request requires authentication.""" self.certificates.add(key, cert, domain) def clear_credentials(self): """Remove all the names and passwords that are used for authentication""" self.credentials.clear() self.authorizations = [] def _conn_request(self, conn, request_uri, method, body, headers): for i in range(RETRIES): try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise ServerNotFoundError("Unable to find the server at %s" % conn.host) except ssl_SSLError: conn.close() raise except socket.error, e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err == errno.ECONNREFUSED: # Connection refused raise except httplib.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. if hasattr(conn, 'sock') and conn.sock is None: if i < RETRIES-1: conn.close() conn.connect() continue else: conn.close() raise if i < RETRIES-1: conn.close() conn.connect() continue try: response = conn.getresponse() except (socket.error, httplib.HTTPException): if i < RETRIES-1: conn.close() conn.connect() continue else: conn.close() raise else: content = "" if method == "HEAD": conn.close() else: content = response.read() response = Response(response) if method != "HEAD": content = _decompressContent(response, content) break return (response, content) def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): """Do the actual request using the connection object and also follow one level of redirects if necessary""" auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] auth = auths and sorted(auths)[0][1] or None if auth: auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers) if auth: if auth.response(response, body): auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers ) response._stale_digest = 1 if response.status == 401: for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): authorization.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) if response.status != 401: self.authorizations.append(authorization) authorization.response(response, body) break if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: # Pick out the location header and basically start from the beginning # remembering first to strip the ETag header and decrement our 'depth' if redirections: if not response.has_key('location') and response.status != 300: raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) # Fix-up relative redirects (which violate an RFC 2616 MUST) if response.has_key('location'): location = response['location'] (scheme, authority, path, query, fragment) = parse_uri(location) if authority == None: response['location'] = urlparse.urljoin(absolute_uri, location) if response.status == 301 and method in ["GET", "HEAD"]: response['-x-permanent-redirect-url'] = response['location'] if not response.has_key('content-location'): response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) if headers.has_key('if-none-match'): del headers['if-none-match'] if headers.has_key('if-modified-since'): del headers['if-modified-since'] if 'authorization' in headers and not self.forward_authorization_headers: del headers['authorization'] if response.has_key('location'): location = response['location'] old_response = copy.deepcopy(response) if not old_response.has_key('content-location'): old_response['content-location'] = absolute_uri redirect_method = method if response.status in [302, 303]: redirect_method = "GET" body = None (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) response.previous = old_response else: raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) elif response.status in [200, 203] and method in ["GET", "HEAD"]: # Don't cache 206's since we aren't going to handle byte range requests if not response.has_key('content-location'): response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) return (response, content) def _normalize_headers(self, headers): return _normalize_headers(headers) # Need to catch and rebrand some exceptions # Then need to optionally turn all exceptions into status codes # including all socket.* and httplib.* exceptions. def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): """ Performs a single HTTP request. The 'uri' is the URI of the HTTP resource and can begin with either 'http' or 'https'. The value of 'uri' must be an absolute URI. The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. There is no restriction on the methods allowed. The 'body' is the entity body to be sent with the request. It is a string object. Any extra headers that are to be sent with the request should be provided in the 'headers' dictionary. The maximum number of redirect to follow before raising an exception is 'redirections. The default is 5. The return value is a tuple of (response, content), the first being and instance of the 'Response' class, the second being a string that contains the response entity body. """ try: if headers is None: headers = {} else: headers = self._normalize_headers(headers) if not headers.has_key('user-agent'): headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ uri = iri2uri(uri) (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) domain_port = authority.split(":")[0:2] if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': scheme = 'https' authority = domain_port[0] proxy_info = self._get_proxy_info(scheme, authority) conn_key = scheme+":"+authority if conn_key in self.connections: conn = self.connections[conn_key] else: if not connection_type: connection_type = SCHEME_TO_CONNECTION[scheme] certs = list(self.certificates.iter(authority)) if scheme == 'https': if certs: conn = self.connections[conn_key] = connection_type( authority, key_file=certs[0][0], cert_file=certs[0][1], timeout=self.timeout, proxy_info=proxy_info, ca_certs=self.ca_certs, disable_ssl_certificate_validation= self.disable_ssl_certificate_validation) else: conn = self.connections[conn_key] = connection_type( authority, timeout=self.timeout, proxy_info=proxy_info, ca_certs=self.ca_certs, disable_ssl_certificate_validation= self.disable_ssl_certificate_validation) else: conn = self.connections[conn_key] = connection_type( authority, timeout=self.timeout, proxy_info=proxy_info) conn.set_debuglevel(debuglevel) if 'range' not in headers and 'accept-encoding' not in headers: headers['accept-encoding'] = 'gzip, deflate' info = email.Message.Message() cached_value = None if self.cache: cachekey = defrag_uri cached_value = self.cache.get(cachekey) if cached_value: # info = email.message_from_string(cached_value) # # Need to replace the line above with the kludge below # to fix the non-existent bug not fixed in this # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html try: info, content = cached_value.split('\r\n\r\n', 1) feedparser = email.FeedParser.FeedParser() feedparser.feed(info) info = feedparser.close() feedparser._parse = None except (IndexError, ValueError): self.cache.delete(cachekey) cachekey = None cached_value = None else: cachekey = None if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: # http://www.w3.org/1999/04/Editing/ headers['if-match'] = info['etag'] if method not in ["GET", "HEAD"] and self.cache and cachekey: # RFC 2616 Section 13.10 self.cache.delete(cachekey) # Check the vary header in the cache to see if this request # matches what varies in the cache. if method in ['GET', 'HEAD'] and 'vary' in info: vary = info['vary'] vary_headers = vary.lower().replace(' ', '').split(',') for header in vary_headers: key = '-varied-%s' % header value = info[key] if headers.get(header, None) != value: cached_value = None break if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: if info.has_key('-x-permanent-redirect-url'): # Should cached permanent redirects be counted in our redirection count? For now, yes. if redirections <= 0: raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) response.previous = Response(info) response.previous.fromcache = True else: # Determine our course of action: # Is the cached entry fresh or stale? # Has the client requested a non-cached response? # # There seems to be three possible answers: # 1. [FRESH] Return the cache entry w/o doing a GET # 2. [STALE] Do the GET (but add in cache validators if available) # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request entry_disposition = _entry_disposition(info, headers) if entry_disposition == "FRESH": if not cached_value: info['status'] = '504' content = "" response = Response(info) if cached_value: response.fromcache = True return (response, content) if entry_disposition == "STALE": if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: headers['if-none-match'] = info['etag'] if info.has_key('last-modified') and not 'last-modified' in headers: headers['if-modified-since'] = info['last-modified'] elif entry_disposition == "TRANSPARENT": pass (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) if response.status == 304 and method == "GET": # Rewrite the cache entry with the new end-to-end headers # Take all headers that are in response # and overwrite their values in info. # unless they are hop-by-hop, or are listed in the connection header. for key in _get_end2end_headers(response): info[key] = response[key] merged_response = Response(info) if hasattr(response, "_stale_digest"): merged_response._stale_digest = response._stale_digest _updateCache(headers, merged_response, content, self.cache, cachekey) response = merged_response response.status = 200 response.fromcache = True elif response.status == 200: content = new_content else: self.cache.delete(cachekey) content = new_content else: cc = _parse_cache_control(headers) if cc.has_key('only-if-cached'): info['status'] = '504' response = Response(info) content = "" else: (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) except Exception, e: if self.force_exception_to_status_code: if isinstance(e, HttpLib2ErrorWithResponse): response = e.response content = e.content response.status = 500 response.reason = str(e) elif isinstance(e, socket.timeout): content = "Request Timeout" response = Response({ "content-type": "text/plain", "status": "408", "content-length": len(content) }) response.reason = "Request Timeout" else: content = str(e) response = Response({ "content-type": "text/plain", "status": "400", "content-length": len(content) }) response.reason = "Bad Request" else: raise return (response, content) def _get_proxy_info(self, scheme, authority): """Return a ProxyInfo instance (or None) based on the scheme and authority. """ hostname, port = urllib.splitport(authority) proxy_info = self.proxy_info if callable(proxy_info): proxy_info = proxy_info(scheme) if (hasattr(proxy_info, 'applies_to') and not proxy_info.applies_to(hostname)): proxy_info = None return proxy_info class Response(dict): """An object more like email.Message than httplib.HTTPResponse.""" """Is this response from our local cache""" fromcache = False """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ version = 11 "Status code returned by server. " status = 200 """Reason phrase returned by server.""" reason = "Ok" previous = None def __init__(self, info): # info is either an email.Message or # an httplib.HTTPResponse object. if isinstance(info, httplib.HTTPResponse): for key, value in info.getheaders(): self[key.lower()] = value self.status = info.status self['status'] = str(self.status) self.reason = info.reason self.version = info.version elif isinstance(info, email.Message.Message): for key, value in info.items(): self[key.lower()] = value self.status = int(self['status']) else: for key, value in info.iteritems(): self[key.lower()] = value self.status = int(self.get('status', self.status)) self.reason = self.get('reason', self.reason) def __getattr__(self, name): if name == 'dict': return self else: raise AttributeError, name httplib2-0.8/python2/httplib2/socks.py0000640017135500116100000004403312115724726017441 0ustar jcgregorioeng"""SocksiPy - Python SOCKS module. Version 1.00 Copyright 2006 Dan-Haim. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Dan Haim nor the names of his contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. This module provides a standard socket-like interface for Python for tunneling connections through SOCKS proxies. """ """ Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for use in PyLoris (http://pyloris.sourceforge.net/) Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) mainly to merge bug fixes found in Sourceforge """ import base64 import socket import struct import sys if getattr(socket, 'socket', None) is None: raise ImportError('socket.socket missing, proxy support unusable') PROXY_TYPE_SOCKS4 = 1 PROXY_TYPE_SOCKS5 = 2 PROXY_TYPE_HTTP = 3 PROXY_TYPE_HTTP_NO_TUNNEL = 4 _defaultproxy = None _orgsocket = socket.socket class ProxyError(Exception): pass class GeneralProxyError(ProxyError): pass class Socks5AuthError(ProxyError): pass class Socks5Error(ProxyError): pass class Socks4Error(ProxyError): pass class HTTPError(ProxyError): pass _generalerrors = ("success", "invalid data", "not connected", "not available", "bad proxy type", "bad input") _socks5errors = ("succeeded", "general SOCKS server failure", "connection not allowed by ruleset", "Network unreachable", "Host unreachable", "Connection refused", "TTL expired", "Command not supported", "Address type not supported", "Unknown error") _socks5autherrors = ("succeeded", "authentication is required", "all offered authentication methods were rejected", "unknown username or invalid password", "unknown error") _socks4errors = ("request granted", "request rejected or failed", "request rejected because SOCKS server cannot connect to identd on the client", "request rejected because the client program and identd report different user-ids", "unknown error") def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) Sets a default proxy which all further socksocket objects will use, unless explicitly changed. """ global _defaultproxy _defaultproxy = (proxytype, addr, port, rdns, username, password) def wrapmodule(module): """wrapmodule(module) Attempts to replace a module's socket library with a SOCKS socket. Must set a default proxy using setdefaultproxy(...) first. This will only work on modules that import socket directly into the namespace; most of the Python Standard Library falls into this category. """ if _defaultproxy != None: module.socket.socket = socksocket else: raise GeneralProxyError((4, "no proxy specified")) class socksocket(socket.socket): """socksocket([family[, type[, proto]]]) -> socket object Open a SOCKS enabled socket. The parameters are the same as those of the standard socket init. In order for SOCKS to work, you must specify family=AF_INET, type=SOCK_STREAM and proto=0. """ def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): _orgsocket.__init__(self, family, type, proto, _sock) if _defaultproxy != None: self.__proxy = _defaultproxy else: self.__proxy = (None, None, None, None, None, None) self.__proxysockname = None self.__proxypeername = None self.__httptunnel = True def __recvall(self, count): """__recvall(count) -> data Receive EXACTLY the number of bytes requested from the socket. Blocks until the required number of bytes have been received. """ data = self.recv(count) while len(data) < count: d = self.recv(count-len(data)) if not d: raise GeneralProxyError((0, "connection closed unexpectedly")) data = data + d return data def sendall(self, content, *args): """ override socket.socket.sendall method to rewrite the header for non-tunneling proxies if needed """ if not self.__httptunnel: content = self.__rewriteproxy(content) return super(socksocket, self).sendall(content, *args) def __rewriteproxy(self, header): """ rewrite HTTP request headers to support non-tunneling proxies (i.e. those which do not support the CONNECT method). This only works for HTTP (not HTTPS) since HTTPS requires tunneling. """ host, endpt = None, None hdrs = header.split("\r\n") for hdr in hdrs: if hdr.lower().startswith("host:"): host = hdr elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): endpt = hdr if host and endpt: hdrs.remove(host) hdrs.remove(endpt) host = host.split(" ")[1] endpt = endpt.split(" ") if (self.__proxy[4] != None and self.__proxy[5] != None): hdrs.insert(0, self.__getauthheader()) hdrs.insert(0, "Host: %s" % host) hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) return "\r\n".join(hdrs) def __getauthheader(self): auth = self.__proxy[4] + ":" + self.__proxy[5] return "Proxy-Authorization: Basic " + base64.b64encode(auth) def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) Sets the proxy to be used. proxytype - The type of the proxy to be used. Three types are supported: PROXY_TYPE_SOCKS4 (including socks4a), PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP addr - The address of the server (IP or DNS). port - The port of the server. Defaults to 1080 for SOCKS servers and 8080 for HTTP proxy servers. rdns - Should DNS queries be preformed on the remote side (rather than the local side). The default is True. Note: This has no effect with SOCKS4 servers. username - Username to authenticate with to the server. The default is no authentication. password - Password to authenticate with to the server. Only relevant when username is also provided. """ self.__proxy = (proxytype, addr, port, rdns, username, password) def __negotiatesocks5(self, destaddr, destport): """__negotiatesocks5(self,destaddr,destport) Negotiates a connection through a SOCKS5 server. """ # First we'll send the authentication packages we support. if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): # The username/password details were supplied to the # setproxy method so we support the USERNAME/PASSWORD # authentication (in addition to the standard none). self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02)) else: # No username/password were entered, therefore we # only support connections with no authentication. self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00)) # We'll receive the server's response to determine which # method was selected chosenauth = self.__recvall(2) if chosenauth[0:1] != chr(0x05).encode(): self.close() raise GeneralProxyError((1, _generalerrors[1])) # Check the chosen authentication method if chosenauth[1:2] == chr(0x00).encode(): # No authentication is required pass elif chosenauth[1:2] == chr(0x02).encode(): # Okay, we need to perform a basic username/password # authentication. self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) authstat = self.__recvall(2) if authstat[0:1] != chr(0x01).encode(): # Bad response self.close() raise GeneralProxyError((1, _generalerrors[1])) if authstat[1:2] != chr(0x00).encode(): # Authentication failed self.close() raise Socks5AuthError((3, _socks5autherrors[3])) # Authentication succeeded else: # Reaching here is always bad self.close() if chosenauth[1] == chr(0xFF).encode(): raise Socks5AuthError((2, _socks5autherrors[2])) else: raise GeneralProxyError((1, _generalerrors[1])) # Now we can request the actual connection req = struct.pack('BBB', 0x05, 0x01, 0x00) # If the given destination address is an IP address, we'll # use the IPv4 address request even if remote resolving was specified. try: ipaddr = socket.inet_aton(destaddr) req = req + chr(0x01).encode() + ipaddr except socket.error: # Well it's not an IP number, so it's probably a DNS name. if self.__proxy[3]: # Resolve remotely ipaddr = None req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr else: # Resolve locally ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) req = req + chr(0x01).encode() + ipaddr req = req + struct.pack(">H", destport) self.sendall(req) # Get the response resp = self.__recvall(4) if resp[0:1] != chr(0x05).encode(): self.close() raise GeneralProxyError((1, _generalerrors[1])) elif resp[1:2] != chr(0x00).encode(): # Connection failed self.close() if ord(resp[1:2])<=8: raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) else: raise Socks5Error((9, _socks5errors[9])) # Get the bound address/port elif resp[3:4] == chr(0x01).encode(): boundaddr = self.__recvall(4) elif resp[3:4] == chr(0x03).encode(): resp = resp + self.recv(1) boundaddr = self.__recvall(ord(resp[4:5])) else: self.close() raise GeneralProxyError((1,_generalerrors[1])) boundport = struct.unpack(">H", self.__recvall(2))[0] self.__proxysockname = (boundaddr, boundport) if ipaddr != None: self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) else: self.__proxypeername = (destaddr, destport) def getproxysockname(self): """getsockname() -> address info Returns the bound IP address and port number at the proxy. """ return self.__proxysockname def getproxypeername(self): """getproxypeername() -> address info Returns the IP and port number of the proxy. """ return _orgsocket.getpeername(self) def getpeername(self): """getpeername() -> address info Returns the IP address and port number of the destination machine (note: getproxypeername returns the proxy) """ return self.__proxypeername def __negotiatesocks4(self,destaddr,destport): """__negotiatesocks4(self,destaddr,destport) Negotiates a connection through a SOCKS4 server. """ # Check if the destination address provided is an IP address rmtrslv = False try: ipaddr = socket.inet_aton(destaddr) except socket.error: # It's a DNS name. Check where it should be resolved. if self.__proxy[3]: ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) rmtrslv = True else: ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) # Construct the request packet req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr # The username parameter is considered userid for SOCKS4 if self.__proxy[4] != None: req = req + self.__proxy[4] req = req + chr(0x00).encode() # DNS name if remote resolving is required # NOTE: This is actually an extension to the SOCKS4 protocol # called SOCKS4A and may not be supported in all cases. if rmtrslv: req = req + destaddr + chr(0x00).encode() self.sendall(req) # Get the response from the server resp = self.__recvall(8) if resp[0:1] != chr(0x00).encode(): # Bad data self.close() raise GeneralProxyError((1,_generalerrors[1])) if resp[1:2] != chr(0x5A).encode(): # Server returned an error self.close() if ord(resp[1:2]) in (91, 92, 93): self.close() raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) else: raise Socks4Error((94, _socks4errors[4])) # Get the bound address/port self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) if rmtrslv != None: self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) else: self.__proxypeername = (destaddr, destport) def __negotiatehttp(self, destaddr, destport): """__negotiatehttp(self,destaddr,destport) Negotiates a connection through an HTTP server. """ # If we need to resolve locally, we do this now if not self.__proxy[3]: addr = socket.gethostbyname(destaddr) else: addr = destaddr headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] headers += ["Host: ", destaddr, "\r\n"] if (self.__proxy[4] != None and self.__proxy[5] != None): headers += [self.__getauthheader(), "\r\n"] headers.append("\r\n") self.sendall("".join(headers).encode()) # We read the response until we get the string "\r\n\r\n" resp = self.recv(1) while resp.find("\r\n\r\n".encode()) == -1: resp = resp + self.recv(1) # We just need the first line to check if the connection # was successful statusline = resp.splitlines()[0].split(" ".encode(), 2) if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): self.close() raise GeneralProxyError((1, _generalerrors[1])) try: statuscode = int(statusline[1]) except ValueError: self.close() raise GeneralProxyError((1, _generalerrors[1])) if statuscode != 200: self.close() raise HTTPError((statuscode, statusline[2])) self.__proxysockname = ("0.0.0.0", 0) self.__proxypeername = (addr, destport) def connect(self, destpair): """connect(self, despair) Connects to the specified destination through a proxy. destpar - A tuple of the IP/DNS address and the port number. (identical to socket's connect). To select the proxy server use setproxy(). """ # Do a minimal input check first if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int): raise GeneralProxyError((5, _generalerrors[5])) if self.__proxy[0] == PROXY_TYPE_SOCKS5: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 1080 _orgsocket.connect(self, (self.__proxy[1], portnum)) self.__negotiatesocks5(destpair[0], destpair[1]) elif self.__proxy[0] == PROXY_TYPE_SOCKS4: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 1080 _orgsocket.connect(self,(self.__proxy[1], portnum)) self.__negotiatesocks4(destpair[0], destpair[1]) elif self.__proxy[0] == PROXY_TYPE_HTTP: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 8080 _orgsocket.connect(self,(self.__proxy[1], portnum)) self.__negotiatehttp(destpair[0], destpair[1]) elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: if self.__proxy[2] != None: portnum = self.__proxy[2] else: portnum = 8080 _orgsocket.connect(self,(self.__proxy[1],portnum)) if destpair[1] == 443: self.__negotiatehttp(destpair[0],destpair[1]) else: self.__httptunnel = False elif self.__proxy[0] == None: _orgsocket.connect(self, (destpair[0], destpair[1])) else: raise GeneralProxyError((4, _generalerrors[4])) httplib2-0.8/python2/httplib2/__init__.pyc-2.40000640017135500116100000015024112115724726020521 0ustar jcgregorioengmò LøÑPc @s­dklZdZdZddddddd gZd Zd Zd kZd kZd k Z d k Z d k Z d k Z d k Z d kZd kZd kZd kZd kZd kZd kZd kZd kZd kZd kZd kZyd klZlZWn7ej o+d k Z d kZe i!Zei!ZnXd k"Z"dk#l#Z$d k%Z%ydk&l'Z'Wn<ej o0y d k'Z'Wnej o e(Z'nXnXyd k)Z)e)i*Z+d„Z,Wn(e-efj oe(Z+d„Z,nXei.ddfjodk/l/Z/n d„Z/d„Z0dddddddddd d!g Z1d"Z2dZ3ei.dd#fjo d$„Z4nd%„Z5e6ei7d&ƒ oe5ei7_8nde9fd'„ƒYZ:d(e:fd)„ƒYZ;de;fd*„ƒYZ<de;fd+„ƒYZ=de;fd,„ƒYZ>de;fd-„ƒYZ?de;fd.„ƒYZ@d/e:fd0„ƒYZAd1e:fd2„ƒYZBd3e:fd4„ƒYZCd!e:fd5„ƒYZDd6e:fd7„ƒYZEd8e:fd9„ƒYZFd:e:fd;„ƒYZGd<eFfd=„ƒYZHd>ZIeiJiKeiJiLeiJiMeNƒƒd?ƒZOd@dAdBdCdDdEdFdGgZPdH„ZQeiRdIƒZSdJ„ZTdK„ZUeiRdLƒZVeiRdMƒZWdN„ZXeiRdOƒZYdP„ZZdQ„Z[d"Z\eiRdRƒZ]eiRdSƒZ^eiRdTƒZ_dUdV„Z`dW„ZadX„ZbdY„ZcdZ„Zdd[„Zed\effd]„ƒYZgd^egfd_„ƒYZhd`egfda„ƒYZidbegfdc„ƒYZjddegfde„ƒYZkdfegfdg„ƒYZlhdheh<diek<djei<dkej<dleltt,( tlistt HOP_BY_HOPthopbyhoptextendt_[1]R2tgettsplittxtstriptkeystheader(R2RFRHRNRK((R t_get_end2end_headersÀs Cs9^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?cCs<ti|ƒiƒ}|d|d|d|d|dfS(s‡Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) iiiiiN(tURItmatchRtgroups(RRR((R t parse_uriÇscCs¹t|ƒ\}}}}}| p| otd|ƒ‚n|iƒ}|iƒ}|p d}n|odi ||gƒp|}|iƒ}|d||}||||fS(Ns(Only absolute URIs are allowed. uri = %st/t?s://( RSRtschemet authoritytpathtquerytfragmentR7tlowertjoint request_urit defrag_uri(RRZR^RWRYRXRVR]((R turlnormÏs   # s^\w+://s[?/:|]+cCsüyYti|ƒoEt|tƒo"|idƒ}|idƒ}qX|idƒ}nWntj onXt|tƒo|idƒ}nt |ƒi ƒ}ti d|ƒ}t i d|ƒ}t|ƒdjo|d }ndi||fƒS(s•Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in. sutf-8tidnaRBRCiÈN(t re_url_schemeRQtfilenamet isinstancetstrtdecodetencodet UnicodeErrortunicodet_md5t hexdigesttfilemd5tsubtre_slashtlenR\(RbRk((R tsafenameãs"s(?:\r\n)?[ \t]+cCsOtg}|iƒD]1\}}||iƒti|dƒi ƒfq~ƒS(Nt ( tdictRHtheaderst iteritemstkeytvalueR[tNORMALIZE_SPACERlRL(RrRuRHRt((R t_normalize_headersÿsc Csh} |idƒoè|didƒ}g}|D]`}d|idƒjoD|tg}|iddƒD]}||i ƒi ƒqm~ƒq4q4~}g}|D]9}d|idƒjo||i ƒi ƒdfq¨q¨~}t||ƒ} n| S(Ns cache-controlRCiÿÿÿÿt=i(tretvalRrthas_keyRJtpartsRHtparttfindttuplet_[2]RKRLR[tparts_with_argstnamet parts_wo_argsRq( RrRHRR{R€R|RKR‚RRy((R t_parse_cache_controlstMsÊ^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$sk^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?yt|d ƒ}Wntj o d }nX||7}n||jo d} qcn| S(sìDetermine freshness from the Date, Expires and Cache-Control headers. We don't handle the following: 1. Cache-Control: max-stale 2. Age: headers are not used in the calculations. Not that this algorithm is simpler than you might think because we are operating as a private (non-shared) cache. This lets us ignore 's-maxage'. We can also ignore 'proxy-invalidate' since we aren't a proxy. We will never return a stale document as fresh as a design decision, and thus the non-implementation of 'max-stale'. This also lets us safely ignore 'must-revalidate' since we operate as if every server has sent 'must-revalidate'. Since we are private we get to ignore both 'public' and 'private' parameters. We also ignore 'no-transform' since we don't do any transformations. The 'no-store' parameter is handled at a higher level. So the only Cache-Control parameters we look at are: no-cache only-if-cached max-age min-fresh tSTALEtpragmasno-cacheiÿÿÿÿt TRANSPARENTs cache-controlsonly-if-cachedtFRESHtdateismax-agetexpiress min-freshN(RyRƒtrequest_headerstcctresponse_headerst cc_responseRzR[R}tcalendarttimegmtemailtUtilst parsedate_tzR–ttimetnowtmaxt current_agetinttfreshness_lifetimeRR—Rt min_fresh( RšR˜R§R¢R™R—R¤R¦R–R›Ry((R t_entry_disposition7sV  0          cCsõ|}yª|iddƒ}|ddgjo|djo%tidti|ƒƒi ƒ}n|djot i |ƒ}nt t |ƒƒ|d<|d|d<|d=nWn>tj o2d}ttdƒ|idƒ||ƒ‚nX|S( Nscontent-encodingtgziptdeflatetfileobjscontent-lengths-content-encodingRBsDContent purported to be compressed with %s but failed to decompress.(t new_contentR3R2RIRtencodingR©tGzipFiletStringIOtreadtzlibt decompressRdRntIOErrorR!t_(R2R¬R­R3((R t_decompressContents % *cCsœ|o‘t|ƒ}t|ƒ} |idƒp| idƒo|i|ƒq˜t i i ƒ}x>|i ƒD]0\} }| dddgjo||| scsˆd||fƒS(Ns%s:%s(tHtstd(RïRð(Rî(R RísRBRçRÒs"%s"s%s:%s:%s:%s:%stnonces%08xRéRãsoDigest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"iRèRåRátopaques , opaque="%s"iN(RîtKDR\RÝR]tA2RÒRÎR)RêRìtrequest_digestR×RrRI( R)RÝR]RrR3RÒRõRóRîRô((RîR RÞs  UZ cCs¼|idƒp[t|dƒidhƒ}d|idƒjo&|d|idR?RBR@(((R Räs    RØcCsÒ|ddgjodSn|d}tii|tii|iƒƒƒ}|pdSnt||ƒ}tiidtiiddƒƒ}g}|o|i dƒ}n|d jo t }n||_ |S( s9 Read proxy info from the environment variables. RØthttpsNt_proxytno_proxytNO_PROXYRBRCt*( RÝtenv_varR$tenvironRIRëturltproxy_info_from_urltpiRHRCRJR7(RÝRMRKRHRCRO((R tproxy_info_from_environments  '$   c Cs#ti|ƒ}d}d}d}d|djoO|diddƒ\}}d|jo|iddƒ\}}q‹|}n |d}d|jo|iddƒ\}}n|}|ot |ƒ}nt ddddƒ|}d}td |d |d |d |pdd |pdƒS(sG Construct a ProxyInfo from a URL (such as http_proxy env var) t@iRçRFi»RØiPiR8R9R:R<R=N(turlparseRMRtusernameRÔtportRJtidentt host_portR<R¥RqRÝR8R( RMRÝRSRURVR8RTR<RÔ((R RN&s.      tHTTPConnectionWithTimeoutcBs,tZdZeeeed„Zd„ZRS(s8 HTTPConnection subclass that supports timeouts All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout cCs/tii||||ƒ||_||_dS(N( RtHTTPConnectionR4R)R<RTtstrictRt proxy_info(R)R<RTRYRRZ((R R4Ts cCs­|iotd jotdƒ‚nd}|io;|iiƒo+t}|ii ƒ\}} } }} }nt}|o|o| } | }n|i} |i}xäti| |dtiƒD]Ç}|\}} }}}y|o;ti|| |ƒ|_|ii|| | || |ƒn5ti|| |ƒ|_|iiti ti!dƒt"|i#ƒo|ii$|i#ƒn|i%djoBd|i|ifGH|o"dt&| | || |fƒGHqÑn|ii'|i|if|dƒWn‘ti(j o‚}|i%djoBd|i|ifGH|o"d t&| | || |fƒGHq^n|io|ii)ƒnd |_qÄnXPqÄW|ipti(|‚nd S( s3Connect to the host and port specified in __init__.s2Proxy support missing but proxy use was requested!s!getaddrinfo returns an empty listiisconnect: (%s, %s) ************sproxy: %s ************isconnect fail: (%s, %s)s proxy: %sN(*R)RZtsocksRR%R*R?Rùt use_proxyR>R8R9R:R;R<R=RßR<RTRt getaddrinfot SOCK_STREAMtrestaftsocktypetprotot canonnametsat socksocketR tsetproxyt setsockoptt IPPROTO_TCPt TCP_NODELAYRRt settimeoutR$RdtconnectterrorR+(R)R`R;RTR\RbR_R*R8R:R<RaR<R9RcRdR=((R RkYsR%   #&(&   (R/R0RûRR4Rk(((R RWJs tHTTPSConnectionWithTimeoutc BsJtZdZeeeeeeeed„Zd„Zd„Zd„ZRS(s2 This class allows communication via SSL. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout c Csjtii||d|d|d|d|ƒ||_ ||_ |djo t }n||_ | |_dS(NRTR R RY(RtHTTPSConnectionR4R)R<RTR R RYRRZRRtCA_CERTSt"disable_ssl_certificate_validation( R)R<RTR R RYRRZRRp((R R4•s     cCs¡d|joFg}|dD],}|diƒdjo||dqq~SnKg}|dD]4}|ddiƒdjo||ddqbqb~SdS(sÍReturns a list of valid host globs for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. Returns: list: A list of valid host globs. tsubjectAltNameitdnsitsubjectt commonnameN(R=RHRKR[(R)R=RHRK((R t_GetValidHostsForCert¶s FcCsj|i|ƒ}xT|D]L}|iddƒiddƒ}tid|f|ti ƒot SqqWt S(sValidates that a given hostname is valid for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. hostname: The hostname to test. Returns: bool: Whether or not the hostname is valid for this certificate. t.s\.RJs[^.]*s^%s$N( R)RuR=thostsR<R½thost_reRÃRRAtIRùRß(R)R=RARxR<Rw((R t_ValidateCertificateHostnameÅs  cCsŠd} |io;|iiƒo+t}|iiƒ\} }} }}}nt }|o|o|} | }n|i} |i}xèti| |dtiƒD]Ë\}}}}}y|o5ti|||ƒ} | i| || |||ƒn/ti|||ƒ} | ititidƒt|i ƒo| i!|i ƒn| i"|i|ifƒt#| |i$|i%|i&|i'ƒ|_|i(djoBd|i|ifGH|o"dt)|| |||fƒGHqÕn|i&p_|ii*ƒ}|ii,ddƒd}|i.||ƒp t/d||f||ƒ‚q>nWn%t0j oh}| o| i2ƒn|io|ii2ƒnd |_|i4t5i6jot7|ƒ‚qg‚n³ti ti8fj o ‚n’ti9j o‚} |i(djoBd|i|ifGH|o"dt)|| |||fƒGHq;n|io|ii2ƒnd |_qnXPqW|ipti9| ‚nd S( s(Connect to a host on a given (SSL) port.s!getaddrinfo returns an empty listiisconnect: (%s, %s)s proxy: %sRçs<Server presented certificate that does not match host %s: %ssconnect fail: (%s, %s)N(:R*R)RZR?RùR\R>R8R9R:R;R<R=RßR<RTRR]R^tfamilyRaRbRctsockaddrR[ReR RfRgRhRiRRRjRkRR R RpRR$Rdt getpeercertR=RJRARzR;t ssl_SSLErrorteR+RterrnoRt SSL_ERROR_SSLR9tgaierrorRl(R)R|R{R;RTR\R9RcRAR R*R8R:R<RaR<RbR=R=R((R RkÕsr%    &   &   ( R/R0RûRRßR4RuRzRk(((R RmŒs !!  RF(sapiproxy_stub_mapturlfetch(sfetch(sInvalidURLErrorcs"ddhttd‡d†}|S(NtGETics4t|d|d|dtd|d|d|dˆƒS(NtpayloadRÝRrtallow_truncatedtfollow_redirectstdeadlinetvalidate_certificate( tfetchRMR…RÝRNR†R‡RˆR‰(RMR…RÝRrR†R‡Rˆ(R‰(R t fixed_fetch,s(RRßRùR‹(R‰R‹((R‰R t_new_fixed_fetch+stAppEngineHttpConnectionc Bs/tZdZeeeeeeeed„ZRS(sÊUse httplib on App Engine, but compensate for its weirdness. The parameters key_file, cert_file, proxy_info, ca_certs, and disable_ssl_certificate_validation are all dropped on the ground. c Cs)tii||d|d|d|ƒdS(NRTRYR(RRXR4R)R<RTRYR( R)R<RTR R RYRRZRRp((R R46s(R/R0RûRRßR4(((R R0s tAppEngineHttpsConnectionc Bs/tZdZeeeeeeeed„ZRS(s4Same as AppEngineHttpConnection, but for HTTPS URIs.c CsEtii||d|d|d|d|d|ƒt | ƒ|_ dS(NRTR R RYR( RRnR4R)R<RTR R RYRRŒRpt_fetch( R)R<RTR R RYRRZRRp((R R4>s   (R/R0RûRRßR4(((R RŽ<s cBs›tZdZeeeeed„Zd„Zd„Zd„Z dd„Z d„Z d„Z d „Z d „Zd „Zd eeeed „Zd„ZRS(s{An HTTP client that handles: - all methods - caching - ETags - compression, - HTTPS - Basic - Digest - WSSE and more. cCsÁ||_||_||_h|_|o#t|tƒot|ƒ|_n ||_t ƒ|_ t ƒ|_ g|_ t|_ddg|_t|_t|_t|_||_t|_dS(sIf 'cache' is a string then it is used as a directory name for a disk cache. Otherwise it must be an object that supports the same interface as FileCache. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout `proxy_info` may be: - a callable that takes the http scheme ('http' or 'https') and returns a ProxyInfo instance per request. By default, uses proxy_nfo_from_environment. - a ProxyInfo instance (static proxy config). - None (proxy disabled). ca_certs is the path of a file containing root CA certificates for SSL server certificate validation. By default, a CA cert file bundled with httplib2 is used. If disable_ssl_certificate_validation is true, SSL cert validation will not be performed. tPUTtPATCHN(RZR)RRpt connectionsR¹Rct basestringR"R/R×R6t certificatestauthorizationsRùR‡toptimistic_concurrency_methodsRßtfollow_all_redirectst ignore_etagtforce_exception_to_status_codeRtforward_authorization_headers(R)R¹RRZRRp((R R4^s$             cCsFti|iƒ}d|jo |d=nd|jo |d=n|S(NRÞR’(tcopyR)t__dict__t state_dict(R)R((R t __getstate__£s     cCs|ii|ƒh|_dS(N(R)RœtupdatetstateR’(R)R ((R t __setstate__­sc csut|dƒ}x_|ii|ƒD]K}xBtD]:}|i |ƒo$t ||||||||ƒVq/q/Wq"WdS(scA generator that creates Authorization objects that can be applied to requests. swww-authenticateN(R‘R2t challengesR)R×R5R<tcredtAUTH_SCHEME_ORDERRVRztAUTH_SCHEME_CLASSESR]RrR3( R)R<R]RrR2R3R¢RVR£((R t_auth_from_challenge±sRBcCs|ii|||ƒdS(s]Add a name and password that will be used any time a request requires authentication.N(R)R×R2RRÔR1(R)RRÔR1((R tadd_credentials»scCs|ii|||ƒdS(sXAdd a key and cert that will be used any time a request requires authentication.N(R)R”R2RtR=R1(R)RtR=R1((R tadd_certificateÀscCs|iiƒg|_dS(sKRemove all the names and passwords that are used for authenticationN(R)R×R3R•(R)((R tclear_credentialsÅs c Cs“x†ttƒD]x} yHt|dƒo|idjo|iƒn|i||||ƒWnlt ij o ‚nUt ij o$|iƒtd|iƒ‚n#tj o|iƒ‚nt ij oV}d} t|dƒot|dƒd} n |i} | tijo‚qÊn tij ot|dƒoM|idjo=| tdjo|iƒ|iƒq q™|iƒ‚n| tdjo|iƒ|iƒq qÊnXy|iƒ}WnNt itifj o6| tdjo|iƒ|iƒq q„‚n[Xd}|djo|iƒn |iƒ}t |ƒ}|djot!||ƒ}nPq W||fS(NR sUnable to find the server at %sitargsiRBtHEAD("RÉtRETRIESRÊRtconnR RRkRÞRÝR]RRrRRR‚R+R8R<R~RlRterrtgetattrR€t ECONNREFUSEDRt HTTPExceptiont getresponseR2R3R°RRµ( R)R­R]RÝRRrR3R2RR®RÊ((R t _conn_requestËsd                   c  Cs¯g}|iD]3}|i||ƒo||i|ƒ|fqq~}|ot |ƒddpd}|o|i ||||ƒn|i|||||ƒ\}}|o[|i||ƒoD|i ||||ƒ|i|||||ƒ\}}d|_qn|idjo–x“|i|||||ƒD]r}|i ||||ƒ|i|||||ƒ\}}|idjo%|ii|ƒ|i||ƒPq<q<Wn|ip#|ddgjp|idjoÂ|ioP|iddd dd gjo1|o|id ƒ o,|idjottd ƒ||ƒ‚n|id ƒoP|d }t|ƒ\}} } } }| djot%i&||ƒ|d } d| }||} |i&| d5ƒ| jo d5}PqêqêWn|oœ|ddgjo‰|i$od|jor|idƒon|djot;dhdƒ‚n|i<|ddd|d |dƒ\}}t?|ƒ|_@tA|i@_BntC||ƒ}|d!joI|pd"|d#tZdZeZdZdZdZeZ d„Z d„Z RS(s<An object more like email.Message than httplib.HTTPResponse.i iÈtOkcCs<t|tiƒohx*|iƒD]\}}|||iƒ3!*  $B’       ÿèhttplib2-0.8/python2/httplib2/test/0000750017135500116100000000000012115724726016717 5ustar jcgregorioenghttplib2-0.8/python2/httplib2/test/brokensocket/0000750017135500116100000000000012115724726021410 5ustar jcgregorioenghttplib2-0.8/python2/httplib2/test/brokensocket/socket.py0000640017135500116100000000010112115724726023243 0ustar jcgregorioengfrom realsocket import gaierror, error, getaddrinfo, SOCK_STREAM httplib2-0.8/python2/httplib2/test/other_cacerts.txt0000640017135500116100000000672512115724726022320 0ustar jcgregorioeng# Certifcate Authority certificates for validating SSL connections. # # This file contains PEM format certificates generated from # http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt # # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is the Netscape security libraries. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1994-2000 # the Initial Developer. All Rights Reserved. # # Contributor(s): # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** Comodo CA Limited, CN=Trusted Certificate Services ================================================== -----BEGIN CERTIFICATE----- MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW 1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi -----END CERTIFICATE----- httplib2-0.8/python2/httplib2/test/__init__.py0000640017135500116100000000000012115724726021017 0ustar jcgregorioenghttplib2-0.8/python2/httplib2/test/miniserver.py0000640017135500116100000000637712115724726021472 0ustar jcgregorioengimport logging import os import select import SimpleHTTPServer import SocketServer import threading HERE = os.path.dirname(__file__) logger = logging.getLogger(__name__) class ThisDirHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): path = path.split('?', 1)[0].split('#', 1)[0] return os.path.join(HERE, *filter(None, path.split('/'))) def log_message(self, s, *args): # output via logging so nose can catch it logger.info(s, *args) class ShutdownServer(SocketServer.TCPServer): """Mixin that allows serve_forever to be shut down. The methods in this mixin are backported from SocketServer.py in the Python 2.6.4 standard library. The mixin is unnecessary in 2.6 and later, when BaseServer supports the shutdown method directly. """ def __init__(self, *args, **kwargs): SocketServer.TCPServer.__init__(self, *args, **kwargs) self.__is_shut_down = threading.Event() self.__serving = False def serve_forever(self, poll_interval=0.1): """Handle one request at a time until shutdown. Polls for shutdown every poll_interval seconds. Ignores self.timeout. If you need to do periodic tasks, do them in another thread. """ self.__serving = True self.__is_shut_down.clear() while self.__serving: r, w, e = select.select([self.socket], [], [], poll_interval) if r: self._handle_request_noblock() self.__is_shut_down.set() def shutdown(self): """Stops the serve_forever loop. Blocks until the loop has finished. This must be called while serve_forever() is running in another thread, or it will deadlock. """ self.__serving = False self.__is_shut_down.wait() def handle_request(self): """Handle one request, possibly blocking. Respects self.timeout. """ # Support people who used socket.settimeout() to escape # handle_request before self.timeout was available. timeout = self.socket.gettimeout() if timeout is None: timeout = self.timeout elif self.timeout is not None: timeout = min(timeout, self.timeout) fd_sets = select.select([self], [], [], timeout) if not fd_sets[0]: self.handle_timeout() return self._handle_request_noblock() def _handle_request_noblock(self): """Handle one request, without blocking. I assume that select.select has returned that the socket is readable before this function was called, so there should be no risk of blocking in get_request(). """ try: request, client_address = self.get_request() except socket.error: return if self.verify_request(request, client_address): try: self.process_request(request, client_address) except: self.handle_error(request, client_address) self.close_request(request) def start_server(handler): httpd = ShutdownServer(("", 0), handler) threading.Thread(target=httpd.serve_forever).start() _, port = httpd.socket.getsockname() return httpd, port httplib2-0.8/python2/httplib2/test/functional/0000750017135500116100000000000012115724726021061 5ustar jcgregorioenghttplib2-0.8/python2/httplib2/test/functional/test_proxies.py0000640017135500116100000000562512115724726024174 0ustar jcgregorioengimport unittest import errno import os import signal import subprocess import tempfile import nose import httplib2 from httplib2 import socks from httplib2.test import miniserver tinyproxy_cfg = """ User "%(user)s" Port %(port)s Listen 127.0.0.1 PidFile "%(pidfile)s" LogFile "%(logfile)s" MaxClients 2 StartServers 1 LogLevel Info """ class FunctionalProxyHttpTest(unittest.TestCase): def setUp(self): if not socks: raise nose.SkipTest('socks module unavailable') if not subprocess: raise nose.SkipTest('subprocess module unavailable') # start a short-lived miniserver so we can get a likely port # for the proxy self.httpd, self.proxyport = miniserver.start_server( miniserver.ThisDirHandler) self.httpd.shutdown() self.httpd, self.port = miniserver.start_server( miniserver.ThisDirHandler) self.pidfile = tempfile.mktemp() self.logfile = tempfile.mktemp() fd, self.conffile = tempfile.mkstemp() f = os.fdopen(fd, 'w') our_cfg = tinyproxy_cfg % {'user': os.getlogin(), 'pidfile': self.pidfile, 'port': self.proxyport, 'logfile': self.logfile} f.write(our_cfg) f.close() try: # TODO use subprocess.check_call when 2.4 is dropped ret = subprocess.call(['tinyproxy', '-c', self.conffile]) self.assertEqual(0, ret) except OSError, e: if e.errno == errno.ENOENT: raise nose.SkipTest('tinyproxy not available') raise def tearDown(self): self.httpd.shutdown() try: pid = int(open(self.pidfile).read()) os.kill(pid, signal.SIGTERM) except OSError, e: if e.errno == errno.ESRCH: print '\n\n\nTinyProxy Failed to start, log follows:' print open(self.logfile).read() print 'end tinyproxy log\n\n\n' raise map(os.unlink, (self.pidfile, self.logfile, self.conffile)) def testSimpleProxy(self): proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, 'localhost', self.proxyport) client = httplib2.Http(proxy_info=proxy_info) src = 'miniserver.py' response, body = client.request('http://localhost:%d/%s' % (self.port, src)) self.assertEqual(response.status, 200) self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read()) lf = open(self.logfile).read() expect = ('Established connection to host "127.0.0.1" ' 'using file descriptor') self.assertTrue(expect in lf, 'tinyproxy did not proxy a request for miniserver') httplib2-0.8/python2/httplib2/test/test_no_socket.py0000640017135500116100000000130012115724726022307 0ustar jcgregorioeng"""Tests for httplib2 when the socket module is missing. This helps ensure compatibility with environments such as AppEngine. """ import os import sys import unittest import httplib2 class MissingSocketTest(unittest.TestCase): def setUp(self): self._oldsocks = httplib2.socks httplib2.socks = None def tearDown(self): httplib2.socks = self._oldsocks def testProxyDisabled(self): proxy_info = httplib2.ProxyInfo('blah', 'localhost', 0) client = httplib2.Http(proxy_info=proxy_info) self.assertRaises(httplib2.ProxiesUnavailableError, client.request, 'http://localhost:-1/') httplib2-0.8/python2/httplib2/test/smoke_test.py0000640017135500116100000000117612115724726021454 0ustar jcgregorioengimport os import unittest import httplib2 from httplib2.test import miniserver class HttpSmokeTest(unittest.TestCase): def setUp(self): self.httpd, self.port = miniserver.start_server( miniserver.ThisDirHandler) def tearDown(self): self.httpd.shutdown() def testGetFile(self): client = httplib2.Http() src = 'miniserver.py' response, body = client.request('http://localhost:%d/%s' % (self.port, src)) self.assertEqual(response.status, 200) self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read()) httplib2-0.8/python2/httplib2/socks.pyc-2.40000640017135500116100000004024412115724726020105 0ustar jcgregorioengmò ýÙst Socks4ErrorcBstZRS(N(RR(((RR?st HTTPErrorcBstZRS(N(RR(((RR @stsuccesss invalid datas not connecteds not availablesbad proxy types bad inputt succeededsgeneral SOCKS server failures!connection not allowed by rulesetsNetwork unreachablesHost unreachablesConnection refuseds TTL expiredsCommand not supportedsAddress type not supporteds Unknown errorsauthentication is requireds0all offered authentication methods were rejecteds$unknown username or invalid passwords unknown errorsrequest grantedsrequest rejected or failedsLrequest rejected because SOCKS server cannot connect to identd on the clientsPrequest rejected because the client program and identd report different user-idscCs||||||fadS(s´setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) Sets a default proxy which all further socksocket objects will use, unless explicitly changed. N(t proxytypetaddrtporttrdnstusernametpasswordt _defaultproxy(R R RRRR((Rtsetdefaultproxy`scCs-tdjot|i_n tdƒ‚dS(s2wrapmodule(module) Attempts to replace a module's socket library with a SOCKS socket. Must set a default proxy using setdefaultproxy(...) first. This will only work on modules that import socket directly into the namespace; most of the Python Standard Library falls into this category. isno proxy specifiedN(isno proxy specified(RtNonet socksockettmoduleRR(R((Rt wrapmodulehs RcBs§tZdZeieided„Zd„Zd„Z d„Z d„Z eeee eed„Z d„Zd „Zd „Zd „Zd „Zd „Zd„ZRS(ssocksocket([family[, type[, proto]]]) -> socket object Open a SOCKS enabled socket. The parameters are the same as those of the standard socket init. In order for SOCKS to work, you must specify family=AF_INET, type=SOCK_STREAM and proto=0. icCs[ti|||||ƒtdjo t|_ n d|_ d|_ d|_ t |_ dS(N(NNNNNN(t _orgsockett__init__tselftfamilyttypetprotot_sockRRt_socksocket__proxyt_socksocket__proxysocknamet_socksocket__proxypeernametTruet_socksocket__httptunnel(RRRRR((RR{s     cCsh|i|ƒ}xRt|ƒ|jo>|i|t|ƒƒ}|ptdƒ‚n||}qW|S(s®__recvall(count) -> data Receive EXACTLY the number of bytes requested from the socket. Blocks until the required number of bytes have been received. isconnection closed unexpectedlyN(isconnection closed unexpectedly(RtrecvtcounttdatatlentdR(RR%R(R&((Rt __recvall…scGs6|ip|i|ƒ}ntt|ƒi||ŒS(sq override socket.socket.sendall method to rewrite the header for non-tunneling proxies if needed N(RR#t_socksocket__rewriteproxytcontenttsuperRtsendalltargs(RR+R.((RR-‘s cCs_d\}}|idƒ}xd|D]\}|iƒidƒo |}q"|iƒidƒp|iƒidƒo |}q"q"W|oÉ|oÂ|i |ƒ|i |ƒ|idƒd}|idƒ}|i dd jo.|i dd jo|i d |i ƒƒn|i d d |ƒ|i d d |d ||d|d fƒndi|ƒS(sÒ rewrite HTTP request headers to support non-tunneling proxies (i.e. those which do not support the CONNECT method). This only works for HTTP (not HTTPS) since HTTPS requires tunneling. s shost:tgettpostt iiiisHost: %ss%s http://%s%s %siN(NN(Rthosttendpttheadertsplitthdrsthdrtlowert startswithtremoveRRtinsertt_socksocket__getauthheadertjoin(RR4R6R2R3R7((Rt__rewriteproxy™s&  ,  (0cCs-|idd|id}dti|ƒS(Nit:isProxy-Authorization: Basic (RRtauthtbase64t b64encode(RR@((Rt__getauthheader°scCs||||||f|_dS(s­setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) Sets the proxy to be used. proxytype - The type of the proxy to be used. Three types are supported: PROXY_TYPE_SOCKS4 (including socks4a), PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP addr - The address of the server (IP or DNS). port - The port of the server. Defaults to 1080 for SOCKS servers and 8080 for HTTP proxy servers. rdns - Should DNS queries be preformed on the remote side (rather than the local side). The default is True. Note: This has no effect with SOCKS4 servers. username - Username to authenticate with to the server. The default is no authentication. password - Password to authenticate with to the server. Only relevant when username is also provided. N(R R RRRRRR(RR R RRRR((Rtsetproxy´sc CsC|idd jo:|idd jo&|itidddddƒƒn |itiddddƒƒ|idƒ}|dd!tdƒi ƒjo$|i ƒt dt dfƒ‚n|dd!tdƒi ƒjonp|dd!tdƒi ƒjoø|itdƒi ƒtt |idƒƒ|idtt |idƒƒ|idƒ|idƒ}|dd!tdƒi ƒjo$|i ƒt dt dfƒ‚n|dd!tdƒi ƒjo$|i ƒtdtdfƒ‚qSnX|i ƒ|dtd ƒi ƒjotdtdfƒ‚nt dt dfƒ‚tiddddƒ}y-ti|ƒ}|tdƒi ƒ|}Wntij o|ido:d }|tdƒi ƒtt |ƒƒi ƒ|}q+titi|ƒƒ}|tdƒi ƒ|}nX|tid |ƒ}|i|ƒ|idƒ}|dd!tdƒi ƒjo$|i ƒt dt dfƒ‚n9|dd!tdƒi ƒjor|i ƒt|dd!ƒd jo4tt|dd!ƒtt|dd!ƒfƒ‚qÙtd td fƒ‚n§|dd!tdƒi ƒjo|idƒ} nt|dd!tdƒi ƒjo3||idƒ}|it|dd!ƒƒ} n!|i ƒt dt dfƒ‚tid |idƒƒd}| |f|_!|d joti"|ƒ|f|_#n||f|_#d S(sk__negotiatesocks5(self,destaddr,destport) Negotiates a connection through a SOCKS5 server. iitBBBBiitBBBiiiÿs>Hii N($RRRR-tstructtpackt_socksocket__recvallt chosenauthtchrtencodetcloseRt_generalerrorsR'tauthstatRt_socks5autherrorstreqRt inet_atontdestaddrtipaddrterrort gethostbynametdestporttresptordRt _socks5errorst boundaddrR$tunpackt boundportR t inet_ntoaR!( RRSRWRXR]RJRQRTROR[((Rt__negotiatesocks5Çsp(&    ]     4     4     cCs|iS(sigetsockname() -> address info Returns the bound IP address and port number at the proxy. N(RR (R((Rtgetproxysockname#scCs ti|ƒS(s`getproxypeername() -> address info Returns the IP and port number of the proxy. N(Rt getpeernameR(R((Rtgetproxypeername)scCs|iS(s£getpeername() -> address info Returns the IP address and port number of the destination machine (note: getproxypeername returns the proxy) N(RR!(R((RRa/scCslt}yti|ƒ}Wnatij oR|ido%t i dddddƒ}t }q}titi |ƒƒ}nXt i ddd|ƒ|}|iddjo||id}n|tdƒiƒ}|o||tdƒiƒ}n|i|ƒ|idƒ}|dd!tdƒiƒjo$|iƒtdtdfƒ‚n|dd!td ƒiƒjo€|iƒt|dd!ƒdjoB|iƒtt|dd!ƒtt|dd!ƒd fƒ‚qýtd tdfƒ‚nti|dƒt id|dd!ƒdf|_|djoti|ƒ|f|_n||f|_dS(sk__negotiatesocks4(self,destaddr,destport) Negotiates a connection through a SOCKS4 server. iREiis>BBHiiiiZi[i\i]i^s>HN(i[i\i](tFalsetrmtrslvRRRRSRTRURRRGRHR"RVRWRQRRKRLR-RIRXRMRRNRYRt _socks4errorsR^R\R R!(RRSRWRQRTRXRd((Rt__negotiatesocks46s<       83 cCs|idpti|ƒ}n|}d|dt|ƒdg}|d|dg7}|iddjo.|iddjo||i ƒdg7}n|i dƒ|i d i |ƒiƒƒ|id ƒ}x4|id iƒƒd jo||id ƒ}qÒW|iƒd idiƒdƒ}|d diƒdiƒfjo$|iƒtd td fƒ‚nyt|d ƒ}Wn3tj o'|iƒtd td fƒ‚nX|djo$|iƒt||dfƒ‚nd|_||f|_dS(sh__negotiatehttp(self,destaddr,destport) Negotiates a connection through an HTTP server. isCONNECT R?s HTTP/1.1 sHost: s iitis iÿÿÿÿiR1isHTTP/1.0sHTTP/1.1iÈs0.0.0.0N(s0.0.0.0i(RRRRVRSR tstrRWtheadersRR<tappendR-R=RLR$RXtfindt splitlinesR5t statuslineRMRRNtintt statuscodet ValueErrorR R R!(RRSRWR RmRXRoRi((Rt__negotiatehttpfs8( "#     cCsÀt|ƒttfjp?t|ƒdjp,t|dtƒ pt|dƒtjotdt dfƒ‚n|i dt jod|i dd jo|i d}nd}ti||i d|fƒ|i|d|dƒnÓ|i dtjod|i dd jo|i d}nd}ti||i d|fƒ|i|d|dƒn[|i dtjod|i dd jo|i d}nd}ti||i d|fƒ|i|d|dƒnã|i dtjo‚|i dd jo|i d}nd}ti||i d|fƒ|ddjo|i|d|dƒq¼t|_nM|i dd jo"ti||d|dfƒntdt dfƒ‚d S( sÿconnect(self, despair) Connects to the specified destination through a proxy. destpar - A tuple of the IP/DNS address and the port number. (identical to socket's connect). To select the proxy server use setproxy(). iiiii8ii»iN(RtdestpairtlistttupleR't isinstancet basestringRnRRNRRtPROXY_TYPE_SOCKS5RtportnumRtconnectt_socksocket__negotiatesocks5tPROXY_TYPE_SOCKS4t_socksocket__negotiatesocks4tPROXY_TYPE_HTTPt_socksocket__negotiatehttptPROXY_TYPE_HTTP_NO_TUNNELRcR#(RRrRx((RRyŠs@X "(RRt__doc__RtAF_INETt SOCK_STREAMRRRIR-R*R<R"RDRzR`RbRaR|R~Ry(((RRts     \    0 $(ssuccesss invalid datas not connecteds not availablesbad proxy types bad input( R sgeneral SOCKS server failures!connection not allowed by rulesetsNetwork unreachablesHost unreachablesConnection refuseds TTL expiredsCommand not supportedsAddress type not supporteds Unknown error(R sauthentication is requireds0all offered authentication methods were rejecteds$unknown username or invalid passwords unknown error(srequest grantedsrequest rejected or failedsLrequest rejected because SOCKS server cannot connect to identd on the clientsPrequest rejected because the client program and identd report different user-idss unknown error(R€RARRGtsystgetattrRt ImportErrorR{RwR}RRRt ExceptionRRRRRR RNRZRPReR"RRR(RARRR{RwRGRZRR RR}RRƒRRRPReRRNRRR((Rt?s4        httplib2-0.8/python2/httplib2/iri2uri.pyc-2.40000640017135500116100000000713112115724726020346 0ustar jcgregorioengmò †‡Lc@sÈdZdZdZgZdZdZdZdkZd4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGgZd/„Z d0„Z e d1jo0dk Z d2e i fd3„ƒYZe iƒndS(Hs% iri2uri Converts an IRI to a URI. s!Joe Gregorio (joe@bitworking.org)sCopyright 2006, Joe Gregorios1.0.0tMITt Ni iÿ×iàiÿøiùiÏýiðýiïÿiiýÿiiýÿiiýÿiiýÿiiýÿiiýÿiiýÿiiýÿi iýÿ i iýÿ i iýÿ i iýÿ i iýÿ iiýÿiiýÿiiýÿcCs›|}t|ƒ}x‚tD]z\}}||joPn||joO||joBdig}|i dƒD]}|dt|ƒqk~ƒ}PqqW|S(Ntsutf-8s%%%2X( tctretvaltordtit escape_rangetlowthightjoint_[1]tencodeto(RR RR R RR((t>/home/jcgregorio/projects/httplib2/python2/httplib2/iri2uri.pyR 5s   = cCs“t|tƒoti|ƒ\}}}}}|i dƒ}ti |||||fƒ}di g}|D]}|t |ƒql~ƒ}n|S(sŸConvert an IRI to a URI. Note that IRIs must be passed in a unicode strings. That is, do not utf-8 encode the IRI before passing it into the function.tidnaRN(t isinstanceturitunicodeturlparseturlsplittschemet authoritytpathtquerytfragmentR t urlunsplitR R R(RR RRRRRR((Rtiri2uriAs4t__main__tTestcBstZd„Zd„ZRS(NcCsIddddddddg}x$|D]}|i|t|ƒƒq%Wd S( s6Test that URIs are invariant under the transformation.u"ftp://ftp.is.co.za/rfc/rfc1808.txtu#http://www.ietf.org/rfc/rfc2396.txtu)ldap://[2001:db8::7]/c=GB?objectClass?oneumailto:John.Doe@example.comu&news:comp.infosystems.www.servers.unixutel:+1-816-555-1212utelnet://192.0.2.16:80/u3urn:oasis:names:specification:docbook:dtd:xml:4.1.2N(t invariantRtselft assertEqualR(RRR((Rt test_urisTs  cCs­|idtdƒƒ|idtdƒƒ|idtdƒƒ|idtdƒƒ|id td ƒƒ|id ttd ƒƒƒ|id td id ƒƒƒd S( sG Test that the right type of escaping is done for each part of the URI.shttp://xn--o3h.com/%E2%98%84uhttp://☄.com/☄s%http://bitworking.org/?fred=%E2%98%84uhttp://bitworking.org/?fred=☄s http://bitworking.org/#%E2%98%84uhttp://bitworking.org/#☄s #%E2%98%84u#☄s/fred?bar=%E2%98%9A#%E2%98%84u/fred?bar=☚#☄sutf-8N(RR RtassertNotEqualR (R((Rttest_iribs(t__name__t __module__R!R#(((RRRs (i iÿ×(iàiÿø(iùiÏý(iðýiïÿ(iiýÿ(iiýÿ(iiýÿ(iiýÿ(iiýÿ(iiýÿ(iiýÿ(iiýÿ(i iýÿ (i iýÿ (i iýÿ (i iýÿ (i iýÿ (iiýÿ(iiýÿ(iiýÿ(t__doc__t __author__t __copyright__t__contributors__t __version__t __license__t __history__RRR RR$tunittesttTestCaseRtmain( R(RR+R-R)R'RRR,R R*R((Rt?s B   httplib2-0.8/python2/httplib2/__init__.py.rej0000640017135500116100000000366012115724726020636 0ustar jcgregorioeng--- __init__.py +++ __init__.py @@ -1058,15 +1062,26 @@ # Use a different connection object for Google App Engine try: - from google.appengine.api import apiproxy_stub_map - if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: - raise ImportError # Bail out; we're not actually running on App Engine. - from google.appengine.api.urlfetch import fetch - from google.appengine.api.urlfetch import InvalidURLError + try: + from google.appengine.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google.appengine.api.urlfetch import fetch + from google.appengine.api.urlfetch import InvalidURLError + except ImportError: + from google3.apphosting.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google3.apphosting.api.urlfetch import fetch + from google3.apphosting.api.urlfetch import InvalidURLError def _new_fixed_fetch(validate_certificate): - def fixed_fetch(url, payload=None, method="GET", headers={}, allow_truncated=False, follow_redirects=True, deadline=5): - return fetch(url, payload=payload, method=method, headers=header, allow_truncated=allow_truncated, follow_redirects=follow_redirects, deadline=deadline, validate_certificate=validate_certificate) + def fixed_fetch(url, payload=None, method="GET", headers={}, + allow_truncated=False, follow_redirects=True, deadline=5): + return fetch(url, payload=payload, method=method, headers=header, + allow_truncated=allow_truncated, + follow_redirects=follow_redirects, deadline=deadline, + validate_certificate=validate_certificate) return fixed_fetch class AppEngineHttpConnection(httplib.HTTPConnection): httplib2-0.8/python2/httplib2test_appengine.py0000640017135500116100000000614412115724726021246 0ustar jcgregorioeng""" httplib2test_appengine A set of unit tests for httplib2.py on Google App Engine """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2011, Joe Gregorio" import os import sys import unittest # The test resources base uri base = 'http://bitworking.org/projects/httplib2/test/' #base = 'http://localhost/projects/httplib2/test/' cacheDirName = ".cache" APP_ENGINE_PATH='../../google_appengine' sys.path.insert(0, APP_ENGINE_PATH) import dev_appserver dev_appserver.fix_sys_path() from google.appengine.ext import testbed testbed = testbed.Testbed() testbed.activate() testbed.init_urlfetch_stub() import google.appengine.api import httplib2 class AppEngineHttpTest(unittest.TestCase): def setUp(self): if os.path.exists(cacheDirName): [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)] def test(self): h = httplib2.Http() response, content = h.request("http://bitworking.org") self.assertEqual(httplib2.SCHEME_TO_CONNECTION['https'], httplib2.AppEngineHttpsConnection) self.assertEquals(1, len(h.connections)) self.assertEquals(response.status, 200) self.assertEquals(response['status'], '200') # It would be great to run the test below, but it really tests the # aberrant behavior of httplib on App Engine, but that special aberrant # httplib only appears when actually running on App Engine and not when # running via the SDK. When running via the SDK the httplib in std lib is # loaded, which throws a different error when a timeout occurs. # #def test_timeout(self): # # The script waits 3 seconds, so a timeout of more than that should succeed. # h = httplib2.Http(timeout=7) # r, c = h.request('http://bitworking.org/projects/httplib2/test/timeout/timeout.cgi') # # import httplib # print httplib.__file__ # h = httplib2.Http(timeout=1) # try: # r, c = h.request('http://bitworking.org/projects/httplib2/test/timeout/timeout.cgi') # self.fail('Timeout should have raised an exception.') # except DeadlineExceededError: # pass def test_proxy_info_ignored(self): h = httplib2.Http(proxy_info='foo.txt') response, content = h.request("http://bitworking.org") self.assertEquals(response.status, 200) class AberrationsTest(unittest.TestCase): def setUp(self): self.orig_apiproxy_stub_map = google.appengine.api.apiproxy_stub_map # Force apiproxy_stub_map to None to trigger the test condition. google.appengine.api.apiproxy_stub_map = None reload(httplib2) def tearDown(self): google.appengine.api.apiproxy_stub_map = self.orig_apiproxy_stub_map reload(httplib2) def test(self): self.assertNotEqual(httplib2.SCHEME_TO_CONNECTION['https'], httplib2.AppEngineHttpsConnection) self.assertNotEqual(httplib2.SCHEME_TO_CONNECTION['http'], httplib2.AppEngineHttpConnection) if __name__ == '__main__': unittest.main() httplib2-0.8/README0000640017135500116100000001037412115724726013473 0ustar jcgregorioengHttplib2 -------------------------------------------------------------------- Introduction A comprehensive HTTP client library, httplib2.py supports many features left out of other HTTP libraries. HTTP and HTTPS HTTPS support is only available if the socket module was compiled with SSL support. Keep-Alive Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible. Authentication The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS. * Digest * Basic * WSSE Caching The module can optionally operate with a private cache that understands the Cache-Control: header and uses both the ETag and Last-Modified cache validators. All Methods The module can handle any HTTP request method, not just GET and POST. Redirects Automatically follows 3XX redirects on GETs. Compression Handles both 'deflate' and 'gzip' types of compression. Lost update support Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout. Unit Tested A large and growing set of unit tests. For more information on this module, see: http://bitworking.org/projects/httplib2/ -------------------------------------------------------------------- Installation The httplib2 module is shipped as a distutils package. To install the library, unpack the distribution archive, and issue the following command: $ python setup.py install -------------------------------------------------------------------- Usage A simple retrieval: import httplib2 h = httplib2.Http(".cache") (resp_headers, content) = h.request("http://example.org/", "GET") The 'content' is the content retrieved from the URL. The content is already decompressed or unzipped if necessary. To PUT some content to a server that uses SSL and Basic authentication: import httplib2 h = httplib2.Http(".cache") h.add_credentials('name', 'password') (resp, content) = h.request("https://example.org/chapter/2", "PUT", body="This is text", headers={'content-type':'text/plain'} ) Use the Cache-Control: header to control how the caching operates. import httplib2 h = httplib2.Http(".cache") (resp, content) = h.request("http://bitworking.org/", "GET") ... (resp, content) = h.request("http://bitworking.org/", "GET", headers={'cache-control':'no-cache'}) The first request will be cached and since this is a request to bitworking.org it will be set to be cached for two hours, because that is how I have my server configured. Any subsequent GET to that URI will return the value from the on-disk cache and no request will be made to the server. You can use the Cache-Control: header to change the caches behavior and in this example the second request adds the Cache-Control: header with a value of 'no-cache' which tells the library that the cached copy must not be used when handling this request. -------------------------------------------------------------------- Httplib2 Software License Copyright (c) 2006 by Joe Gregorio Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. httplib2-0.8/CHANGELOG0000640017135500116100000001754712115724726014036 0ustar jcgregorioeng0.8 More fixes for the App Engine support. Added a new feature that allows you to supply your own provider for the CA_CERTS file. Just create a module named ca_certs_locater that has a method get() that returns the file location of the CA_CERTS file. Lots of clean up of the code formatting to make it more consistent. 0.7.7 More fixes for App Engine, now less likely to swallow important exceptions. Adding proxy_info_from_* methods to Python3. Reviewed in https://codereview.appspot.com/6588078/. Added GeoTrust cert Make httplib2.Http() instances pickleable. Reviewed in https://codereview.appspot.com/6506074/ The following issues have been fixed: 229 python3 httplib2 clobbers multiple headers of same key 230 Expose meaningful exception for App Engine URLFetch ResponseTooLargeError 231 Expose App Engine URLFetch DeadlineExceededError for debugging purposes 0.7.6 Fixes for App Engine 2.7. 0.7.5 Keys are lowercase in a Response object, regardless of how Response object is constructed. Add control so that Authorization: headers aren't forwarded on a 3xx response by default. Set the reason correctly when running on App Engine. Patch from Alain Vongsouvanh. Reviewed in http://codereview.appspot.com/6422051/ Fix proxy socks for SSL connections. Fixes issue #199. You can now set httplib2.RETRIES to the number of retries before a request is considered to fail It is set to a default of 2 to mimic the traditional behavior of httplib2. The following issues have been addressed: 223 HEAD requests fail calling the close() method of ResponseDict instance. 222 Can't disable cert validation in appengine 204 Credentials can leak in HTTP redirects 210 Different API between Python 2 and Python 3 version breaks wsgi_intercept 214 ValueError on malformated cache entries 204 Credentials can leak in HTTP redirects 0.7.3 ProxyInfo objects now can construct themselves from environment variables commonly-used in Unix environments. By default, the Http class will construct a ProxyInfo instance based on these environment variables. To achieve the previous behavior, where environment variables are ignored, pass proxy_info=None to Http(). The following issues have been addressed: Issue 159: automatic detection of proxy configuration. Issue 179: Allow unicode in proxy hostname. Issue 194: Added support for setuptools. Fixes for HTTP CONNECT proxies. 0.7.1 Fix failure to install cacerts.txt for 2.x installs. 0.7.0 The two major changes in this release are SSL Certificate checking and App Engine support. By default the certificates of an HTTPS connection are checked, but that can be disabled via disable_ssl_certificate_validation. The second change is that on App Engine there is a new connection object that utilizes the urlfetch capabilities on App Engine, including setting timeouts and validating certificates. The following issues have been addressed: Fixes issue 72. Always lowercase authorization header. Fix issue 47. Redirects that become a GET should not have a body. Fixes issue 19. Set Content-location on redirected HEAD requests Fixes issue 139. Redirect with a GET on 302 regardless of the originating method. Fixes issue 138. Handle unicode in headers when writing and retrieving cache entries. Who says headers have to be ASCII! Add certificate validation. Work initially started by Christoph Kern. Set a version number. Fixes issue # 135. Sync to latest version of socks.py Add gzip to the user-agent, in case we are making a request to an app engine project: http://code.google.com/appengine/kb/general.html#compression Uses a custom httplib shim on App Engine to wrap urlfetch, as opposed Add default support for optimistic concurrency on PATCH requests Fixes issue 126. IPv6 under various conditions would fail. Fixes issue 131. Handle socket.timeout's that occur during send. proxy support: degrade gracefully when socket.socket is unavailable 0.6.0 The following issues have been addressed: #51 - Failure to handle server legitimately closing connection before request body is fully sent #77 - Duplicated caching test #65 - Transform _normalize_headers into a method of Http class #45 - Vary header #73 - All files in Mercurial are executable #81 - Have a useful .hgignore #78 - Add release tags to the Mercurial repository #67 - HEAD requests cause next request to be retried Mostly bug fixes, the big enhancement is the addition of proper Vary: header handling. Thanks to Chris Dent for that change. The other big change is the build process for distributions so that both python2 and python3 are included in the same .tar.gz/.zip file. 0.5.0 Added Python 3 support Fixed the following bugs: #12 - Cache-Control: only-if-cached incorrectly does request if item not in cache #39 - Deprecation warnings in Python 2.6 #54 - Http.request fails accesing Google account via http proxy #56 - Block on response.read() for HEAD requests. #57 - Timeout ignore for Python 2.6 #58 - Fixed parsing of Cache-Control: header to make it more robust Also fixed a deprecation warning that appeared between Python 3.0 and 3.1. 0.4.0 Added support for proxies if the Socksipy module is installed. Fixed bug with some HEAD responses having content-length set to zero incorrectly. Fixed most except's to catch a specific exception. Added 'connection_type' parameter to Http.request(). The default for 'force_exception_to_status_code' was changed to False. Defaulting to True was causing quite a bit of confusion. 0.3.0 Calling Http.request() with a relative URI, as opposed to an absolute URI, will now throw a specific exception. Http() now has an additional optional parameter for the socket timeout. Exceptions can now be forced into responses. That is, instead of throwing an exception, a good httlib2.Response object is returned that describe the error with an appropriate status code. Many improvements to the file cache: 1. The names in the cache are now much less opaque, which should help with debugging. 2. The disk cache is now Apache mod_asis compatible. 3. A Content-Location: header is supplied and stored in the cache which points to the original requested URI. User supplied If-* headers now override httplib2 supplied versions. IRIs are now fully supported. Note that they MUST be passed in as unicode objects. Http.add_credentials() now takes an optional domain to restrict the credentials to being only used on that domain. Added Http.add_certificate() which allows setting a key and cert for SSL connnections. Many other bugs fixed. 0.2.0 Added support for Google Auth. Added experimental support for HMACDigest. Added support for a pluggable caching system. Now supports the old system of using the file system and now memcached. Added httplib2.debuglevel which turns on debugging. Change Response._previous to Response.previous. Addded Http.follow_all_redirects which forces httplib2 to follow all redirects, as opposed to following only the safe redirects. This makes the GData protocol easier to use. All known bugs fixed to date. 0.1.1 Fixed several bugs raised by James Antill: 1. HEAD didn't get an Accept: header added like GET. 2. HEAD requests did not use the cache. 3. GET requests with Range: headers would erroneously return a full cached response. 4. Subsequent requests to resources that had timed out would raise an exception. And one feature request for 'method' to default to GET. Xavier Verges Farrero supplied what I needed to make the library work with Python 2.3. I added distutils based setup.py. 0.1 Rev 86 Initial Release httplib2-0.8/MANIFEST.in0000640017135500116100000000015612115724726014346 0ustar jcgregorioengrecursive-include python2 *.py *.txt recursive-include python3 *.py *.txt include python2/httplib2/test/*.txt httplib2-0.8/setup.py0000750017135500116100000000447212115724726014331 0ustar jcgregorioengtry: from setuptools import setup except ImportError: from distutils.core import setup import sys pkgdir = {'': 'python%s' % sys.version_info[0]} VERSION = '0.8' setup(name='httplib2', version=VERSION, author='Joe Gregorio', author_email='joe@bitworking.org', url='http://code.google.com/p/httplib2/', download_url='http://httplib2.googlecode.com/files/httplib2-%s.tar.gz' % VERSION, description='A comprehensive HTTP client library.', license='MIT', long_description=""" A comprehensive HTTP client library, ``httplib2`` supports many features left out of other HTTP libraries. **HTTP and HTTPS** HTTPS support is only available if the socket module was compiled with SSL support. **Keep-Alive** Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible. **Authentication** The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS. * Digest * Basic * WSSE **Caching** The module can optionally operate with a private cache that understands the Cache-Control: header and uses both the ETag and Last-Modified cache validators. Both file system and memcached based caches are supported. **All Methods** The module can handle any HTTP request method, not just GET and POST. **Redirects** Automatically follows 3XX redirects on GETs. **Compression** Handles both 'deflate' and 'gzip' types of compression. **Lost update support** Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout **Unit Tested** A large and growing set of unit tests. """, package_dir=pkgdir, packages=['httplib2'], package_data={'httplib2': ['*.txt']}, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries', ], ) httplib2-0.8/python3/0000750017135500116100000000000012115724726014211 5ustar jcgregorioenghttplib2-0.8/python3/README0000640017135500116100000000473512115724726015103 0ustar jcgregorioenghttplib2 for Python 3 This directory contains a port of httplib2 to Python 3. As you may know, Python 3 is not backward-compatible with Python 2. The biggest change in Python 3 (that affects httplib2) is the distinction between bytes and strings. To successfully use http2lib for Python 3, you absolutely must understand the following sentence: ** THE RESPONSE HEADERS ARE STRINGS, BUT THE CONTENT BODY IS BYTES ** Example: >>> import httplib2, pprint >>> h = httplib2.Http(".cache") >>> (resp_headers, content) = h.request("http://example.org/", "GET") >>> pprint.pprint(resp_headers) {'accept-ranges': 'bytes', 'connection': 'close', 'content-length': '438', 'content-location': 'http://example.org/', 'content-type': 'text/html; charset=UTF-8', 'date': 'Fri, 29 May 2009 03:57:29 GMT', 'etag': '"b80f4-1b6-80bfd280"', 'last-modified': 'Tue, 15 Nov 2005 13:24:10 GMT', 'server': 'Apache/2.2.3 (CentOS)', 'status': '200'} >>> type(content) >>> content[:49] b'\r\n\r\n Example Web Page' Further reading: * http://diveintopython3.org/strings.html * http://docs.python.org/3.0/whatsnew/3.0.html#text-vs-data-instead-of-unicode-vs-8-bit * http://docs.python.org/3.0/howto/unicode.html -------------------------------------------------------------------- Httplib2 Software License Copyright (c) 2006 by Joe Gregorio Copyright (c) 2009 by Mark Pilgrim Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. httplib2-0.8/python3/httplib2test.py0000750017135500116100000022047512115724726017230 0ustar jcgregorioeng#!/usr/bin/env python3 """ httplib2test A set of unit tests for httplib2.py. Requires Python 3.0 or later """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = ["Mark Pilgrim"] __license__ = "MIT" __history__ = """ """ __version__ = "0.2 ($Rev: 118 $)" import base64 import http.client import httplib2 import io import os import pickle import socket import ssl import sys import time import unittest import urllib.parse # The test resources base uri base = 'http://bitworking.org/projects/httplib2/test/' #base = 'http://localhost/projects/httplib2/test/' cacheDirName = ".cache" class CredentialsTest(unittest.TestCase): def test(self): c = httplib2.Credentials() c.add("joe", "password") self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0]) self.assertEqual(("joe", "password"), list(c.iter(""))[0]) c.add("fred", "password2", "wellformedweb.org") self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0]) self.assertEqual(1, len(list(c.iter("bitworking.org")))) self.assertEqual(2, len(list(c.iter("wellformedweb.org")))) self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org"))) c.clear() self.assertEqual(0, len(list(c.iter("bitworking.org")))) c.add("fred", "password2", "wellformedweb.org") self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org"))) self.assertEqual(0, len(list(c.iter("bitworking.org")))) self.assertEqual(0, len(list(c.iter("")))) class ParserTest(unittest.TestCase): def testFromStd66(self): self.assertEqual( ('http', 'example.com', '', None, None ), httplib2.parse_uri("http://example.com")) self.assertEqual( ('https', 'example.com', '', None, None ), httplib2.parse_uri("https://example.com")) self.assertEqual( ('https', 'example.com:8080', '', None, None ), httplib2.parse_uri("https://example.com:8080")) self.assertEqual( ('http', 'example.com', '/', None, None ), httplib2.parse_uri("http://example.com/")) self.assertEqual( ('http', 'example.com', '/path', None, None ), httplib2.parse_uri("http://example.com/path")) self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', None ), httplib2.parse_uri("http://example.com/path?a=1&b=2")) self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred")) self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred")) class UrlNormTest(unittest.TestCase): def test(self): self.assertEqual( "http://example.org/", httplib2.urlnorm("http://example.org")[-1]) self.assertEqual( "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1]) self.assertEqual( "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1]) self.assertEqual( "http://example.org/mypath?a=b", httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1]) self.assertEqual( "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1]) self.assertEqual( httplib2.urlnorm("http://localhost:80/"), httplib2.urlnorm("HTTP://LOCALHOST:80")) try: httplib2.urlnorm("/") self.fail("Non-absolute URIs should raise an exception") except httplib2.RelativeURIError: pass class UrlSafenameTest(unittest.TestCase): def test(self): # Test that different URIs end up generating different safe names self.assertEqual( "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f", httplib2.safename("http://example.org/fred/?a=b")) self.assertEqual( "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b", httplib2.safename("http://example.org/fred?/a=b")) self.assertEqual( "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968", httplib2.safename("http://www.example.org/fred?/a=b")) self.assertEqual( httplib2.safename(httplib2.urlnorm("http://www")[-1]), httplib2.safename(httplib2.urlnorm("http://WWW")[-1])) self.assertEqual( "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d", httplib2.safename("https://www.example.org/fred?/a=b")) self.assertNotEqual( httplib2.safename("http://www"), httplib2.safename("https://www")) # Test the max length limits uri = "http://" + ("w" * 200) + ".org" uri2 = "http://" + ("w" * 201) + ".org" self.assertNotEqual( httplib2.safename(uri2), httplib2.safename(uri)) # Max length should be 200 + 1 (",") + 32 self.assertEqual(233, len(httplib2.safename(uri2))) self.assertEqual(233, len(httplib2.safename(uri))) # Unicode if sys.version_info >= (2,3): self.assertEqual( "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193", httplib2.safename("http://\u2304.org/fred/?a=b")) class _MyResponse(io.BytesIO): def __init__(self, body, **kwargs): io.BytesIO.__init__(self, body) self.headers = kwargs def items(self): return self.headers.items() def iteritems(self): return iter(self.headers.items()) class _MyHTTPConnection(object): "This class is just a mock of httplib.HTTPConnection used for testing" def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None): self.host = host self.port = port self.timeout = timeout self.log = "" self.sock = None def set_debuglevel(self, level): pass def connect(self): "Connect to a host on a given port." pass def close(self): pass def request(self, method, request_uri, body, headers): pass def getresponse(self): return _MyResponse(b"the body", status="200") class HttpTest(unittest.TestCase): def setUp(self): if os.path.exists(cacheDirName): [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)] self.http = httplib2.Http(cacheDirName) self.http.clear_credentials() def testIPv6NoSSL(self): try: self.http.request("http://[::1]/") except socket.gaierror: self.fail("should get the address family right for IPv6") except socket.error: # Even if IPv6 isn't installed on a machine it should just raise socket.error pass def testIPv6SSL(self): try: self.http.request("https://[::1]/") except socket.gaierror: self.fail("should get the address family right for IPv6") except socket.error: # Even if IPv6 isn't installed on a machine it should just raise socket.error pass def testConnectionType(self): self.http.force_exception_to_status_code = False response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection) self.assertEqual(response['content-location'], "http://bitworking.org") self.assertEqual(content, b"the body") def testGetUnknownServer(self): self.http.force_exception_to_status_code = False try: self.http.request("http://fred.bitworking.org/") self.fail("An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server.") except httplib2.ServerNotFoundError: pass # Now test with exceptions turned off self.http.force_exception_to_status_code = True (response, content) = self.http.request("http://fred.bitworking.org/") self.assertEqual(response['content-type'], 'text/plain') self.assertTrue(content.startswith(b"Unable to find")) self.assertEqual(response.status, 400) def testGetConnectionRefused(self): self.http.force_exception_to_status_code = False try: self.http.request("http://localhost:7777/") self.fail("An socket.error exception must be thrown on Connection Refused.") except socket.error: pass # Now test with exceptions turned off self.http.force_exception_to_status_code = True (response, content) = self.http.request("http://localhost:7777/") self.assertEqual(response['content-type'], 'text/plain') self.assertTrue(b"Connection refused" in content) self.assertEqual(response.status, 400) def testGetIRI(self): if sys.version_info >= (2,3): uri = urllib.parse.urljoin(base, "reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}") (response, content) = self.http.request(uri, "GET") d = self.reflector(content) self.assertTrue('QUERY_STRING' in d) self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0) def testGetIsDefaultMethod(self): # Test that GET is the default method uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi") (response, content) = self.http.request(uri) self.assertEqual(response['x-method'], "GET") def testDifferentMethods(self): # Test that all methods can be used uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi") for method in ["GET", "PUT", "DELETE", "POST"]: (response, content) = self.http.request(uri, method, body=b" ") self.assertEqual(response['x-method'], method) def testHeadRead(self): # Test that we don't try to read the response of a HEAD request # since httplib blocks response.read() for HEAD requests. # Oddly enough this doesn't appear as a problem when doing HEAD requests # against Apache servers. uri = "http://www.google.com/" (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertEqual(content, b"") def testGetNoCache(self): # Test that can do a GET w/o the cache turned on. http = httplib2.Http() uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.previous, None) def testGetOnlyIfCachedCacheHit(self): # Test that can do a GET with cache and 'only-if-cached' uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'}) self.assertEqual(response.fromcache, True) self.assertEqual(response.status, 200) def testGetOnlyIfCachedCacheMiss(self): # Test that can do a GET with no cache with 'only-if-cached' uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'}) self.assertEqual(response.fromcache, False) self.assertEqual(response.status, 504) def testGetOnlyIfCachedNoCacheAtAll(self): # Test that can do a GET with no cache with 'only-if-cached' # Of course, there might be an intermediary beyond us # that responds to the 'only-if-cached', so this # test can't really be guaranteed to pass. http = httplib2.Http() uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = http.request(uri, "GET", headers={'cache-control': 'only-if-cached'}) self.assertEqual(response.fromcache, False) self.assertEqual(response.status, 504) def testUserAgent(self): # Test that we provide a default user-agent uri = urllib.parse.urljoin(base, "user-agent/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertTrue(content.startswith(b"Python-httplib2/")) def testUserAgentNonDefault(self): # Test that the default user-agent can be over-ridden uri = urllib.parse.urljoin(base, "user-agent/test.cgi") (response, content) = self.http.request(uri, "GET", headers={'User-Agent': 'fred/1.0'}) self.assertEqual(response.status, 200) self.assertTrue(content.startswith(b"fred/1.0")) def testGet300WithLocation(self): # Test the we automatically follow 300 redirects if a Location: header is provided uri = urllib.parse.urljoin(base, "300/with-location-header.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 300) self.assertEqual(response.previous.fromcache, False) # Confirm that the intermediate 300 is not cached (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 300) self.assertEqual(response.previous.fromcache, False) def testGet300WithLocationNoRedirect(self): # Test the we automatically follow 300 redirects if a Location: header is provided self.http.follow_redirects = False uri = urllib.parse.urljoin(base, "300/with-location-header.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 300) def testGet300WithoutLocation(self): # Not giving a Location: header in a 300 response is acceptable # In which case we just return the 300 response uri = urllib.parse.urljoin(base, "300/without-location-header.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 300) self.assertTrue(response['content-type'].startswith("text/html")) self.assertEqual(response.previous, None) def testGet301(self): # Test that we automatically follow 301 redirects # and that we cache the 301 response uri = urllib.parse.urljoin(base, "301/onestep.asis") destination = urllib.parse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertTrue('content-location' in response) self.assertEqual(response['content-location'], destination) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 301) self.assertEqual(response.previous.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response['content-location'], destination) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 301) self.assertEqual(response.previous.fromcache, True) def testHead301(self): # Test that we automatically follow 301 redirects uri = urllib.parse.urljoin(base, "301/onestep.asis") (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertEqual(response.previous.status, 301) self.assertEqual(response.previous.fromcache, False) def testGet301NoRedirect(self): # Test that we automatically follow 301 redirects # and that we cache the 301 response self.http.follow_redirects = False uri = urllib.parse.urljoin(base, "301/onestep.asis") destination = urllib.parse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 301) def testGet302(self): # Test that we automatically follow 302 redirects # and that we DO NOT cache the 302 response uri = urllib.parse.urljoin(base, "302/onestep.asis") destination = urllib.parse.urljoin(base, "302/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response['content-location'], destination) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 302) self.assertEqual(response.previous.fromcache, False) uri = urllib.parse.urljoin(base, "302/onestep.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.assertEqual(response['content-location'], destination) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 302) self.assertEqual(response.previous.fromcache, False) self.assertEqual(response.previous['content-location'], uri) uri = urllib.parse.urljoin(base, "302/twostep.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 302) self.assertEqual(response.previous.fromcache, False) def testGet302RedirectionLimit(self): # Test that we can set a lower redirection limit # and that we raise an exception when we exceed # that limit. self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "302/twostep.asis") try: (response, content) = self.http.request(uri, "GET", redirections = 1) self.fail("This should not happen") except httplib2.RedirectLimit: pass except Exception as e: self.fail("Threw wrong kind of exception ") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET", redirections = 1) self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Redirected more")) self.assertEqual("302", response['status']) self.assertTrue(content.startswith(b"")) self.assertTrue(response.previous != None) def testGet302NoLocation(self): # Test that we throw an exception when we get # a 302 with no Location: header. self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "302/no-location.asis") try: (response, content) = self.http.request(uri, "GET") self.fail("Should never reach here") except httplib2.RedirectMissingLocation: pass except Exception as e: self.fail("Threw wrong kind of exception ") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Redirected but")) self.assertEqual("302", response['status']) self.assertTrue(content.startswith(b"This is content")) def testGet301ViaHttps(self): # Google always redirects to http://google.com (response, content) = self.http.request("https://code.google.com/apis/", "GET") self.assertEqual(200, response.status) self.assertEqual(301, response.previous.status) def testGetViaHttps(self): # Test that we can handle HTTPS (response, content) = self.http.request("https://google.com/adsense/", "GET") self.assertEqual(200, response.status) def testGetViaHttpsSpecViolationOnLocation(self): # Test that we follow redirects through HTTPS # even if they violate the spec by including # a relative Location: header instead of an # absolute one. (response, content) = self.http.request("https://google.com/adsense", "GET") self.assertEqual(200, response.status) self.assertNotEqual(None, response.previous) def testGetViaHttpsKeyCert(self): # At this point I can only test # that the key and cert files are passed in # correctly to httplib. It would be nice to have # a real https endpoint to test against. http = httplib2.Http(timeout=2) http.add_certificate("akeyfile", "acertfile", "bitworking.org") try: (response, content) = http.request("https://bitworking.org", "GET") except AttributeError: self.assertEqual(http.connections["https:bitworking.org"].key_file, "akeyfile") self.assertEqual(http.connections["https:bitworking.org"].cert_file, "acertfile") except IOError: # Skip on 3.2 pass try: (response, content) = http.request("https://notthere.bitworking.org", "GET") except httplib2.ServerNotFoundError: self.assertEqual(http.connections["https:notthere.bitworking.org"].key_file, None) self.assertEqual(http.connections["https:notthere.bitworking.org"].cert_file, None) except IOError: # Skip on 3.2 pass def testSslCertValidation(self): # Test that we get an ssl.SSLError when specifying a non-existent CA # certs file. http = httplib2.Http(ca_certs='/nosuchfile') self.assertRaises(IOError, http.request, "https://www.google.com/", "GET") # Test that we get a SSLHandshakeError if we try to access # https://www.google.com, using a CA cert file that doesn't contain # the CA Gogole uses (i.e., simulating a cert that's not signed by a # trusted CA). other_ca_certs = os.path.join( os.path.dirname(os.path.abspath(httplib2.__file__ )), "test", "other_cacerts.txt") http = httplib2.Http(ca_certs=other_ca_certs) self.assertRaises(ssl.SSLError, http.request,"https://www.google.com/", "GET") def testSniHostnameValidation(self): self.http.request("https://google.com/", method="GET") def testGet303(self): # Do a follow-up GET on a Location: header # returned from a POST that gave a 303. uri = urllib.parse.urljoin(base, "303/303.cgi") (response, content) = self.http.request(uri, "POST", " ") self.assertEqual(response.status, 200) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 303) def testGet303NoRedirect(self): # Do a follow-up GET on a Location: header # returned from a POST that gave a 303. self.http.follow_redirects = False uri = urllib.parse.urljoin(base, "303/303.cgi") (response, content) = self.http.request(uri, "POST", " ") self.assertEqual(response.status, 303) def test303ForDifferentMethods(self): # Test that all methods can be used uri = urllib.parse.urljoin(base, "303/redirect-to-reflector.cgi") for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]: (response, content) = self.http.request(uri, method, body=b" ") self.assertEqual(response['x-method'], method_on_303) def testGet304(self): # Test that we use ETags properly to validate our cache uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'must-revalidate'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) cache_file_name = os.path.join(cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1])) f = open(cache_file_name, "r") status_line = f.readline() f.close() self.assertTrue(status_line.startswith("status:")) (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers = {'range': 'bytes=0-0'}) self.assertEqual(response.status, 206) self.assertEqual(response.fromcache, False) def testGetIgnoreEtag(self): # Test that we can forcibly ignore ETags uri = urllib.parse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertTrue('HTTP_IF_NONE_MATCH' in d) self.http.ignore_etag = True (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertEqual(response.fromcache, False) self.assertFalse('HTTP_IF_NONE_MATCH' in d) def testOverrideEtag(self): # Test that we can forcibly ignore ETags uri = urllib.parse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertTrue('HTTP_IF_NONE_MATCH' in d) self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0', 'if-none-match': 'fred'}) d = self.reflector(content) self.assertTrue('HTTP_IF_NONE_MATCH' in d) self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred") #MAP-commented this out because it consistently fails # def testGet304EndToEnd(self): # # Test that end to end headers get overwritten in the cache # uri = urllib.parse.urljoin(base, "304/end2end.cgi") # (response, content) = self.http.request(uri, "GET") # self.assertNotEqual(response['etag'], "") # old_date = response['date'] # time.sleep(2) # # (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'}) # # The response should be from the cache, but the Date: header should be updated. # new_date = response['date'] # self.assertNotEqual(new_date, old_date) # self.assertEqual(response.status, 200) # self.assertEqual(response.fromcache, True) def testGet304LastModified(self): # Test that we can still handle a 304 # by only using the last-modified cache validator. uri = urllib.parse.urljoin(base, "304/last-modified-only/last-modified-only.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['last-modified'], "") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) def testGet307(self): # Test that we do follow 307 redirects but # do not cache the 307 uri = urllib.parse.urljoin(base, "307/onestep.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 307) self.assertEqual(response.previous.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.assertEqual(content, b"This is the final destination.\n") self.assertEqual(response.previous.status, 307) self.assertEqual(response.previous.fromcache, False) def testGet410(self): # Test that we pass 410's through uri = urllib.parse.urljoin(base, "410/410.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 410) def testVaryHeaderSimple(self): """ RFC 2616 13.6 When the cache receives a subsequent request whose Request-URI specifies one or more cache entries including a Vary header field, the cache MUST NOT use such a cache entry to construct a response to the new request unless all of the selecting request-headers present in the new request match the corresponding stored request-headers in the original request. """ # test that the vary header is sent uri = urllib.parse.urljoin(base, "vary/accept.asis") (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertTrue('vary' in response) # get the resource again, from the cache since accept header in this # request is the same as the request (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True, msg="Should be from cache") # get the resource again, not from cache since Accept headers does not match (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False, msg="Should not be from cache") # get the resource again, without any Accept header, so again no match (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False, msg="Should not be from cache") def testNoVary(self): pass # when there is no vary, a different Accept header (e.g.) should not # impact if the cache is used # test that the vary header is not sent # uri = urllib.parse.urljoin(base, "vary/no-vary.asis") # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) # self.assertEqual(response.status, 200) # self.assertFalse('vary' in response) # # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) # self.assertEqual(response.status, 200) # self.assertEqual(response.fromcache, True, msg="Should be from cache") # # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'}) # self.assertEqual(response.status, 200) # self.assertEqual(response.fromcache, True, msg="Should be from cache") def testVaryHeaderDouble(self): uri = urllib.parse.urljoin(base, "vary/accept-double.asis") (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'}) self.assertEqual(response.status, 200) self.assertTrue('vary' in response) # we are from cache (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'}) self.assertEqual(response.fromcache, True, msg="Should be from cache") (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) # get the resource again, not from cache, varied headers don't match exact (response, content) = self.http.request(uri, "GET", headers={'Accept-Language': 'da'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False, msg="Should not be from cache") def testVaryUnusedHeader(self): # A header's value is not considered to vary if it's not used at all. uri = urllib.parse.urljoin(base, "vary/unused-header.asis") (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain'}) self.assertEqual(response.status, 200) self.assertTrue('vary' in response) # we are from cache (response, content) = self.http.request(uri, "GET", headers={ 'Accept': 'text/plain',}) self.assertEqual(response.fromcache, True, msg="Should be from cache") def testHeadGZip(self): # Test that we don't try to decompress a HEAD response uri = urllib.parse.urljoin(base, "gzip/final-destination.txt") (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) self.assertNotEqual(int(response['content-length']), 0) self.assertEqual(content, b"") def testGetGZip(self): # Test that we support gzip compression uri = urllib.parse.urljoin(base, "gzip/final-destination.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertFalse('content-encoding' in response) self.assertTrue('-content-encoding' in response) self.assertEqual(int(response['content-length']), len(b"This is the final destination.\n")) self.assertEqual(content, b"This is the final destination.\n") def testPostAndGZipResponse(self): uri = urllib.parse.urljoin(base, "gzip/post.cgi") (response, content) = self.http.request(uri, "POST", body=" ") self.assertEqual(response.status, 200) self.assertFalse('content-encoding' in response) self.assertTrue('-content-encoding' in response) def testGetGZipFailure(self): # Test that we raise a good exception when the gzip fails self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "gzip/failed-compression.asis") try: (response, content) = self.http.request(uri, "GET") self.fail("Should never reach here") except httplib2.FailedToDecompressContent: pass except Exception: self.fail("Threw wrong kind of exception") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Content purported")) def testIndividualTimeout(self): uri = urllib.parse.urljoin(base, "timeout/timeout.cgi") http = httplib2.Http(timeout=1) http.force_exception_to_status_code = True (response, content) = http.request(uri) self.assertEqual(response.status, 408) self.assertTrue(response.reason.startswith("Request Timeout")) self.assertTrue(content.startswith(b"Request Timeout")) def testGetDeflate(self): # Test that we support deflate compression uri = urllib.parse.urljoin(base, "deflate/deflated.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertFalse('content-encoding' in response) self.assertEqual(int(response['content-length']), len("This is the final destination.")) self.assertEqual(content, b"This is the final destination.") def testGetDeflateFailure(self): # Test that we raise a good exception when the deflate fails self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "deflate/failed-compression.asis") try: (response, content) = self.http.request(uri, "GET") self.fail("Should never reach here") except httplib2.FailedToDecompressContent: pass except Exception: self.fail("Threw wrong kind of exception") # Re-run the test with out the exceptions self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Content purported")) def testGetDuplicateHeaders(self): # Test that duplicate headers get concatenated via ',' uri = urllib.parse.urljoin(base, "duplicate-headers/multilink.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(content, b"This is content\n") self.assertEqual(response['link'].split(",")[0], '; rel="home"; title="BitWorking"') def testGetCacheControlNoCache(self): # Test Cache-Control: no-cache on requests uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-cache'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlPragmaNoCache(self): # Test Pragma: no-cache on requests uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers={'Pragma': 'no-cache'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlNoStoreRequest(self): # A no-store request means that the response should not be stored. uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlNoStoreResponse(self): # A no-store response means that the response should not be stored. uri = urllib.parse.urljoin(base, "no-store/no-store.asis") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testGetCacheControlNoCacheNoStoreRequest(self): # Test that a no-store, no-cache clears the entry from the cache # even if it was cached previously. uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'}) (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'}) self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) def testUpdateInvalidatesCache(self): # Test that calling PUT or DELETE on a # URI that is cache invalidates that cache. uri = urllib.parse.urljoin(base, "304/test_etag.txt") (response, content) = self.http.request(uri, "GET") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "DELETE") self.assertEqual(response.status, 405) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.fromcache, False) def testUpdateUsesCachedETag(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "PUT", body="foo") self.assertEqual(response.status, 200) (response, content) = self.http.request(uri, "PUT", body="foo") self.assertEqual(response.status, 412) def testUpdatePatchUsesCachedETag(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "PATCH", body="foo") self.assertEqual(response.status, 200) (response, content) = self.http.request(uri, "PATCH", body="foo") self.assertEqual(response.status, 412) def testUpdateUsesCachedETagAndOCMethod(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) self.http.optimistic_concurrency_methods.append("DELETE") (response, content) = self.http.request(uri, "DELETE") self.assertEqual(response.status, 200) def testUpdateUsesCachedETagOverridden(self): # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, False) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) self.assertEqual(response.fromcache, True) (response, content) = self.http.request(uri, "PUT", body="foo", headers={'if-match': 'fred'}) self.assertEqual(response.status, 412) def testBasicAuth(self): # Test Basic Authentication uri = urllib.parse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urllib.parse.urljoin(base, "basic/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urllib.parse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testBasicAuthWithDomain(self): # Test Basic Authentication uri = urllib.parse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urllib.parse.urljoin(base, "basic/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('joe', 'password', "example.org") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urllib.parse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) domain = urllib.parse.urlparse(base)[1] self.http.add_credentials('joe', 'password', domain) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urllib.parse.urljoin(base, "basic/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testBasicAuthTwoDifferentCredentials(self): # Test Basic Authentication with multiple sets of credentials uri = urllib.parse.urljoin(base, "basic2/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urllib.parse.urljoin(base, "basic2/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('fred', 'barney') (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urllib.parse.urljoin(base, "basic2/file.txt") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testBasicAuthNested(self): # Test Basic Authentication with resources # that are nested uri = urllib.parse.urljoin(base, "basic-nested/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) uri = urllib.parse.urljoin(base, "basic-nested/subdir") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) # Now add in credentials one at a time and test. self.http.add_credentials('joe', 'password') uri = urllib.parse.urljoin(base, "basic-nested/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urllib.parse.urljoin(base, "basic-nested/subdir") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('fred', 'barney') uri = urllib.parse.urljoin(base, "basic-nested/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urllib.parse.urljoin(base, "basic-nested/subdir") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) def testDigestAuth(self): # Test that we support Digest Authentication uri = urllib.parse.urljoin(base, "digest/") (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) uri = urllib.parse.urljoin(base, "digest/file.txt") (response, content) = self.http.request(uri, "GET") def testDigestAuthNextNonceAndNC(self): # Test that if the server sets nextnonce that we reset # the nonce count back to 1 uri = urllib.parse.urljoin(base, "digest/file.txt") self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) info = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) info2 = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) if 'nextnonce' in info: self.assertEqual(info2['nc'], 1) def testDigestAuthStale(self): # Test that we can handle a nonce becoming stale uri = urllib.parse.urljoin(base, "digest-expire/file.txt") self.http.add_credentials('joe', 'password') (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) info = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) time.sleep(3) # Sleep long enough that the nonce becomes stale (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"}) self.assertFalse(response.fromcache) self.assertTrue(response._stale_digest) info3 = httplib2._parse_www_authenticate(response, 'authentication-info') self.assertEqual(response.status, 200) def reflector(self, content): return dict( [tuple(x.split("=", 1)) for x in content.decode('utf-8').strip().split("\n")] ) def testReflector(self): uri = urllib.parse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") d = self.reflector(content) self.assertTrue('HTTP_USER_AGENT' in d) def testConnectionClose(self): uri = "http://www.google.com/" (response, content) = self.http.request(uri, "GET") for c in self.http.connections.values(): self.assertNotEqual(None, c.sock) (response, content) = self.http.request(uri, "GET", headers={"connection": "close"}) for c in self.http.connections.values(): self.assertEqual(None, c.sock) def testPickleHttp(self): pickled_http = pickle.dumps(self.http) new_http = pickle.loads(pickled_http) self.assertEqual(sorted(new_http.__dict__.keys()), sorted(self.http.__dict__.keys())) for key in new_http.__dict__: if key in ('certificates', 'credentials'): self.assertEqual(new_http.__dict__[key].credentials, self.http.__dict__[key].credentials) elif key == 'cache': self.assertEqual(new_http.__dict__[key].cache, self.http.__dict__[key].cache) else: self.assertEqual(new_http.__dict__[key], self.http.__dict__[key]) def testPickleHttpWithConnection(self): self.http.request('http://bitworking.org', connection_type=_MyHTTPConnection) pickled_http = pickle.dumps(self.http) new_http = pickle.loads(pickled_http) self.assertEqual(list(self.http.connections.keys()), ['http:bitworking.org']) self.assertEqual(new_http.connections, {}) def testPickleCustomRequestHttp(self): def dummy_request(*args, **kwargs): return new_request(*args, **kwargs) dummy_request.dummy_attr = 'dummy_value' self.http.request = dummy_request pickled_http = pickle.dumps(self.http) self.assertFalse(b"S'request'" in pickled_http) try: import memcache class HttpTestMemCached(HttpTest): def setUp(self): self.cache = memcache.Client(['127.0.0.1:11211'], debug=0) #self.cache = memcache.Client(['10.0.0.4:11211'], debug=1) self.http = httplib2.Http(self.cache) self.cache.flush_all() # Not exactly sure why the sleep is needed here, but # if not present then some unit tests that rely on caching # fail. Memcached seems to lose some sets immediately # after a flush_all if the set is to a value that # was previously cached. (Maybe the flush is handled async?) time.sleep(1) self.http.clear_credentials() except: pass # ------------------------------------------------------------------------ class HttpPrivateTest(unittest.TestCase): def testParseCacheControl(self): # Test that we can parse the Cache-Control header self.assertEqual({}, httplib2._parse_cache_control({})) self.assertEqual({'no-cache': 1}, httplib2._parse_cache_control({'cache-control': ' no-cache'})) cc = httplib2._parse_cache_control({'cache-control': ' no-cache, max-age = 7200'}) self.assertEqual(cc['no-cache'], 1) self.assertEqual(cc['max-age'], '7200') cc = httplib2._parse_cache_control({'cache-control': ' , '}) self.assertEqual(cc[''], 1) try: cc = httplib2._parse_cache_control({'cache-control': 'Max-age=3600;post-check=1800,pre-check=3600'}) self.assertTrue("max-age" in cc) except: self.fail("Should not throw exception") def testNormalizeHeaders(self): # Test that we normalize headers to lowercase h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'}) self.assertTrue('cache-control' in h) self.assertTrue('other' in h) self.assertEqual('Stuff', h['other']) def testExpirationModelTransparent(self): # Test that no-cache makes our request TRANSPARENT response_headers = { 'cache-control': 'max-age=7200' } request_headers = { 'cache-control': 'no-cache' } self.assertEqual("TRANSPARENT", httplib2._entry_disposition(response_headers, request_headers)) def testMaxAgeNonNumeric(self): # Test that no-cache makes our request TRANSPARENT response_headers = { 'cache-control': 'max-age=fred, min-fresh=barney' } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelNoCacheResponse(self): # The date and expires point to an entry that should be # FRESH, but the no-cache over-rides that. now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)), 'cache-control': 'no-cache' } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelStaleRequestMustReval(self): # must-revalidate forces STALE self.assertEqual("STALE", httplib2._entry_disposition({}, {'cache-control': 'must-revalidate'})) def testExpirationModelStaleResponseMustReval(self): # must-revalidate forces STALE self.assertEqual("STALE", httplib2._entry_disposition({'cache-control': 'must-revalidate'}, {})) def testExpirationModelFresh(self): response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()), 'cache-control': 'max-age=2' } request_headers = { } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) time.sleep(3) self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationMaxAge0(self): response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()), 'cache-control': 'max-age=0' } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateAndExpires(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)), } request_headers = { } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) time.sleep(3) self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpiresZero(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': "0", } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateOnly(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+3)), } request_headers = { } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelOnlyIfCached(self): response_headers = { } request_headers = { 'cache-control': 'only-if-cached', } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelMaxAgeBoth(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'cache-control': 'max-age=2' } request_headers = { 'cache-control': 'max-age=0' } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateAndExpiresMinFresh1(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)), } request_headers = { 'cache-control': 'min-fresh=2' } self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers)) def testExpirationModelDateAndExpiresMinFresh2(self): now = time.time() response_headers = { 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)), 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)), } request_headers = { 'cache-control': 'min-fresh=2' } self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers)) def testParseWWWAuthenticateEmpty(self): res = httplib2._parse_www_authenticate({}) self.assertEqual(len(list(res.keys())), 0) def testParseWWWAuthenticate(self): # different uses of spaces around commas res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'}) self.assertEqual(len(list(res.keys())), 1) self.assertEqual(len(list(res['test'].keys())), 5) # tokens with non-alphanum res = httplib2._parse_www_authenticate({ 'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'}) self.assertEqual(len(list(res.keys())), 1) self.assertEqual(len(list(res['t*!%#st'].keys())), 2) # quoted string with quoted pairs res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="a \\"test\\" realm"'}) self.assertEqual(len(list(res.keys())), 1) self.assertEqual(res['test']['realm'], 'a "test" realm') def testParseWWWAuthenticateStrict(self): httplib2.USE_WWW_AUTH_STRICT_PARSING = 1; self.testParseWWWAuthenticate(); httplib2.USE_WWW_AUTH_STRICT_PARSING = 0; def testParseWWWAuthenticateBasic(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me"'}) basic = res['basic'] self.assertEqual('me', basic['realm']) res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm="MD5"'}) basic = res['basic'] self.assertEqual('me', basic['realm']) self.assertEqual('MD5', basic['algorithm']) res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm=MD5'}) basic = res['basic'] self.assertEqual('me', basic['realm']) self.assertEqual('MD5', basic['algorithm']) def testParseWWWAuthenticateBasic2(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me",other="fred" '}) basic = res['basic'] self.assertEqual('me', basic['realm']) self.assertEqual('fred', basic['other']) def testParseWWWAuthenticateBasic3(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic REAlm="me" '}) basic = res['basic'] self.assertEqual('me', basic['realm']) def testParseWWWAuthenticateDigest(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) def testParseWWWAuthenticateMultiple(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce']) self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque']) basic = res['basic'] self.assertEqual('me', basic['realm']) def testParseWWWAuthenticateMultiple2(self): # Handle an added comma between challenges, which might get thrown in if the challenges were # originally sent in separate www-authenticate headers. res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce']) self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque']) basic = res['basic'] self.assertEqual('me', basic['realm']) def testParseWWWAuthenticateMultiple3(self): # Handle an added comma between challenges, which might get thrown in if the challenges were # originally sent in separate www-authenticate headers. res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) self.assertEqual('auth,auth-int', digest['qop']) self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce']) self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque']) basic = res['basic'] self.assertEqual('me', basic['realm']) wsse = res['wsse'] self.assertEqual('foo', wsse['realm']) self.assertEqual('UsernameToken', wsse['profile']) def testParseWWWAuthenticateMultiple4(self): res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) digest = res['digest'] self.assertEqual('test-real.m@host.com', digest['realm']) self.assertEqual('\tauth,auth-int', digest['qop']) self.assertEqual('(*)&^&$%#', digest['nonce']) def testParseWWWAuthenticateMoreQuoteCombos(self): res = httplib2._parse_www_authenticate({'www-authenticate':'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'}) digest = res['digest'] self.assertEqual('myrealm', digest['realm']) def testParseWWWAuthenticateMalformed(self): try: res = httplib2._parse_www_authenticate({'www-authenticate':'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'}) self.fail("should raise an exception") except httplib2.MalformedHeader: pass def testDigestObject(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = { 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"' } content = b"" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") our_request = "authorization: %s" % headers['authorization'] working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"' self.assertEqual(our_request, working_request) def testDigestObjectWithOpaque(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = { 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", opaque="atestopaque"' } content = "" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") our_request = "authorization: %s" % headers['authorization'] working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46", opaque="atestopaque"' self.assertEqual(our_request, working_request) def testDigestObjectStale(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = httplib2.Response({ }) response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true' response.status = 401 content = b"" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) # Returns true to force a retry self.assertTrue( d.response(response, content) ) def testDigestObjectAuthInfo(self): credentials = ('joe', 'password') host = None request_uri = '/projects/httplib2/test/digest/' headers = {} response = httplib2.Response({ }) response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true' response['authentication-info'] = 'nextnonce="fred"' content = b"" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) # Returns true to force a retry self.assertFalse( d.response(response, content) ) self.assertEqual('fred', d.challenge['nonce']) self.assertEqual(1, d.challenge['nc']) def testWsseAlgorithm(self): digest = httplib2._wsse_username_token("d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm") expected = b"quR/EWLAV4xLf9Zqyw4pDmfV9OY=" self.assertEqual(expected, digest) def testEnd2End(self): # one end to end header response = {'content-type': 'application/atom+xml', 'te': 'deflate'} end2end = httplib2._get_end2end_headers(response) self.assertTrue('content-type' in end2end) self.assertTrue('te' not in end2end) self.assertTrue('connection' not in end2end) # one end to end header that gets eliminated response = {'connection': 'content-type', 'content-type': 'application/atom+xml', 'te': 'deflate'} end2end = httplib2._get_end2end_headers(response) self.assertTrue('content-type' not in end2end) self.assertTrue('te' not in end2end) self.assertTrue('connection' not in end2end) # Degenerate case of no headers response = {} end2end = httplib2._get_end2end_headers(response) self.assertEqual(0, len(end2end)) # Degenerate case of connection referrring to a header not passed in response = {'connection': 'content-type'} end2end = httplib2._get_end2end_headers(response) self.assertEqual(0, len(end2end)) class TestProxyInfo(unittest.TestCase): def setUp(self): self.orig_env = dict(os.environ) def tearDown(self): os.environ.clear() os.environ.update(self.orig_env) def test_from_url(self): pi = httplib2.proxy_info_from_url('http://myproxy.example.com') self.assertEqual(pi.proxy_host, 'myproxy.example.com') self.assertEqual(pi.proxy_port, 80) self.assertEqual(pi.proxy_user, None) def test_from_url_ident(self): pi = httplib2.proxy_info_from_url('http://zoidberg:fish@someproxy:99') self.assertEqual(pi.proxy_host, 'someproxy') self.assertEqual(pi.proxy_port, 99) self.assertEqual(pi.proxy_user, 'zoidberg') self.assertEqual(pi.proxy_pass, 'fish') def test_from_env(self): os.environ['http_proxy'] = 'http://myproxy.example.com:8080' pi = httplib2.proxy_info_from_environment() self.assertEqual(pi.proxy_host, 'myproxy.example.com') self.assertEqual(pi.proxy_port, 8080) def test_from_env_no_proxy(self): os.environ['http_proxy'] = 'http://myproxy.example.com:80' os.environ['https_proxy'] = 'http://myproxy.example.com:81' pi = httplib2.proxy_info_from_environment('https') self.assertEqual(pi.proxy_host, 'myproxy.example.com') self.assertEqual(pi.proxy_port, 81) def test_from_env_none(self): os.environ.clear() pi = httplib2.proxy_info_from_environment() self.assertEqual(pi, None) if __name__ == '__main__': unittest.main() httplib2-0.8/python3/httplib2test.py.rej0000640017135500116100000005030512115724726017776 0ustar jcgregorioeng--- httplib2test.py +++ httplib2test.py @@ -140,7 +140,7 @@ class _MyHTTPConnection(object): class HttpTest(unittest.TestCase): def setUp(self): - if os.path.exists(cacheDirName): + if os.path.exists(cacheDirName): [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)] self.http = httplib2.Http(cacheDirName) self.http.clear_credentials() @@ -164,13 +164,13 @@ class HttpTest(unittest.TestCase): pass def testConnectionType(self): - self.http.force_exception_to_status_code = False + self.http.force_exception_to_status_code = False response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection) self.assertEqual(response['content-location'], "http://bitworking.org") self.assertEqual(content, b"the body") def testGetUnknownServer(self): - self.http.force_exception_to_status_code = False + self.http.force_exception_to_status_code = False try: self.http.request("http://fred.bitworking.org/") self.fail("An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server.") @@ -206,9 +206,9 @@ class HttpTest(unittest.TestCase): uri = urllib.parse.urljoin(base, "reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}") (response, content) = self.http.request(uri, "GET") d = self.reflector(content) - self.assertTrue('QUERY_STRING' in d) - self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0) - + self.assertTrue('QUERY_STRING' in d) + self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0) + def testGetIsDefaultMethod(self): # Test that GET is the default method uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi") @@ -386,7 +386,7 @@ class HttpTest(unittest.TestCase): # Test that we can set a lower redirection limit # and that we raise an exception when we exceed # that limit. - self.http.force_exception_to_status_code = False + self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "302/twostep.asis") try: @@ -410,7 +410,7 @@ class HttpTest(unittest.TestCase): def testGet302NoLocation(self): # Test that we throw an exception when we get # a 302 with no Location: header. - self.http.force_exception_to_status_code = False + self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "302/no-location.asis") try: (response, content) = self.http.request(uri, "GET") @@ -421,14 +421,14 @@ class HttpTest(unittest.TestCase): self.fail("Threw wrong kind of exception ") # Re-run the test with out the exceptions - self.http.force_exception_to_status_code = True + self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) self.assertTrue(response.reason.startswith("Redirected but")) self.assertEqual("302", response['status']) self.assertTrue(content.startswith(b"This is content")) - + def testGet301ViaHttps(self): # Google always redirects to http://google.com (response, content) = self.http.request("https://code.google.com/apis/", "GET") @@ -443,7 +443,7 @@ class HttpTest(unittest.TestCase): def testGetViaHttpsSpecViolationOnLocation(self): # Test that we follow redirects through HTTPS # even if they violate the spec by including - # a relative Location: header instead of an + # a relative Location: header instead of an # absolute one. (response, content) = self.http.request("https://google.com/adsense", "GET") self.assertEqual(200, response.status) @@ -452,8 +452,8 @@ class HttpTest(unittest.TestCase): def testGetViaHttpsKeyCert(self): # At this point I can only test - # that the key and cert files are passed in - # correctly to httplib. It would be nice to have + # that the key and cert files are passed in + # correctly to httplib. It would be nice to have # a real https endpoint to test against. http = httplib2.Http(timeout=2) @@ -517,7 +517,7 @@ class HttpTest(unittest.TestCase): def test303ForDifferentMethods(self): # Test that all methods can be used uri = urllib.parse.urljoin(base, "303/redirect-to-reflector.cgi") - for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]: + for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]: (response, content) = self.http.request(uri, method, body=b" ") self.assertEqual(response['x-method'], method_on_303) @@ -548,36 +548,36 @@ class HttpTest(unittest.TestCase): self.assertEqual(response.fromcache, False) def testGetIgnoreEtag(self): - # Test that we can forcibly ignore ETags + # Test that we can forcibly ignore ETags uri = urllib.parse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) - self.assertTrue('HTTP_IF_NONE_MATCH' in d) + self.assertTrue('HTTP_IF_NONE_MATCH' in d) self.http.ignore_etag = True (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) self.assertEqual(response.fromcache, False) - self.assertFalse('HTTP_IF_NONE_MATCH' in d) + self.assertFalse('HTTP_IF_NONE_MATCH' in d) def testOverrideEtag(self): - # Test that we can forcibly ignore ETags + # Test that we can forcibly ignore ETags uri = urllib.parse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") self.assertNotEqual(response['etag'], "") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'}) d = self.reflector(content) - self.assertTrue('HTTP_IF_NONE_MATCH' in d) - self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred") + self.assertTrue('HTTP_IF_NONE_MATCH' in d) + self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred") (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0', 'if-none-match': 'fred'}) d = self.reflector(content) - self.assertTrue('HTTP_IF_NONE_MATCH' in d) - self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred") + self.assertTrue('HTTP_IF_NONE_MATCH' in d) + self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred") #MAP-commented this out because it consistently fails # def testGet304EndToEnd(self): @@ -596,7 +596,7 @@ class HttpTest(unittest.TestCase): # self.assertEqual(response.fromcache, True) def testGet304LastModified(self): - # Test that we can still handle a 304 + # Test that we can still handle a 304 # by only using the last-modified cache validator. uri = urllib.parse.urljoin(base, "304/last-modified-only/last-modified-only.txt") (response, content) = self.http.request(uri, "GET") @@ -715,7 +715,7 @@ class HttpTest(unittest.TestCase): self.assertEqual(response.fromcache, True, msg="Should be from cache") def testHeadGZip(self): - # Test that we don't try to decompress a HEAD response + # Test that we don't try to decompress a HEAD response uri = urllib.parse.urljoin(base, "gzip/final-destination.txt") (response, content) = self.http.request(uri, "HEAD") self.assertEqual(response.status, 200) @@ -741,7 +741,7 @@ class HttpTest(unittest.TestCase): def testGetGZipFailure(self): # Test that we raise a good exception when the gzip fails - self.http.force_exception_to_status_code = False + self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "gzip/failed-compression.asis") try: (response, content) = self.http.request(uri, "GET") @@ -752,7 +752,7 @@ class HttpTest(unittest.TestCase): self.fail("Threw wrong kind of exception") # Re-run the test with out the exceptions - self.http.force_exception_to_status_code = True + self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) @@ -761,7 +761,7 @@ class HttpTest(unittest.TestCase): def testIndividualTimeout(self): uri = urllib.parse.urljoin(base, "timeout/timeout.cgi") http = httplib2.Http(timeout=1) - http.force_exception_to_status_code = True + http.force_exception_to_status_code = True (response, content) = http.request(uri) self.assertEqual(response.status, 408) @@ -780,7 +780,7 @@ class HttpTest(unittest.TestCase): def testGetDeflateFailure(self): # Test that we raise a good exception when the deflate fails - self.http.force_exception_to_status_code = False + self.http.force_exception_to_status_code = False uri = urllib.parse.urljoin(base, "deflate/failed-compression.asis") try: @@ -792,7 +792,7 @@ class HttpTest(unittest.TestCase): self.fail("Threw wrong kind of exception") # Re-run the test with out the exceptions - self.http.force_exception_to_status_code = True + self.http.force_exception_to_status_code = True (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 500) @@ -870,7 +870,7 @@ class HttpTest(unittest.TestCase): self.assertEqual(response.fromcache, False) def testUpdateInvalidatesCache(self): - # Test that calling PUT or DELETE on a + # Test that calling PUT or DELETE on a # URI that is cache invalidates that cache. uri = urllib.parse.urljoin(base, "304/test_etag.txt") @@ -884,7 +884,7 @@ class HttpTest(unittest.TestCase): self.assertEqual(response.fromcache, False) def testUpdateUsesCachedETag(self): - # Test that we natively support http://www.w3.org/1999/04/Editing/ + # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") @@ -900,7 +900,7 @@ class HttpTest(unittest.TestCase): def testUpdatePatchUsesCachedETag(self): - # Test that we natively support http://www.w3.org/1999/04/Editing/ + # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") @@ -915,7 +915,7 @@ class HttpTest(unittest.TestCase): self.assertEqual(response.status, 412) def testUpdateUsesCachedETagAndOCMethod(self): - # Test that we natively support http://www.w3.org/1999/04/Editing/ + # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") @@ -930,7 +930,7 @@ class HttpTest(unittest.TestCase): def testUpdateUsesCachedETagOverridden(self): - # Test that we natively support http://www.w3.org/1999/04/Editing/ + # Test that we natively support http://www.w3.org/1999/04/Editing/ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi") (response, content) = self.http.request(uri, "GET") @@ -978,7 +978,7 @@ class HttpTest(unittest.TestCase): (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 401) - domain = urllib.parse.urlparse(base)[1] + domain = urllib.parse.urlparse(base)[1] self.http.add_credentials('joe', 'password', domain) (response, content) = self.http.request(uri, "GET") self.assertEqual(response.status, 200) @@ -1094,7 +1094,7 @@ class HttpTest(unittest.TestCase): uri = urllib.parse.urljoin(base, "reflector/reflector.cgi") (response, content) = self.http.request(uri, "GET") d = self.reflector(content) - self.assertTrue('HTTP_USER_AGENT' in d) + self.assertTrue('HTTP_USER_AGENT' in d) def testConnectionClose(self): @@ -1186,7 +1186,7 @@ class HttpPrivateTest(unittest.TestCase): def testNormalizeHeaders(self): - # Test that we normalize headers to lowercase + # Test that we normalize headers to lowercase h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'}) self.assertTrue('cache-control' in h) self.assertTrue('other' in h) @@ -1327,19 +1327,19 @@ class HttpPrivateTest(unittest.TestCase): def testParseWWWAuthenticateEmpty(self): res = httplib2._parse_www_authenticate({}) - self.assertEqual(len(list(res.keys())), 0) + self.assertEqual(len(list(res.keys())), 0) def testParseWWWAuthenticate(self): # different uses of spaces around commas res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'}) self.assertEqual(len(list(res.keys())), 1) self.assertEqual(len(list(res['test'].keys())), 5) - + # tokens with non-alphanum res = httplib2._parse_www_authenticate({ 'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'}) self.assertEqual(len(list(res.keys())), 1) self.assertEqual(len(list(res['t*!%#st'].keys())), 2) - + # quoted string with quoted pairs res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="a \\"test\\" realm"'}) self.assertEqual(len(list(res.keys())), 1) @@ -1378,7 +1378,7 @@ class HttpPrivateTest(unittest.TestCase): def testParseWWWAuthenticateDigest(self): - res = httplib2._parse_www_authenticate({ 'www-authenticate': + res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) @@ -1386,7 +1386,7 @@ class HttpPrivateTest(unittest.TestCase): def testParseWWWAuthenticateMultiple(self): - res = httplib2._parse_www_authenticate({ 'www-authenticate': + res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) @@ -1399,7 +1399,7 @@ class HttpPrivateTest(unittest.TestCase): def testParseWWWAuthenticateMultiple2(self): # Handle an added comma between challenges, which might get thrown in if the challenges were # originally sent in separate www-authenticate headers. - res = httplib2._parse_www_authenticate({ 'www-authenticate': + res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) @@ -1412,7 +1412,7 @@ class HttpPrivateTest(unittest.TestCase): def testParseWWWAuthenticateMultiple3(self): # Handle an added comma between challenges, which might get thrown in if the challenges were # originally sent in separate www-authenticate headers. - res = httplib2._parse_www_authenticate({ 'www-authenticate': + res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) digest = res['digest'] self.assertEqual('testrealm@host.com', digest['realm']) @@ -1426,8 +1426,8 @@ class HttpPrivateTest(unittest.TestCase): self.assertEqual('UsernameToken', wsse['profile']) def testParseWWWAuthenticateMultiple4(self): - res = httplib2._parse_www_authenticate({ 'www-authenticate': - 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) + res = httplib2._parse_www_authenticate({ 'www-authenticate': + 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) digest = res['digest'] self.assertEqual('test-real.m@host.com', digest['realm']) self.assertEqual('\tauth,auth-int', digest['qop']) @@ -1448,7 +1448,7 @@ class HttpPrivateTest(unittest.TestCase): def testDigestObject(self): credentials = ('joe', 'password') host = None - request_uri = '/projects/httplib2/test/digest/' + request_uri = '/projects/httplib2/test/digest/' headers = {} response = { 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"' @@ -1456,7 +1456,7 @@ class HttpPrivateTest(unittest.TestCase): content = b"" d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None) - d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") + d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") our_request = "authorization: %s" % headers['authorization'] working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"' self.assertEqual(our_request, working_request) @@ -1480,7 +1480,7 @@ class HttpPrivateTest(unittest.TestCase): def testDigestObjectStale(self): credentials = ('joe', 'password') host = None - request_uri = '/projects/httplib2/test/digest/' + request_uri = '/projects/httplib2/test/digest/' headers = {} response = httplib2.Response({ }) response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true' @@ -1493,7 +1493,7 @@ class HttpPrivateTest(unittest.TestCase): def testDigestObjectAuthInfo(self): credentials = ('joe', 'password') host = None - request_uri = '/projects/httplib2/test/digest/' + request_uri = '/projects/httplib2/test/digest/' headers = {} response = httplib2.Response({ }) response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true' @@ -1530,7 +1530,7 @@ class HttpPrivateTest(unittest.TestCase): end2end = httplib2._get_end2end_headers(response) self.assertEqual(0, len(end2end)) - # Degenerate case of connection referrring to a header not passed in + # Degenerate case of connection referrring to a header not passed in response = {'connection': 'content-type'} end2end = httplib2._get_end2end_headers(response) self.assertEqual(0, len(end2end)) httplib2-0.8/python3/httplib2/0000750017135500116100000000000012115724726015741 5ustar jcgregorioenghttplib2-0.8/python3/httplib2/cacerts.txt0000640017135500116100000011536612115724726020143 0ustar jcgregorioeng# Certifcate Authority certificates for validating SSL connections. # # This file contains PEM format certificates generated from # http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt # # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is the Netscape security libraries. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1994-2000 # the Initial Developer. All Rights Reserved. # # Contributor(s): # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** Verisign/RSA Secure Server CA ============================= -----BEGIN CERTIFICATE----- MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0 MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII 0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3 YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc 1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA== -----END CERTIFICATE----- Thawte Personal Basic CA ======================== -----BEGIN CERTIFICATE----- MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53 dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7 G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7 c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P 9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ== -----END CERTIFICATE----- Thawte Personal Premium CA ========================== -----BEGIN CERTIFICATE----- MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ -----END CERTIFICATE----- Thawte Personal Freemail CA =========================== -----BEGIN CERTIFICATE----- MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa /RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei gQ== -----END CERTIFICATE----- Thawte Server CA ================ -----BEGIN CERTIFICATE----- MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG 7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ qdq5snUb9kLy78fyGPmJvKP/iiMucEc= -----END CERTIFICATE----- Thawte Premium Server CA ======================== -----BEGIN CERTIFICATE----- MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG 9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== -----END CERTIFICATE----- Equifax Secure CA ================= -----BEGIN CERTIFICATE----- MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y 7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh 1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 -----END CERTIFICATE----- Verisign Class 1 Public Primary Certification Authority ======================================================= -----BEGIN CERTIFICATE----- MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05 NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR 4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5 FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx lA== -----END CERTIFICATE----- Verisign Class 2 Public Primary Certification Authority ======================================================= -----BEGIN CERTIFICATE----- MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7 FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY -----END CERTIFICATE----- Verisign Class 3 Public Primary Certification Authority ======================================================= -----BEGIN CERTIFICATE----- MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k -----END CERTIFICATE----- Verisign Class 1 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68 DzFc6PLZ -----END CERTIFICATE----- Verisign Class 2 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn jBJ7xUS0rg== -----END CERTIFICATE----- Verisign Class 3 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY oJ2daZH9 -----END CERTIFICATE----- Verisign Class 4 Public Primary Certification Authority - G2 ============================================================ -----BEGIN CERTIFICATE----- MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP T8qAkbYp -----END CERTIFICATE----- Verisign Class 1 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4 nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO 8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2 6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4 wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3 ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4 E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g== -----END CERTIFICATE----- Verisign Class 2 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u 7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q -----END CERTIFICATE----- Verisign Class 3 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te 2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC /Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== -----END CERTIFICATE----- Verisign Class 4 Public Primary Certification Authority - G3 ============================================================ -----BEGIN CERTIFICATE----- MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ +mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c 2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== -----END CERTIFICATE----- Equifax Secure Global eBusiness CA ================================== -----BEGIN CERTIFICATE----- MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc 58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv 8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV -----END CERTIFICATE----- Equifax Secure eBusiness CA 1 ============================= -----BEGIN CERTIFICATE----- MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN /Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== -----END CERTIFICATE----- Equifax Secure eBusiness CA 2 ============================= -----BEGIN CERTIFICATE----- MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy 0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN -----END CERTIFICATE----- Thawte Time Stamping CA ======================= -----BEGIN CERTIFICATE----- MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT 6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL 8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC 9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ CayJSdM= -----END CERTIFICATE----- thawte Primary Root CA ====================== -----BEGIN CERTIFICATE----- MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta 3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk 6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 /qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 jVaMaA== -----END CERTIFICATE----- VeriSign Class 3 Public Primary Certification Authority - G5 ============================================================ -----BEGIN CERTIFICATE----- MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y 5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ 4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq -----END CERTIFICATE----- Entrust.net Secure Server Certification Authority ================================================= -----BEGIN CERTIFICATE----- MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN 95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd 2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= -----END CERTIFICATE----- Go Daddy Certification Authority Root Certificate Bundle ======================================================== -----BEGIN CERTIFICATE----- MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ 6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o 0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV U+4= -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0 aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv 2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1 QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw== -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG 9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd -----END CERTIFICATE----- GeoTrust Global CA ================== -----BEGIN CERTIFICATE----- MIIDfTCCAuagAwIBAgIDErvmMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNVBAYTAlVT MRAwDgYDVQQKEwdFcXVpZmF4MS0wKwYDVQQLEyRFcXVpZmF4IFNlY3VyZSBDZXJ0 aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDIwNTIxMDQwMDAwWhcNMTgwODIxMDQwMDAw WjBCMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UE AxMSR2VvVHJ1c3QgR2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB CgKCAQEA2swYYzD99BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9m OSm9BXiLnTjoBbdqfnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIu T8rxh0PBFpVXLVDviS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6c JmTM386DGXHKTubU1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmR Cw7+OC7RHQWa9k0+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5asz PeE4uwc2hGKceeoWMPRfwCvocWvk+QIDAQABo4HwMIHtMB8GA1UdIwQYMBaAFEjm aPkr0rKV10fYIyAQTzOYkJ/UMB0GA1UdDgQWBBTAephojYn7qwVkDBF9qn1luMrM TjAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjA6BgNVHR8EMzAxMC+g LaArhilodHRwOi8vY3JsLmdlb3RydXN0LmNvbS9jcmxzL3NlY3VyZWNhLmNybDBO BgNVHSAERzBFMEMGBFUdIAAwOzA5BggrBgEFBQcCARYtaHR0cHM6Ly93d3cuZ2Vv dHJ1c3QuY29tL3Jlc291cmNlcy9yZXBvc2l0b3J5MA0GCSqGSIb3DQEBBQUAA4GB AHbhEm5OSxYShjAGsoEIz/AIx8dxfmbuwu3UOx//8PDITtZDOLC5MH0Y0FWDomrL NhGc6Ehmo21/uBPUR/6LWlxz/K7ZGzIZOKuXNBSqltLroxwUCEm2u+WR74M26x1W b8ravHNjkOR/ez4iyz0H7V84dJzjA1BOoa+Y7mHyhD8S -----END CERTIFICATE----- httplib2-0.8/python3/httplib2/iri2uri.py0000640017135500116100000000754612115724726017715 0ustar jcgregorioeng""" iri2uri Converts an IRI to a URI. """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = [] __version__ = "1.0.0" __license__ = "MIT" __history__ = """ """ import urllib.parse # Convert an IRI to a URI following the rules in RFC 3987 # # The characters we need to enocde and escape are defined in the spec: # # iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD # ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF # / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD # / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD # / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD # / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD # / %xD0000-DFFFD / %xE1000-EFFFD escape_range = [ (0xA0, 0xD7FF), (0xE000, 0xF8FF), (0xF900, 0xFDCF), (0xFDF0, 0xFFEF), (0x10000, 0x1FFFD), (0x20000, 0x2FFFD), (0x30000, 0x3FFFD), (0x40000, 0x4FFFD), (0x50000, 0x5FFFD), (0x60000, 0x6FFFD), (0x70000, 0x7FFFD), (0x80000, 0x8FFFD), (0x90000, 0x9FFFD), (0xA0000, 0xAFFFD), (0xB0000, 0xBFFFD), (0xC0000, 0xCFFFD), (0xD0000, 0xDFFFD), (0xE1000, 0xEFFFD), (0xF0000, 0xFFFFD), (0x100000, 0x10FFFD), ] def encode(c): retval = c i = ord(c) for low, high in escape_range: if i < low: break if i >= low and i <= high: retval = "".join(["%%%2X" % o for o in c.encode('utf-8')]) break return retval def iri2uri(uri): """Convert an IRI to a URI. Note that IRIs must be passed in a unicode strings. That is, do not utf-8 encode the IRI before passing it into the function.""" if isinstance(uri ,str): (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri) authority = authority.encode('idna').decode('utf-8') # For each character in 'ucschar' or 'iprivate' # 1. encode as utf-8 # 2. then %-encode each octet of that utf-8 uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment)) uri = "".join([encode(c) for c in uri]) return uri if __name__ == "__main__": import unittest class Test(unittest.TestCase): def test_uris(self): """Test that URIs are invariant under the transformation.""" invariant = [ "ftp://ftp.is.co.za/rfc/rfc1808.txt", "http://www.ietf.org/rfc/rfc2396.txt", "ldap://[2001:db8::7]/c=GB?objectClass?one", "mailto:John.Doe@example.com", "news:comp.infosystems.www.servers.unix", "tel:+1-816-555-1212", "telnet://192.0.2.16:80/", "urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] for uri in invariant: self.assertEqual(uri, iri2uri(uri)) def test_iri(self): """ Test that the right type of escaping is done for each part of the URI.""" self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri("http://\N{COMET}.com/\N{COMET}")) self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri("http://bitworking.org/?fred=\N{COMET}")) self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri("http://bitworking.org/#\N{COMET}")) self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}")) self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) unittest.main() httplib2-0.8/python3/httplib2/__init__.py0000640017135500116100000015575312115724726020073 0ustar jcgregorioeng """ httplib2 A caching http interface that supports ETags and gzip to conserve bandwidth. Requires Python 3.0 or later Changelog: 2009-05-28, Pilgrim: ported to Python 3 2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", "James Antill", "Xavier Verges Farrero", "Jonathan Feinberg", "Blair Zajac", "Sam Ruby", "Louis Nyffenegger", "Mark Pilgrim"] __license__ = "MIT" __version__ = "0.8" import re import sys import email import email.utils import email.message import email.feedparser import io import gzip import zlib import http.client import urllib.parse import base64 import os import copy import calendar import time import random import errno from hashlib import sha1 as _sha, md5 as _md5 import hmac from gettext import gettext as _ import socket import ssl _ssl_wrap_socket = ssl.wrap_socket try: import socks except ImportError: socks = None from .iri2uri import iri2uri def has_timeout(timeout): if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) return (timeout is not None) __all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', 'debuglevel', 'RETRIES'] # The httplib debug level, set to a non-zero value to get debug output debuglevel = 0 # A request will be tried 'RETRIES' times if it fails at the socket/connection level. RETRIES = 2 # All exceptions raised here derive from HttpLib2Error class HttpLib2Error(Exception): pass # Some exceptions can be caught and optionally # be turned back into responses. class HttpLib2ErrorWithResponse(HttpLib2Error): def __init__(self, desc, response, content): self.response = response self.content = content HttpLib2Error.__init__(self, desc) class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass class RedirectLimit(HttpLib2ErrorWithResponse): pass class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class MalformedHeader(HttpLib2Error): pass class RelativeURIError(HttpLib2Error): pass class ServerNotFoundError(HttpLib2Error): pass class CertificateValidationUnsupportedInPython31(HttpLib2Error): pass # Open Items: # ----------- # Proxy support # Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) # Pluggable cache storage (supports storing the cache in # flat files by default. We need a plug-in architecture # that can support Berkeley DB and Squid) # == Known Issues == # Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. # Does not handle Cache-Control: max-stale # Does not use Age: headers when calculating cache freshness. # The number of redirections to follow before giving up. # Note that only GET redirects are automatically followed. # Will also honor 301 requests by saving that info and never # requesting that URI again. DEFAULT_MAX_REDIRECTS = 5 # Which headers are hop-by-hop headers by default HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] # Default CA certificates file bundled with httplib2. CA_CERTS = os.path.join( os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") def _get_end2end_headers(response): hopbyhop = list(HOP_BY_HOP) hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) return [header for header in list(response.keys()) if header not in hopbyhop] URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") def parse_uri(uri): """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ groups = URI.match(uri).groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) def urlnorm(uri): (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) authority = authority.lower() scheme = scheme.lower() if not path: path = "/" # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. request_uri = query and "?".join([path, query]) or path scheme = scheme.lower() defrag_uri = scheme + "://" + authority + request_uri return scheme, authority, request_uri, defrag_uri # Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) re_url_scheme = re.compile(br'^\w+://') re_url_scheme_s = re.compile(r'^\w+://') re_slash = re.compile(br'[?/:|]+') def safename(filename): """Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in. """ try: if re_url_scheme_s.match(filename): if isinstance(filename,bytes): filename = filename.decode('utf-8') filename = filename.encode('idna') else: filename = filename.encode('idna') except UnicodeError: pass if isinstance(filename,str): filename=filename.encode('utf-8') filemd5 = _md5(filename).hexdigest().encode('utf-8') filename = re_url_scheme.sub(b"", filename) filename = re_slash.sub(b",", filename) # limit length of filename if len(filename)>200: filename=filename[:200] return b",".join((filename, filemd5)).decode('utf-8') NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') def _normalize_headers(headers): return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.items()]) def _parse_cache_control(headers): retval = {} if 'cache-control' in headers: parts = headers['cache-control'].split(',') parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] retval = dict(parts_with_args + parts_wo_args) return retval # Whether to use a strict mode to parse WWW-Authenticate headers # Might lead to bad results in case of ill-formed header value, # so disabled by default, falling back to relaxed parsing. # Set to true to turn on, usefull for testing servers. USE_WWW_AUTH_STRICT_PARSING = 0 # In regex below: # [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP # "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space # Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: # \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? current_age: retval = "FRESH" return retval def _decompressContent(response, new_content): content = new_content try: encoding = response.get('content-encoding', None) if encoding in ['gzip', 'deflate']: if encoding == 'gzip': content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read() if encoding == 'deflate': content = zlib.decompress(content) response['content-length'] = str(len(content)) # Record the historical presence of the encoding in a way the won't interfere. response['-content-encoding'] = response['content-encoding'] del response['content-encoding'] except IOError: content = "" raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) return content def _bind_write_headers(msg): from email.header import Header def _write_headers(self): # Self refers to the Generator object for h, v in msg.items(): print('%s:' % h, end=' ', file=self._fp) if isinstance(v, Header): print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp) else: # Header's got lots of smarts, so use it. header = Header(v, maxlinelen=self._maxheaderlen, charset='utf-8', header_name=h) print(header.encode(), file=self._fp) # A blank line always separates headers from body print(file=self._fp) return _write_headers def _updateCache(request_headers, response_headers, content, cache, cachekey): if cachekey: cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) if 'no-store' in cc or 'no-store' in cc_response: cache.delete(cachekey) else: info = email.message.Message() for key, value in response_headers.items(): if key not in ['status','content-encoding','transfer-encoding']: info[key] = value # Add annotations to the cache to indicate what headers # are variant for this request. vary = response_headers.get('vary', None) if vary: vary_headers = vary.lower().replace(' ', '').split(',') for header in vary_headers: key = '-varied-%s' % header try: info[key] = request_headers[header] except KeyError: pass status = response_headers.status if status == 304: status = 200 status_header = 'status: %d\r\n' % status try: header_str = info.as_string() except UnicodeEncodeError: setattr(info, '_write_headers', _bind_write_headers(info)) header_str = info.as_string() header_str = re.sub("\r(?!\n)|(? 0: service = "cl" # No point in guessing Base or Spreadsheet #elif request_uri.find("spreadsheets") > 0: # service = "wise" auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) lines = content.split('\n') d = dict([tuple(line.split("=", 1)) for line in lines if line]) if resp.status == 403: self.Auth = "" else: self.Auth = d['Auth'] def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" headers['authorization'] = 'GoogleLogin Auth=' + self.Auth AUTH_SCHEME_CLASSES = { "basic": BasicAuthentication, "wsse": WsseAuthentication, "digest": DigestAuthentication, "hmacdigest": HmacDigestAuthentication, "googlelogin": GoogleLoginAuthentication } AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] class FileCache(object): """Uses a local directory as a store for cached files. Not really safe to use if multiple threads or processes are going to be running on the same cache. """ def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior self.cache = cache self.safe = safe if not os.path.exists(cache): os.makedirs(self.cache) def get(self, key): retval = None cacheFullPath = os.path.join(self.cache, self.safe(key)) try: f = open(cacheFullPath, "rb") retval = f.read() f.close() except IOError: pass return retval def set(self, key, value): cacheFullPath = os.path.join(self.cache, self.safe(key)) f = open(cacheFullPath, "wb") f.write(value) f.close() def delete(self, key): cacheFullPath = os.path.join(self.cache, self.safe(key)) if os.path.exists(cacheFullPath): os.remove(cacheFullPath) class Credentials(object): def __init__(self): self.credentials = [] def add(self, name, password, domain=""): self.credentials.append((domain.lower(), name, password)) def clear(self): self.credentials = [] def iter(self, domain): for (cdomain, name, password) in self.credentials: if cdomain == "" or domain == cdomain: yield (name, password) class KeyCerts(Credentials): """Identical to Credentials except that name/password are mapped to key/cert.""" pass class ProxyInfo(object): """Collect information required to use a proxy.""" def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX constants. For example: p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) """ self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass def astuple(self): return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass) def isgood(self): return socks and (self.proxy_host != None) and (self.proxy_port != None) def proxy_info_from_environment(method='http'): """ Read proxy info from the environment variables. """ if method not in ('http', 'https'): return env_var = method + '_proxy' url = os.environ.get(env_var, os.environ.get(env_var.upper())) if not url: return return proxy_info_from_url(url, method) def proxy_info_from_url(url, method='http'): """ Construct a ProxyInfo from a URL (such as http_proxy env var) """ url = urllib.parse.urlparse(url) username = None password = None port = None if '@' in url[1]: ident, host_port = url[1].split('@', 1) if ':' in ident: username, password = ident.split(':', 1) else: password = ident else: host_port = url[1] if ':' in host_port: host, port = host_port.split(':', 1) else: host = host_port if port: port = int(port) else: port = dict(https=443, http=80)[method] proxy_type = 3 # socks.PROXY_TYPE_HTTP return ProxyInfo( proxy_type = proxy_type, proxy_host = host, proxy_port = port, proxy_user = username or None, proxy_pass = password or None, ) class HTTPConnectionWithTimeout(http.client.HTTPConnection): """HTTPConnection subclass that supports timeouts HTTPConnection subclass that supports timeouts All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout """ def __init__(self, host, port=None, timeout=None, proxy_info=None): http.client.HTTPConnection.__init__(self, host, port=port, timeout=timeout) self.proxy_info = proxy_info class HTTPSConnectionWithTimeout(http.client.HTTPSConnection): """ This class allows communication via SSL. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout """ def __init__(self, host, port=None, key_file=None, cert_file=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): self.proxy_info = proxy_info context = None if ca_certs is None: ca_certs = CA_CERTS if (cert_file or ca_certs) and not disable_ssl_certificate_validation: if not hasattr(ssl, 'SSLContext'): raise CertificateValidationUnsupportedInPython31() context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) context.verify_mode = ssl.CERT_REQUIRED if cert_file: context.load_cert_chain(cert_file, key_file) if ca_certs: context.load_verify_locations(ca_certs) http.client.HTTPSConnection.__init__( self, host, port=port, key_file=key_file, cert_file=cert_file, timeout=timeout, context=context, check_hostname=True) SCHEME_TO_CONNECTION = { 'http': HTTPConnectionWithTimeout, 'https': HTTPSConnectionWithTimeout, } class Http(object): """An HTTP client that handles: - all methods - caching - ETags - compression, - HTTPS - Basic - Digest - WSSE and more. """ def __init__(self, cache=None, timeout=None, proxy_info=proxy_info_from_environment, ca_certs=None, disable_ssl_certificate_validation=False): """If 'cache' is a string then it is used as a directory name for a disk cache. Otherwise it must be an object that supports the same interface as FileCache. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout `proxy_info` may be: - a callable that takes the http scheme ('http' or 'https') and returns a ProxyInfo instance per request. By default, uses proxy_info_from_environment. - a ProxyInfo instance (static proxy config). - None (proxy disabled). ca_certs is the path of a file containing root CA certificates for SSL server certificate validation. By default, a CA cert file bundled with httplib2 is used. If disable_ssl_certificate_validation is true, SSL cert validation will not be performed. """ self.proxy_info = proxy_info self.ca_certs = ca_certs self.disable_ssl_certificate_validation = \ disable_ssl_certificate_validation # Map domain name to an httplib connection self.connections = {} # The location of the cache, for now a directory # where cached responses are held. if cache and isinstance(cache, str): self.cache = FileCache(cache) else: self.cache = cache # Name/password self.credentials = Credentials() # Key/cert self.certificates = KeyCerts() # authorization objects self.authorizations = [] # If set to False then no redirects are followed, even safe ones. self.follow_redirects = True # Which HTTP methods do we apply optimistic concurrency to, i.e. # which methods get an "if-match:" etag header added to them. self.optimistic_concurrency_methods = ["PUT", "PATCH"] # If 'follow_redirects' is True, and this is set to True then # all redirecs are followed, including unsafe ones. self.follow_all_redirects = False self.ignore_etag = False self.force_exception_to_status_code = False self.timeout = timeout # Keep Authorization: headers on a redirect. self.forward_authorization_headers = False def __getstate__(self): state_dict = copy.copy(self.__dict__) # In case request is augmented by some foreign object such as # credentials which handle auth if 'request' in state_dict: del state_dict['request'] if 'connections' in state_dict: del state_dict['connections'] return state_dict def __setstate__(self, state): self.__dict__.update(state) self.connections = {} def _auth_from_challenge(self, host, request_uri, headers, response, content): """A generator that creates Authorization objects that can be applied to requests. """ challenges = _parse_www_authenticate(response, 'www-authenticate') for cred in self.credentials.iter(host): for scheme in AUTH_SCHEME_ORDER: if scheme in challenges: yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) def add_credentials(self, name, password, domain=""): """Add a name and password that will be used any time a request requires authentication.""" self.credentials.add(name, password, domain) def add_certificate(self, key, cert, domain): """Add a key and cert that will be used any time a request requires authentication.""" self.certificates.add(key, cert, domain) def clear_credentials(self): """Remove all the names and passwords that are used for authentication""" self.credentials.clear() self.authorizations = [] def _conn_request(self, conn, request_uri, method, body, headers): for i in range(RETRIES): try: if conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: conn.close() raise except socket.gaierror: conn.close() raise ServerNotFoundError("Unable to find the server at %s" % conn.host) except socket.error as e: errno_ = (e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno) if errno_ == errno.ECONNREFUSED: # Connection refused raise except http.client.HTTPException: if conn.sock is None: if i < RETRIES-1: conn.close() conn.connect() continue else: conn.close() raise if i < RETRIES-1: conn.close() conn.connect() continue # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. pass try: response = conn.getresponse() except socket.timeout: raise except (socket.error, http.client.HTTPException): conn.close() if i == 0: conn.close() conn.connect() continue else: raise else: content = b"" if method == "HEAD": conn.close() else: content = response.read() response = Response(response) if method != "HEAD": content = _decompressContent(response, content) break return (response, content) def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): """Do the actual request using the connection object and also follow one level of redirects if necessary""" auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] auth = auths and sorted(auths)[0][1] or None if auth: auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers) if auth: if auth.response(response, body): auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers ) response._stale_digest = 1 if response.status == 401: for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): authorization.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) if response.status != 401: self.authorizations.append(authorization) authorization.response(response, body) break if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: # Pick out the location header and basically start from the beginning # remembering first to strip the ETag header and decrement our 'depth' if redirections: if 'location' not in response and response.status != 300: raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) # Fix-up relative redirects (which violate an RFC 2616 MUST) if 'location' in response: location = response['location'] (scheme, authority, path, query, fragment) = parse_uri(location) if authority == None: response['location'] = urllib.parse.urljoin(absolute_uri, location) if response.status == 301 and method in ["GET", "HEAD"]: response['-x-permanent-redirect-url'] = response['location'] if 'content-location' not in response: response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) if 'if-none-match' in headers: del headers['if-none-match'] if 'if-modified-since' in headers: del headers['if-modified-since'] if 'authorization' in headers and not self.forward_authorization_headers: del headers['authorization'] if 'location' in response: location = response['location'] old_response = copy.deepcopy(response) if 'content-location' not in old_response: old_response['content-location'] = absolute_uri redirect_method = method if response.status in [302, 303]: redirect_method = "GET" body = None (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) response.previous = old_response else: raise RedirectLimit("Redirected more times than redirection_limit allows.", response, content) elif response.status in [200, 203] and method in ["GET", "HEAD"]: # Don't cache 206's since we aren't going to handle byte range requests if 'content-location' not in response: response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) return (response, content) def _normalize_headers(self, headers): return _normalize_headers(headers) # Need to catch and rebrand some exceptions # Then need to optionally turn all exceptions into status codes # including all socket.* and httplib.* exceptions. def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): """ Performs a single HTTP request. The 'uri' is the URI of the HTTP resource and can begin with either 'http' or 'https'. The value of 'uri' must be an absolute URI. The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. There is no restriction on the methods allowed. The 'body' is the entity body to be sent with the request. It is a string object. Any extra headers that are to be sent with the request should be provided in the 'headers' dictionary. The maximum number of redirect to follow before raising an exception is 'redirections. The default is 5. The return value is a tuple of (response, content), the first being and instance of the 'Response' class, the second being a string that contains the response entity body. """ try: if headers is None: headers = {} else: headers = self._normalize_headers(headers) if 'user-agent' not in headers: headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ uri = iri2uri(uri) (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) domain_port = authority.split(":")[0:2] if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': scheme = 'https' authority = domain_port[0] conn_key = scheme+":"+authority if conn_key in self.connections: conn = self.connections[conn_key] else: if not connection_type: connection_type = SCHEME_TO_CONNECTION[scheme] certs = list(self.certificates.iter(authority)) if issubclass(connection_type, HTTPSConnectionWithTimeout): if certs: conn = self.connections[conn_key] = connection_type( authority, key_file=certs[0][0], cert_file=certs[0][1], timeout=self.timeout, proxy_info=self.proxy_info, ca_certs=self.ca_certs, disable_ssl_certificate_validation= self.disable_ssl_certificate_validation) else: conn = self.connections[conn_key] = connection_type( authority, timeout=self.timeout, proxy_info=self.proxy_info, ca_certs=self.ca_certs, disable_ssl_certificate_validation= self.disable_ssl_certificate_validation) else: conn = self.connections[conn_key] = connection_type( authority, timeout=self.timeout, proxy_info=self.proxy_info) conn.set_debuglevel(debuglevel) if 'range' not in headers and 'accept-encoding' not in headers: headers['accept-encoding'] = 'gzip, deflate' info = email.message.Message() cached_value = None if self.cache: cachekey = defrag_uri cached_value = self.cache.get(cachekey) if cached_value: try: info, content = cached_value.split(b'\r\n\r\n', 1) info = email.message_from_bytes(info) for k, v in info.items(): if v.startswith('=?') and v.endswith('?='): info.replace_header(k, str(*email.header.decode_header(v)[0])) except (IndexError, ValueError): self.cache.delete(cachekey) cachekey = None cached_value = None else: cachekey = None if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers: # http://www.w3.org/1999/04/Editing/ headers['if-match'] = info['etag'] if method not in ["GET", "HEAD"] and self.cache and cachekey: # RFC 2616 Section 13.10 self.cache.delete(cachekey) # Check the vary header in the cache to see if this request # matches what varies in the cache. if method in ['GET', 'HEAD'] and 'vary' in info: vary = info['vary'] vary_headers = vary.lower().replace(' ', '').split(',') for header in vary_headers: key = '-varied-%s' % header value = info[key] if headers.get(header, None) != value: cached_value = None break if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: if '-x-permanent-redirect-url' in info: # Should cached permanent redirects be counted in our redirection count? For now, yes. if redirections <= 0: raise RedirectLimit("Redirected more times than redirection_limit allows.", {}, "") (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) response.previous = Response(info) response.previous.fromcache = True else: # Determine our course of action: # Is the cached entry fresh or stale? # Has the client requested a non-cached response? # # There seems to be three possible answers: # 1. [FRESH] Return the cache entry w/o doing a GET # 2. [STALE] Do the GET (but add in cache validators if available) # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request entry_disposition = _entry_disposition(info, headers) if entry_disposition == "FRESH": if not cached_value: info['status'] = '504' content = b"" response = Response(info) if cached_value: response.fromcache = True return (response, content) if entry_disposition == "STALE": if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers: headers['if-none-match'] = info['etag'] if 'last-modified' in info and not 'last-modified' in headers: headers['if-modified-since'] = info['last-modified'] elif entry_disposition == "TRANSPARENT": pass (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) if response.status == 304 and method == "GET": # Rewrite the cache entry with the new end-to-end headers # Take all headers that are in response # and overwrite their values in info. # unless they are hop-by-hop, or are listed in the connection header. for key in _get_end2end_headers(response): info[key] = response[key] merged_response = Response(info) if hasattr(response, "_stale_digest"): merged_response._stale_digest = response._stale_digest _updateCache(headers, merged_response, content, self.cache, cachekey) response = merged_response response.status = 200 response.fromcache = True elif response.status == 200: content = new_content else: self.cache.delete(cachekey) content = new_content else: cc = _parse_cache_control(headers) if 'only-if-cached'in cc: info['status'] = '504' response = Response(info) content = b"" else: (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) except Exception as e: if self.force_exception_to_status_code: if isinstance(e, HttpLib2ErrorWithResponse): response = e.response content = e.content response.status = 500 response.reason = str(e) elif isinstance(e, socket.timeout): content = b"Request Timeout" response = Response({ "content-type": "text/plain", "status": "408", "content-length": len(content) }) response.reason = "Request Timeout" else: content = str(e).encode('utf-8') response = Response({ "content-type": "text/plain", "status": "400", "content-length": len(content) }) response.reason = "Bad Request" else: raise return (response, content) class Response(dict): """An object more like email.message than httplib.HTTPResponse.""" """Is this response from our local cache""" fromcache = False """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ version = 11 "Status code returned by server. " status = 200 """Reason phrase returned by server.""" reason = "Ok" previous = None def __init__(self, info): # info is either an email.message or # an httplib.HTTPResponse object. if isinstance(info, http.client.HTTPResponse): for key, value in info.getheaders(): key = key.lower() prev = self.get(key) if prev is not None: value = ', '.join((prev, value)) self[key] = value self.status = info.status self['status'] = str(self.status) self.reason = info.reason self.version = info.version elif isinstance(info, email.message.Message): for key, value in list(info.items()): self[key.lower()] = value self.status = int(self['status']) else: for key, value in info.items(): self[key.lower()] = value self.status = int(self.get('status', self.status)) def __getattr__(self, name): if name == 'dict': return self else: raise AttributeError(name) httplib2-0.8/python3/httplib2/test/0000750017135500116100000000000012115724726016720 5ustar jcgregorioenghttplib2-0.8/python3/httplib2/test/other_cacerts.txt0000640017135500116100000000672512115724726022321 0ustar jcgregorioeng# Certifcate Authority certificates for validating SSL connections. # # This file contains PEM format certificates generated from # http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt # # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is the Netscape security libraries. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1994-2000 # the Initial Developer. All Rights Reserved. # # Contributor(s): # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** Comodo CA Limited, CN=Trusted Certificate Services ================================================== -----BEGIN CERTIFICATE----- MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW 1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi -----END CERTIFICATE----- httplib2-0.8/python3/httplib2/iri2uri.py.rej0000640017135500116100000000215212115724726020460 0ustar jcgregorioeng--- iri2uri.py +++ iri2uri.py @@ -28,26 +28,26 @@ import urllib.parse # / %xD0000-DFFFD / %xE1000-EFFFD escape_range = [ - (0xA0, 0xD7FF ), - (0xE000, 0xF8FF ), - (0xF900, 0xFDCF ), - (0xFDF0, 0xFFEF), - (0x10000, 0x1FFFD ), - (0x20000, 0x2FFFD ), - (0x30000, 0x3FFFD), - (0x40000, 0x4FFFD ), - (0x50000, 0x5FFFD ), - (0x60000, 0x6FFFD), - (0x70000, 0x7FFFD ), - (0x80000, 0x8FFFD ), - (0x90000, 0x9FFFD), - (0xA0000, 0xAFFFD ), - (0xB0000, 0xBFFFD ), - (0xC0000, 0xCFFFD), - (0xD0000, 0xDFFFD ), - (0xE1000, 0xEFFFD), - (0xF0000, 0xFFFFD ), - (0x100000, 0x10FFFD) + (0xA0, 0xD7FF), + (0xE000, 0xF8FF), + (0xF900, 0xFDCF), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD), + (0x20000, 0x2FFFD), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD), + (0x50000, 0x5FFFD), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD), + (0x80000, 0x8FFFD), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD), + (0xB0000, 0xBFFFD), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD), + (0x100000, 0x10FFFD), ] def encode(c): httplib2-0.8/python3/httplib2/__pycache__/0000750017135500116100000000000012115724726020151 5ustar jcgregorioeng