pax_global_header00006660000000000000000000000064145070245140014514gustar00rootroot0000000000000052 comment=c4a256f07fc8bc16e4716dfc29152caa5604285f geventhttpclient-2.0.11/000077500000000000000000000000001450702451400151645ustar00rootroot00000000000000geventhttpclient-2.0.11/.github/000077500000000000000000000000001450702451400165245ustar00rootroot00000000000000geventhttpclient-2.0.11/.github/workflows/000077500000000000000000000000001450702451400205615ustar00rootroot00000000000000geventhttpclient-2.0.11/.github/workflows/publish.yml000066400000000000000000000031031450702451400227470ustar00rootroot00000000000000name: Publish on: workflow_dispatch jobs: source: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v2 with: submodules: recursive - uses: actions/setup-python@v2 - run: | python -m pip install --upgrade pip pip install setuptools wheel python setup.py sdist - uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} skip_existing: true build: name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} needs: source strategy: matrix: os: [ubuntu-22.04, windows-2022, macos-12] steps: - uses: actions/checkout@v2 with: submodules: recursive - uses: actions/setup-python@v2 - uses: docker/setup-qemu-action@v1 if: ${{ matrix.os == 'ubuntu-22.04' }} name: Set up QEMU - run: python -m pip install cibuildwheel - run: python -m cibuildwheel --output-dir wheelhouse env: CIBW_ARCHS_MACOS: "x86_64 universal2 arm64" CIBW_ARCHS_LINUX: "auto aarch64 ppc64le" - uses: actions/upload-artifact@v2 with: path: wheelhouse/*.whl if-no-files-found: error publish: runs-on: ubuntu-22.04 needs: build steps: - uses: actions/download-artifact@v2 - uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ packages_dir: artifact password: ${{ secrets.PYPI_API_TOKEN }} skip_existing: true geventhttpclient-2.0.11/.github/workflows/test.yml000066400000000000000000000034731450702451400222720ustar00rootroot00000000000000name: Tests on: [push, pull_request] jobs: tests: name: ${{ matrix.name }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: include: - {name: Windows, python: '3.9', os: windows-latest, tox: py38} - {name: Mac, python: '3.9', os: macos-latest, tox: py39} - {name: '3.12', python: '3.12', os: ubuntu-latest, tox: py312} - {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311} - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} - {name: '3.7', python: '3.7', os: ubuntu-20.04, tox: py37} - {name: '3.6', python: '3.6', os: ubuntu-20.04, tox: py36} # Python 2.7 support has been dropped from Github # - {name: '2.7', python: '2.7', os: ubuntu-20.04, tox: py27} steps: - uses: actions/checkout@v3 with: submodules: recursive - uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - name: Get pip cache dir id: pip-cache run: echo "::set-output name=dir::$(pip cache dir)" - name: Cache pip uses: actions/cache@v3 with: path: ${{ steps.pip-cache.outputs.dir }} key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }} - name: set full Python version in PY env var # See https://pre-commit.com/#github-actions-example run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV - run: python -m pip install tox - run: python -m tox -e ${{ matrix.tox }} if: ${{ matrix.os != 'windows-latest'}} # skip tests on windows because they are hanging geventhttpclient-2.0.11/.gitignore000066400000000000000000000002021450702451400171460ustar00rootroot00000000000000*.o *.so *.pyc *.egg-info .Python build/ lib/ include/ bin/ .project .pydevproject .idea .settings/ dist/ .tox .cache wheelhouse geventhttpclient-2.0.11/.gitmodules000066400000000000000000000001141450702451400173350ustar00rootroot00000000000000[submodule "llhttp"] path = llhttp url = https://github.com/nodejs/llhttp geventhttpclient-2.0.11/LICENSE.txt000066400000000000000000000026321450702451400170120ustar00rootroot00000000000000Based on llhttp, copyright Fedor Indutny, 2018. Python extension is copyright Antonin Amand , licensed under the same terms. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Note: Previous versions of gevenhttpclient used http_parser.c, which in turn was based on src/http/ngx_http_parse.c from NGINX, copyright Igor Sysoev, Joyent, Inc. and other Node contributors. See http://github.com/joyent/http-parser for more information geventhttpclient-2.0.11/MANIFEST.in000066400000000000000000000004371450702451400167260ustar00rootroot00000000000000include src/geventhttpclient/*.py include ext/*.c include ext/*.h recursive-include llhttp *.h *.c LICENSE-MIT recursive-include src/geventhttpclient/tests * global-exclude __pycache__ global-exclude *.py[co] include LICENSE-MIT include README.mdown include CHANGELOG include pytest.ini geventhttpclient-2.0.11/Makefile000066400000000000000000000004621450702451400166260ustar00rootroot00000000000000build_ext: python setup.py build_ext --inplace test: pytest src/geventhttpclient/tests _develop: python setup.py develop develop: _develop build_ext clean: rm -rf build find . -name '*.pyc' -delete dist: python setup.py sdist upload release: cat release.md .PHONY: develop dist release test geventhttpclient-2.0.11/README.mdown000066400000000000000000000113061450702451400171700ustar00rootroot00000000000000# geventhttpclient A high performance, concurrent HTTP client library for python using [gevent](http://gevent.org). `gevent.httplib` support was removed in [gevent 1.0](https://github.com/surfly/gevent/commit/b45b83b1bc4de14e3c4859362825044b8e3df7d6 ), **geventhttpclient** now provides that missing functionality. **geventhttpclient** uses a fast [http parser](https://github.com/nodejs/llhttp), written in C. **geventhttpclient** has been specifically designed for high concurrency, streaming and support HTTP 1.1 persistent connections. More generally it is designed for efficiently pulling from REST APIs and streaming APIs like Twitter's. Safe SSL support is provided by default. **geventhttpclient** depends on the certifi CA Bundle. This is the same CA Bundle which ships with the Requests codebase, and is derived from Mozilla Firefox's canonical set. As of version 1.5, only Python 3.6+ is fully supported (with prebuilt wheels), but Python 2.7 and 3.5 *should* work too. Use of SSL/TLS with python 2.7.9 is not recommended and may be broken. A simple example: ```python #!/usr/bin/python from geventhttpclient import HTTPClient from geventhttpclient.url import URL url = URL('http://gevent.org/') http = HTTPClient(url.host) # issue a get request response = http.get(url.request_uri) # read status_code response.status_code # read response body body = response.read() # close connections http.close() ``` ## httplib compatibility and monkey patch **geventhttpclient.httplib** module contains classes for drop in replacement of httplib connection and response objects. If you use httplib directly you can replace the **httplib** imports by **geventhttpclient.httplib**. ```python # from httplib import HTTPConnection from geventhttpclient.httplib import HTTPConnection ``` If you use **httplib2**, **urllib** or **urllib2**; you can patch **httplib** to use the wrappers from **geventhttpclient**. For **httplib2**, make sure you patch before you import or the *super* calls will fail. ```python import geventhttpclient.httplib geventhttpclient.httplib.patch() import httplib2 ``` ## High Concurrency HTTPClient has connection pool built in and is greenlet safe by design. You can use the same instance among several greenlets. ```python #!/usr/bin/env python import gevent.pool import json from geventhttpclient import HTTPClient from geventhttpclient.url import URL # go to http://developers.facebook.com/tools/explorer and copy the access token TOKEN = '' url = URL('https://graph.facebook.com/me/friends') url['access_token'] = TOKEN # setting the concurrency to 10 allow to create 10 connections and # reuse them. http = HTTPClient.from_url(url, concurrency=10) response = http.get(url.request_uri) assert response.status_code == 200 # response comply to the read protocol. It passes the stream to # the json parser as it's being read. data = json.load(response)['data'] def print_friend_username(http, friend_id): friend_url = URL('/' + str(friend_id)) friend_url['access_token'] = TOKEN # the greenlet will block until a connection is available response = http.get(friend_url.request_uri) assert response.status_code == 200 friend = json.load(response) if friend.has_key('username'): print '%s: %s' % (friend['username'], friend['name']) else: print '%s has no username.' % friend['name'] # allow to run 20 greenlet at a time, this is more than concurrency # of the http client but isn't a problem since the client has its own # connection pool. pool = gevent.pool.Pool(20) for item in data: friend_id = item['id'] pool.spawn(print_friend_username, http, friend_id) pool.join() http.close() ``` ## Streaming **geventhttpclient** supports streaming. Response objects have a read(N) and readline() method that read the stream incrementally. See *src/examples/twitter_streaming.py* for pulling twitter stream API. Here is an example on how to download a big file chunk by chunk to save memory: ```python #!/usr/bin/env python from geventhttpclient import HTTPClient, URL url = URL('http://127.0.0.1:80/100.dat') http = HTTPClient.from_url(url) response = http.get(url.query_string) assert response.status_code == 200 CHUNK_SIZE = 1024 * 16 # 16KB with open('/tmp/100.dat', 'w') as f: data = response.read(CHUNK_SIZE) while data: f.write(data) data = response.read(CHUNK_SIZE) ``` ## Benchmarks The benchmark does 1000 get requests against a local nginx server with a concurrency of 10. See *benchmarks* folder. - httplib2 with geventhttpclient monkey patch (*benchmarks/httplib2_patched.py*): **~2500 req/s** - geventhttpclient.HTTPClient (*benchmarks/httpclient.py*): **~4000 req/s** geventhttpclient-2.0.11/benchmarks/000077500000000000000000000000001450702451400173015ustar00rootroot00000000000000geventhttpclient-2.0.11/benchmarks/benchmark.py000066400000000000000000000055541450702451400216160ustar00rootroot00000000000000import gevent.monkey gevent.monkey.patch_all() import argparse import time import requests import requests.adapters import gevent.pool import geventhttpclient.useragent import urllib3 class Benchmark: def __init__(self, url: str, concurrency: int, rounds: int, round_size: int): self.url = url self.concurrency = concurrency self.rounds = rounds self.round_size = round_size self.init_client() def init_client(self): pass def request(self): pass def start(self): results = [] for round in range(self.rounds): self.init_client() now = time.time() pool = gevent.pool.Pool(size=self.concurrency) for _ in range(self.round_size): pool.spawn(self.request) pool.join() delta = time.time() - now rps = self.round_size / delta results.append(rps) print("round: {}, rps: {}".format(round, rps)) print("total rps:", sum(results) / len(results)) class GeventHTTPClientBenchmark(Benchmark): client: geventhttpclient.useragent.UserAgent def init_client(self): self.client = geventhttpclient.useragent.UserAgent(concurrency=self.concurrency) def request(self): self.client.urlopen(self.url).content class RequestsBenchmark(Benchmark): client: requests.Session def init_client(self): self.client = requests.Session() adapter = requests.adapters.HTTPAdapter(pool_maxsize=self.concurrency, pool_block=True) self.client.mount("https://", adapter) self.client.mount("http://", adapter) def request(self): self.client.get(self.url) class UrllibBenchmark(Benchmark): client: urllib3.PoolManager def init_client(self): self.client = urllib3.PoolManager(maxsize=self.concurrency, block=True) def request(self): self.client.request("GET", self.url) if __name__ == "__main__": available_benchmarks = { "gevent": GeventHTTPClientBenchmark, "requests": RequestsBenchmark, "urllib": UrllibBenchmark, } parser = argparse.ArgumentParser() parser.add_argument(dest="url") parser.add_argument("--concurrency", type=int, default=10) parser.add_argument("--rounds", type=int, default=10) parser.add_argument("--round-size", type=int, default=10000) parser.add_argument( "-b", "--benchmark", nargs="+", choices=available_benchmarks.keys(), default=available_benchmarks.keys(), ) args = dict(**parser.parse_args().__dict__) benchmark_classes = (available_benchmarks[x] for x in args.pop("benchmark")) for benchmark_class in benchmark_classes: print("Running {}".format(benchmark_class.__name__)) benchmark = benchmark_class(**args) benchmark.start() print() geventhttpclient-2.0.11/benchmarks/httpclient.py000066400000000000000000000020671450702451400220360ustar00rootroot00000000000000import time import gevent.pool from geventhttpclient import HTTPClient, URL from geventhttpclient.header import Headers if __name__ == "__main__": N = 1000 C = 10 url = URL('http://127.0.0.1/index.html') qs = url.request_uri def run(client): response = client.get(qs) response.read() assert response.status_code == 200 # For better compatibility, especially with cookies, use headers_type=Headers # The difference is 2900 requests/s with dict vs 2450 with Headers on my machine # For maximum speed, set headers_type=dict # In that case, multiple header lines will be ignored, only the first is kept client = HTTPClient.from_url(url, concurrency=C, headers_type=dict) group = gevent.pool.Pool(size=C) for i in xrange(5): now = time.time() for _ in xrange(N): group.spawn(run, client) group.join() delta = time.time() - now req_per_sec = N / delta print "request count:%d, concurrenry:%d, %f req/s" % ( N, C, req_per_sec) geventhttpclient-2.0.11/benchmarks/httplib2_patched.py000066400000000000000000000024051450702451400230740ustar00rootroot00000000000000if __name__ == "__main__": from geventhttpclient import httplib httplib.patch() import httplib2 import time import gevent.queue import gevent.pool from contextlib import contextmanager class ConnectionPool(object): def __init__(self, factory, size=5): self.factory = factory self.queue = gevent.queue.Queue(size) for i in xrange(size): self.queue.put(factory()) @contextmanager def use(self): el = self.queue.get() yield el self.queue.put(el) def httplib2_factory(*args, **kw): def factory(): return httplib2.Http(*args, **kw) return factory N = 1000 C = 10 url = 'http://127.0.0.1/index.html' def run(pool): with pool.use() as http: http.request(url) http_pool = ConnectionPool(httplib2_factory(), size=C) group = gevent.pool.Pool(size=C) for i in xrange(5): now = time.time() for _ in xrange(N): group.spawn(run, http_pool) group.join() delta = time.time() - now req_per_sec = N / delta print "request count:%d, concurrenry:%d, %f req/s" % ( N, C, req_per_sec) geventhttpclient-2.0.11/benchmarks/requests_bench.py000066400000000000000000000012471450702451400226710ustar00rootroot00000000000000import time import gevent.pool import gevent.monkey gevent.monkey.patch_all() import requests if __name__ == "__main__": N = 1000 C = 10 url = 'http://127.0.0.1/index.html' def run(client): response = client.get(url) assert response.status_code == requests.codes.ok client = requests.Session() group = gevent.pool.Pool(size=C) for i in xrange(5): now = time.time() for _ in xrange(N): group.spawn(run, client) group.join() delta = time.time() - now req_per_sec = N / delta print "request count:%d, concurrenry:%d, %f req/s" % ( N, C, req_per_sec) geventhttpclient-2.0.11/benchmarks/restkit_bench.py000066400000000000000000000015031450702451400224760ustar00rootroot00000000000000if __name__ == "__main__": from gevent import monkey monkey.patch_all() import gevent.pool import time from restkit import * from socketpool import ConnectionPool url = 'http://127.0.0.1/index.html' N = 1000 C = 10 Pool = ConnectionPool(factory=Connection,backend="gevent",max_size=C,timeout=300) def run(): response = request(url,follow_redirect=True,pool=Pool) response.body_string() assert response.status_int == 200 group = gevent.pool.Pool(size=C) for i in xrange(5): now = time.time() for _ in xrange(N): group.spawn(run) group.join() delta = time.time() - now req_per_sec = N / delta print "request count:%d, concurrenry:%d, %f req/s" % ( N, C, req_per_sec) geventhttpclient-2.0.11/benchmarks/urllib3_bench.py000066400000000000000000000012421450702451400223650ustar00rootroot00000000000000import time import gevent.pool import gevent.monkey gevent.monkey.patch_all() import urllib3 if __name__ == "__main__": N = 1000 C = 10 url = 'http://127.0.0.1/index.html' def run(client): response = client.request('GET', url) assert response.status == 200 client = urllib3.PoolManager() group = gevent.pool.Pool(size=C) for i in xrange(5): now = time.time() for _ in xrange(N): group.spawn(run, client) group.join() delta = time.time() - now req_per_sec = N / delta print "request count:%d, concurrenry:%d, %f req/s" % ( N, C, req_per_sec) geventhttpclient-2.0.11/examples/000077500000000000000000000000001450702451400170025ustar00rootroot00000000000000geventhttpclient-2.0.11/examples/download.py000066400000000000000000000007241450702451400211660ustar00rootroot00000000000000#!/usr/bin/env python from geventhttpclient import HTTPClient, URL if __name__ == "__main__": url = URL('http://127.0.0.1:80/100.dat') http = HTTPClient.from_url(url) response = http.get(url.request_uri) assert response.status_code == 200 CHUNK_SIZE = 1024 * 16 # 16KB with open('/tmp/100.dat', 'w') as f: data = response.read(CHUNK_SIZE) while data: f.write(data) data = response.read(CHUNK_SIZE) geventhttpclient-2.0.11/examples/facebook.py000066400000000000000000000032011450702451400211210ustar00rootroot00000000000000#!/usr/bin/env python import gevent.pool import json from geventhttpclient import HTTPClient from geventhttpclient.url import URL if __name__ == "__main__": # go to http://developers.facebook.com/tools/explorer and copy the access token TOKEN = '' url = URL('https://graph.facebook.com/me/friends') url['access_token'] = TOKEN # setting the concurrency to 10 allow to create 10 connections and # reuse them. http = HTTPClient.from_url(url, concurrency=10) response = http.get(url.request_uri) assert response.status_code == 200 # response comply to the read protocol. It passes the stream to # the json parser as it's being read. data = json.load(response)['data'] def print_friend_username(http, friend_id): friend_url = URL('/' + str(friend_id)) friend_url['access_token'] = TOKEN # the greenlet will block until a connection is available response = http.get(friend_url.request_uri) assert response.status_code == 200 friend = json.load(response) if friend.has_key('username'): print '%s: %s' % (friend['username'], friend['name']) else: print '%s has no username.' % friend['name'] # allow to run 20 greenlet at a time, this is more than concurrency # of the http client but isn't a problem since the client has its own # connection pool. pool = gevent.pool.Pool(20) for item in data: friend_id = item['id'] pool.spawn(print_friend_username, http, friend_id) pool.join() http.close() geventhttpclient-2.0.11/examples/httplib2_patched.py000066400000000000000000000011121450702451400225670ustar00rootroot00000000000000from geventhttpclient import httplib httplib.patch() from httplib2 import Http if __name__ == "__main__": http = Http() response, content = http.request('http://google.fr/') assert response.status == 200 assert content print response print content response, content = http.request('http://google.fr/', method='HEAD') assert response.status == 200 assert content == '' print response response, content = http.request('https://www.google.com/', method='HEAD') assert response.status == 200 assert content == '' print response geventhttpclient-2.0.11/examples/twitter_streaming.py000066400000000000000000000033511450702451400231310ustar00rootroot00000000000000import time import json from pprint import pprint as pp from geventhttpclient.url import URL from geventhttpclient import HTTPClient import oauth2 as oauthlib if __name__ == "__main__": APP_ID = '' APP_SECRET = '' # see https://github.com/simplegeo/python-oauth2 # "Twitter Three-legged OAuth Example" token_info = { "oauth_token_secret" : "...", "user_id" : "...", "oauth_token" : "...", "screen_name" : "..." } oauthlib_consumer = oauthlib.Consumer(APP_ID, APP_SECRET) token = oauthlib.Token(token_info['oauth_token'], token_info['oauth_token_secret']) params = { 'oauth_version': "1.0", 'oauth_nonce': oauthlib.generate_nonce(), 'oauth_timestamp': int(time.time()), 'oauth_token': token.key, 'oauth_consumer_key': oauthlib_consumer.key, 'locations': '-122.75,36.8,-121.75,37.8' # San Francisco } url = URL('https://stream.twitter.com/1/statuses/filter.json') req = oauthlib.Request.from_consumer_and_token( oauthlib_consumer, token=token, http_url=str(url), http_method='POST') signature_method = oauthlib.SignatureMethod_HMAC_SHA1() req = oauthlib.Request(method="POST", url=str(url), parameters=params) req.sign_request(signature_method, oauthlib_consumer, token) http = HTTPClient.from_url(url) response = http.request('POST', url.request_uri, body=req.to_postdata(), headers={'Content-Type': 'application/x-www-form-urlencoded', 'Accept': '*/*'}) data = json.loads(response.readline()) while data: pp(data) data = json.loads(response.readline()) geventhttpclient-2.0.11/examples/urllib_patched.py000066400000000000000000000002051450702451400223320ustar00rootroot00000000000000from geventhttpclient import httplib httplib.patch() from urllib2 import urlopen print urlopen('https://www.google.fr/').read() geventhttpclient-2.0.11/ext/000077500000000000000000000000001450702451400157645ustar00rootroot00000000000000geventhttpclient-2.0.11/ext/Python_compat.h000066400000000000000000000001411450702451400207550ustar00rootroot00000000000000#include #ifndef Py_TYPE #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) #endif geventhttpclient-2.0.11/ext/_parser.c000066400000000000000000000307341450702451400175720ustar00rootroot00000000000000#define PY_SSIZE_T_CLEAN #include #include "Python_compat.h" #include #include static PyObject * PyExc_HTTPParseError; enum py_parser_should_keep_alive { KA_INCOMPLETE, KA_FALSE, KA_TRUE, }; typedef struct { PyObject_HEAD llhttp_t* parser; llhttp_errno_t error; const char* reason; enum py_parser_should_keep_alive should_keep_alive; } PyHTTPResponseParser; static int on_message_begin(llhttp_t* parser) { int fail = 0; PyObject* self = (PyObject*)parser->data; if (PyObject_HasAttrString(self, "_on_message_begin")) { PyObject* callable = PyObject_GetAttrString(self, "_on_message_begin"); PyObject* result = PyObject_CallObject(callable, NULL); PyObject* exception = PyErr_Occurred(); if (exception != NULL) { fail = -1; } else { if (PyObject_IsTrue(result)) fail = -1; } Py_XDECREF(result); Py_DECREF(callable); } return fail; } static int on_message_complete(llhttp_t* parser) { int fail = 0; PyHTTPResponseParser *self = (PyHTTPResponseParser*) parser->data; self->should_keep_alive = llhttp_should_keep_alive(parser) ? KA_TRUE : KA_FALSE; if (PyObject_HasAttrString(self, "_on_message_complete")) { PyObject* callable = PyObject_GetAttrString(self, "_on_message_complete"); PyObject* result = PyObject_CallObject(callable, NULL); PyObject* exception = PyErr_Occurred(); if (exception != NULL) { fail = -1; } else { if (PyObject_IsTrue(result)) fail = -1; } Py_XDECREF(result); Py_DECREF(callable); } return fail; } static int on_headers_complete(llhttp_t* parser) { /* 1 => skip body, 2 => upgrade, 0 => continue, -1 => error */ int skip_body = 0; PyObject* self = (PyObject*)parser->data; if (PyObject_HasAttrString(self, "_on_headers_complete")) { PyObject* callable = PyObject_GetAttrString(self, "_on_headers_complete"); PyObject* result = PyObject_CallObject(callable, NULL); PyObject* exception = PyErr_Occurred(); if (exception != NULL) { skip_body = -1; } else if (PyObject_IsTrue(result)) { skip_body = 1; } Py_XDECREF(result); Py_DECREF(callable); } return skip_body; } static int on_http_data_cb(llhttp_t* parser, const char *at, size_t length, const char * python_cb) { int fail = 0; PyObject* self = (PyObject*)parser->data; if (PyObject_HasAttrString(self, python_cb)) { PyObject* callable = PyObject_GetAttrString(self, python_cb); PyObject* args = Py_BuildValue("(s#)", at, length); PyObject* result = PyObject_CallObject(callable, args); PyObject* exception = PyErr_Occurred(); if (exception != NULL) { fail = -1; } else { if (PyObject_IsTrue(result)) fail = -1; } Py_XDECREF(result); Py_DECREF(callable); Py_DECREF(args); } return fail; } static int on_status(llhttp_t* parser, const char *at, size_t length) { return on_http_data_cb(parser, at, length, "_on_status"); } static int on_header_field(llhttp_t* parser, const char *at, size_t length) { return on_http_data_cb(parser, at, length, "_on_header_field"); } static int on_header_value(llhttp_t* parser, const char *at, size_t length) { return on_http_data_cb(parser, at, length, "_on_header_value"); } static int on_body(llhttp_t* parser, const char *at, size_t length) { int fail = 0; PyObject* self = (PyObject*)parser->data; if (PyObject_HasAttrString(self, "_on_body")) { PyObject* callable = PyObject_GetAttrString(self, "_on_body"); PyObject* bytearray = PyByteArray_FromStringAndSize(at, length); PyObject* result = PyObject_CallFunctionObjArgs( callable, bytearray, NULL); PyObject* exception = PyErr_Occurred(); if (exception != NULL) { fail = -1; } else { if (PyObject_IsTrue(result)) fail = -1; } Py_XDECREF(result); Py_DECREF(callable); Py_DECREF(bytearray); } return fail; } static llhttp_settings_t _parser_settings = { on_message_begin, NULL, // on_url on_status, on_header_field, on_header_value, on_headers_complete, on_body, on_message_complete }; static PyObject* PyHTTPResponseParser_new(PyTypeObject* type, PyObject* args, PyObject* kwds) { PyHTTPResponseParser* self = (PyHTTPResponseParser*)type->tp_alloc(type, 0); if (self != NULL) { self->parser = PyMem_Malloc(sizeof(llhttp_t)); if (self->parser == NULL) { return NULL; } else { llhttp_init(self->parser, HTTP_RESPONSE, &_parser_settings); self->parser->data = (void*)self; self->error = HPE_OK; self->reason = 0; self->should_keep_alive = KA_INCOMPLETE; } } return (PyObject*) self; } static void* set_parser_exception(PyHTTPResponseParser *parser) { PyObject *args = Py_BuildValue("(s,B)", parser->reason, parser->error); if (args == NULL) return PyErr_NoMemory(); PyErr_SetObject(PyExc_HTTPParseError, args); Py_DECREF(args); return NULL; } static size_t size_t_MAX = -1; static PyObject* PyHTTPResponseParser_feed(PyHTTPResponseParser *self, PyObject* args) { char* buf = NULL; Py_ssize_t buf_len; int succeed = PyArg_ParseTuple(args, "s#", &buf, &buf_len); /* cast Py_ssize_t signed integer to unsigned */ size_t unsigned_buf_len = buf_len + size_t_MAX + 1; if (succeed) { PyObject * exception; /* in case feed is called again after an error occurred */ if (self->error != HPE_OK) { return set_parser_exception(self); } if (buf_len) { self->error = llhttp_execute(self->parser, buf, unsigned_buf_len); self->reason = self->parser->reason; } else { if (!llhttp_message_needs_eof(self->parser)) { PyErr_SetString(PyExc_HTTPParseError, "Incomplete response."); return NULL; } self->error = llhttp_finish(self->parser); self->reason = self->parser->reason; } /* Exception in callbacks */ exception = PyErr_Occurred(); if (exception != NULL) return NULL; if (self->error != HPE_OK) { return set_parser_exception(self); } Py_RETURN_NONE; } return NULL; } static PyObject* PyHTTPResponseParser_parser_failed(PyHTTPResponseParser* self) { return PyBool_FromLong(self->error != HPE_OK || llhttp_get_errno(self->parser) != HPE_OK); } #if PY_MAJOR_VERSION >= 3 static PyObject* PyHTTPResponseParser_get_http_version(PyHTTPResponseParser *self) { return PyUnicode_FromFormat("HTTP/%u.%u", self->parser->http_major, self->parser->http_minor); } #else static PyObject* PyHTTPResponseParser_get_http_version(PyHTTPResponseParser *self) { return PyString_FromFormat("HTTP/%u.%u", self->parser->http_major, self->parser->http_minor); } #endif static PyObject* PyHTTPResponseParser_get_remaining_content_length(PyHTTPResponseParser *self) { if (sizeof(signed long long) == 8) return Py_BuildValue("L", self->parser->content_length); if (sizeof(signed long) == 8) return Py_BuildValue("l", self->parser->content_length); // int return Py_BuildValue("i", self->parser->content_length); } static PyObject* PyHTTPResponseParser_get_code(PyHTTPResponseParser *self) { return Py_BuildValue("i", self->parser->status_code); } static PyObject* PyHTTPResponseParser_should_keep_alive(PyHTTPResponseParser* self) { if (self->error != HPE_OK) { Py_RETURN_FALSE; } int should_keep_alive; switch (self->should_keep_alive) { case KA_INCOMPLETE: should_keep_alive = llhttp_should_keep_alive(self->parser); break; case KA_FALSE: should_keep_alive = 0; break; case KA_TRUE: should_keep_alive = 1; break; } return PyBool_FromLong(should_keep_alive); } void PyHTTPResponseParser_dealloc(PyHTTPResponseParser* self) { self->parser->data = NULL; PyMem_Free(self->parser); Py_TYPE(self)->tp_free((PyObject*)self); } static PyMethodDef PyHTTPResponseParser_methods[] = { {"feed", (PyCFunction)PyHTTPResponseParser_feed, METH_VARARGS, "Feed the parser with data"}, {"get_code", (PyCFunction)PyHTTPResponseParser_get_code, METH_NOARGS, "Get http response code"}, {"get_http_version", (PyCFunction)PyHTTPResponseParser_get_http_version, METH_NOARGS, "Get http version"}, {"get_remaining_content_length", (PyCFunction)PyHTTPResponseParser_get_remaining_content_length, METH_NOARGS, "Get remaining content length to read"}, {"should_keep_alive", (PyCFunction)PyHTTPResponseParser_should_keep_alive, METH_NOARGS, "Tell wether the connection should stay connected (HTTP 1.1)"}, {"parser_failed", (PyCFunction)PyHTTPResponseParser_parser_failed, METH_NOARGS, "Tell if parser have failed."}, {NULL} /* Sentinel */ }; static PyTypeObject HTTPParserType = { PyVarObject_HEAD_INIT(NULL, 0) "HTTPResponseParser", /*tp_name*/ sizeof(PyHTTPResponseParser), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor)PyHTTPResponseParser_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ 0, /*tp_compare*/ 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash */ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ "HTTP Response parser (non thread-safe)", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ PyHTTPResponseParser_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ PyHTTPResponseParser_new, /* tp_new */ }; static PyMethodDef module_methods[] = { {NULL} /* Sentinel */ }; #if PY_MAJOR_VERSION >= 3 static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "_parser", "HTTP Parser from nginx/Joyent.", 0, module_methods, NULL, NULL, NULL, NULL }; #define INITERROR return NULL PyMODINIT_FUNC PyInit__parser(void) #else #define INITERROR return void init_parser(void) #endif { PyObject *module, *httplib, *HTTPException; if (PyType_Ready(&HTTPParserType) < 0) INITERROR; #if PY_MAJOR_VERSION >= 3 module = PyModule_Create(&moduledef); #else module = Py_InitModule3("_parser", module_methods, "HTTP Parser from nginx/Joyent."); #endif Py_INCREF(&HTTPParserType); PyModule_AddObject(module, "HTTPResponseParser", (PyObject *)&HTTPParserType); #if PY_MAJOR_VERSION >= 3 httplib = PyImport_ImportModule("http.client"); #else httplib = PyImport_ImportModule("httplib"); #endif HTTPException = PyObject_GetAttrString(httplib, "HTTPException"); PyExc_HTTPParseError = PyErr_NewException( "_parser.HTTPParseError", HTTPException, NULL); Py_INCREF(PyExc_HTTPParseError); PyModule_AddObject(module, "HTTPParseError", PyExc_HTTPParseError); #if PY_MAJOR_VERSION >= 3 return module; #endif } #undef PY_SSIZE_T_CLEAN geventhttpclient-2.0.11/llhttp/000077500000000000000000000000001450702451400164735ustar00rootroot00000000000000geventhttpclient-2.0.11/pytest.ini000066400000000000000000000001011450702451400172050ustar00rootroot00000000000000[pytest] markers = online: tests which require internet access geventhttpclient-2.0.11/release.md000066400000000000000000000004131450702451400171240ustar00rootroot00000000000000# Making new releases - Bump version in src/geventhttpclient/\__init__.py - Bump version in setup.py - Make a GitHub release (which also makes a git tag) - Trigger the publish action https://github.com/geventhttpclient/geventhttpclient/actions/workflows/publish.yml geventhttpclient-2.0.11/requirements-dev.txt000066400000000000000000000000551450702451400212240ustar00rootroot00000000000000-rrequirements.txt pytest dpkt mock requests geventhttpclient-2.0.11/requirements.txt000066400000000000000000000000411450702451400204430ustar00rootroot00000000000000six certifi gevent brotli urllib3geventhttpclient-2.0.11/setup.py000066400000000000000000000040371450702451400167020ustar00rootroot00000000000000import sys from setuptools.extension import Extension from setuptools import find_packages from distutils.core import setup DESC = """ A high performance, concurrent HTTP client library for python using gevent. gevent.httplib support was removed in gevent 1.0, geventhttpclient now provides that missing functionality. geventhttpclient uses a fast http parser, written in C, originating from nginx, extracted and modified by Joyent. geventhttpclient has been specifically designed for high concurrency, streaming and support HTTP 1.1 persistent connections. More generally it is designed for efficiently pulling from REST APIs and streaming APIs like Twitter's. Safe SSL support is provided by default. geventhttpclient depends on the certifi CA Bundle. This is the same CA Bundle which ships with the Requests codebase, and is derived from Mozilla Firefox's canonical set. As of 1.5, only Python 3.6+ is fully supported (with prebuilt wheels), but Python 2.7 and 3.5 *should* work too. Use of SSL/TLS with python 2.7.9 is not recommended and may be broken. """ httpparser = Extension( 'geventhttpclient._parser', sources=[ 'ext/_parser.c', 'llhttp/src/api.c', 'llhttp/src/http.c', 'llhttp/src/llhttp.c', ], include_dirs=[ 'llhttp/include', ], ) requirements = [ 'gevent >= 0.13', 'certifi', 'six', 'brotli' ] if sys.hexversion < 0x02070900: requirements += [ 'backports.ssl_match_hostname', ] setup(name='geventhttpclient', version = "2.0.11", # dont forget to update version in __init__.py as well description = 'http client library for gevent', long_description = DESC, url="http://github.com/gwik/geventhttpclient", author="Antonin Amand", author_email="antonin.amand@gmail.com", packages=find_packages('src'), exclude_package_data={'geventhttpclient': ['tests/*']}, license='MIT', package_dir={'': 'src'}, ext_modules = [httpparser], include_package_data=True, install_requires=requirements) geventhttpclient-2.0.11/src/000077500000000000000000000000001450702451400157535ustar00rootroot00000000000000geventhttpclient-2.0.11/src/geventhttpclient/000077500000000000000000000000001450702451400213425ustar00rootroot00000000000000geventhttpclient-2.0.11/src/geventhttpclient/__init__.py000066400000000000000000000002541450702451400234540ustar00rootroot00000000000000# package __version__ = "2.0.11" # dont forget to update version in setup.py as well from geventhttpclient.client import HTTPClient from geventhttpclient.url import URL geventhttpclient-2.0.11/src/geventhttpclient/client.py000066400000000000000000000254471450702451400232060ustar00rootroot00000000000000import errno import os import gevent.socket import six from geventhttpclient import __version__ from geventhttpclient.connectionpool import ConnectionPool from geventhttpclient.header import Headers from geventhttpclient.response import HTTPConnectionClosed from geventhttpclient.response import HTTPSocketPoolResponse from geventhttpclient.url import URL CRLF = "\r\n" WHITESPACE = " " FIELD_VALUE_SEP = ": " HOST_PORT_SEP = ":" SLASH = "/" PROTO_HTTP = "http" PROTO_HTTPS = "https" HEADER_HOST = "Host" HEADER_CONTENT_LENGTH = "Content-Length" METHOD_GET = "GET" METHOD_HEAD = "HEAD" METHOD_POST = "POST" METHOD_PUT = "PUT" METHOD_DELETE = "DELETE" METHOD_PATCH = "PATCH" METHOD_OPTIONS = "OPTIONS" METHOD_TRACE = "TRACE" def _get_body_length(body): """ Get len of string or file :param body: :return: :rtype: int """ try: return len(body) except TypeError: try: return os.fstat(body.fileno()).st_size except (AttributeError, OSError): return None class HTTPClient(object): HTTP_11 = 'HTTP/1.1' HTTP_10 = 'HTTP/1.0' BLOCK_SIZE = 1024 * 4 # 4KB DEFAULT_HEADERS = Headers({ 'User-Agent': 'python/gevent-http-client-' + __version__ }) @classmethod def from_url(cls, url, **kw): if not isinstance(url, URL): url = URL(url) enable_ssl = url.scheme == PROTO_HTTPS if not enable_ssl: kw.pop('ssl_options', None) return cls(url.host, port=url.port, ssl=enable_ssl, **kw) def __init__(self, host, port=None, headers=None, block_size=BLOCK_SIZE, connection_timeout=ConnectionPool.DEFAULT_CONNECTION_TIMEOUT, network_timeout=ConnectionPool.DEFAULT_NETWORK_TIMEOUT, disable_ipv6=False, concurrency=1, ssl=False, ssl_options=None, ssl_context_factory=None, insecure=False, proxy_host=None, proxy_port=None, version=HTTP_11, headers_type=Headers): if headers is None: headers = {} self.host = host self.port = port connection_host = self.host connection_port = self.port if proxy_host is not None: assert proxy_port is not None, \ 'you have to provide proxy_port if you set proxy_host' self.use_proxy = True connection_host = proxy_host connection_port = proxy_port else: self.use_proxy = False if ssl: ssl_options = ssl_options.copy() if ssl_options else {} if ssl_options is not None: if ssl_context_factory is not None: requested_hostname = headers.get('host', self.host) ssl_options.setdefault('server_hostname', requested_hostname) self.ssl = True if not self.port: self.port = 443 if not connection_port: connection_port = self.port # Import SSL as late as possible, fail hard with Import Error from geventhttpclient.connectionpool import SSLConnectionPool self._connection_pool = SSLConnectionPool( connection_host, connection_port, self.host, self.port, size=concurrency, ssl_options=ssl_options, ssl_context_factory=ssl_context_factory, insecure=insecure, network_timeout=network_timeout, connection_timeout=connection_timeout, disable_ipv6=disable_ipv6, use_proxy=self.use_proxy ) else: self.ssl = False if not self.port: self.port = 80 if not connection_port: connection_port = self.port self._connection_pool = ConnectionPool( connection_host, connection_port, self.host, self.port, size=concurrency, network_timeout=network_timeout, connection_timeout=connection_timeout, disable_ipv6=disable_ipv6, use_proxy=self.use_proxy ) self.version = version self.headers_type = headers_type self.default_headers = headers_type() self.default_headers.update(self.DEFAULT_HEADERS) self.default_headers.update(headers) self.block_size = block_size self._base_url_string = str(self.get_base_url()) def get_base_url(self): url = URL() url.host = self.host url.port = self.port url.scheme = self.ssl and PROTO_HTTPS or PROTO_HTTP return url def close(self): self._connection_pool.close() # Like urllib2, try to treat the body as a file if we can't determine the # file length with `len()` def _build_request(self, method, request_uri, body="", headers=None): """ :param method: :type method: basestring :param request_uri: :type request_uri: basestring :param body: :type body: basestring or file :param headers: :type headers: dict :return: :rtype: basestring """ if headers is None: headers = {} header_fields = self.headers_type() header_fields.update(self.default_headers) header_fields.update(headers) if self.version == self.HTTP_11 and HEADER_HOST not in header_fields: host_port = self.host # IPv6 addresses require square brackets in the Host header. if ':' in self.host and self.host[0] != '[' and self.host[-1] != ']': host_port = '[' + host_port + ']' if self.port not in (80, 443): host_port += HOST_PORT_SEP + str(self.port) header_fields[HEADER_HOST] = host_port if body and HEADER_CONTENT_LENGTH not in header_fields: body_length = _get_body_length(body) if body_length: header_fields[HEADER_CONTENT_LENGTH] = body_length request_url = request_uri if self.use_proxy: base_url = self._base_url_string if request_uri.startswith(SLASH): base_url = base_url[:-1] request_url = base_url + request_url elif not request_url.startswith((SLASH, PROTO_HTTP)): request_url = SLASH + request_url elif request_url.startswith(PROTO_HTTP): if request_url.startswith(self._base_url_string): request_url = request_url[len(self._base_url_string) - 1:] else: raise ValueError("Invalid host in URL") request = method + WHITESPACE + request_url + WHITESPACE + self.version + CRLF for field, value in header_fields.items(): request += field + FIELD_VALUE_SEP + str(value) + CRLF request += CRLF return request def request(self, method, request_uri, body=b"", headers=None): """ :param method: :param request_uri: :param body: byte or file :param headers: :return: """ if isinstance(body, six.text_type): body = body.encode('utf-8') request = self._build_request( method.upper(), request_uri, body=body, headers=headers) attempts_left = self._connection_pool.size + 1 while 1: sock = self._connection_pool.get_socket() try: _request = request.encode() if body: if isinstance(body, six.binary_type): sock.sendall(_request + body) else: sock.sendall(_request) # TODO: Support non file-like iterables, e.g. `(u"string1", u"string2")`. if six.PY3: sock.sendfile(body) else: while True: chunk = body.read(65536) if not chunk: break sock.sendall(chunk) else: sock.sendall(_request) except gevent.socket.error as e: self._connection_pool.release_socket(sock) if (e.errno == errno.ECONNRESET or e.errno == errno.EPIPE) and attempts_left > 0: attempts_left -= 1 continue raise e try: response = HTTPSocketPoolResponse(sock, self._connection_pool, block_size=self.block_size, method=method.upper(), headers_type=self.headers_type) except HTTPConnectionClosed as e: # connection is released by the response itself if attempts_left > 0: attempts_left -= 1 continue raise e else: response._sent_request = request return response def get(self, request_uri, headers={}): return self.request(METHOD_GET, request_uri, headers=headers) def head(self, request_uri, headers=None): return self.request(METHOD_HEAD, request_uri, headers=headers) def post(self, request_uri, body=u'', headers=None): return self.request(METHOD_POST, request_uri, body=body, headers=headers) def put(self, request_uri, body=u'', headers=None): return self.request(METHOD_PUT, request_uri, body=body, headers=headers) def delete(self, request_uri, body=u'', headers=None): return self.request(METHOD_DELETE, request_uri, body=body, headers=headers) def patch(self, request_uri, body=u'', headers=None): return self.request(METHOD_PATCH, request_uri, body=body, headers=headers) def trace(self, request_uri, body=u'', headers=None): return self.request(METHOD_TRACE, request_uri, body=body, headers=headers) def options(self, request_uri, headers=None): return self.request(METHOD_OPTIONS, request_uri, headers=headers) class HTTPClientPool(object): """ Factory for maintaining a bunch of clients, one per host:port """ # TODO: Add some housekeeping and cleanup logic def __init__(self, **kwargs): self.clients = dict() self.client_args = kwargs def get_client(self, url): if not isinstance(url, URL): url = URL(url) client_key = url.host, url.port try: return self.clients[client_key] except KeyError: client = HTTPClient.from_url(url, **self.client_args) self.clients[client_key] = client return client def close(self): for client in self.clients.values(): client.close() self.clients.clear() geventhttpclient-2.0.11/src/geventhttpclient/connectionpool.py000066400000000000000000000212361450702451400247510ustar00rootroot00000000000000import gevent.queue import gevent.socket import os import sys import six _CA_CERTS = None try: from ssl import get_default_verify_paths except ImportError: _CA_CERTS = None else: _certs = get_default_verify_paths() _CA_CERTS = _certs.cafile or _certs.capath if not _CA_CERTS or os.path.isdir(_CA_CERTS): import certifi _CA_CERTS = certifi.where() try: from ssl import _DEFAULT_CIPHERS except ImportError: # ssl._DEFAULT_CIPHERS in python2.7 branch. _DEFAULT_CIPHERS = ( 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5') try: from gevent import lock except ImportError: # gevent < 1.0b2 from gevent import coros as lock DEFAULT_CONNECTION_TIMEOUT = 5.0 DEFAULT_NETWORK_TIMEOUT = 5.0 IGNORED = object() class ConnectionPool(object): DEFAULT_CONNECTION_TIMEOUT = 5.0 DEFAULT_NETWORK_TIMEOUT = 5.0 def __init__(self, connection_host, connection_port, request_host, request_port, size=5, disable_ipv6=False, connection_timeout=DEFAULT_CONNECTION_TIMEOUT, network_timeout=DEFAULT_NETWORK_TIMEOUT, use_proxy=False): self._closed = False self._connection_host = connection_host self._connection_port = connection_port self._request_host = request_host self._request_port = request_port self._semaphore = lock.BoundedSemaphore(size) self._socket_queue = gevent.queue.LifoQueue(size) self._use_proxy = use_proxy self.connection_timeout = connection_timeout self.network_timeout = network_timeout self.size = size self.disable_ipv6 = disable_ipv6 def _resolve(self): """ resolve (dns) socket informations needed to connect it. """ family = 0 if self.disable_ipv6: family = gevent.socket.AF_INET info = gevent.socket.getaddrinfo(self._connection_host, self._connection_port, family, 0, gevent.socket.SOL_TCP) # family, socktype, proto, canonname, sockaddr = info[0] return info def close(self): self._closed = True while not self._socket_queue.empty(): try: sock = self._socket_queue.get(block=False) try: sock.close() except: pass except gevent.queue.Empty: pass def _create_tcp_socket(self, family, socktype, protocol): """ tcp socket factory. """ sock = gevent.socket.socket(family, socktype, protocol) return sock def _create_socket(self): """ might be overridden and super for wrapping into a ssl socket or set tcp/socket options """ sock_infos = self._resolve() first_error = None for sock_info in sock_infos: try: sock = self._create_tcp_socket(*sock_info[:3]) except Exception as e: if not first_error: first_error = e continue try: sock.settimeout(self.connection_timeout) sock = self._connect_socket(sock, sock_info[-1]) self.after_connect(sock) sock.settimeout(self.network_timeout) return sock except IOError as e: sock.close() if not first_error: first_error = e except: sock.close() raise if first_error: raise first_error else: raise RuntimeError( "Cannot resolve %s:%s" % (self._host, self._port)) def after_connect(self, sock): pass def _connect_socket(self, sock, address): sock.connect(address) self._setup_proxy(sock) return sock def _setup_proxy(self, sock): if self._use_proxy: sock.send( six.binary_type( "CONNECT {self._request_host}:{self._request_port} " "HTTP/1.1\r\n\r\n".format(self=self), 'utf8' ) ) resp = sock.recv(4096) parts = resp.split() if not parts or parts[1] != b"200": raise RuntimeError( "Error response from Proxy server : %s" % resp) def get_socket(self): """ get a socket from the pool. This blocks until one is available. """ self._semaphore.acquire() if self._closed: raise RuntimeError('connection pool closed') try: return self._socket_queue.get(block=False) except gevent.queue.Empty: try: return self._create_socket() except: self._semaphore.release() raise def return_socket(self, sock): """ return a socket to the pool. """ if self._closed: try: sock.close() except: pass return self._socket_queue.put(sock) self._semaphore.release() def release_socket(self, sock): """ call when the socket is no more usable. """ try: sock.close() except: pass if not self._closed: self._semaphore.release() try: import gevent.ssl try: if sys.version_info[:2] < (3, 7): from gevent.ssl import match_hostname except ImportError: from backports.ssl_match_hostname import match_hostname try: from gevent.ssl import create_default_context except ImportError: create_default_context = None except ImportError: pass else: class SSLConnectionPool(ConnectionPool): """ SSLConnectionPool creates connections wrapped with SSL/TLS. :param host: hostname :param port: port :param ssl_options: accepts any options supported by `ssl.wrap_socket` :param ssl_context_factory: use `ssl.create_default_context` by default if provided. It must be a callable that returns a SSLContext. """ default_options = { 'ciphers': _DEFAULT_CIPHERS, 'ca_certs': _CA_CERTS, 'cert_reqs': gevent.ssl.CERT_REQUIRED } def __init__(self, connection_host, connection_port, request_host, request_port, insecure=False, ssl_context_factory=None, ssl_options=None, **kw): self.insecure = insecure self.ssl_options = self.default_options.copy() self.ssl_options.update(ssl_options or {}) ssl_context_factory = ssl_context_factory or create_default_context if ssl_context_factory is not None: self.init_ssl_context(ssl_context_factory) else: self.ssl_context = None super(SSLConnectionPool, self).__init__(connection_host, connection_port, request_host, request_port, **kw) def init_ssl_context(self, ssl_context_factory): ca_certs = self.ssl_options['ca_certs'] try: self.ssl_context = ssl_context_factory(cafile=ca_certs) except TypeError: self.ssl_context = ssl_context_factory() self.ssl_context.load_verify_locations(cafile=ca_certs) self.ssl_context.check_hostname = not self.insecure def after_connect(self, sock): super(SSLConnectionPool, self).after_connect(sock) if not self.insecure and sys.version_info[:2] < (3, 7): match_hostname(sock.getpeercert(), self._request_host) def _connect_socket(self, sock, address): sock = super(SSLConnectionPool, self)._connect_socket(sock, address) if self.ssl_context is None: return gevent.ssl.wrap_socket(sock, **self.ssl_options) else: server_hostname = self.ssl_options.get('server_hostname', self._request_host) return self.ssl_context.wrap_socket(sock, server_hostname=server_hostname) geventhttpclient-2.0.11/src/geventhttpclient/header.py000066400000000000000000000172461450702451400231560ustar00rootroot00000000000000import six if six.PY3: from collections.abc import Mapping, MutableMapping else: from collections import Mapping, MutableMapping _dict_setitem = dict.__setitem__ _dict_getitem = dict.__getitem__ _dict_delitem = dict.__delitem__ _dict_contains = dict.__contains__ _dict_setdefault = dict.setdefault class Headers(dict): """ :param headers: An iterable of field-value pairs. Must not contain multiple field names when compared case-insensitively. :param kwargs: Additional field-value pairs to pass in to ``dict.update``. A ``dict`` like container for storing HTTP Headers. Field names are stored and compared case-insensitively in compliance with RFC 7230. Iteration provides the first case-sensitive key seen for each case-insensitive pair. Using ``__setitem__`` syntax overwrites fields that compare equal case-insensitively in order to maintain ``dict``'s api. For fields that compare equal, instead create a new ``Headers`` and use ``.add`` in a loop. If multiple fields that are equal case-insensitively are passed to the constructor or ``.update``, the behavior is undefined and some will be lost. Note: b'asdf' and 'u'asdf' are separate things. This class tries not to enforce the one or the other. >>> headers = Headers() >>> headers.add('Set-Cookie', 'foo=bar') >>> headers.add('set-cookie', 'baz=quxx') >>> headers['content-length'] = '7' >>> headers['SET-cookie'] 'foo=bar, baz=quxx' >>> headers['Content-Length'] '7' """ def __init__(self, headers=None, **kwargs): dict.__init__(self) if headers is not None: if isinstance(headers, Headers): self._copy_from(headers) else: self.extend(headers) if kwargs: self.extend(kwargs) def __setitem__(self, key, val): return _dict_setitem(self, key.lower(), (key, val)) def __getitem__(self, key): val = _dict_getitem(self, key.lower()) if len(val) == 2: return val[1] return val[1:] def __delitem__(self, key): return _dict_delitem(self, key.lower()) def __contains__(self, key): return _dict_contains(self, key.lower()) def __eq__(self, other): if not isinstance(other, Mapping) and not hasattr(other, 'keys'): return False if not isinstance(other, type(self)): other = type(self)(other) return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other) def __ne__(self, other): return not self.__eq__(other) values = MutableMapping.values get = MutableMapping.get update = MutableMapping.update if six.PY3: keys = MutableMapping.keys else: iterkeys = MutableMapping.iterkeys if six.PY2: itervalues = MutableMapping.itervalues __marker = object() def pop(self, key, default=__marker): '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. # So let's reinvent the wheel. try: value = self[key] except KeyError: if default is self.__marker: raise return default else: del self[key] return value def discard(self, key): try: del self[key] except KeyError: pass def add(self, key, val): """Adds a (name, value) pair, doesn't overwrite the value if it already exists. >>> headers = Headers(foo='bar') >>> headers.add('Foo', 'baz') >>> headers['foo'] 'bar, baz' """ key_lower = key.lower() new_vals = key, val # Keep the common case aka no item present as fast as possible vals = _dict_setdefault(self, key_lower, new_vals) if new_vals is not vals: # new_vals was not inserted, as there was a previous one if isinstance(vals, list): # If already several items got inserted, we have a list vals.append(val) else: # vals should be a tuple then, i.e. only one item so far # Need to convert the tuple to list for further extension _dict_setitem(self, key_lower, [vals[0], vals[1], val]) def extend(self, *args, **kwargs): """Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ """ if len(args) > 1: raise TypeError("extend() takes at most 1 positional " "arguments ({} given)".format(len(args))) other = args[0] if len(args) >= 1 else () if isinstance(other, Headers): for key, val in other.iteritems(): self.add(key, val) elif isinstance(other, Mapping): for key in other: self.add(key, other[key]) elif hasattr(other, "keys"): for key in other.keys(): self.add(key, other[key]) else: for key, value in other: self.add(key, value) for key, value in kwargs.items(): self.add(key, value) def getlist(self, key): """Returns a list of all the values for the named field. Returns an empty list if the key doesn't exist.""" try: vals = _dict_getitem(self, key.lower()) except KeyError: return [] else: if isinstance(vals, tuple): return [vals[1]] else: return vals[1:] # Backwards compatibility for httplib getheaders = getlist getallmatchingheaders = getlist iget = getlist # Python3 compatibility def get_all(self, key, failobj=None): vals = self.getlist(key) if not vals: return failobj return vals def __repr__(self): return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) def _copy_from(self, other): for key in other: val = _dict_getitem(other, key) if isinstance(val, list): # Don't need to convert tuples val = list(val) _dict_setitem(self, key, val) def copy(self): clone = type(self)() clone._copy_from(self) return clone def itermerged(self): """Iterate over all headers, merging duplicate ones together.""" for key in self: val = _dict_getitem(self, key) # this should preserve either binary or string type sep = u', ' if isinstance(val[1], six.string_types) else b', ' yield val[0], sep.join(val[1:]) # Extensions to urllib3, compatibility to previous implementation def __len__(self): return sum(len(self.getlist(key)) for key in self) def compatible_dict(self): return dict(self.itermerged()) def iterlower(self): for key in self: vals = _dict_getitem(self, key) for val in vals[1:]: yield key, val iteritems = iterlower def items(self): return list(self.iterlower()) def iteroriginal(self): """Iterate over all header lines, including duplicate ones.""" for key in self: vals = _dict_getitem(self, key) for val in vals[1:]: yield vals[0], val geventhttpclient-2.0.11/src/geventhttpclient/httplib.py000066400000000000000000000066211450702451400233670ustar00rootroot00000000000000import six if six.PY3: httplib = __import__('http.client').client else: httplib = __import__('httplib') from geventhttpclient import response from geventhttpclient import header import gevent.socket class HTTPLibHeaders(header.Headers): def __getitem__(self, key): value = super(HTTPLibHeaders, self).__getitem__(key) if isinstance(value, (list, tuple)): return ", ".join(value) else: return value class HTTPResponse(response.HTTPSocketResponse): def __init__(self, sock, method='GET', strict=0, debuglevel=0, buffering=False, **kw): if method is None: method = 'GET' else: method = method.upper() super(HTTPResponse, self).__init__(sock, method=method, **kw) @property def msg(self): if hasattr(self, '_msg'): return self._msg self._msg = HTTPLibHeaders(self._headers_index) return self._msg @property def fp(self): return self @property def version(self): v = self.get_http_version() if v == 'HTTP/1.1': return 11 return 10 @property def status(self): return self.status_code @property def reason(self): return self.msg def _read_status(self): return (self.version, self.status_code, self.msg) def begin(self): pass def close(self): self.release() def isclosed(self): return self._sock is None def read(self, amt=None): return super(HTTPResponse, self).read(amt) def getheader(self, name, default=None): return self.get(name.lower(), default) def getheaders(self): return self._headers_index.items() @property def will_close(self): return self.message_complete and not self.should_keep_alive() def _check_close(self): return not self.should_keep_alive() HTTPLibConnection = httplib.HTTPConnection class HTTPConnection(httplib.HTTPConnection): response_class = HTTPResponse def __init__(self, *args, **kw): HTTPLibConnection.__init__(self, *args, **kw) # python 2.6 compat if not hasattr(self, "source_address"): self.source_address = None def connect(self): self.sock = gevent.socket.create_connection( (self.host,self.port), self.timeout, self.source_address) if self._tunnel_host: self._tunnel() try: import gevent.ssl except: pass else: class HTTPSConnection(HTTPConnection): default_port = 443 def __init__(self, host, port=None, key_file=None, cert_file=None, **kw): HTTPConnection.__init__(self, host, port, **kw) self.key_file = key_file self.cert_file = cert_file def connect(self): "Connect to a host on a given (SSL) port." sock = gevent.socket.create_connection((self.host, self.port), self.timeout, self.source_address) if self._tunnel_host: self.sock = sock self._tunnel() self.sock = gevent.ssl.wrap_socket( sock, self.key_file, self.cert_file) def patch(): httplib.HTTPConnection = HTTPConnection httplib.HTTPResponse = HTTPResponse try: httplib.HTTPSConnection = HTTPSConnection except NameError: pass geventhttpclient-2.0.11/src/geventhttpclient/response.py000066400000000000000000000215251450702451400235570ustar00rootroot00000000000000import six import errno from geventhttpclient._parser import HTTPResponseParser from geventhttpclient._parser import HTTPParseError from geventhttpclient.header import Headers import gevent.socket HEADER_STATE_INIT = 0 HEADER_STATE_FIELD = 1 HEADER_STATE_VALUE = 2 HEADER_STATE_DONE = 3 def copy(data): if six.PY3: return data[:] else: return str(data) class HTTPConnectionClosed(HTTPParseError): pass class HTTPProtocolViolationError(HTTPParseError): pass class HTTPResponse(HTTPResponseParser): def __init__(self, method='GET', headers_type=Headers): super(HTTPResponse, self).__init__() self.method = method.upper() self.headers_complete = False self.message_begun = False self.message_complete = False self._headers_index = headers_type() self._header_state = HEADER_STATE_INIT self._current_header_field = None self._current_header_value = None self._header_position = 1 self._body_buffer = bytearray() self.status_message = None def __getitem__(self, key): return self._headers_index[key] def get(self, key, default=None): return self._headers_index.get(key, default) def iteritems(self): return self._headers_index.iteritems() def items(self): return self._headers_index.items() def info(self): """ Basic cookielib compatibility """ return self._headers_index def should_close(self): """ return if we should close the connection. It is not the opposite of should_keep_alive method. It also checks that the body as been consumed completely. """ return not self.message_complete or \ self.parser_failed() or \ not super(HTTPResponse, self).should_keep_alive() headers = property(items) def __contains__(self, key): return key in self._headers_index @property def status_code(self): return self.get_code() @property def content_length(self): length = self.get('content-length', None) if length is not None: if six.PY3: return int(length) else: return long(length) @property def length(self): return self.content_length @property def version(self): return self.get_http_version() def _on_status(self, msg): self.status_message = msg def _on_message_begin(self): if self.message_begun: raise HTTPProtocolViolationError( "A new response began before end of %r." % self) self.message_begun = True def _on_message_complete(self): self.message_complete = True def _on_headers_complete(self): self._flush_header() self._header_state = HEADER_STATE_DONE self.headers_complete = True if self.method == 'HEAD': return True # SKIP BODY return False def _on_header_field(self, string): if self._header_state == HEADER_STATE_FIELD: self._current_header_field += string else: if self._header_state == HEADER_STATE_VALUE: self._flush_header() self._current_header_field = string self._header_state = HEADER_STATE_FIELD def _on_header_value(self, string): if self._header_state == HEADER_STATE_VALUE: self._current_header_value += string else: self._current_header_value = string self._header_state = HEADER_STATE_VALUE def _flush_header(self): if self._current_header_field is not None: self._headers_index.add(self._current_header_field, self._current_header_value) self._header_position += 1 self._current_header_field = None self._current_header_value = None def _on_body(self, buf): self._body_buffer += buf def __repr__(self): return "<{klass} status={status} headers={headers}>".format( klass=self.__class__.__name__, status=self.status_code, headers=dict(self.headers)) class HTTPSocketResponse(HTTPResponse): DEFAULT_BLOCK_SIZE = 1024 * 4 # 4KB def __init__(self, sock, block_size=DEFAULT_BLOCK_SIZE, method='GET', **kw): super(HTTPSocketResponse, self).__init__(method=method, **kw) self._sock = sock self.block_size = block_size self._read_headers() def release(self): try: if self._sock is not None and self.should_close(): try: self._sock.close() except: pass finally: self._sock = None def __del__(self): self.release() def _read_headers(self): try: start = True while not self.headers_complete: try: data = self._sock.recv(self.block_size) self.feed(data) # depending on gevent version we get a conn reset or no data if not len(data) and not self.headers_complete: if start: raise HTTPConnectionClosed( 'connection closed.') raise HTTPParseError('connection closed before' ' end of the headers') start = False except gevent.socket.error as e: if e.errno == errno.ECONNRESET: if start: raise HTTPConnectionClosed( 'connection closed.') raise if self.message_complete: self.release() except BaseException: self.release() raise def readline(self, sep=b"\r\n"): cursor = 0 multibyte = len(sep) > 1 while True: cursor = self._body_buffer.find(sep[0:1], cursor) if cursor >= 0: found = True if multibyte: pos = cursor cursor = self._body_buffer.find(sep, cursor) if cursor < 0: cursor = pos found = False if found: length = cursor + len(sep) line = copy(self._body_buffer[:length]) del self._body_buffer[:length] cursor = 0 return line else: cursor = 0 if self.message_complete: return b'' try: data = self._sock.recv(self.block_size) self.feed(data) except BaseException: self.release() raise def read(self, length=None): # get the existing body that may have already been parsed # during headers parsing if length is not None and len(self._body_buffer) >= length: read = self._body_buffer[0:length] del self._body_buffer[0:length] return copy(read) if self._sock is None: read = copy(self._body_buffer) del self._body_buffer[:] return read try: while not(self.message_complete) and ( length is None or len(self._body_buffer) < length): data = self._sock.recv(length or self.block_size) self.feed(data) except: self.release() raise if length is not None: read = copy(self._body_buffer[0:length]) del self._body_buffer[0:length] return read read = copy(self._body_buffer) del self._body_buffer[:] return read def __iter__(self): return self def next(self): bytes = self.read(self.block_size) if not len(bytes): raise StopIteration() return bytes def _on_message_complete(self): super(HTTPSocketResponse, self)._on_message_complete() self.release() def __enter__(self): return self def __exit__(self, *args): self.release() class HTTPSocketPoolResponse(HTTPSocketResponse): def __init__(self, sock, pool, **kw): self._pool = pool super(HTTPSocketPoolResponse, self).__init__(sock, **kw) def release(self): try: if self._sock is not None: if self.should_close(): self._pool.release_socket(self._sock) else: self._pool.return_socket(self._sock) finally: self._sock = None self._pool = None def __del__(self): if self._sock is not None: self._pool.release_socket(self._sock) geventhttpclient-2.0.11/src/geventhttpclient/tests/000077500000000000000000000000001450702451400225045ustar00rootroot00000000000000geventhttpclient-2.0.11/src/geventhttpclient/tests/oncert.pem000066400000000000000000000024661450702451400245110ustar00rootroot00000000000000TWCA Root Certification Authority ================================= -----BEGIN CERTIFICATE----- MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJ VEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlmaWNh dGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMzWhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQG EwJUVzESMBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NB IFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK AoIBAQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFEAcK0HMMx QhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HHK3XLfJ+utdGdIzdjp9xC oi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeXRfwZVzsrb+RH9JlF/h3x+JejiB03HFyP 4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/zrX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1r y+UPizgN7gr8/g+YnzAx3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIB BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkqhkiG 9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeCMErJk/9q56YAf4lC mtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdlsXebQ79NqZp4VKIV66IIArB6nCWlW QtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62Dlhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVY T0bf+215WfKEIlKuD8z7fDvnaspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocny Yh0igzyXxfkZYiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== -----END CERTIFICATE----- geventhttpclient-2.0.11/src/geventhttpclient/tests/server.crt000066400000000000000000000025061450702451400245270ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIDtzCCAp+gAwIBAgIJAPG8jjtUA97jMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNV BAYTAkZSMRMwEQYDVQQIDApTb21lLVN0YXRlMQ8wDQYDVQQHDAZOYW50ZXMxFjAU BgNVBAoMDUFudG9uaW4gQW1hbmQxDDAKBgNVBAsMA3d3dzEXMBUGA1UEAwwOKi5n d2lrem9uZS5vcmcwHhcNMTkxMDA4MTIxODIxWhcNNDkxMjMxMTIxODIzWjByMQsw CQYDVQQGEwJGUjETMBEGA1UECAwKU29tZS1TdGF0ZTEPMA0GA1UEBwwGTmFudGVz MRYwFAYDVQQKDA1BbnRvbmluIEFtYW5kMQwwCgYDVQQLDAN3d3cxFzAVBgNVBAMM DiouZ3dpa3pvbmUub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA 7MyhNrvVzaFAnKCycBsX49MvmkBbFPAqtY2p8iWuT3kg2+yb/tc5junZiPAvViBW r0RhrQGrCmG3d1qlIWd6yQStx9cMN3zS9hfiCD0imVRUW0ruhzPZZOLdl4GUM8bd psjSUB6ibK7fYicTyacELjA6m/m4PVdVO4mDaxJJ6WIvvDDY9o8Y9n7eO53ixQ9Z VxkZh1FmxL6wnhpqnALTi3vP15qKmigrZ2OubWbhpoMZN+WQRrT0Yj2y1Fu5zPk2 f17J5+Qeayj+GWJEhcvDDxFBd8xm7GNb6VlkzokBBHEX++FuB8Zj3JRlgw2ATpyw 7fyE3a53Umjtlxw9ZlRq+wIDAQABo1AwTjAdBgNVHQ4EFgQUXtmN+P8zjf+18Gvx VyiOQ+BZfZYwHwYDVR0jBBgwFoAUXtmN+P8zjf+18GvxVyiOQ+BZfZYwDAYDVR0T BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAG6mPbejtmtbj6Nn5T8Ub9NhuXNhK bOpU86bErXlLzgtLq275nAvJYq7T2VPaDPbqm0BeqWOU5gwzg66LHiEn3y5p6Zji 8T2/3to/YJPzrafjurXJv8YBV7D/dGskQBoJSJ46odrFyPz9f8g8chHKylu1tJPp bIB5c4oovGCA1LkAB33zRoCjHxntfw/27e423MXjHc0+pUFNCRdPCQEH+gifKIGi ETqdf+kJW7Hw9JBg1C0jzfqv41U+0K8tHTF72XoQ3gxYcaQxPAHVX4/VW8xiZ4DV feczwSroV30iWBMh7+gQeHlr5Q/6hSbTfrVmEd3mOw1/V33S44t35y83uw== -----END CERTIFICATE----- geventhttpclient-2.0.11/src/geventhttpclient/tests/server.key000066400000000000000000000032541450702451400245300ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQDszKE2u9XNoUCc oLJwGxfj0y+aQFsU8Cq1janyJa5PeSDb7Jv+1zmO6dmI8C9WIFavRGGtAasKYbd3 WqUhZ3rJBK3H1ww3fNL2F+IIPSKZVFRbSu6HM9lk4t2XgZQzxt2myNJQHqJsrt9i JxPJpwQuMDqb+bg9V1U7iYNrEknpYi+8MNj2jxj2ft47neLFD1lXGRmHUWbEvrCe GmqcAtOLe8/XmoqaKCtnY65tZuGmgxk35ZBGtPRiPbLUW7nM+TZ/Xsnn5B5rKP4Z YkSFy8MPEUF3zGbsY1vpWWTOiQEEcRf74W4HxmPclGWDDYBOnLDt/ITdrndSaO2X HD1mVGr7AgMBAAECggEBANUJA8CKHD9y4v4GVIIm3cdJJ3pOi6K6rrHHUS1YWQmG n9k7dNxAJLg+Ckb4Tp3dWh80FevfpFEiuDvYgNcckYpnezBed88zrVdZBTEETyxC geHf99gK/XmV4GV9Xid2sdJquUr7RIUijjVvko3IhCpJmXDZqLcG6tc1T9X0Diq3 P8rmmSR+LV3Gq9nY/lTokDz772/OWejcFxDQd0NOc5gk8vewZhuA6Jd5SWQrPvTw TLdKIodjr/NUGTMeLVwAXegHHK5MDeyGrkq2ISvqpA8g5qB2UBWGHALKc6NmCKnD FY8CVo+6SHCYuVcM8h/UOyD0mxm+TN31Pzk8AouZpIECgYEA+dj3GFetN1eM3YE2 JaQCeB6x2FWJBESHhZNWMlnFimli9ba7r5ACYdTCF93WPIlbc+YQn3Uf9pYnX5J2 DiyQyaePe/tbhrlwWF9dHX/07l3bDu9GMNHQkmYMANALlPymJAbtLSRCl8AOFg+o QERqttoWJqxX86SXdX4LZU4MHVcCgYEA8qFosnhLVHlGTHL32vDyUc72lql9l4uV KZi7bm37MF944cjn5Xphyts4VgiQJSXXiepKX4xueQIMZ+2qnj52wzpvwvV1FnD0 WGZJ4IBD1KZaTBe3iIOn5k39eKLiqHiP+aT1etGEqjtdWVEMtFkWJBXG2GYqGAPz 8FvHSvdwdP0CgYEA1sgoqxl8Q+4Un7rbZIJYyRCCDJqDflK3OiJN4F0CD5nHQ7oC IMuo8Jco/8Fp0YK3RqwJsDrM7ntof6V1rXusQF62X3vW07uIk7/vWVPNqDiw/2a8 rW1BYwks2mn++uYyoRf/PmZEhRXKRjbRieYG4wgaMMbJmR7uQUfI3h65FWMCgYEA 05SJtkhMeIehE/2VwiknRsvSISQAT2z7UGDN4ciqqU44HbyS7HxCVyHACNZCzQIo 6S2UWaoelAuDQgXxKTmTWz/UQhDGYCF0ErpwZgO0IvZF++EavUMB4UXVaaOxP/SS qMJ/BvE6XTCTgNW63HJramBLHF5LCtI3oTyHmAoa2QkCgYEAtX6jiLm6WwcduZBY n5cT1Dl5ctP7UJWZvvOfnz+XGCWXdGDo2QwrOFiqPHNXZnpqIPkWAWLWiHMVFEco /+ZvwPUma3/VQMCAUZMlPu0qkbDM6BH2Vyujp4sadtEIOArQstim1p79kBGI19kU M9wu/MRq6BWJmk7tMkMN0IgButA= -----END PRIVATE KEY----- geventhttpclient-2.0.11/src/geventhttpclient/tests/test_client.py000066400000000000000000000240131450702451400253730ustar00rootroot00000000000000import os import sys import tempfile import pytest import json from contextlib import contextmanager from geventhttpclient import HTTPClient from gevent.ssl import SSLError #@UnresolvedImport import gevent.pool import gevent.server import gevent.pywsgi from six.moves import xrange listener = ('127.0.0.1', 54323) @contextmanager def server(handler): server = gevent.server.StreamServer( listener, handle=handler) server.start() try: yield finally: server.stop() @contextmanager def wsgiserver(handler): server = gevent.pywsgi.WSGIServer(('127.0.0.1', 54323), handler) server.start() try: yield finally: server.stop() class HTTPBinClient(HTTPClient): """Special HTTPClient with higher timeout values Args: HTTPClient (_type_): _description_ """ def __init__( self, host, port=None, headers=None, block_size=HTTPClient.BLOCK_SIZE, connection_timeout=30.0, network_timeout=30.0, disable_ipv6=True, concurrency=1, ssl=False, ssl_options=None, ssl_context_factory=None, insecure=False, proxy_host=None, proxy_port=None, version=HTTPClient.HTTP_11, ): super().__init__( host, port=port, headers=headers, block_size=block_size, connection_timeout=connection_timeout, network_timeout=network_timeout, disable_ipv6=disable_ipv6, concurrency=concurrency, ssl=ssl, ssl_options=ssl_options, ssl_context_factory=ssl_context_factory, insecure=insecure, proxy_host=proxy_host, proxy_port=proxy_port, version=version, ) @pytest.mark.online def test_client_simple(): client = HTTPBinClient('httpbin.org') assert client.port == 80 response = client.get('/') assert response.status_code == 200 body = response.read() assert len(body) @pytest.mark.online def test_client_without_leading_slash(): client = HTTPBinClient('httpbin.org') with client.get("") as response: assert response.status_code == 200 with client.get("base64/test") as response: assert(response.status_code in (200, 301, 302)) test_headers = {'User-Agent': 'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.9.2.17) Gecko/20110422 Ubuntu/10.04 (lucid) Firefox/3.6.17'} @pytest.mark.online def test_client_with_default_headers(): client = HTTPBinClient.from_url('httpbin.org/', headers=test_headers) @pytest.mark.online def test_request_with_headers(): client = HTTPBinClient('httpbin.org') response = client.get('/', headers=test_headers) assert response.status_code == 200 client = HTTPClient('www.heise.de') raw_req_cmp = client._build_request('GET', '/tp/') @pytest.mark.online def test_build_request_relative_uri(): raw_req = client._build_request('GET', 'tp/') assert raw_req == raw_req_cmp @pytest.mark.online def test_build_request_absolute_uri(): raw_req = client._build_request('GET', '/tp/') assert raw_req == raw_req_cmp @pytest.mark.online def test_build_request_full_url(): raw_req = client._build_request('GET', 'http://www.heise.de/tp/') assert raw_req == raw_req_cmp @pytest.mark.online def test_build_request_invalid_host(): with pytest.raises(ValueError): client._build_request('GET', 'http://www.spiegel.de/') @pytest.mark.online def test_response_context_manager(): client = HTTPClient.from_url('http://httpbin.org/') r = None with client.get('/') as response: assert response.status_code == 200 r = response assert r._sock is None # released @pytest.mark.skipif( os.environ.get("TRAVIS") == "true", reason="We have issues on travis with the SSL tests" ) @pytest.mark.online def test_client_ssl(): client = HTTPClient('github.com', ssl=True) assert client.port == 443 response = client.get('/') assert response.status_code == 200 body = response.read() assert len(body) @pytest.mark.skipif( sys.version_info < (2, 7) and os.environ.get("TRAVIS") == "true", reason="We have issues on travis with the SSL tests" ) @pytest.mark.online def test_ssl_fail_invalid_certificate(): certs = os.path.join( os.path.dirname(os.path.abspath(__file__)), "oncert.pem") client = HTTPClient('github.com', ssl_options={'ca_certs': certs}) assert client.port == 443 with pytest.raises(SSLError) as e_info: client.get('/') assert e_info.value.reason == 'CERTIFICATE_VERIFY_FAILED' @pytest.mark.online def test_multi_queries_greenlet_safe(): client = HTTPBinClient('httpbin.org', concurrency=3) group = gevent.pool.Group() event = gevent.event.Event() def run(i): event.wait() response = client.get('/') return response, response.read() count = 0 ok_count = 0 gevent.spawn_later(0.2, event.set) for response, content in group.imap_unordered(run, xrange(5)): # occasionally httpbin.org will return 504 :-/ assert response.status_code in [200, 504] if response.status_code == 200: ok_count += 1 assert len(content) count += 1 assert count == 5 # ensure at least 3 of requests got 200 assert ok_count >= 3 class StreamTestIterator(object): def __init__(self, sep, count): lines = [json.dumps({ 'index': i, 'title': 'this is line %d' % i}) for i in xrange(0, count)] self.buf = (sep.join(lines) + sep).encode() def __len__(self): return len(self.buf) def __iter__(self): self.cursor = 0 return self def next(self): if self.cursor >= len(self.buf): raise StopIteration() gevent.sleep(0) pos = self.cursor + 10 data = self.buf[self.cursor:pos] self.cursor = pos return data def __next__(self): return self.next() def readline_iter(sock, addr): sock.recv(1024) iterator = StreamTestIterator("\n", 100) sock.sendall(b"HTTP/1.1 200 Ok\r\nConnection: close\r\n\r\n") for block in iterator: sock.sendall(block) def test_readline(): with server(readline_iter): client = HTTPClient(*listener, block_size=1) response = client.get('/') lines = [] while True: line = response.readline(b"\n") if not line: break data = json.loads(line[:-1].decode()) lines.append(data) assert len(lines) == 100 assert [x['index'] for x in lines] == [x for x in range(0, 100)] def readline_multibyte_sep(sock, addr): sock.recv(1024) iterator = StreamTestIterator("\r\n", 100) sock.sendall(b"HTTP/1.1 200 Ok\r\nConnection: close\r\n\r\n") for block in iterator: sock.sendall(block) def test_readline_multibyte_sep(): with server(readline_multibyte_sep): client = HTTPClient(*listener, block_size=1) response = client.get('/') lines = [] while True: line = response.readline(b"\r\n") if not line: break data = json.loads(line[:-1].decode()) lines.append(data) assert len(lines) == 100 assert [x['index'] for x in lines] == [x for x in range(0, 100)] def readline_multibyte_splitsep(sock, addr): sock.recv(1024) sock.sendall(b"HTTP/1.1 200 Ok\r\nConnection: close\r\n\r\n") sock.sendall(b'{"a": 1}\r') gevent.sleep(0) sock.sendall(b'\n{"a": 2}\r\n{"a": 3}\r\n') def test_readline_multibyte_splitsep(): with server(readline_multibyte_splitsep): client = HTTPClient(*listener, block_size=1) response = client.get('/') lines = [] last_index = 0 while True: line = response.readline(b"\r\n") if not line: break data = json.loads(line[:-2].decode()) assert data['a'] == last_index + 1 last_index = data['a'] len(lines) == 3 def internal_server_error(sock, addr): sock.recv(1024) head = 'HTTP/1.1 500 Internal Server Error\r\n' \ 'Connection: close\r\n' \ 'Content-Type: text/html\r\n' \ 'Content-Length: 135\r\n\r\n' body = '\n \n Internal Server Error\n ' \ '\n \n

Internal Server Error

\n \n ' \ '\n\n\n' sock.sendall((head + body).encode()) sock.close() def test_internal_server_error(): with server(internal_server_error): client = HTTPClient(*listener) response = client.get('/') assert not response.should_keep_alive() assert response.should_close() body = response.read() assert len(body) == response.content_length def check_upload(body, body_length): def wsgi_handler(env, start_response): assert int(env.get('CONTENT_LENGTH')) == body_length assert body == env['wsgi.input'].read() start_response('200 OK', []) return [] return wsgi_handler def test_file_post(): body = tempfile.NamedTemporaryFile("a+b", delete=False) name = body.name try: body.write(b"123456789") body.close() with wsgiserver(check_upload(b"123456789", 9)): client = HTTPClient(*listener) with open(name, 'rb') as body: client.post('/', body) finally: os.remove(name) def test_bytes_post(): with wsgiserver(check_upload(b"12345", 5)): client = HTTPClient(*listener) client.post('/', b"12345") def test_string_post(): with wsgiserver(check_upload("12345", 5)): client = HTTPClient(*listener) client.post('/', "12345") def test_unicode_post(): byte_string = b'\xc8\xb9\xc8\xbc\xc9\x85' unicode_string = byte_string.decode('utf-8') with wsgiserver(check_upload(byte_string, len(byte_string))): client = HTTPClient(*listener) client.post('/', unicode_string) geventhttpclient-2.0.11/src/geventhttpclient/tests/test_headers.py000066400000000000000000000171431450702451400255360ustar00rootroot00000000000000import six from six.moves import xrange import gevent import gevent.monkey gevent.monkey.patch_all() import pytest if six.PY2: from cookielib import CookieJar from urllib2 import Request else: from http.cookiejar import CookieJar from urllib.request import Request import string import random import time from geventhttpclient.response import HTTPResponse from geventhttpclient.header import Headers MULTI_COOKIE_RESPONSE = """ HTTP/1.1 200 OK Server: nginx Date: Fri, 21 Sep 2012 18:49:35 GMT Content-Type: text/html; charset=windows-1251 Connection: keep-alive X-Powered-By: PHP/5.2.17 Set-Cookie: bb_lastvisit=1348253375; expires=Sat, 21-Sep-2013 18:49:35 GMT; path=/ Set-Cookie: bb_lastactivity=0; expires=Sat, 21-Sep-2013 18:49:35 GMT; path=/ Cache-Control: private Pragma: private Set-Cookie: bb_sessionhash=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_referrerid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_userid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_password=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_lastvisit=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_lastactivity=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_threadedmode=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_userstyleid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_languageid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_fbaccesstoken=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_fbprofilepicurl=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=/ Set-Cookie: bb_sessionhash=abcabcabcabcabcabcabcabcabcabcab; path=/; HttpOnly Set-Cookie: tapatalk_redirect3=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_sessionhash=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: __utma=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: __utmb=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: __utmc=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: __utmz=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: vbulletin_collapse=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_referrerid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_userid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_password=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_lastvisit=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_lastactivity=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_threadedmode=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_userstyleid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_languageid=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_fbaccesstoken=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Set-Cookie: bb_fbprofilepicurl=deleted; expires=Thu, 22-Sep-2011 18:49:34 GMT; path=1; domain=forum.somewhere.com Content-Encoding: gzip Content-Length: 26186 """.lstrip().replace('\n', '\r\n') # Do not remove the final empty line! def test_create_from_kwargs(): h = Headers(ab=1, cd=2, ef=3, gh=4) assert len(h) == 4 assert 'ab' in h def test_create_from_iterator(): h = Headers((x, x*5) for x in string.ascii_lowercase) assert len(h) == len(string.ascii_lowercase) def test_create_from_dict(): h = Headers(dict(ab=1, cd=2, ef=3, gh=4)) assert len(h) == 4 assert 'ab' in h def test_create_from_list(): h = Headers([('ab', 'A'), ('cd', 'B'), ('cookie', 'C'), ('cookie', 'D'), ('cookie', 'E')]) assert len(h) == 5 assert 'ab' in h assert len(h['cookie']) == 3 assert h['cookie'][0] == 'C' assert h['cookie'][-1] == 'E' def test_case_insensitivity(): h = Headers({'Content-Type': 'text/plain'}) h.add('Content-Encoding', 'utf8') for val in ('content-type', 'content-encoding'): assert val.upper() in h assert val.lower() in h assert val.capitalize() in h assert h.get(val.lower()) == h.get(val.upper()) == h.get(val.capitalize()) del h[val.upper()] assert val.lower() not in h def test_read_multiple_header(): parser = HTTPResponse() parser.feed(MULTI_COOKIE_RESPONSE) headers = parser._headers_index assert len(headers['set-cookie']) == MULTI_COOKIE_RESPONSE.count('Set-Cookie') assert headers['set-cookie'][0].startswith('bb_lastvisit') assert headers['set-cookie'][-1].startswith('bb_fbprofilepicurl') @pytest.mark.skip(reason="remote site behavior changed") def test_cookielib_compatibility(): cj = CookieJar() # Set time in order to be still valid in some years, when cookie strings expire cj._now = cj._policy._now = time.mktime((2012, 1, 1, 0, 0, 0, 0, 0, 0)) request = Request('http://test.com') parser = HTTPResponse() parser.feed(MULTI_COOKIE_RESPONSE) cookies = cj.make_cookies(parser, request) # Don't use extract_cookies directly, as time can not be set there manually for testing for cookie in cookies: if cj._policy.set_ok(cookie, request): cj.set_cookie(cookie) # Three valid, not expired cookies placed assert len(list(cj)) == 3 def test_compatibility_with_previous_API_read(): parser = HTTPResponse() parser.feed(MULTI_COOKIE_RESPONSE) for single_item in ('content-encoding', 'content-type', 'content-length', 'cache-control', 'connection'): assert isinstance(parser[single_item], six.string_types) assert isinstance(parser.get(single_item), six.string_types) def test_compatibility_with_previous_API_write(): h = Headers() h['asdf'] = 'jklm' h['asdf'] = 'dfdf' # Lists only if necessary assert h['asdf'] == 'dfdf' def test_copy(): rnd_txt = lambda length: ''.join(random.choice(string.ascii_letters) for _ in xrange(length)) h = Headers((rnd_txt(10), rnd_txt(50)) for _ in xrange(100)) c = h.copy() assert h is not c assert len(h) == len(c) assert set(h.keys()) == set(c.keys()) assert h == c assert type(h) is type(c) for _ in xrange(100): rnd_key = rnd_txt(9) c[rnd_key] = rnd_txt(10) assert rnd_key in c assert rnd_key not in h def test_fieldname_string_enforcement(): with pytest.raises(Exception): Headers({3: 3}) h = Headers() with pytest.raises(Exception): h[3] = 5 with pytest.raises(Exception): h.add(3, 4) with pytest.raises(Exception): del h[3] def test_header_replace(): d = Headers() d['Content-Type'] = "text/plain" d['content-type'] = "text/html" assert d['content-type'] == "text/html" def test_compat_dict(): h = Headers(D='asdf') h.add('E', 'd') h.add('E', 'f') h.add('Cookie', 'd') h.add('Cookie', 'e') h.add('Cookie', 'f') d = h.compatible_dict() for x in ('Cookie', 'D', 'E'): assert x in d assert d['D'] == 'asdf' assert d['E'] == 'd, f' assert d['Cookie'] == 'd, e, f' if __name__ == '__main__': test_copy() test_compat_dict() test_cookielib_compatibility() geventhttpclient-2.0.11/src/geventhttpclient/tests/test_http_host.py000066400000000000000000000021721450702451400261330ustar00rootroot00000000000000import pytest from geventhttpclient import HTTPClient from geventhttpclient.client import METHOD_GET def test_domain(): http = HTTPClient('localhost') assert http._build_request(METHOD_GET, "").lower().find("host: localhost\r\n") > 0 http = HTTPClient('localhost', 1234) assert http._build_request(METHOD_GET, "").lower().find("host: localhost:1234\r\n") > 0 def test_ipv4(): http = HTTPClient('127.0.0.1') assert http._build_request(METHOD_GET, "").lower().find("host: 127.0.0.1\r\n") > 0 http = HTTPClient('127.0.0.1', 1234) assert http._build_request(METHOD_GET, "").lower().find("host: 127.0.0.1:1234\r\n") > 0 def test_ipv6(): http = HTTPClient('[::1]') assert http._build_request(METHOD_GET, "").lower().find("host: [::1]\r\n") > 0 http = HTTPClient('[::1]', 1234) assert http._build_request(METHOD_GET, "").lower().find("host: [::1]:1234\r\n") > 0 http = HTTPClient('::1') assert http._build_request(METHOD_GET, "").lower().find("host: [::1]\r\n") > 0 http = HTTPClient('::1', 1234) assert http._build_request(METHOD_GET, "").lower().find("host: [::1]:1234\r\n") > 0 geventhttpclient-2.0.11/src/geventhttpclient/tests/test_httplib.py000066400000000000000000000035211450702451400255640ustar00rootroot00000000000000import six import pytest if six.PY2: from httplib import HTTPException else: from http.client import HTTPException from geventhttpclient.httplib import HTTPConnection import gevent.server from contextlib import contextmanager listener = ('127.0.0.1', 54322) @contextmanager def server(handler): server = gevent.server.StreamServer( listener, handle=handler) server.start() try: yield finally: server.stop() def wrong_response_status_line(sock, addr): sock.recv(4096) sock.sendall(b'HTTP/1.1 apfais df0 asdf\r\n\r\n') def test_httplib_exception(): with server(wrong_response_status_line): connection = HTTPConnection(*listener) connection.request("GET", '/') with pytest.raises(HTTPException): connection.getresponse() def success_response(sock, addr): sock.recv(4096) sock.sendall(b"HTTP/1.1 200 Ok\r\n" b"Content-Type: text/plain\r\n" b"Set-Cookie: foo=bar\r\n" b"Set-Cookie: baz=bar\r\n" b"Content-Length: 12\r\n\r\n" b"Hello World!") def test_success_response(): with server(success_response): connection = HTTPConnection(*listener) connection.request("GET", "/") response = connection.getresponse() assert response.should_keep_alive() assert response.message_complete assert not response.should_close() assert response.read().decode() == 'Hello World!' assert response.content_length == 12 def test_msg(): with server(success_response): connection = HTTPConnection(*listener) connection.request("GET", "/") response = connection.getresponse() assert response.msg['Set-Cookie'] == "foo=bar, baz=bar" assert response.msg['Content-Type'] == "text/plain" geventhttpclient-2.0.11/src/geventhttpclient/tests/test_keep_alive.py000066400000000000000000000063641450702451400262320ustar00rootroot00000000000000from geventhttpclient._parser import HTTPParseError from geventhttpclient.response import HTTPResponse import pytest def test_simple(): response = HTTPResponse() response.feed("""HTTP/1.1 200 Ok\r\nContent-Length: 0\r\n\r\n""") assert response.headers_complete assert response.message_complete assert response.should_keep_alive() assert not response.should_close() assert response.status_code == 200 def test_simple_with_body(): response = HTTPResponse() response.feed("""HTTP/1.1 200 Ok\r\nContent-Length: 5\r\n\r\n12345""") assert response.headers_complete assert response.message_complete assert response.should_keep_alive() assert not response.should_close() assert response.status_code == 200 def test_keep_alive_http_10_without_header(): response = HTTPResponse() response.feed("""HTTP/1.0 200 Ok\r\n\r\n""") response.feed("") assert response.headers_complete assert response.message_complete assert not response.should_keep_alive() assert response.should_close() assert response.status_code == 200 def test_keep_alive_http_10_with_header(): response = HTTPResponse() response.feed("HTTP/1.0 200 Ok\r\nConnection: keep-alive\r\nContent-Length: 5\r\n\r\n12345") assert response.headers_complete assert response.message_complete assert response.should_keep_alive() assert not response.should_close() assert response.status_code == 200 def test_keep_alive_http_10_closed(): response = HTTPResponse() response.feed("HTTP/1.0 200 Ok\r\nConnection: close\r\nContent-Length: 5\r\n\r\n12345") assert response.headers_complete assert response.message_complete assert not response.should_keep_alive() assert response.should_close() assert response.status_code == 200 def test_keep_alive_bodyless_response_with_body(): response = HTTPResponse(method='HEAD') response.feed("HTTP/1.1 200 Ok\r\n\r\n") assert response.message_complete assert response.should_keep_alive() response = HTTPResponse(method='HEAD') with pytest.raises(HTTPParseError): response.feed( """HTTP/1.1 200 Ok\r\nContent-Length: 10\r\n\r\n0123456789""") assert not response.should_keep_alive() assert response.should_close() def test_keep_alive_bodyless_10x_request_with_body(): response = HTTPResponse() response.feed("""HTTP/1.1 100 Continue\r\n\r\n""") assert response.should_keep_alive() response = HTTPResponse() response.feed("""HTTP/1.1 100 Continue\r\nTransfer-Encoding: chunked\r\n\r\n""") assert response.should_keep_alive() assert response.should_close() def test_close_connection_and_no_content_length(): response = HTTPResponse() response.feed("HTTP/1.1 200 Ok\r\n" "Connection: close\r\n\r\n" "Hello World!") assert response._body_buffer == bytearray(b"Hello World!") assert not response.should_keep_alive() assert response.should_close() def test_close_connection_with_content_length(): response = HTTPResponse() response.feed("HTTP/1.1 200 Ok\r\nContent-length: 5\r\nConnection: close\r\n\r\n12345") assert response._body_buffer == bytearray(b"12345") assert not response.should_keep_alive() assert response.should_close() geventhttpclient-2.0.11/src/geventhttpclient/tests/test_network_failures.py000066400000000000000000000103421450702451400275000ustar00rootroot00000000000000import six import pytest if six.PY2: from httplib import HTTPException else: from http.client import HTTPException from geventhttpclient import HTTPClient import gevent.server import gevent.socket from contextlib import contextmanager CRLF = "\r\n" listener = ('127.0.0.1', 54326) @contextmanager def server(handler): server = gevent.server.StreamServer( listener, handle=handler) server.start() try: yield finally: server.stop() def wrong_response_status_line(sock, addr): sock.recv(4096) sock.sendall(b'HTTP/1.1 apfais df0 asdf\r\n\r\n') def test_exception(): with server(wrong_response_status_line): connection = HTTPClient(*listener) with pytest.raises(HTTPException): connection.get('/') def close(sock, addr): sock.close() def test_close(): with server(close): client = HTTPClient(*listener) with pytest.raises(HTTPException): client.get('/') def close_after_recv(sock, addr): sock.recv(4096) sock.close() def test_close_after_recv(): with server(close_after_recv): client = HTTPClient(*listener) with pytest.raises(HTTPException): client.get('/') def timeout_recv(sock, addr): sock.recv(4096) gevent.sleep(1) def test_timeout_recv(): with server(timeout_recv): connection = HTTPClient(*listener, network_timeout=0.1) with pytest.raises(gevent.socket.timeout): connection.request("GET", '/') def timeout_send(sock, addr): gevent.sleep(1) def test_timeout_send(): with server(timeout_send): connection = HTTPClient(*listener, network_timeout=0.1) with pytest.raises(gevent.socket.timeout): connection.request("GET", '/') def close_during_content(sock, addr): sock.recv(4096) sock.sendall(b"""HTTP/1.1 200 Ok\r\nContent-Length: 100\r\n\r\n""") sock.close() def test_close_during_content(): with server(close_during_content): client = HTTPClient(*listener, block_size=1) response = client.get('/') with pytest.raises(HTTPException): response.read() def content_too_small(sock, addr): sock.recv(4096) sock.sendall(b"""HTTP/1.1 200 Ok\r\nContent-Length: 100\r\n\r\ncontent""") gevent.sleep(10) def test_content_too_small(): with server(content_too_small): client = HTTPClient(*listener, network_timeout=0.2) with pytest.raises(gevent.socket.timeout): response = client.get('/') response.read() def close_during_chuncked_readline(sock, addr): sock.recv(4096) sock.sendall(b'HTTP/1.1 200 Ok\r\nTransfer-Encoding: chunked\r\n\r\n') chunks = ['This is the data in the first chunk\r\n', 'and this is the second one\r\n', 'con\r\n'] for chunk in chunks: gevent.sleep(0.1) sock.sendall((hex(len(chunk))[2:] + CRLF + chunk + CRLF).encode()) sock.close() def test_close_during_chuncked_readline(): with server(close_during_chuncked_readline): client = HTTPClient(*listener) response = client.get('/') assert response['transfer-encoding'] == 'chunked' chunks = [] with pytest.raises(HTTPException): data = 'enter_loop' while data: data = response.readline() chunks.append(data) assert len(chunks) == 3 def timeout_during_chuncked_readline(sock, addr): sock.recv(4096) sock.sendall(b"HTTP/1.1 200 Ok\r\nTransfer-Encoding: chunked\r\n\r\n") chunks = ['This is the data in the first chunk\r\n', 'and this is the second one\r\n', 'con\r\n'] for chunk in chunks: sock.sendall((hex(len(chunk))[2:] + CRLF + chunk + CRLF).encode()) gevent.sleep(2) sock.close() def test_timeout_during_chuncked_readline(): with server(timeout_during_chuncked_readline): client = HTTPClient(*listener, network_timeout=0.1) response = client.get('/') assert response['transfer-encoding'] == 'chunked' chunks = [] with pytest.raises(gevent.socket.timeout): data = 'enter_loop' while data: data = response.readline() chunks.append(data) assert len(chunks) == 3 geventhttpclient-2.0.11/src/geventhttpclient/tests/test_no_module_ssl.py000066400000000000000000000023351450702451400267620ustar00rootroot00000000000000# These tests are broken since gevent 23.7.0. I'm optimistic this is not a real issue, # it's just that gevent got more picky. import sys import pytest import gevent import gevent.ssl class DisableSSL(object): def __enter__(self): self._modules = dict() # pretend there is no ssl support self._modules['ssl'] = sys.modules.pop('ssl', None) sys.modules['ssl'] = None # ensure gevent must be re-imported to fire an ssl ImportError for module_name in [k for k in sys.modules.keys() if k.startswith('gevent')]: self._modules[module_name] = sys.modules.pop(module_name) def __exit__(self, *args, **kwargs): # Restore all previously disabled modules sys.modules.update(self._modules) def test_import_with_nossl(): return with DisableSSL(): from geventhttpclient import httplib from geventhttpclient import HTTPClient def test_httpclient_raises_with_no_ssl(): return with DisableSSL(): from geventhttpclient import HTTPClient with pytest.raises(Exception): HTTPClient.from_url("https://httpbin.org/") if __name__ == '__main__': test_import_with_nossl() test_httpclient_raises_with_no_ssl() geventhttpclient-2.0.11/src/geventhttpclient/tests/test_parser.py000066400000000000000000000101221450702451400254050ustar00rootroot00000000000000import six from geventhttpclient.response import HTTPResponse if six.PY3: from http.client import HTTPException from io import StringIO else: from httplib import HTTPException from cStringIO import StringIO import pytest from functools import wraps import sys from six.moves import xrange RESPONSE = 'HTTP/1.1 301 Moved Permanently\r\nLocation: http://www.google.fr/\r\nContent-Type: text/html; charset=UTF-8\r\nDate: Thu, 13 Oct 2011 15:03:12 GMT\r\nExpires: Sat, 12 Nov 2011 15:03:12 GMT\r\nCache-Control: public, max-age=2592000\r\nServer: gws\r\nContent-Length: 218\r\nX-XSS-Protection: 1; mode=block\r\n\r\n\n301 Moved\n

301 Moved

\nThe document has moved\nhere.\r\n\r\n' # borrowed from gevent # sys.gettotalrefcount is available only with python built with debug flag on gettotalrefcount = getattr(sys, 'gettotalrefcount', None) def wrap_refcount(method): if gettotalrefcount is None: return method @wraps(method) def wrapped(*args, **kwargs): import gc gc.disable() gc.collect() deltas = [] d = None try: for _ in xrange(4): d = gettotalrefcount() method(*args, **kwargs) if 'urlparse' in sys.modules: sys.modules['urlparse'].clear_cache() d = gettotalrefcount() - d deltas.append(d) if deltas[-1] == 0: break else: raise AssertionError('refcount increased by %r' % (deltas, )) finally: gc.collect() gc.enable() return wrapped @wrap_refcount def test_parse(): parser = HTTPResponse() parser.feed(RESPONSE) assert parser.message_begun assert parser.headers_complete assert parser.message_complete @wrap_refcount def test_parse_small_blocks(): parser = HTTPResponse() parser.feed(RESPONSE) response = StringIO(RESPONSE) while not parser.message_complete: data = response.read(10) parser.feed(data) assert parser.message_begun assert parser.headers_complete assert parser.message_complete assert parser.should_keep_alive() assert parser.status_code == 301 assert sorted(parser.items()) == [ ('cache-control', 'public, max-age=2592000'), ('content-length', '218'), ('content-type', 'text/html; charset=UTF-8'), ('date', 'Thu, 13 Oct 2011 15:03:12 GMT'), ('expires', 'Sat, 12 Nov 2011 15:03:12 GMT'), ('location', 'http://www.google.fr/'), ('server', 'gws'), ('x-xss-protection', '1; mode=block'), ] @wrap_refcount def test_parse_error(): response = HTTPResponse() try: response.feed("HTTP/1.1 asdf\r\n\r\n") response.feed("") assert response.status_code, 0 assert response.message_begun except HTTPException as e: assert 'Invalid response status' in str(e) else: assert False, "should have raised" @wrap_refcount def test_incomplete_response(): response = HTTPResponse() response.feed("""HTTP/1.1 200 Ok\r\nContent-Length:10\r\n\r\n1""") with pytest.raises(HTTPException): response.feed("") assert response.should_keep_alive() assert response.should_close() @wrap_refcount def test_response_too_long(): response = HTTPResponse() data = """HTTP/1.1 200 Ok\r\nContent-Length:1\r\n\r\ntoolong""" with pytest.raises(HTTPException): response.feed(data) @wrap_refcount def test_on_body_raises(): response = HTTPResponse() def on_body(buf): raise RuntimeError('error') response._on_body = on_body with pytest.raises(RuntimeError): response.feed(RESPONSE) @wrap_refcount def test_on_message_begin(): response = HTTPResponse() def on_message_begin(): raise RuntimeError('error') response._on_message_begin = on_message_begin with pytest.raises(RuntimeError): response.feed(RESPONSE) geventhttpclient-2.0.11/src/geventhttpclient/tests/test_ssl.py000066400000000000000000000173021450702451400247210ustar00rootroot00000000000000import gevent.monkey gevent.monkey.patch_ssl() try: import unittest.mock as mock except ImportError: import mock import dpkt.ssl import six import sys from contextlib import contextmanager import pytest import gevent.server import gevent.socket import gevent.ssl import os from gevent import joinall from gevent.socket import error as socket_error from geventhttpclient import HTTPClient try: from ssl import CertificateError except ImportError: from backports.ssl_match_hostname import CertificateError pytestmark = pytest.mark.skipif( sys.version_info < (2, 7) and os.environ.get("TRAVIS") == "true", reason="We have issues on travis with the SSL tests" ) BASEDIR = os.path.dirname(__file__) KEY = os.path.join(BASEDIR, 'server.key') CERT = os.path.join(BASEDIR, 'server.crt') @contextmanager def server(handler, backlog=1): server = gevent.server.StreamServer( ("localhost", 0), backlog=backlog, handle=handler, keyfile=KEY, certfile=CERT) server.start() try: yield (server.server_host, server.server_port) finally: server.stop() @contextmanager def timeout_connect_server(): sock = gevent.socket.socket(gevent.socket.AF_INET, gevent.socket.SOCK_STREAM, 0) sock = gevent.ssl.wrap_socket(sock, keyfile=KEY, certfile=CERT) sock.setsockopt(gevent.socket.SOL_SOCKET, gevent.socket.SO_REUSEADDR, 1) sock.bind(("localhost", 0)) sock.listen(1) def run(sock): conns = [] while True: conn, addr = sock.accept() conns.append(conns) conn.recv(1024) gevent.sleep(10) job = gevent.spawn(run, sock) try: yield sock.getsockname() sock.close() finally: job.kill() def simple_ssl_response(sock, addr): sock.recv(1024) sock.sendall(b'HTTP/1.1 200 Ok\r\nConnection: close\r\n\r\n') sock.close() def test_simple_ssl(): with server(simple_ssl_response) as listener: http = HTTPClient(*listener, insecure=True, ssl=True, ssl_options={'ca_certs': CERT}) response = http.get('/') assert response.status_code == 200 response.read() def timeout_on_connect(sock, addr): sock.recv(1024) sock.sendall(b'HTTP/1.1 200 Ok\r\nContent-Length: 0\r\n\r\n') def test_implicit_sni_from_host_in_ssl(): server_host, server_port, sent_sni = _get_sni_sent_from_client() assert sent_sni == server_host def test_implicit_sni_from_header_in_ssl(): server_host, server_port, sent_sni = _get_sni_sent_from_client( headers={'host': 'ololo_special_host'}, ) assert sent_sni == 'ololo_special_host' def test_explicit_sni_in_ssl(): server_host, server_port, sent_sni = _get_sni_sent_from_client( ssl_options={'server_hostname': 'test_sni'}, headers={'host': 'ololo_special_host'}, ) assert sent_sni == 'test_sni' def _get_sni_sent_from_client(**additional_client_args): with sni_checker_server() as ctx: server_sock, server_greenlet = ctx server_addr, server_port = server_sock.getsockname()[:2] mock_addrinfo = ( gevent.socket.AF_INET, gevent.socket.SOCK_STREAM, gevent.socket.IPPROTO_TCP, 'localhost', ('127.0.0.1', server_port) ) with mock.patch( 'gevent.socket.getaddrinfo', mock.Mock(return_value=[mock_addrinfo]) ): server_host = 'some_foo' http = HTTPClient( server_host, server_port, insecure=True, ssl=True, connection_timeout=.1, ssl_context_factory=gevent.ssl.create_default_context, **additional_client_args ) def run(http): try: http.get('/') except socket_error: pass # handshake will not be completed client_greenlet = gevent.spawn(run, http) joinall([client_greenlet, server_greenlet]) return server_host, server_port, server_greenlet.value @contextmanager def sni_checker_server(): sock = gevent.socket.socket(gevent.socket.AF_INET, gevent.socket.SOCK_STREAM, 0) sock.setsockopt(gevent.socket.SOL_SOCKET, gevent.socket.SO_REUSEADDR, 1) sock.bind(("localhost", 0)) sock.listen(1) # @cyberw 2021-07-10: seems this doesnt exist any more, hope it doesnt make any difference # sock.last_seen_sni = None def run(sock): while True: conn, addr = sock.accept() client_hello = conn.recv(1024) return extract_sni_from_client_hello(client_hello) def extract_sni_from_client_hello(hello_packet): records, bytes_used = dpkt.ssl.tls_multi_factory(hello_packet) for record in records: # TLS handshake only if record.type != 22: continue if len(record.data) == 0: continue # Client Hello only if record.data[0] not in (1, chr(1)): continue handshake = dpkt.ssl.TLSHandshake(record.data) ch = handshake.data SNI_extension = [ ext_data for (ext_type, ext_data) in ch.extensions if ext_type == 0x0 # server_name ] if SNI_extension: SNI_extension = SNI_extension[0] sni_list, _ = dpkt.ssl.parse_variable_array(SNI_extension, 2) sni_list = sni_list[1:] # skip SNI entry type first_entry, _ = dpkt.ssl.parse_variable_array(sni_list, 2) return first_entry.decode() job = gevent.spawn(run, sock) try: yield sock, job sock.close() finally: job.kill() def test_timeout_on_connect(): with timeout_connect_server() as listener: http = HTTPClient(*listener, insecure=True, ssl=True, ssl_options={'ca_certs': CERT}) def run(http, wait_time=100): try: response = http.get('/') gevent.sleep(wait_time) response.read() except Exception: pass gevent.spawn(run, http) gevent.sleep(0) e = None try: http2 = HTTPClient(*listener, insecure=True, ssl=True, connection_timeout=0.1, ssl_options={'ca_certs': CERT}) http2.get('/') except gevent.ssl.SSLError as error: e = error except gevent.socket.timeout as error: e = error except: raise assert e is not None, 'should have raised' if isinstance(e, gevent.ssl.SSLError): assert "operation timed out" in str(e) def network_timeout(sock, addr): sock.recv(1024) gevent.sleep(10) sock.sendall(b'HTTP/1.1 200 Ok\r\nContent-Length: 0\r\n\r\n') def test_network_timeout(): with server(network_timeout) as listener: http = HTTPClient(*listener, ssl=True, insecure=True, network_timeout=0.1, ssl_options={'ca_certs': CERT}) if six.PY3: with pytest.raises(gevent.socket.timeout): response = http.get('/') assert response.status_code == 0, 'should have timed out.' else: with pytest.raises(gevent.ssl.SSLError): response = http.get('/') assert response.status_code == 0, 'should have timed out.' def test_verify_hostname(): with server(simple_ssl_response) as listener: http = HTTPClient(*listener, ssl=True, ssl_options={'ca_certs': CERT}) with pytest.raises(CertificateError): http.get('/') geventhttpclient-2.0.11/src/geventhttpclient/tests/test_url.py000066400000000000000000000101761450702451400247240ustar00rootroot00000000000000import six from geventhttpclient.url import URL url_full = 'http://getgauss.com/subdir/file.py?param=value&other=true#frag' url_path_only = '/path/to/something?param=value&other=true' def test_simple_url(): url = URL(url_full) assert url.path == '/subdir/file.py' assert url.host == 'getgauss.com' assert url.port == 80 assert url.query_string == 'param=value&other=true' assert url.fragment == 'frag' def test_path_only(): url = URL(url_path_only) assert url.host == '' assert url.port == None assert url.path == '/path/to/something' assert url.query_string == 'param=value&other=true' def test_params(): url = URL(url_full, params={"pp":"hello"}) assert url.path == '/subdir/file.py' assert url.host == 'getgauss.com' assert url.port == 80 assert url.query_string == 'param=value&other=true&pp=hello' assert url.fragment == 'frag' def test_params_urlencoded(): url = URL(url_full, params={"a/b":"c/d"}) assert url.path == '/subdir/file.py' assert url.host == 'getgauss.com' assert url.port == 80 assert url.query_string == 'param=value&other=true&a%2Fb=c%2Fd' assert url.fragment == 'frag' def test_query_string_urlencoded(): url = URL("http://getgauss.com/?foo=bar with spaces") assert url.query_string == 'foo=bar%20with%20spaces' assert url.host == 'getgauss.com' assert url.port == 80 def test_empty(): url = URL() assert url.host == '' assert url.port == 80 assert url.query_string == '' assert url.fragment == '' assert url.netloc == '' assert str(url) == 'http:///' def test_empty_path(): assert URL('http://getgauss.com').path == '' def test_consistent_reparsing(): for surl in (url_full, url_path_only): url = URL(surl) reparsed = URL(str(url)) for attr in URL.__slots__: assert getattr(reparsed, attr) == getattr(url, attr) def test_redirection_abs_path(): url = URL(url_full) updated = url.redirect('/test.html') assert updated.host == url.host assert updated.port == url.port assert updated.path == '/test.html' assert updated.query_string == '' assert updated.fragment == '' def test_redirection_rel_path(): url = URL(url_full) for redir in ('test.html?key=val', 'folder/test.html?key=val'): updated = url.redirect(redir) assert updated.host == url.host assert updated.port == url.port assert updated.path.startswith('/subdir/') assert updated.path.endswith(redir.split('?', 1)[0]) assert updated.query_string == 'key=val' assert updated.fragment == '' def test_redirection_full_path(): url_full2_plain = 'http://google.de/index' url = URL(url_full) updated = url.redirect(url_full2_plain) url_full2 = URL(url_full2_plain) for attr in URL.__slots__: assert getattr(updated, attr) == getattr(url_full2, attr) assert str(url_full2) == url_full2_plain def test_params(): assert URL("/some/url", params={"a":"b", "c":2}).query_string == "a=b&c=2" def test_equality(): assert URL('https://example.com/') != URL('http://example.com/') assert URL('http://example.com/') == URL('http://example.com/') def test_pw(): url = URL('http://asdf:dd@heise.de/index.php?aaaa=bbbbb') assert url.host == 'heise.de' assert url.port == 80 assert url.user == 'asdf' assert url.password == 'dd' def test_pw_with_port(): url = URL('http://asdf:dd@heise.de:90/index.php?aaaa=bbbbb') assert url.host == 'heise.de' assert url.port == 90 assert url.user == 'asdf' assert url.password == 'dd' def test_ipv6(): url = URL('http://[2001:db8:85a3:8d3:1319:8a2e:370:7348]/') assert url.host == '2001:db8:85a3:8d3:1319:8a2e:370:7348' assert url.port == 80 assert url.user == None def test_ipv6_with_port(): url = URL('https://[2001:db8:85a3:8d3:1319:8a2e:370:7348]:8080/') assert url.host == '2001:db8:85a3:8d3:1319:8a2e:370:7348' assert url.port == 8080 assert url.user == None if __name__ == '__main__': test_redirection_abs_path() test_redirection_rel_path() test_redirection_full_path() test_ipv6_with_port() geventhttpclient-2.0.11/src/geventhttpclient/tests/test_useragent.py000066400000000000000000000162571450702451400261250ustar00rootroot00000000000000import gevent.pywsgi import os import pytest import six import tempfile if six.PY2: from cookielib import CookieJar else: from http.cookiejar import CookieJar from contextlib import contextmanager from geventhttpclient.useragent import UserAgent, BadStatusCode @contextmanager def wsgiserver(handler): server = gevent.pywsgi.WSGIServer(('127.0.0.1', 54323), handler) server.start() try: yield finally: server.stop() def check_upload(body, headers=None): def wsgi_handler(env, start_response): if headers: # For Python 2.6 which does not have viewitems if six.PY2: env >= headers else: assert six.viewitems(env) >= six.viewitems(headers) assert body == env['wsgi.input'].read() start_response('200 OK', []) return [] return wsgi_handler def internal_server_error(): def wsgi_handler(env, start_response): start_response('500 Internal Server Error', []) return [] return wsgi_handler def check_redirect(): def wsgi_handler(env, start_response): path_info = env.get('PATH_INFO') if path_info == "/": start_response('301 Moved Permanently', [('Location', 'http://127.0.0.1:54323/redirected')]) return [] else: assert path_info == "/redirected" start_response('200 OK', []) return [b"redirected"] return wsgi_handler def check_querystring(): def wsgi_handler(env, start_response): querystring = env["QUERY_STRING"] start_response('200 OK', [("Content-type", "text/plaim")]) return [querystring.encode("utf-8")] return wsgi_handler def set_cookie(): def wsgi_handler(env, start_response): start_response('200 OK', [('Set-Cookie', 'testcookie=testdata')]) return [] return wsgi_handler def return_brotli(): def wsgi_handler(env, start_response): path_info = env.get('PATH_INFO') if path_info == "/": start_response('200 OK', [("Content-Encoding", "br")]) return [b"\x1b'\x00\x98\x04rq\x88\xa1'\xbf]\x12\xac+g!%\x98\xf4\x02\xc4\xda~)8\xba\x06xO\x11)Y\x02"] return wsgi_handler def test_file_post(): body = tempfile.NamedTemporaryFile("a+b", delete=False) name = body.name try: body.write(b"123456789") body.close() headers = {'CONTENT_LENGTH': '9', 'CONTENT_TYPE': 'application/octet-stream'} with wsgiserver(check_upload(b"123456789", headers)): useragent = UserAgent() with open(name, 'rb') as body: useragent.urlopen('http://127.0.0.1:54323/', method='POST', payload=body) finally: os.remove(name) def test_multipart_post(): body = tempfile.NamedTemporaryFile("a+b", delete=False) name = body.name try: body.write(b"123456789") headers = {'CONTENT_LENGTH': '237', 'CONTENT_TYPE': 'multipart/form-data; boundary=custom_boundary'} files = {'file': ('report.xls', body, 'application/vnd.ms-excel', {'Expires': '0'}, 'custom_boundary')} with wsgiserver(check_upload((b'--custom_boundary\r\n' b'Content-Disposition: form-data; name="files"\r\n' b'\r\n' b'file\r\n' b'--custom_boundary\r\n' b'Content-Disposition: form-data; name="file"; filename="report.xls"\r\n' b'Content-Type: application/vnd.ms-excel\r\n' b'Expires: 0\r\n' b'\r\n' b'\r\n' b'--custom_boundary--' b'\r\n'), headers)): useragent = UserAgent() useragent.urlopen('http://127.0.0.1:54323/', method='POST', files=files) finally: body.close() os.remove(name) def test_unicode_post(): byte_string = b'\xc8\xb9\xc8\xbc\xc9\x85' unicode_string = byte_string.decode('utf-8') headers = {'CONTENT_LENGTH': str(len(byte_string)), 'CONTENT_TYPE': 'text/plain; charset=utf-8'} with wsgiserver(check_upload(byte_string, headers)): useragent = UserAgent() useragent.urlopen('http://127.0.0.1:54323/', method='POST', payload=unicode_string) def test_bytes_post(): headers = {'CONTENT_LENGTH': '5', 'CONTENT_TYPE': 'application/octet-stream'} with wsgiserver(check_upload(b"12345", headers)): useragent = UserAgent() useragent.urlopen('http://127.0.0.1:54323/', method='POST', payload=b"12345") def test_dict_post_with_content_type(): headers = {'Content-Type': 'application/x-www-form-urlencoded'} payload = {"foo": "bar"} with wsgiserver(set_cookie()): # lazy. I just want to see that we dont crash making the request resp = UserAgent().urlopen('http://127.0.0.1:54323/', method='POST', payload=payload, headers=headers) assert resp.status_code == 200 def test_redirect(): with wsgiserver(check_redirect()): resp = UserAgent().urlopen('http://127.0.0.1:54323/') assert resp.status_code == 200 assert b"redirected" == resp.content def test_params(): with wsgiserver(check_querystring()): resp = UserAgent().urlopen('http://127.0.0.1:54323/?param1=b', params={"param2":"hello"}) assert resp.status_code == 200 assert resp.content == b"param1=b¶m2=hello" def test_params_quoted(): with wsgiserver(check_querystring()): resp = UserAgent().urlopen('http://127.0.0.1:54323/?a/b', params={"path":"/"}) assert resp.status_code == 200 assert resp.content == b"a/b&path=%2F" def test_server_error_with_bytes(): with wsgiserver(internal_server_error()): useragent = UserAgent() with pytest.raises(BadStatusCode): useragent.urlopen('http://127.0.0.1:54323/', method='POST', payload=b"12345") def test_server_error_with_unicode(): with wsgiserver(internal_server_error()): useragent = UserAgent() with pytest.raises(BadStatusCode): useragent.urlopen('http://127.0.0.1:54323/', method='POST', payload=u"12345") def test_server_error_with_file(): body = tempfile.NamedTemporaryFile("a+b", delete=False) name = body.name try: body.write(b"123456789") body.close() with wsgiserver(internal_server_error()): useragent = UserAgent() with pytest.raises(BadStatusCode): with open(name, 'rb') as body: useragent.urlopen('http://127.0.0.1:54323/', method='POST', payload=body) finally: os.remove(name) def test_cookiejar(): with wsgiserver(set_cookie()): useragent = UserAgent(cookiejar=CookieJar()) assert b"" == useragent.urlopen('http://127.0.0.1:54323/').read() def test_brotli_response(): with wsgiserver(return_brotli()): resp = UserAgent().urlopen('http://127.0.0.1:54323/', params={"path":"/"}) assert resp.status_code == 200 assert resp.content == b"https://github.com/gwik/geventhttpclient" geventhttpclient-2.0.11/src/geventhttpclient/url.py000066400000000000000000000156411450702451400225250ustar00rootroot00000000000000import six if six.PY3: from urllib import parse as urlparse from urllib.parse import urlencode from urllib.parse import quote_plus from collections.abc import Mapping basestring = (str, bytes) else: import urlparse from urllib import quote_plus, urlencode from collections import Mapping DEFAULT_PORTS = { 'http': 80, 'https': 443 } def to_key_val_list(value): """Take an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') Traceback (most recent call last): ... ValueError: cannot encode objects that are not 2-tuples :rtype: list """ if value is None: return None if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') if isinstance(value, Mapping): value = value.items() return list(value) class URL(object): """ A mutable URL class You build it from a url string. >>> url = URL('http://getgauss.com/urls?param=asdfa') >>> url URL(http://getgauss.com/urls?param=asdfa) You cast it to a tuple, it returns the same tuple as `urlparse.urlsplit`. >>> tuple(url) ('http', 'getgauss.com', '/urls', 'param=asdfa', '') You can cast it as a string. >>> str(url) 'http://getgauss.com/urls?param=asdfa' You can change attributes. >>> url.host = 'infrae.com' >>> url URL(http://infrae.com/urls?auth_token=asdfaisdfuasdf¶m=asdfa) """ __slots__ = ('scheme', 'host', 'port', 'path', 'query', 'fragment', 'user', 'password', 'params') quoting_safe = '' def __init__(self, url=None, params=None): if url is not None: scheme, netloc, path, query, fragment = urlparse.urlsplit(url) else: scheme, netloc, path, query, fragment = 'http', '', '/', '', '' self.scheme = scheme self.fragment = fragment user, password, host, port = None, None, '', None if netloc: if '@' in netloc: user_pw, netloc = netloc.rsplit('@', 1) if ':' in user_pw: user, password = user_pw.rsplit(':', 1) else: user = user_pw if netloc.startswith('['): host, port_pt = netloc.rsplit(']', 1) host = host.strip('[]') if port_pt: port = int(port_pt.strip(':')) else: if ':' in netloc: host, port = netloc.rsplit(':', 1) port = int(port) else: host = netloc if not port: port = DEFAULT_PORTS.get(self.scheme) self.host = host self.port = port self.user = user self.password = password self.path = path or '' self.query = query.replace(" ", "%20") # get a little closer to the behaviour of requests.utils.requote_uri self.params = params @property def netloc(self): return self.full_netloc(auth=False) def full_netloc(self, auth=True): buf = '' if self.user and auth: buf += self.user if self.passwort: buf += ':' + self.passwort buf += '@' if ':' in self.host: buf += '[' + self.host + ']' else: buf += self.host if self.port is None: return buf elif DEFAULT_PORTS.get(self.scheme) == self.port: return buf buf += ':' + str(self.port) return buf def __copy__(self): clone = type(self)() for key in self.__slots__: val = getattr(self, key) if isinstance(val, dict): val = val.copy() setattr(clone, key, val) return clone def __repr__(self): return "URL(%s)" % str(self) def __iter__(self): return iter((self.scheme, self.full_netloc(), self.path, self.query_string, self.fragment)) def __str__(self): return urlparse.urlunsplit(tuple(self)) def __eq__(self, other): return str(self) == str(other) @staticmethod def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. """ if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v)) return urlencode(result, doseq=True) else: return data @property def query_string(self): query = [] if self.query: query.append(self.query) if self.params: query.append(self._encode_params(self.params)) return "&".join(query) @property def request_uri(self): query = self.query_string if not query: return self.path return self.path + '?' + query def append_to_path(self, value): if value.startswith('/'): if self.path.endswith('/'): self.path += value[1:] return self.path elif not self.path.endswith("/"): self.path += "/" + value return self.path self.path += value return self.path def redirect(self, other): """ Redirect to the other URL, relative to the current one """ if not isinstance(other, type(self)): other = type(self)(other) if not other.host: other.scheme = self.scheme other.host = self.host other.port = self.port if not other.path.startswith('/'): if self.path.endswith('/'): other.path = self.path + other.path else: other.path = self.path.rsplit('/', 1)[0] + '/' + other.path return other def stripped_auth(self): """ Remove fragment and authentication for proxy handling """ clone = type(self)() # Copy all fields except fragment, username and password for key in self.__slots__[:5]: val = getattr(self, key) if isinstance(val, dict): val = val.copy() setattr(clone, key, val) return clone geventhttpclient-2.0.11/src/geventhttpclient/useragent.py000066400000000000000000000522631450702451400237210ustar00rootroot00000000000000import socket import errno import six import sys import ssl import zlib import os import brotli from six.moves import xrange, cStringIO from six.moves.urllib.parse import urlencode from six import print_, reraise, string_types, text_type import gevent from urllib3 import encode_multipart_formdata from urllib3.fields import RequestField try: from gevent.dns import DNSError except ImportError: class DNSError(Exception): pass from .url import URL, to_key_val_list from .client import HTTPClient, HTTPClientPool basestring = (str, bytes) class ConnectionError(Exception): def __init__(self, url, *args, **kwargs): self.url = url self.__dict__.update(kwargs) if args and isinstance(args[0], string_types): try: self.text = args[0] % args[1:] except TypeError: self.text = args[0] + ': ' + str(args[1:]) if args else '' else: self.text = str(args[0]) if len(args) == 1 else '' if kwargs: self.text += ', ' if self.text else '' self.kwargs_text = ', '.join('%s=%s' % (key, val) for key, val in six.iteritems(kwargs)) self.text += self.kwargs_text else: self.text = '' def __str__(self): if self.text: return "URL %s: %s" % (self.url, self.text) else: return "URL %s" % self.url def __repr__(self): repr_str = super().__repr__() if self.kwargs_text: return repr_str.replace(')', ''.join([', ', self.kwargs_text, ')'])) return repr_str class RetriesExceeded(ConnectionError): pass class BadStatusCode(ConnectionError): pass class EmptyResponse(ConnectionError): pass class CompatRequest(object): """ urllib / cookielib compatible request class. See also: http://docs.python.org/library/cookielib.html """ def __init__(self, url, method='GET', headers=None, payload=None, params=None): self.params = params self.set_url(url) self.original_host = self.url_split.host self.method = method self.headers = headers self.payload = payload def set_url(self, url): if isinstance(url, URL): self.url = str(url) self.url_split = url else: self.url = url self.url_split = URL(self.url, params=self.params) def get_full_url(self): return self.url def get_host(self): return self.url_split.host def get_type(self): return self.url_split.scheme def get_origin_req_host(self): return self.original_host def is_unverifiable(self): """ See http://tools.ietf.org/html/rfc2965.html. Not fully implemented! """ return False @property def unverifiable(self): return self.is_unverifiable() def get_header(self, header_name, default=None): return self.headers.get(header_name, default) def has_header(self, header_name): return header_name in self.headers def header_items(self): return self.headers.items() def add_unredirected_header(self, key, val): self.headers.add(key, val) def _drop_payload(self): self.method = 'GET' self.payload = None for item in ('content-length', 'content-type', 'content-encoding'): self.headers.discard(item) def _drop_cookies(self): for item in ('cookie', 'cookie2'): self.headers.discard(item) def redirect(self, code, location): """ Modify the request inplace to point to the new location """ self.set_url(self.url_split.redirect(location)) if code in (302, 303): self._drop_payload() self._drop_cookies() class CompatResponse(object): """ Adapter for urllib responses with some extensions """ __slots__ = 'headers', '_response', '_request', '_sent_request', '_cached_content' def __init__(self, ghc_response, request=None, sent_request=None): self._response = ghc_response self._request = request self._sent_request = sent_request self.headers = self._response._headers_index @property def status(self): """ The returned http status """ # TODO: Should be a readable string return str(self.status_code) @property def status_code(self): """ The http status code as plain integer """ return self._response.get_code() @property def stream(self): return self._response def read(self, n=None): """ Read n bytes from the response body """ return self._response.read(n) def readline(self): return self._response.readline() def release(self): return self._response.release() def unzipped(self, gzip=True, br=False): bodystr = self._response.read() if gzip: return zlib.decompress(bodystr, 16 + zlib.MAX_WBITS) elif br: return brotli.decompress(bodystr) else: # zlib only provides the zlib compress format, not the deflate format; # so on top of all there's this workaround: try: return zlib.decompress(bodystr, -zlib.MAX_WBITS) except zlib.error: return zlib.decompress(bodystr) @property def content(self): """ Unzips if necessary and buffers the received body. Careful with large files! """ try: return self._cached_content except AttributeError: self._cached_content = self._content() return self._cached_content def _content(self): try: content_type = self.headers.getheaders('content-encoding')[0].lower() except IndexError: # No content-encoding header set content_type = 'identity' if content_type == 'gzip': ret = self.unzipped(gzip=True) elif content_type == 'deflate': ret = self.unzipped(gzip=False) elif content_type == 'identity': ret = self._response.read() elif content_type == 'br': ret = self.unzipped(gzip=False, br=True) elif content_type == 'compress': raise ValueError("Compression type not supported: %s" % content_type) else: raise ValueError("Unknown content encoding: %s" % content_type) self.release() return ret def __len__(self): """ The content lengths as should be returned from the headers """ try: return int(self.headers.getheaders('content-length')[0]) except (IndexError, ValueError): return len(self.content) def __nonzero__(self): """ If we have an empty response body, we still don't want to evaluate as false """ return True def info(self): """ Adaption to cookielib: Alias for headers """ return self.headers def __enter__(self): return self def __exit__(self, *args): self.release() class RestkitCompatResponse(CompatResponse): """ Some extra lines to also serve as a drop in replacement for restkit """ def body_string(self): return self.content def body_stream(self): return self._response @property def status_int(self): return self.status_code class UserAgent(object): response_type = CompatResponse request_type = CompatRequest valid_response_codes = frozenset([200, 206, 301, 302, 303, 307]) redirect_resonse_codes = frozenset([301, 302, 303, 307]) def __init__(self, max_redirects=3, max_retries=3, retry_delay=0, cookiejar=None, headers=None, **kwargs): self.max_redirects = int(max_redirects) self.max_retries = int(max_retries) self.retry_delay = retry_delay self.default_headers = HTTPClient.DEFAULT_HEADERS.copy() if headers: self.default_headers.update(headers) self.cookiejar = cookiejar self.clientpool = HTTPClientPool(**kwargs) def close(self): self.clientpool.close() def __del__(self): self.close() def _make_request(self, url, method='GET', headers=None, payload=None, params=None, files=None): req_headers = self.default_headers.copy() if headers: req_headers.update(headers) if payload or files: # Adjust headers depending on payload content content_type = req_headers.get('content-type', None) if files: (body, content_type) = self._encode_files(files, payload) payload = body req_headers['content-type'] = content_type if isinstance(payload, dict): if not content_type: req_headers['content-type'] = "application/x-www-form-urlencoded; charset=utf-8" payload = urlencode(payload) elif not content_type and isinstance(payload, text_type): req_headers['content-type'] = 'text/plain; charset=utf-8' elif not content_type: req_headers['content-type'] = 'application/octet-stream' return self.request_type(url, method=method, headers=req_headers, payload=payload, params=params) def _urlopen(self, request): client = self.clientpool.get_client(request.url_split) resp = client.request(request.method, request.url_split.request_uri, body=request.payload, headers=request.headers) return self.response_type(resp, request=request, sent_request=resp._sent_request) def _verify_status(self, status_code, url=None): """ Hook for subclassing """ if status_code not in self.valid_response_codes: raise BadStatusCode(url, code=status_code) def _encode_files(self, files, data): """ Method taken from models in requests library , usage is the same. Only difference is that you can add custom boundary in 5-tuple version. Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype), 4-tuples (filename, fileobj, contentype, custom_headers) or 5-tuples (filename, fileobj, contentype, custom_headers, custom boundary). example: files = {'file': ('report.xls', body, 'application/vnd.ms-excel', {'Expires': '0'}, 'custom_boundary')} """ if not files: raise ValueError("Files must be provided.") elif isinstance(data, basestring): raise ValueError("Data must not be a string.") new_fields = [] fields = to_key_val_list(data or {}) files = to_key_val_list(files or {}) for field, val in fields: if isinstance(val, basestring) or not hasattr(val, "__iter__"): val = [val] for v in val: if v is not None: # Don't call str() on bytestrings: in Py3 it all goes wrong. if not isinstance(v, bytes): v = str(v) new_fields.append( ( field.decode("utf-8") if isinstance(field, bytes) else field, v.encode("utf-8") if isinstance(v, str) else v, ) ) for (k, v) in files: # support for explicit filename ft = None fh = None boundary = None if isinstance(v, (tuple, list)): if len(v) == 2: fn, fp = v elif len(v) == 3: fn, fp, ft = v elif len(v) == 4: fn, fp, ft, fh = v else: fn, fp, ft, fh, boundary = v else: fn = self.guess_filename(v) or k fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp elif hasattr(fp, "read"): fdata = fp.read() elif fp is None: continue else: fdata = fp rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) body, content_type = encode_multipart_formdata(new_fields, boundary) return body, content_type def _handle_error(self, e, url=None): """ Hook for subclassing. Raise the error to interrupt further retrying, return it to continue retries and save the error, when retries exceed the limit. Temporary errors should be swallowed here for automatic retries. """ if isinstance(e, (socket.timeout, gevent.Timeout)): return e elif isinstance(e, (socket.error, DNSError)) and \ e.errno in set([errno.ETIMEDOUT, errno.ENOLINK, errno.ENOENT, errno.EPIPE]): return e elif isinstance(e, ssl.SSLError) and 'read operation timed out' in str(e): return e elif isinstance(e, EmptyResponse): return e raise reraise(type(e), e, sys.exc_info()[2]) def _handle_retries_exceeded(self, url, last_error=None): """ Hook for subclassing """ raise RetriesExceeded(url, self.max_retries, original=last_error) def urlopen(self, url, method='GET', response_codes=valid_response_codes, headers=None, payload=None, to_string=False, debug_stream=None, params=None, **kwargs): """ Open an URL, do retries and redirects and verify the status code """ # POST or GET parameters can be passed in **kwargs if kwargs: if not payload: payload = kwargs elif isinstance(payload, dict): payload.update(kwargs) files = kwargs.get("files", None) else: files = None req = self._make_request(url, method=method, headers=headers, payload=payload, params=params, files=files) for retry in xrange(self.max_retries): if retry > 0 and self.retry_delay: # Don't wait the first time and skip if no delay specified gevent.sleep(self.retry_delay) for _ in xrange(self.max_redirects): if self.cookiejar is not None: self.cookiejar.add_cookie_header(req) try: resp = self._urlopen(req) except gevent.GreenletExit: raise except BaseException as e: e.request = req last_error = self._handle_error(e, url=req.url) break # Continue with next retry # We received a response if debug_stream is not None: debug_stream.write(self._conversation_str(req.url, resp, payload=req.payload) + '\n\n') try: self._verify_status(resp.status_code, url=req.url) except Exception as e: # Basic transmission successful, but not the wished result # Let's collect some debug info e.response = resp e.request = req e.http_log = self._conversation_str(req.url, resp, payload=req.payload) resp.release() last_error = self._handle_error(e, url=req.url) break # Continue with next retry if self.cookiejar is not None: self.cookiejar.extract_cookies(resp, req) redirection = resp.headers.get('location') if isinstance(redirection, six.binary_type): redirection = redirection.decode('utf-8') if resp.status_code in self.redirect_resonse_codes and redirection: resp.release() try: req.redirect(resp.status_code, redirection) continue except Exception as e: last_error = self._handle_error(e, url=req.url) break if not to_string: return resp else: # to_string added as parameter, to handle empty response # bodies as error and continue retries automatically try: ret = resp.content except Exception as e: last_error = self._handle_error(e, url=req.url) break else: if not ret: e = EmptyResponse(url, "Empty response body received") last_error = self._handle_error(e, url=req.url) break else: return ret else: e = RetriesExceeded(url, "Redirection limit reached (%s)" % self.max_redirects) last_error = self._handle_error(e, url=url) else: return self._handle_retries_exceeded(url, last_error=last_error) @classmethod def _conversation_str(cls, url, resp, payload=None): if six.PY2: header_str = '\n'.join('%s: %s' % item for item in resp.headers.iteroriginal()) ret = 'REQUEST: ' + url + '\n' + resp._sent_request if payload and isinstance(payload, string_types): ret += payload + '\n\n' ret += 'RESPONSE: ' + resp._response.version + ' ' + \ str(resp.status_code) + '\n' + \ header_str + '\n\n' + resp.content else: header_str = '\n'.join('%s: %s' % item for item in resp.headers.iteroriginal()) ret = 'REQUEST: ' + url + '\n' + resp._sent_request if payload: if isinstance(payload, six.binary_type): try: ret += payload.decode('utf-8') + '\n\n' except UnicodeDecodeError: ret += 'UnicodeDecodeError' + '\n\n' elif isinstance(payload, six.text_type): ret += payload + '\n\n' ret += 'RESPONSE: ' + resp._response.version + ' ' + \ str(resp.status_code) + '\n' + \ header_str + '\n\n' + resp.content[:].decode('utf-8') return ret @classmethod def guess_filename(cls, file): """Tries to guess the filename of the given object.""" name = getattr(file, "name", None) if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": return os.path.basename(name) def download(self, url, fpath, chunk_size=16 * 1024, resume=False, **kwargs): kwargs.pop('to_string', None) headers = kwargs.pop('headers', {}) headers['Connection'] = 'Keep-Alive' if resume and os.path.isfile(fpath): offset = os.path.getsize(fpath) else: offset = 0 for _ in xrange(self.max_retries): if offset: headers['Range'] = 'bytes=%d-' % offset resp = self.urlopen(url, headers=headers, **kwargs) cr = resp.headers.get('Content-Range') if resp.status_code != 206 or not cr or not cr.startswith('bytes') or \ not cr.split(None, 1)[1].startswith(str(offset)): resp.release() offset = 0 if not offset: headers.pop('Range', None) resp = self.urlopen(url, headers=headers, **kwargs) with open(fpath, 'ab' if offset else 'wb') as f: if offset: f.seek(offset, os.SEEK_SET) try: data = resp.read(chunk_size) with resp: while data: f.write(data) data = resp.read(chunk_size) except BaseException as e: self._handle_error(e, url=url) if resp.headers.get('accept-ranges') == 'bytes': # Only if this header is set, we can fall back to partial download offset = f.tell() continue # All done, break outer loop break else: self._handle_retries_exceeded(url, last_error=e) return resp class RestkitCompatUserAgent(UserAgent): response_type = RestkitCompatResponse class XmlrpcCompatUserAgent(UserAgent): def request(self, host, handler, request, verbose=False): debug_stream = None if not verbose else cStringIO.StringIO() ret = self.urlopen(host + handler, 'POST', payload=request, to_string=True, debug_stream=debug_stream) if debug_stream is not None: debug_stream.seek(0) print_(debug_stream.read()) return ret geventhttpclient-2.0.11/tox.ini000066400000000000000000000005051450702451400164770ustar00rootroot00000000000000[tox] envlist = py{27,35,36,37,38,39,310,311,312} [testenv] allowlist_externals = rm find pytest commands= rm -rf build find . -name '*.pyc' -delete python setup.py clean python setup.py build_ext --inplace pip install -r requirements-dev.txt pytest --fulltrace src/geventhttpclient/tests