pax_global_header00006660000000000000000000000064145234306750014523gustar00rootroot0000000000000052 comment=0d2c93cb46371521c1a2ba4bc7f789ea58d8fef2 httpcore-1.0.2/000077500000000000000000000000001452343067500133535ustar00rootroot00000000000000httpcore-1.0.2/.github/000077500000000000000000000000001452343067500147135ustar00rootroot00000000000000httpcore-1.0.2/.github/ISSUE_TEMPLATE/000077500000000000000000000000001452343067500170765ustar00rootroot00000000000000httpcore-1.0.2/.github/ISSUE_TEMPLATE/1-issue.md000066400000000000000000000011171452343067500207060ustar00rootroot00000000000000--- name: Issue about: Please only raise an issue if you've been advised to do so after discussion. Thanks! πŸ™ --- The starting point for issues should usually be a discussion... https://github.com/encode/httpcore/discussions Possible bugs may be raised as a "Potential Issue" discussion, feature requests may be raised as an "Ideas" discussion. We can then determine if the discussion needs to be escalated into an "Issue" or not. This will help us ensure that the "Issues" list properly reflects ongoing or needed work on the project. --- - [ ] Initially raised as discussion #... httpcore-1.0.2/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000006631452343067500210730ustar00rootroot00000000000000# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser blank_issues_enabled: false contact_links: - name: Discussions url: https://github.com/encode/httpcore/discussions about: > The "Discussions" forum is where you want to start. πŸ’– - name: Chat url: https://gitter.im/encode/community about: > Our community chat forum. httpcore-1.0.2/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000010431452343067500205120ustar00rootroot00000000000000 # Summary # Checklist - [ ] I understand that this PR may be closed in case there was no previous discussion. (This doesn't apply to typos!) - [ ] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change. - [ ] I've updated the documentation accordingly. httpcore-1.0.2/.github/dependabot.yml000066400000000000000000000001551452343067500175440ustar00rootroot00000000000000 version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "monthly" httpcore-1.0.2/.github/stale.yml000066400000000000000000000010301452343067500165400ustar00rootroot00000000000000# Number of days of inactivity before an issue becomes stale daysUntilStale: 180 # Number of days of inactivity before a stale issue is closed daysUntilClose: 14 # Comment to post when marking an issue as stale. Set to `false` to disable markComment: > This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. # Comment to post when closing a stale issue. Set to `false` to disable closeComment: false httpcore-1.0.2/.github/workflows/000077500000000000000000000000001452343067500167505ustar00rootroot00000000000000httpcore-1.0.2/.github/workflows/publish.yml000066400000000000000000000011351452343067500211410ustar00rootroot00000000000000name: Publish on: push: tags: - '*' jobs: publish: name: "Publish release" runs-on: "ubuntu-latest" environment: name: deploy steps: - uses: "actions/checkout@v4" - uses: "actions/setup-python@v4" with: python-version: 3.8 - name: "Install dependencies" run: "scripts/install" - name: "Build package & docs" run: "scripts/build" - name: "Publish to PyPI & deploy docs" run: "scripts/publish" env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} httpcore-1.0.2/.github/workflows/test-suite.yml000066400000000000000000000014331452343067500216020ustar00rootroot00000000000000--- name: Test Suite on: push: branches: ["master"] pull_request: branches: ["master"] jobs: tests: name: "Python ${{ matrix.python-version }}" runs-on: "ubuntu-latest" strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: "actions/checkout@v4" - uses: "actions/setup-python@v4" with: python-version: "${{ matrix.python-version }}" allow-prereleases: true - name: "Install dependencies" run: "scripts/install" - name: "Run linting checks" run: "scripts/check" - name: "Build package & docs" run: "scripts/build" - name: "Run tests" run: "scripts/test" - name: "Enforce coverage" run: "scripts/coverage" httpcore-1.0.2/.gitignore000066400000000000000000000001701452343067500153410ustar00rootroot00000000000000*.pyc .coverage .pytest_cache/ .mypy_cache/ __pycache__/ htmlcov/ site/ *.egg-info/ venv*/ .python-version build/ dist/ httpcore-1.0.2/CHANGELOG.md000066400000000000000000000363661452343067500152020ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## 1.0.2 (November 10th, 2023) - Fix `float("inf")` timeouts in `Event.wait` function. (#846) ## 1.0.1 (November 3rd, 2023) - Fix pool timeout to account for the total time spent retrying. (#823) - Raise a neater RuntimeError when the correct async deps are not installed. (#826) - Add support for synchronous TLS-in-TLS streams. (#840) ## 1.0.0 (October 6th, 2023) From version 1.0 our async support is now optional, as the package has minimal dependencies by default. For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. - Async support becomes fully optional. (#809) - Add support for Python 3.12. (#807) ## 0.18.0 (September 8th, 2023) - Add support for HTTPS proxies. (#745, #786) - Drop Python 3.7 support. (#727) - Handle `sni_hostname` extension with SOCKS proxy. (#774) - Handle HTTP/1.1 half-closed connections gracefully. (#641) - Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) ## 0.17.3 (July 5th, 2023) - Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) - The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) - Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) - Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) - Drop Python 3.7 support. (#727) ## 0.17.2 (May 23th, 2023) - Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) - Improve logging with per-module logger names. (#690) - Add `sni_hostname` request extension. (#696) - Resolve race condition during import of `anyio` package. (#692) - Enable TCP_NODELAY for all synchronous sockets. (#651) ## 0.17.1 (May 17th, 2023) - If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) - Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) - Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) - Fix edge-case exception when removing requests from the connection pool. (#680) - Fix pool timeout edge-case. (#688) ## 0.17.0 (March 16th, 2023) - Add DEBUG level logging. (#648) - Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) - Increase the allowable HTTP header size to 100kB. (#647) - Add `retries` option to SOCKS proxy classes. (#643) ## 0.16.3 (December 20th, 2022) - Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) - Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) - Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) - Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) ## 0.16.2 (November 25th, 2022) - Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) - Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) ## 0.16.1 (November 17th, 2022) - Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) ## 0.16.0 (October 11th, 2022) - Support HTTP/1.1 informational responses. (#581) - Fix async cancellation behaviour. (#580) - Support `h11` 0.14. (#579) ## 0.15.0 (May 17th, 2022) - Drop Python 3.6 support (#535) - Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) - Switch to explicit `typing.Optional` for type hints. (#513) - For `trio` map OSError exceptions to `ConnectError`. (#543) ## 0.14.7 (February 4th, 2022) - Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) - Fix AttributeError that happened when Socks5Connection were terminated. (#501) ## 0.14.6 (February 1st, 2022) - Fix SOCKS support for `http://` URLs. (#492) - Resolve race condition around exceptions during streaming a response. (#491) ## 0.14.5 (January 18th, 2022) - SOCKS proxy support. (#478) - Add proxy_auth argument to HTTPProxy. (#481) - Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) ## 0.14.4 (January 5th, 2022) - Support HTTP/2 on HTTPS tunnelling proxies. (#468) - Fix proxy headers missing on HTTP forwarding. (#456) - Only instantiate SSL context if required. (#457) - More robust HTTP/2 handling. (#253, #439, #440, #441) ## 0.14.3 (November 17th, 2021) - Fix race condition when removing closed connections from the pool. (#437) ## 0.14.2 (November 16th, 2021) - Failed connections no longer remain in the pool. (Pull #433) ## 0.14.1 (November 12th, 2021) - `max_connections` becomes optional. (Pull #429) - `certifi` is now included in the install dependancies. (Pull #428) - `h2` is now strictly optional. (Pull #428) ## 0.14.0 (November 11th, 2021) The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... * Log the point at which the connection is established, and the IP/port on which it is made. * Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) * Log SSL version info / certificate info. Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. ## 0.13.7 (September 13th, 2021) - Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) ## 0.13.6 (June 15th, 2021) ### Fixed - Close sockets when read or write timeouts occur. (Pull #365) ## 0.13.5 (June 14th, 2021) ### Fixed - Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) ## 0.13.4 (June 9th, 2021) ### Added - Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) ### Fixed - Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). ## 0.13.3 (May 6th, 2021) ### Added - Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) ### Fixed - Handle cases where environment does not provide `select.poll` support. (Pull #331) ## 0.13.2 (April 29th, 2021) ### Added - Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) ## 0.13.1 (April 28th, 2021) ### Fixed - More resiliant testing for closed connections. (Pull #311) - Don't raise exceptions on ungraceful connection closes. (Pull #310) ## 0.13.0 (April 21st, 2021) The 0.13 release updates the core API in order to match the HTTPX Transport API, introduced in HTTPX 0.18 onwards. An example of making requests with the new interface is: ```python with httpcore.SyncConnectionPool() as http: status_code, headers, stream, extensions = http.handle_request( method=b'GET', url=(b'https', b'example.org', 443, b'/'), headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] stream=httpcore.ByteStream(b''), extensions={} ) body = stream.read() print(status_code, body) ``` ### Changed - The `.request()` method is now `handle_request()`. (Pull #296) - The `.arequest()` method is now `.handle_async_request()`. (Pull #296) - The `headers` argument is no longer optional. (Pull #296) - The `stream` argument is no longer optional. (Pull #296) - The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) - The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) - The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) - The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) ### Added - Streams now support a `.read()` interface. (Pull #296) ### Fixed - Task cancellation no longer leaks connections from the connection pool. (Pull #305) ## 0.12.3 (December 7th, 2020) ### Fixed - Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) - Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) - Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) - Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) - Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) ## 0.12.2 (November 20th, 2020) ### Fixed - Properly wrap connect errors on the asyncio backend. (Pull #235) - Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) ## 0.12.1 (November 7th, 2020) ### Added - Add connect retries. (Pull #221) ### Fixed - Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) - Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) - Properly wrap OS errors when using `trio`. (Pull #225) ## 0.12.0 (October 6th, 2020) ### Changed - HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) ### Added - Add Python 3.9 to officially supported versions. ### Fixed - Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) ## 0.11.1 (September 28nd, 2020) ### Fixed - Add await to async semaphore release() coroutine (#197) - Drop incorrect curio classifier (#192) ## 0.11.0 (September 22nd, 2020) The Transport API with 0.11.0 has a couple of significant changes. Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. The interface changes from: ```python def request(method, url, headers, stream, timeout): return (http_version, status_code, reason, headers, stream) ``` To instead including an optional dictionary of extensions on the request and response: ```python def request(method, url, headers, stream, ext): return (status_code, headers, stream, ext) ``` Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. In particular: * Trailing headers support. * HTTP/2 Server Push * sendfile. * Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. * Exposing debug information out of the API, including template name, template context. Currently extensions are limited to: * request: `timeout` - Optional. Timeout dictionary. * response: `http_version` - Optional. Include the HTTP version used on the response. * response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. Secondly, the async version of `request` is now namespaced as `arequest`. This allows concrete transports to support both sync and async implementations on the same class. ### Added - Add curio support. (Pull #168) - Add anyio support, with `backend="anyio"`. (Pull #169) ### Changed - Update the Transport API to use 'ext' for optional extensions. (Pull #190) - Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) ## 0.10.2 (August 20th, 2020) ### Added - Added Unix Domain Socket support. (Pull #139) ### Fixed - Always include the port on proxy CONNECT requests. (Pull #154) - Fix `max_keepalive_connections` configuration. (Pull #153) - Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) ## 0.10.1 (August 7th, 2020) - Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. ## 0.10.0 (August 7th, 2020) The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. ### Added - HTTP/2 support becomes optional. (Pull #121, #130) - Add `local_address=...` support. (Pull #100, #134) - Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) - Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) - Add `UnsupportedProtocol` exception. (Pull #128) - Add `.get_connection_info()` method. (Pull #102, #137) - Add better TRACE logs. (Pull #101) ### Changed - `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) ### Fixed - Improve handling of server disconnects. (Pull #112) ## 0.9.1 (May 27th, 2020) ### Fixed - Proper host resolution for sync case, including IPv6 support. (Pull #97) - Close outstanding connections when connection pool is closed. (Pull #98) ## 0.9.0 (May 21th, 2020) ### Changed - URL port becomes an `Optional[int]` instead of `int`. (Pull #92) ### Fixed - Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) - Remove incorrect debug log. (Pull #83) ## 0.8.4 (May 11th, 2020) ### Added - Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables and TRACE level logging. (Pull #79) ### Fixed - Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) ## 0.8.3 (May 6rd, 2020) ### Fixed - Include `Host` and `Accept` headers on proxy "CONNECT" requests. - De-duplicate any headers also contained in proxy_headers. - HTTP/2 flag not being passed down to proxy connections. ## 0.8.2 (May 3rd, 2020) ### Fixed - Fix connections using proxy forwarding requests not being added to the connection pool properly. (Pull #70) ## 0.8.1 (April 30th, 2020) ### Changed - Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. ## 0.8.0 (April 30th, 2020) ### Fixed - Fixed tunnel proxy support. ###Β Added - New `TimeoutException` base class. ## 0.7.0 (March 5th, 2020) - First integration with HTTPX. httpcore-1.0.2/LICENSE.md000066400000000000000000000027561452343067500147710ustar00rootroot00000000000000Copyright Β© 2020, [Encode OSS Ltd](https://www.encode.io/). All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. httpcore-1.0.2/README.md000066400000000000000000000064571452343067500146460ustar00rootroot00000000000000# HTTP Core [![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) [![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) > *Do one thing, and do it well.* The HTTP Core package provides a minimal low-level HTTP client, which does one thing only. Sending HTTP requests. It does not provide any high level model abstractions over the API, does not handle redirects, multipart uploads, building authentication headers, transparent HTTP caching, URL parsing, session cookie handling, content or charset decoding, handling JSON, environment based configuration defaults, or any of that Jazz. Some things HTTP Core does do: * Sending HTTP requests. * Thread-safe / task-safe connection pooling. * HTTP(S) proxy & SOCKS proxy support. * Supports HTTP/1.1 and HTTP/2. * Provides both sync and async interfaces. * Async backend support for `asyncio` and `trio`. ## Requirements Python 3.8+ ## Installation For HTTP/1.1 only support, install with: ```shell $ pip install httpcore ``` There are also a number of optional extras available... ```shell $ pip install httpcore['asyncio,trio,http2,socks'] ``` # Sending requests Send an HTTP request: ```python import httpcore response = httpcore.request("GET", "https://www.example.com/") print(response) # print(response.status) # 200 print(response.headers) # [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] print(response.content) # b'\n\n\nExample Domain\n\n\n ...' ``` The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. ```python import httpcore http = httpcore.ConnectionPool() response = http.request("GET", "https://www.example.com/") ``` Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). ## Motivation You *probably* don't want to be using HTTP Core directly. It might make sense if you're writing something like a proxy service in Python, and you just want something at the lowest possible level, but more typically you'll want to use a higher level client library, such as `httpx`. The motivation for `httpcore` is: * To provide a reusable low-level client library, that other packages can then build on top of. * To provide a *really clear interface split* between the networking code and client logic, so that each is easier to understand and reason about in isolation. ## Dependencies The `httpcore` package has the following dependencies... * `h11` * `certifi` And the following optional extras... * `anyio` - Required by `pip install httpcore['asyncio']`. * `trio` - Required by `pip install httpcore['trio']`. * `h2` - Required by `pip install httpcore['http2']`. * `socksio` - Required by `pip install httpcore['socks']`. ## Versioning We use [SEMVER for our versioning policy](https://semver.org/). For changes between package versions please see our [project changelog](CHANGELOG.md). We recommend pinning your requirements either the most current major version, or a more specific version range: ```python pip install 'httpcore==1.*' ``` httpcore-1.0.2/docs/000077500000000000000000000000001452343067500143035ustar00rootroot00000000000000httpcore-1.0.2/docs/async.md000066400000000000000000000165321452343067500157510ustar00rootroot00000000000000# Async Support HTTPX offers a standard synchronous API by default, but also gives you the option of an async client if you need it. Async is a concurrency model that is far more efficient than multi-threading, and can provide significant performance benefits and enable the use of long-lived network connections such as WebSockets. If you're working with an async web framework then you'll also want to use an async client for sending outgoing HTTP requests. Launching concurrent async tasks is far more resource efficient than spawning multiple threads. The Python interpreter should be able to comfortably handle switching between over 1000 concurrent tasks, while a sensible number of threads in a thread pool might be to enable around 10 or 20 concurrent threads. ## Enabling Async support If you're using async with [Python's stdlib `asyncio` support](https://docs.python.org/3/library/asyncio.html), install the optional dependencies using: ```shell $ pip install 'httpcore[asyncio]' ``` Alternatively, if you're working with [the Python `trio` package](https://trio.readthedocs.io/en/stable/): ```shell $ pip install 'httpcore[trio]' ``` We highly recommend `trio` for async support. The `trio` project [pioneered the principles of structured concurrency](https://en.wikipedia.org/wiki/Structured_concurrency), and has a more carefully constrained API against which to work from. ## API differences When using async support, you need make sure to use an async connection pool class: ```python # The async variation of `httpcore.ConnectionPool` async with httpcore.AsyncConnectionPool() as http: ... ``` Or if connecting via a proxy: ```python # The async variation of `httpcore.HTTPProxy` async with httpcore.AsyncHTTPProxy() as proxy: ... ``` ### Sending requests Sending requests with the async version of `httpcore` requires the `await` keyword: ```python import asyncio import httpcore async def main(): async with httpcore.AsyncConnectionPool() as http: response = await http.request("GET", "https://www.example.com/") asyncio.run(main()) ``` When including content in the request, the content must either be bytes or an *async iterable* yielding bytes. ### Streaming responses Streaming responses also require a slightly different interface to the sync version: * `with .stream(...) as response` β†’ `async with .stream() as response`. * `for chunk in response.iter_stream()` β†’ `async for chunk in response.aiter_stream()`. * `response.read()` β†’ `await response.aread()`. * `response.close()` β†’ `await response.aclose()` For example: ```python import asyncio import httpcore async def main(): async with httpcore.AsyncConnectionPool() as http: async with http.stream("GET", "https://www.example.com/") as response: async for chunk in response.aiter_stream(): print(f"Downloaded: {chunk}") asyncio.run(main()) ``` ### Pool lifespans When using `httpcore` in an async environment it is strongly recommended that you instantiate and use connection pools using the context managed style: ```python async with httpcore.AsyncConnectionPool() as http: ... ``` To benefit from connection pooling it is recommended that you instantiate a single connection pool in this style, and pass it around throughout your application. If you do want to use a connection pool without this style then you'll need to ensure that you explicitly close the pool once it is no longer required: ```python try: http = httpcore.AsyncConnectionPool() ... finally: await http.aclose() ``` This is a little different to the threaded context, where it's okay to simply instantiate a globally available connection pool, and then allow Python's garbage collection to deal with closing any connections in the pool, once the `__del__` method is called. The reason for this difference is that asynchronous code is not able to run within the context of the synchronous `__del__` method, so there is no way for connections to be automatically closed at the point of garbage collection. This can lead to unterminated TCP connections still remaining after the Python interpreter quits. ## Supported environments HTTPX supports either `asyncio` or `trio` as an async environment. It will auto-detect which of those two to use as the backend for socket operations and concurrency primitives. ### AsyncIO AsyncIO is Python's [built-in library](https://docs.python.org/3/library/asyncio.html) for writing concurrent code with the async/await syntax. Let's take a look at sending several outgoing HTTP requests concurrently, using `asyncio`: ```python import asyncio import httpcore import time async def download(http, year): await http.request("GET", f"https://en.wikipedia.org/wiki/{year}") async def main(): async with httpcore.AsyncConnectionPool() as http: started = time.time() # Here we use `asyncio.gather()` in order to run several tasks concurrently... tasks = [download(http, year) for year in range(2000, 2020)] await asyncio.gather(*tasks) complete = time.time() for connection in http.connections: print(connection) print("Complete in %.3f seconds" % (complete - started)) asyncio.run(main()) ``` ### Trio Trio is [an alternative async library](https://trio.readthedocs.io/en/stable/), designed around the [the principles of structured concurrency](https://en.wikipedia.org/wiki/Structured_concurrency). ```python import httpcore import trio import time async def download(http, year): await http.request("GET", f"https://en.wikipedia.org/wiki/{year}") async def main(): async with httpcore.AsyncConnectionPool() as http: started = time.time() async with trio.open_nursery() as nursery: for year in range(2000, 2020): nursery.start_soon(download, http, year) complete = time.time() for connection in http.connections: print(connection) print("Complete in %.3f seconds" % (complete - started)) trio.run(main) ``` ### AnyIO AnyIO is an [asynchronous networking and concurrency library](https://anyio.readthedocs.io/) that works on top of either asyncio or trio. It blends in with native libraries of your chosen backend (defaults to asyncio). The `anyio` library is designed around the [the principles of structured concurrency](https://en.wikipedia.org/wiki/Structured_concurrency), and brings many of the same correctness and usability benefits that Trio provides, while interoperating with existing `asyncio` libraries. ```python import httpcore import anyio import time async def download(http, year): await http.request("GET", f"https://en.wikipedia.org/wiki/{year}") async def main(): async with httpcore.AsyncConnectionPool() as http: started = time.time() async with anyio.create_task_group() as task_group: for year in range(2000, 2020): task_group.start_soon(download, http, year) complete = time.time() for connection in http.connections: print(connection) print("Complete in %.3f seconds" % (complete - started)) anyio.run(main) ``` --- # Reference ## `httpcore.AsyncConnectionPool` ::: httpcore.AsyncConnectionPool handler: python rendering: show_source: False ## `httpcore.AsyncHTTPProxy` ::: httpcore.AsyncHTTPProxy handler: python rendering: show_source: False httpcore-1.0.2/docs/connection-pools.md000066400000000000000000000106351452343067500201230ustar00rootroot00000000000000# Connection Pools While the top-level API provides convenience functions for working with `httpcore`, in practice you'll almost always want to take advantage of the connection pooling functionality that it provides. To do so, instantiate a pool instance, and use it to send requests: ```python import httpcore http = httpcore.ConnectionPool() r = http.request("GET", "https://www.example.com/") print(r) # ``` Connection pools support the same `.request()` and `.stream()` APIs [as described in the Quickstart](../quickstart). We can observe the benefits of connection pooling with a simple script like so: ```python import httpcore import time http = httpcore.ConnectionPool() for counter in range(5): started = time.time() response = http.request("GET", "https://www.example.com/") complete = time.time() print(response, "in %.3f seconds" % (complete - started)) ``` The output *should* demonstrate the initial request as being substantially slower than the subsequent requests: ``` in {0.529} seconds in {0.096} seconds in {0.097} seconds in {0.095} seconds in {0.098} seconds ``` This is to be expected. Once we've established a connection to `"www.example.com"` we're able to reuse it for following requests. ## Configuration The connection pool instance is also the main point of configuration. Let's take a look at the various options that it provides: ### SSL configuration * `ssl_context`: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. ### Pooling configuration * `max_connections`: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. * `max_keepalive_connections`: The maximum number of idle HTTP connections that will be maintained in the pool. * `keepalive_expiry`: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. ### HTTP version support * `http1`: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to `True`. * `http2`: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to `False`. ### Other options * `retries`: The maximum number of retries when trying to establish a connection. * `local_address`: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). * `uds`: Path to a Unix Domain Socket to use instead of TCP sockets. * `network_backend`: A backend instance to use for handling network I/O. * `socket_options`: Socket options that have to be included in the TCP socket when the connection was established. ## Pool lifespans Because connection pools hold onto network resources, careful developers may want to ensure that instances are properly closed once they are no longer required. Working with a single global instance isn't a bad idea for many use case, since the connection pool will automatically be closed when the `__del__` method is called on it: ```python # This is perfectly fine for most purposes. # The connection pool will automatically be closed when it is garbage collected, # or when the Python interpreter exits. http = httpcore.ConnectionPool() ``` However, to be more explicit around the resource usage, we can use the connection pool within a context manager: ```python with httpcore.ConnectionPool() as http: ... ``` Or else close the pool explicitly: ```python http = httpcore.ConnectionPool() try: ... finally: http.close() ``` ## Thread and task safety Connection pools are designed to be thread-safe. Similarly, when using `httpcore` in an async context connection pools are task-safe. This means that you can have a single connection pool instance shared by multiple threads. --- # Reference ## `httpcore.ConnectionPool` ::: httpcore.ConnectionPool handler: python rendering: show_source: False httpcore-1.0.2/docs/connections.md000066400000000000000000000006271452343067500171540ustar00rootroot00000000000000# Connections TODO --- # Reference ## `httpcore.HTTPConnection` ::: httpcore.HTTPConnection handler: python rendering: show_source: False ## `httpcore.HTTP11Connection` ::: httpcore.HTTP11Connection handler: python rendering: show_source: False ## `httpcore.HTTP2Connection` ::: httpcore.HTTP2Connection handler: python rendering: show_source: False httpcore-1.0.2/docs/exceptions.md000066400000000000000000000007601452343067500170110ustar00rootroot00000000000000# Exceptions The following exceptions may be raised when sending a request: * `httpcore.TimeoutException` * `httpcore.PoolTimeout` * `httpcore.ConnectTimeout` * `httpcore.ReadTimeout` * `httpcore.WriteTimeout` * `httpcore.NetworkError` * `httpcore.ConnectError` * `httpcore.ReadError` * `httpcore.WriteError` * `httpcore.ProtocolError` * `httpcore.RemoteProtocolError` * `httpcore.LocalProtocolError` * `httpcore.ProxyError` * `httpcore.UnsupportedProtocol` httpcore-1.0.2/docs/extensions.md000066400000000000000000000246241452343067500170340ustar00rootroot00000000000000# Extensions The request/response API used by `httpcore` is kept deliberately simple and explicit. The `Request` and `Response` models are pretty slim wrappers around this core API: ``` # Pseudo-code expressing the essentials of the request/response model. ( status_code: int, headers: List[Tuple(bytes, bytes)], stream: Iterable[bytes] ) = handle_request( method: bytes, url: URL, headers: List[Tuple(bytes, bytes)], stream: Iterable[bytes] ) ``` This is everything that's needed in order to represent an HTTP exchange. Well... almost. There is a maxim in Computer Science that *"All non-trivial abstractions, to some degree, are leaky"*. When an expression is leaky, it's important that it ought to at least leak only in well-defined places. In order to handle cases that don't otherwise fit inside this core abstraction, `httpcore` requests and responses have 'extensions'. These are a dictionary of optional additional information. Let's expand on our request/response abstraction... ``` # Pseudo-code expressing the essentials of the request/response model, # plus extensions allowing for additional API that does not fit into # this abstraction. ( status_code: int, headers: List[Tuple(bytes, bytes)], stream: Iterable[bytes], extensions: dict ) = handle_request( method: bytes, url: URL, headers: List[Tuple(bytes, bytes)], stream: Iterable[bytes], extensions: dict ) ``` Several extensions are supported both on the request: ```python r = httpcore.request( "GET", "https://www.example.com", extensions={"timeout": {"connect": 5.0}} ) ``` And on the response: ```python r = httpcore.request("GET", "https://www.example.com") print(r.extensions["http_version"]) # When using HTTP/1.1 on the client side, the server HTTP response # could feasibly be one of b"HTTP/0.9", b"HTTP/1.0", or b"HTTP/1.1". ``` ## Request Extensions ### `"timeout"` A dictionary of `str: Optional[float]` timeout values. May include values for `'connect'`, `'read'`, `'write'`, or `'pool'`. For example: ```python # Timeout if a connection takes more than 5 seconds to established, or if # we are blocked waiting on the connection pool for more than 10 seconds. r = httpcore.request( "GET", "https://www.example.com", extensions={"timeout": {"connect": 5.0, "pool": 10.0}} ) ``` ### `"trace"` The trace extension allows a callback handler to be installed to monitor the internal flow of events within `httpcore`. The simplest way to explain this is with an example: ```python import httpcore def log(event_name, info): print(event_name, info) r = httpcore.request("GET", "https://www.example.com/", extensions={"trace": log}) # connection.connect_tcp.started {'host': 'www.example.com', 'port': 443, 'local_address': None, 'timeout': None} # connection.connect_tcp.complete {'return_value': } # connection.start_tls.started {'ssl_context': , 'server_hostname': b'www.example.com', 'timeout': None} # connection.start_tls.complete {'return_value': } # http11.send_request_headers.started {'request': } # http11.send_request_headers.complete {'return_value': None} # http11.send_request_body.started {'request': } # http11.send_request_body.complete {'return_value': None} # http11.receive_response_headers.started {'request': } # http11.receive_response_headers.complete {'return_value': (b'HTTP/1.1', 200, b'OK', [(b'Age', b'553715'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Thu, 21 Oct 2021 17:08:42 GMT'), (b'Etag', b'"3147526947+ident"'), (b'Expires', b'Thu, 28 Oct 2021 17:08:42 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS (nyb/1DCD)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'1256')])} # http11.receive_response_body.started {'request': } # http11.receive_response_body.complete {'return_value': None} # http11.response_closed.started {} # http11.response_closed.complete {'return_value': None} ``` The `event_name` and `info` arguments here will be one of the following: * `{event_type}.{event_name}.started`, `` * `{event_type}.{event_name}.complete`, `{"return_value": <...>}` * `{event_type}.{event_name}.failed`, `{"exception": <...>}` Note that when using the async variant of `httpcore` the handler function passed to `"trace"` must be an `async def ...` function. The following event types are currently exposed... **Establishing the connection** * `"connection.connect_tcp"` * `"connection.connect_unix_socket"` * `"connection.start_tls"` **HTTP/1.1 events** * `"http11.send_request_headers"` * `"http11.send_request_body"` * `"http11.receive_response"` * `"http11.receive_response_body"` * `"http11.response_closed"` **HTTP/2 events** * `"http2.send_connection_init"` * `"http2.send_request_headers"` * `"http2.send_request_body"` * `"http2.receive_response_headers"` * `"http2.receive_response_body"` * `"http2.response_closed"` The exact set of trace events may be subject to change across different versions of `httpcore`. If you need to rely on a particular set of events it is recommended that you pin installation of the package to a fixed version. ### `"sni_hostname"` The server's hostname, which is used to confirm the hostname supplied by the SSL certificate. For example: ``` python headers = {"Host": "www.encode.io"} extensions = {"sni_hostname": "www.encode.io"} response = httpcore.request( "GET", "https://185.199.108.153", headers=headers, extensions=extensions ) ``` ## Response Extensions ### `"http_version"` The HTTP version, as bytes. Eg. `b"HTTP/1.1"`. When using HTTP/1.1 the response line includes an explicit version, and the value of this key could feasibly be one of `b"HTTP/0.9"`, `b"HTTP/1.0"`, or `b"HTTP/1.1"`. When using HTTP/2 there is no further response versioning included in the protocol, and the value of this key will always be `b"HTTP/2"`. ### `"reason_phrase"` The reason-phrase of the HTTP response, as bytes. For example `b"OK"`. Some servers may include a custom reason phrase, although this is not recommended. HTTP/2 onwards does not include a reason phrase on the wire. When no key is included, a default based on the status code may be used. ### `"stream_id"` When HTTP/2 is being used the `"stream_id"` response extension can be accessed to determine the ID of the data stream that the response was sent on. ### `"network_stream"` The `"network_stream"` extension allows developers to handle HTTP `CONNECT` and `Upgrade` requests, by providing an API that steps outside the standard request/response model, and can directly read or write to the network. The interface provided by the network stream: * `read(max_bytes, timeout = None) -> bytes` * `write(buffer, timeout = None)` * `close()` * `start_tls(ssl_context, server_hostname = None, timeout = None) -> NetworkStream` * `get_extra_info(info) -> Any` This API can be used as the foundation for working with HTTP proxies, WebSocket upgrades, and other advanced use-cases. See the [network backends documentation](network-backends.md) for more information on working directly with network streams. ##### `CONNECT` requests A proxy CONNECT request using the network stream: ```python # Formulate a CONNECT request... # # This will establish a connection to 127.0.0.1:8080, and then send the following... # # CONNECT http://www.example.com HTTP/1.1 # Host: 127.0.0.1:8080 url = httpcore.URL(b"http", b"127.0.0.1", 8080, b"http://www.example.com") with httpcore.stream("CONNECT", url) as response: network_stream = response.extensions["network_stream"] # Upgrade to an SSL stream... network_stream = network_stream.start_tls( ssl_context=httpcore.default_ssl_context(), hostname=b"www.example.com", ) # Manually send an HTTP request over the network stream, and read the response... # # For a more complete example see the httpcore `TunnelHTTPConnection` implementation. network_stream.write(b"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") data = network_stream.read() print(data) ``` ##### `Upgrade` requests Using the `wsproto` package to handle a websockets session: ```python import httpcore import wsproto import os import base64 url = "http://127.0.0.1:8000/" headers = { b"Connection": b"Upgrade", b"Upgrade": b"WebSocket", b"Sec-WebSocket-Key": base64.b64encode(os.urandom(16)), b"Sec-WebSocket-Version": b"13" } with httpcore.stream("GET", url, headers=headers) as response: if response.status != 101: raise Exception("Failed to upgrade to websockets", response) # Get the raw network stream. network_steam = response.extensions["network_stream"] # Write a WebSocket text frame to the stream. ws_connection = wsproto.Connection(wsproto.ConnectionType.CLIENT) message = wsproto.events.TextMessage("hello, world!") outgoing_data = ws_connection.send(message) network_steam.write(outgoing_data) # Wait for a response. incoming_data = network_steam.read(max_bytes=4096) ws_connection.receive_data(incoming_data) for event in ws_connection.events(): if isinstance(event, wsproto.events.TextMessage): print("Got data:", event.data) # Write a WebSocket close to the stream. message = wsproto.events.CloseConnection(code=1000) outgoing_data = ws_connection.send(message) network_steam.write(outgoing_data) ``` ##### Extra network information The network stream abstraction also allows access to various low-level information that may be exposed by the underlying socket: ```python response = httpcore.request("GET", "https://www.example.com") network_stream = response.extensions["network_stream"] client_addr = network_stream.get_extra_info("client_addr") server_addr = network_stream.get_extra_info("server_addr") print("Client address", client_addr) print("Server address", server_addr) ``` The socket SSL information is also available through this interface, although you need to ensure that the underlying connection is still open, in order to access it... ```python with httpcore.stream("GET", "https://www.example.com") as response: network_stream = response.extensions["network_stream"] ssl_object = network_stream.get_extra_info("ssl_object") print("TLS version", ssl_object.version()) ``` httpcore-1.0.2/docs/http2.md000066400000000000000000000177521452343067500157020ustar00rootroot00000000000000# HTTP/2 HTTP/2 is a major new iteration of the HTTP protocol, that provides a more efficient transport, with potential performance benefits. HTTP/2 does not change the core semantics of the request or response, but alters the way that data is sent to and from the server. Rather than the text format that HTTP/1.1 uses, HTTP/2 is a binary format. The binary format provides full request and response multiplexing, and efficient compression of HTTP headers. The stream multiplexing means that where HTTP/1.1 requires one TCP stream for each concurrent request, HTTP/2 allows a single TCP stream to handle multiple concurrent requests. HTTP/2 also provides support for functionality such as response prioritization, and server push. For a comprehensive guide to HTTP/2 you may want to check out "[HTTP2 Explained](https://http2-explained.haxx.se)". ## Enabling HTTP/2 When using the `httpcore` client, HTTP/2 support is not enabled by default, because HTTP/1.1 is a mature, battle-hardened transport layer, and our HTTP/1.1 implementation may be considered the more robust option at this point in time. It is possible that a future version of `httpcore` may enable HTTP/2 support by default. If you're issuing highly concurrent requests you might want to consider trying out our HTTP/2 support. You can do so by first making sure to install the optional HTTP/2 dependencies... ```shell $ pip install 'httpcore[http2]' ``` And then instantiating a connection pool with HTTP/2 support enabled: ```python import httpcore pool = httpcore.ConnectionPool(http2=True) ``` We can take a look at the difference in behaviour by issuing several outgoing requests in parallel. Start out by using a standard HTTP/1.1 connection pool: ```python import httpcore import concurrent.futures import time def download(http, year): http.request("GET", f"https://en.wikipedia.org/wiki/{year}") def main(): with httpcore.ConnectionPool() as http: started = time.time() with concurrent.futures.ThreadPoolExecutor(max_workers=10) as threads: for year in range(2000, 2020): threads.submit(download, http, year) complete = time.time() for connection in http.connections: print(connection) print("Complete in %.3f seconds" % (complete - started)) main() ``` If you run this with an HTTP/1.1 connection pool, you ought to see output similar to the following: ```python , , , , , , , Complete in 0.586 seconds ``` We can see that the connection pool required a number of connections in order to handle the parallel requests. If we now upgrade our connection pool to support HTTP/2: ```python with httpcore.ConnectionPool(http2=True) as http: ... ``` And run the same script again, we should end up with something like this: ```python Complete in 0.573 seconds ``` All of our requests have been handled over a single connection. Switching to HTTP/2 should not *necessarily* be considered an "upgrade". It is more complex, and requires more computational power, and so particularly in an interpreted language like Python it *could* be slower in some instances. Moreover, utilising multiple connections may end up connecting to multiple hosts, and could sometimes appear faster to the client, at the cost of requiring more server resources. Enabling HTTP/2 is most likely to be beneficial if you are sending requests in high concurrency, and may often be more well suited to an async context, rather than multi-threading. ## Inspecting the HTTP version Enabling HTTP/2 support on the client does not *necessarily* mean that your requests and responses will be transported over HTTP/2, since both the client *and* the server need to support HTTP/2. If you connect to a server that only supports HTTP/1.1 the client will use a standard HTTP/1.1 connection instead. You can determine which version of the HTTP protocol was used by examining the `"http_version"` response extension. ```python import httpcore pool = httpcore.ConnectionPool(http2=True) response = pool.request("GET", "https://www.example.com/") # Should be one of b"HTTP/2", b"HTTP/1.1", b"HTTP/1.0", or b"HTTP/0.9". print(response.extensions["http_version"]) ``` See [the extensions documentation](extensions.md) for more details. ## HTTP/2 negotiation Robust servers need to support both HTTP/2 and HTTP/1.1 capable clients, and so need some way to "negotiate" with the client which protocol version will be used. ### HTTP/2 over HTTPS Generally the method used is for the server to advertise if it has HTTP/2 support during the part of the SSL connection handshake. This is known as ALPN - "Application Layer Protocol Negotiation". Most browsers only provide HTTP/2 support over HTTPS connections, and this is also the default behaviour that `httpcore` provides. If you enable HTTP/2 support you should still expect to see HTTP/1.1 connections for any `http://` URLs. ### HTTP/2 over HTTP Servers can optionally also support HTTP/2 over HTTP by supporting the `Upgrade: h2c` header. This mechanism is not supported by `httpcore`. It requires an additional round-trip between the client and server, and also requires any request body to be sent twice. ### Prior Knowledge If you know in advance that the server you are communicating with will support HTTP/2, then you can enforce that the client uses HTTP/2, without requiring either ALPN support or an HTTP `Upgrade: h2c` header. This is managed by disabling HTTP/1.1 support on the connection pool: ```python pool = httpcore.ConnectionPool(http1=False, http2=True) ``` ## Request & response headers Because HTTP/2 frames the requests and responses somewhat differently to HTTP/1.1, there is a difference in some of the headers that are used. In order for the `httpcore` library to support both HTTP/1.1 and HTTP/2 transparently, the HTTP/1.1 style is always used throughout the API. Any differences in header styles are only mapped onto HTTP/2 at the internal network layer. ## Request headers The following pseudo-headers are used by HTTP/2 in the request: * `:method` - The request method. * `:path` - Taken from the URL of the request. * `:authority` - Equivalent to the `Host` header in HTTP/1.1. In `httpcore` this is represented using the request `Host` header, which is automatically populated from the request URL if no `Host` header is explicitly included. * `:scheme` - Taken from the URL of the request. These pseudo-headers are included in `httpcore` as part of the `request.method` and `request.url` attributes, and through the `request.headers["Host"]` header. *They are not exposed directly by their psuedo-header names.* The one other difference to be aware of is the `Transfer-Encoding: chunked` header. In HTTP/2 this header is never used, since streaming data is framed using a different mechanism. In `httpcore` the `Transfer-Encoding: chunked` header is always used to represent the presence of a streaming body on the request, and is automatically populated if required. However the header is only sent if the underlying connection ends up being HTTP/1.1, and is omitted if the underlying connection ends up being HTTP/2. ## Response headers The following pseudo-header is used by HTTP/2 in the response: * `:status` - The response status code. In `httpcore` this *is represented by the `response.status` attribute, rather than being exposed as a psuedo-header*. httpcore-1.0.2/docs/index.md000066400000000000000000000034401452343067500157350ustar00rootroot00000000000000# HTTPCore [![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) [![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) > *Do one thing, and do it well.* The HTTP Core package provides a minimal low-level HTTP client, which does one thing only. Sending HTTP requests. It does not provide any high level model abstractions over the API, does not handle redirects, multipart uploads, building authentication headers, transparent HTTP caching, URL parsing, session cookie handling, content or charset decoding, handling JSON, environment based configuration defaults, or any of that Jazz. Some things HTTP Core does do: * Sending HTTP requests. * Thread-safe / task-safe connection pooling. * HTTP(S) proxy & SOCKS proxy support. * Supports HTTP/1.1 and HTTP/2. * Provides both sync and async interfaces. * Async backend support for `asyncio` and `trio`. ## Requirements Python 3.8+ ## Installation For HTTP/1.1 only support, install with: ```shell $ pip install httpcore ``` For HTTP/1.1 and HTTP/2 support, install with: ```shell $ pip install httpcore[http2] ``` For SOCKS proxy support, install with: ```shell $ pip install httpcore[socks] ``` ## Example Let's check we're able to send HTTP requests: ```python import httpcore response = httpcore.request("GET", "https://www.example.com/") print(response) # print(response.status) # 200 print(response.headers) # [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] print(response.content) # b'\n\n\nExample Domain\n\n\n ...' ``` Ready to get going? Head over to [the quickstart documentation](quickstart.md). httpcore-1.0.2/docs/logging.md000066400000000000000000000053241452343067500162570ustar00rootroot00000000000000# Logging If you need to inspect the internal behaviour of `httpcore`, you can use Python's standard logging to output debug level information. For example, the following configuration... ```python import logging import httpcore logging.basicConfig( format="%(levelname)s [%(asctime)s] %(name)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.DEBUG ) httpcore.request('GET', 'https://www.example.com') ``` Will send debug level output to the console, or wherever `stdout` is directed too... ``` DEBUG [2023-01-09 14:44:00] httpcore.connection - connect_tcp.started host='www.example.com' port=443 local_address=None timeout=None DEBUG [2023-01-09 14:44:00] httpcore.connection - connect_tcp.complete return_value= DEBUG [2023-01-09 14:44:00] httpcore.connection - start_tls.started ssl_context= server_hostname='www.example.com' timeout=None DEBUG [2023-01-09 14:44:00] httpcore.connection - start_tls.complete return_value= DEBUG [2023-01-09 14:44:00] httpcore.http11 - send_request_headers.started request= DEBUG [2023-01-09 14:44:00] httpcore.http11 - send_request_headers.complete DEBUG [2023-01-09 14:44:00] httpcore.http11 - send_request_body.started request= DEBUG [2023-01-09 14:44:00] httpcore.http11 - send_request_body.complete DEBUG [2023-01-09 14:44:00] httpcore.http11 - receive_response_headers.started request= DEBUG [2023-01-09 14:44:00] httpcore.http11 - receive_response_headers.complete return_value=(b'HTTP/1.1', 200, b'OK', [(b'Age', b'572646'), (b'Cache-Control', b'max-age=604800'), (b'Content-Type', b'text/html; charset=UTF-8'), (b'Date', b'Mon, 09 Jan 2023 14:44:00 GMT'), (b'Etag', b'"3147526947+ident"'), (b'Expires', b'Mon, 16 Jan 2023 14:44:00 GMT'), (b'Last-Modified', b'Thu, 17 Oct 2019 07:18:26 GMT'), (b'Server', b'ECS (nyb/1D18)'), (b'Vary', b'Accept-Encoding'), (b'X-Cache', b'HIT'), (b'Content-Length', b'1256')]) DEBUG [2023-01-09 14:44:00] httpcore.http11 - receive_response_body.started request= DEBUG [2023-01-09 14:44:00] httpcore.http11 - receive_response_body.complete DEBUG [2023-01-09 14:44:00] httpcore.http11 - response_closed.started DEBUG [2023-01-09 14:44:00] httpcore.http11 - response_closed.complete DEBUG [2023-01-09 14:44:00] httpcore.connection - close.started DEBUG [2023-01-09 14:44:00] httpcore.connection - close.complete ``` The exact formatting of the debug logging may be subject to change across different versions of `httpcore`. If you need to rely on a particular format it is recommended that you pin installation of the package to a fixed version.httpcore-1.0.2/docs/network-backends.md000066400000000000000000000212651452343067500200740ustar00rootroot00000000000000# Network Backends The API layer at which `httpcore` interacts with the network is described as the network backend. Various backend implementations are provided, allowing `httpcore` to handle networking in different runtime contexts. ## Working with network backends ### The default network backend Typically you won't need to specify a network backend, as a default will automatically be selected. However, understanding how the network backends fit in may be useful if you want to better understand the underlying architecture. Let's start by seeing how we can explicitly select the network backend. First we're making a standard HTTP request, using a connection pool: ```python import httpcore with httpcore.ConnectionPool() as http: response = http.request('GET', 'https://www.example.com') print(response) ``` We can also have the same behavior, but be explicit with our selection of the network backend: ```python import httpcore network_backend = httpcore.SyncBackend() with httpcore.ConnectionPool(network_backend=network_backend) as http: response = http.request('GET', 'https://www.example.com') print(response) ``` The `httpcore.SyncBackend()` implementation handles the opening of TCP connections, and operations on the socket stream, such as reading, writing, and closing the connection. We can get a better understanding of this by using a network backend to send a basic HTTP/1.1 request directly: ```python import httpcore # Create an SSL context using 'certifi' for the certificates. ssl_context = httpcore.default_ssl_context() # A basic HTTP/1.1 request as a plain bytestring. request = b'\r\n'.join([ b'GET / HTTP/1.1', b'Host: www.example.com', b'Accept: */*', b'Connection: close', b'' ]) # Open a TCP stream and upgrade it to SSL. network_backend = httpcore.SyncBackend() network_stream = network_backend.connect_tcp("www.example.com", 443) network_stream = network_stream.start_tls(ssl_context, server_hostname="www.example.com") # Send the HTTP request. network_stream.write(request) # Read the HTTP response. while True: response = network_stream.read(max_bytes=4096) if response == b'': break print(response) # The output should look something like this: # # b'HTTP/1.1 200 OK\r\nAge: 600005\r\n [...] Content-Length: 1256\r\nConnection: close\r\n\r\n' # b'\n\n\n Example Domain [...] \n' ``` ### Async network backends If we're working with an `async` codebase, then we need to select a different backend. The `httpcore.AnyIOBackend` is suitable for usage if you're running under `asyncio`. This is a networking backend implemented using [the `anyio` package](https://anyio.readthedocs.io/en/3.x/). ```python import httpcore import asyncio async def main(): network_backend = httpcore.AnyIOBackend() async with httpcore.AsyncConnectionPool(network_backend=network_backend) as http: response = await http.request('GET', 'https://www.example.com') print(response) asyncio.run(main()) ``` The `AnyIOBackend` will work when running under either `asyncio` or `trio`. However, if you're working with async using the [`trio` framework](https://trio.readthedocs.io/en/stable/), then we recommend using the `httpcore.TrioBackend`. This will give you the same kind of networking behavior you'd have using `AnyIOBackend`, but there will be a little less indirection so it will be marginally more efficient and will present cleaner tracebacks in error cases. ```python import httpcore import trio async def main(): network_backend = httpcore.TrioBackend() async with httpcore.AsyncConnectionPool(network_backend=network_backend) as http: response = await http.request('GET', 'https://www.example.com') print(response) trio.run(main) ``` ### Mock network backends There are also mock network backends available that can be useful for testing purposes. These backends accept a list of bytes, and return network stream interfaces that return those byte streams. Here's an example of mocking a simple HTTP/1.1 response... ```python import httpcore network_backend = httpcore.MockBackend([ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ]) with httpcore.ConnectionPool(network_backend=network_backend) as http: response = http.request("GET", "https://example.com/") print(response.extensions['http_version']) print(response.status) print(response.content) ``` Mocking a HTTP/2 response is more complex, since it uses a binary format... ```python import hpack import hyperframe.frame import httpcore content = [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] # Note that we instantiate the mock backend with an `http2=True` argument. # This ensures that the mock network stream acts as if the `h2` ALPN flag has been set, # and causes the connection pool to interact with the connection using HTTP/2. network_backend = httpcore.MockBackend(content, http2=True) with httpcore.ConnectionPool(network_backend=network_backend) as http: response = http.request("GET", "https://example.com/") print(response.extensions['http_version']) print(response.status) print(response.content) ``` ### Custom network backends The base interface for network backends is provided as public API, allowing you to implement custom networking behavior. You can use this to provide advanced networking functionality such as: * Network recording / replay. * In-depth debug tooling. * Handling non-standard SSL or DNS requirements. Here's an example that records the network response to a file on disk: ```python import httpcore class RecordingNetworkStream(httpcore.NetworkStream): def __init__(self, record_file, stream): self.record_file = record_file self.stream = stream def read(self, max_bytes, timeout=None): data = self.stream.read(max_bytes, timeout=timeout) self.record_file.write(data) return data def write(self, buffer, timeout=None): self.stream.write(buffer, timeout=timeout) def close(self) -> None: self.stream.close() def start_tls( self, ssl_context, server_hostname=None, timeout=None, ): self.stream = self.stream.start_tls( ssl_context, server_hostname=server_hostname, timeout=timeout ) return self def get_extra_info(self, info): return self.stream.get_extra_info(info) class RecordingNetworkBackend(httpcore.NetworkBackend): """ A custom network backend that records network responses. """ def __init__(self, record_file): self.record_file = record_file self.backend = httpcore.SyncBackend() def connect_tcp( self, host, port, timeout=None, local_address=None, socket_options=None, ): # Note that we're only using a single record file here, # so even if multiple connections are opened the network # traffic will all write to the same file. # An alternative implementation might automatically use # a new file for each opened connection. stream = self.backend.connect_tcp( host, port, timeout=timeout, local_address=local_address, socket_options=socket_options ) return RecordingNetworkStream(self.record_file, stream) # Once you make the request, the raw HTTP/1.1 response will be available #Β in the 'network-recording' file. # # Try switching to `http2=True` to see the difference when recording HTTP/2 binary network traffic, # or add `headers={'Accept-Encoding': 'gzip'}` to see HTTP content compression. with open("network-recording", "wb") as record_file: network_backend = RecordingNetworkBackend(record_file) with httpcore.ConnectionPool(network_backend=network_backend) as http: response = http.request("GET", "https://www.example.com/") print(response) ``` --- ## Reference ### Networking Backends * `httpcore.SyncBackend` * `httpcore.AnyIOBackend` * `httpcore.TrioBackend` ### Mock Backends * `httpcore.MockBackend` * `httpcore.MockStream` * `httpcore.AsyncMockBackend` * `httpcore.AsyncMockStream` ### Base Interface * `httpcore.NetworkBackend` * `httpcore.NetworkStream` * `httpcore.AsyncNetworkBackend` * `httpcore.AsyncNetworkStream` httpcore-1.0.2/docs/proxies.md000066400000000000000000000062741452343067500163270ustar00rootroot00000000000000# Proxies The `httpcore` package provides support for HTTP proxies, using either "HTTP Forwarding" or "HTTP Tunnelling". Forwarding is a proxy mechanism for sending requests to `http` URLs via an intermediate proxy. Tunnelling is a proxy mechanism for sending requests to `https` URLs via an intermediate proxy. Sending requests via a proxy is very similar to sending requests using a standard connection pool: ```python import httpcore proxy = httpcore.HTTPProxy(proxy_url="http://127.0.0.1:8080/") r = proxy.request("GET", "https://www.example.com/") print(r) # ``` You can test the `httpcore` proxy support, using the Python [`proxy.py`](https://pypi.org/project/proxy.py/) tool: ```shell $ pip install proxy.py $ proxy --hostname 127.0.0.1 --port 8080 ``` Requests will automatically use either forwarding or tunnelling, depending on if the scheme is `http` or `https`. ## Authentication Proxy authentication can be included in the initial configuration: ```python import httpcore # A `Proxy-Authorization` header will be included on the initial proxy connection. proxy = httpcore.HTTPProxy( proxy_url="http://127.0.0.1:8080/", proxy_auth=("", "") ) ``` Custom headers can also be included: ```python import httpcore import base64 # Construct and include a `Proxy-Authorization` header. auth = base64.b64encode(b":") proxy = httpcore.HTTPProxy( proxy_url="http://127.0.0.1:8080/", proxy_headers={"Proxy-Authorization": b"Basic " + auth} ) ``` ## Proxy SSL The `httpcore` package also supports HTTPS proxies for http and https destinations. HTTPS proxies can be used in the same way that HTTP proxies are. ```python proxy = httpcore.HTTPProxy(proxy_url="https://127.0.0.1:8080/") ``` Also, when using HTTPS proxies, you may need to configure the SSL context, which you can do with the `proxy_ssl_context` argument. ```python import ssl import httpcore proxy_ssl_context = ssl.create_default_context() proxy_ssl_context.check_hostname = False proxy = httpcore.HTTPProxy('https://127.0.0.1:8080/', proxy_ssl_context=proxy_ssl_context) ``` It is important to note that the `ssl_context` argument is always used for the remote connection, and the `proxy_ssl_context` argument is always used for the proxy connection. ## HTTP Versions If you use proxies, keep in mind that the `httpcore` package only supports proxies to HTTP/1.1 servers. ## SOCKS proxy support The `httpcore` package also supports proxies using the SOCKS5 protocol. Make sure to install the optional dependancy using `pip install 'httpcore[socks]'`. The `SOCKSProxy` class should be using instead of a standard connection pool: ```python import httpcore # Note that the SOCKS port is 1080. proxy = httpcore.SOCKSProxy(proxy_url="socks5://127.0.0.1:1080/") r = proxy.request("GET", "https://www.example.com/") ``` Authentication via SOCKS is also supported: ```python import httpcore proxy = httpcore.SOCKSProxy( proxy_url="socks5://127.0.0.1:8080/", proxy_auth=("", "") ) r = proxy.request("GET", "https://www.example.com/") ``` --- # Reference ## `httpcore.HTTPProxy` ::: httpcore.HTTPProxy handler: python rendering: show_source: False httpcore-1.0.2/docs/quickstart.md000066400000000000000000000120361452343067500170210ustar00rootroot00000000000000# Quickstart For convenience, the `httpcore` package provides a couple of top-level functions that you can use for sending HTTP requests. You probably don't want to integrate against functions if you're writing a library that uses `httpcore`, but you might find them useful for testing `httpcore` from the command-line, or if you're writing a simple script that doesn't require any of the connection pooling or advanced configuration that `httpcore` offers. ## Sending a request We'll start off by sending a request... ```python import httpcore response = httpcore.request("GET", "https://www.example.com/") print(response) # print(response.status) # 200 print(response.headers) # [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] print(response.content) # b'\n\n\nExample Domain\n\n\n ...' ``` ## Request headers Request headers may be included either in a dictionary style, or as a list of two-tuples. ```python import httpcore import json headers = {'User-Agent': 'httpcore'} r = httpcore.request('GET', 'https://httpbin.org/headers', headers=headers) print(json.loads(r.content)) # { # 'headers': { # 'Host': 'httpbin.org', # 'User-Agent': 'httpcore', # 'X-Amzn-Trace-Id': 'Root=1-616ff5de-5ea1b7e12766f1cf3b8e3a33' # } # } ``` The keys and values may either be provided as strings or as bytes. Where strings are provided they may only contain characters within the ASCII range `chr(0)` - `chr(127)`. To include characters outside this range you must deal with any character encoding explicitly, and pass bytes as the header keys/values. The `Host` header will always be automatically included in any outgoing request, as it is strictly required to be present by the HTTP protocol. *Note that the `X-Amzn-Trace-Id` header shown in the example above is not an outgoing request header, but has been added by a gateway server.* ## Request body A request body can be included either as bytes... ```python import httpcore import json r = httpcore.request('POST', 'https://httpbin.org/post', content=b'Hello, world') print(json.loads(r.content)) # { # 'args': {}, # 'data': 'Hello, world', # 'files': {}, # 'form': {}, # 'headers': { # 'Host': 'httpbin.org', # 'Content-Length': '12', # 'X-Amzn-Trace-Id': 'Root=1-61700258-00e338a124ca55854bf8435f' # }, # 'json': None, # 'origin': '68.41.35.196', # 'url': 'https://httpbin.org/post' # } ``` Or as an iterable that returns bytes... ```python import httpcore import json with open("hello-world.txt", "rb") as input_file: r = httpcore.request('POST', 'https://httpbin.org/post', content=input_file) print(json.loads(r.content)) # { # 'args': {}, # 'data': 'Hello, world', # 'files': {}, # 'form': {}, # 'headers': { # 'Host': 'httpbin.org', # 'Transfer-Encoding': 'chunked', # 'X-Amzn-Trace-Id': 'Root=1-61700258-00e338a124ca55854bf8435f' # }, # 'json': None, # 'origin': '68.41.35.196', # 'url': 'https://httpbin.org/post' # } ``` When a request body is included, either a `Content-Length` header or a `Transfer-Encoding: chunked` header will be automatically included. The `Content-Length` header is used when passing bytes, and indicates an HTTP request with a body of a pre-determined length. The `Transfer-Encoding: chunked` header is the mechanism that HTTP/1.1 uses for sending HTTP request bodies without a pre-determined length. ## Streaming responses When using the `httpcore.request()` function, the response body will automatically be read to completion, and made available in the `response.content` attribute. Sometimes you may be dealing with large responses and not want to read the entire response into memory. The `httpcore.stream()` function provides a mechanism for sending a request and dealing with a streaming response: ```python import httpcore with httpcore.stream('GET', 'https://example.com') as response: for chunk in response.iter_stream(): print(f"Downloaded: {chunk}") ``` Here's a more complete example that demonstrates downloading a response: ```python import httpcore with httpcore.stream('GET', 'https://speed.hetzner.de/100MB.bin') as response: with open("download.bin", "wb") as output_file: for chunk in response.iter_stream(): output_file.write(chunk) ``` The `httpcore.stream()` API also allows you to *conditionally* read the response... ```python import httpcore with httpcore.stream('GET', 'https://example.com') as response: content_length = [int(v) for k, v in response.headers if k.lower() == b'content-length'][0] if content_length > 100_000_000: raise Exception("Response too large.") response.read() # `response.content` is now available. ``` --- # Reference ## `httpcore.request()` ::: httpcore.request handler: python rendering: show_source: False ## `httpcore.stream()` ::: httpcore.stream handler: python rendering: show_source: False httpcore-1.0.2/docs/requests-responses-urls.md000066400000000000000000000025571452343067500215130ustar00rootroot00000000000000# Requests, Responses, and URLs TODO ## Requests Request instances in `httpcore` are deliberately simple, and only include the essential information required to represent an HTTP request. Properties on the request are plain byte-wise representations. ```python >>> request = httpcore.Request("GET", "https://www.example.com/") >>> request.method b"GET" >>> request.url httpcore.URL(scheme=b"https", host=b"www.example.com", port=None, target=b"/") >>> request.headers [(b'Host', b'www.example.com')] >>> request.stream ``` The interface is liberal in the types that it accepts, but specific in the properties that it uses to represent them. For example, headers may be specified as a dictionary of strings, but internally are represented as a list of `(byte, byte)` tuples. ```python >>> headers = {"User-Agent": "custom"} >>> request = httpcore.Request("GET", "https://www.example.com/", headers=headers) >>> request.headers [(b'Host', b'www.example.com'), (b"User-Agent", b"custom")] ## Responses ... ## URLs ... --- # Reference ## `httpcore.Request` ::: httpcore.Request handler: python rendering: show_source: False ## `httpcore.Response` ::: httpcore.Response handler: python rendering: show_source: False ## `httpcore.URL` ::: httpcore.URL handler: python rendering: show_source: False httpcore-1.0.2/docs/table-of-contents.md000066400000000000000000000030001452343067500201420ustar00rootroot00000000000000# API Reference * Quickstart * `httpcore.request()` * `httpcore.stream()` * Requests, Responses, and URLs * `httpcore.Request` * `httpcore.Response` * `httpcore.URL` * Connection Pools * `httpcore.ConnectionPool` * Proxies * `httpcore.HTTPProxy` * Connections * `httpcore.HTTPConnection` * `httpcore.HTTP11Connection` * `httpcore.HTTP2Connection` * Async Support * `httpcore.AsyncConnectionPool` * `httpcore.AsyncHTTPProxy` * `httpcore.AsyncHTTPConnection` * `httpcore.AsyncHTTP11Connection` * `httpcore.AsyncHTTP2Connection` * Network Backends * Sync * `httpcore.backends.sync.SyncBackend` * `httpcore.backends.mock.MockBackend` * Async * `httpcore.backends.auto.AutoBackend` * `httpcore.backends.asyncio.AsyncioBackend` * `httpcore.backends.trio.TrioBackend` * `httpcore.backends.mock.AsyncMockBackend` * Base interfaces * `httpcore.backends.base.NetworkBackend` * `httpcore.backends.base.AsyncNetworkBackend` * Exceptions * `httpcore.TimeoutException` * `httpcore.PoolTimeout` * `httpcore.ConnectTimeout` * `httpcore.ReadTimeout` * `httpcore.WriteTimeout` * `httpcore.NetworkError` * `httpcore.ConnectError` * `httpcore.ReadError` * `httpcore.WriteError` * `httpcore.ProtocolError` * `httpcore.RemoteProtocolError` * `httpcore.LocalProtocolError` * `httpcore.ProxyError` * `httpcore.UnsupportedProtocol` httpcore-1.0.2/httpcore/000077500000000000000000000000001452343067500152035ustar00rootroot00000000000000httpcore-1.0.2/httpcore/__init__.py000066400000000000000000000064111452343067500173160ustar00rootroot00000000000000from ._api import request, stream from ._async import ( AsyncConnectionInterface, AsyncConnectionPool, AsyncHTTP2Connection, AsyncHTTP11Connection, AsyncHTTPConnection, AsyncHTTPProxy, AsyncSOCKSProxy, ) from ._backends.base import ( SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream, NetworkBackend, NetworkStream, ) from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream from ._backends.sync import SyncBackend from ._exceptions import ( ConnectError, ConnectionNotAvailable, ConnectTimeout, LocalProtocolError, NetworkError, PoolTimeout, ProtocolError, ProxyError, ReadError, ReadTimeout, RemoteProtocolError, TimeoutException, UnsupportedProtocol, WriteError, WriteTimeout, ) from ._models import URL, Origin, Request, Response from ._ssl import default_ssl_context from ._sync import ( ConnectionInterface, ConnectionPool, HTTP2Connection, HTTP11Connection, HTTPConnection, HTTPProxy, SOCKSProxy, ) # The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. try: from ._backends.anyio import AnyIOBackend except ImportError: # pragma: nocover class AnyIOBackend: # type: ignore def __init__(self, *args, **kwargs): # type: ignore msg = ( "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." ) raise RuntimeError(msg) # The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. try: from ._backends.trio import TrioBackend except ImportError: # pragma: nocover class TrioBackend: # type: ignore def __init__(self, *args, **kwargs): # type: ignore msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." raise RuntimeError(msg) __all__ = [ # top-level requests "request", "stream", # models "Origin", "URL", "Request", "Response", # async "AsyncHTTPConnection", "AsyncConnectionPool", "AsyncHTTPProxy", "AsyncHTTP11Connection", "AsyncHTTP2Connection", "AsyncConnectionInterface", "AsyncSOCKSProxy", # sync "HTTPConnection", "ConnectionPool", "HTTPProxy", "HTTP11Connection", "HTTP2Connection", "ConnectionInterface", "SOCKSProxy", # network backends, implementations "SyncBackend", "AnyIOBackend", "TrioBackend", # network backends, mock implementations "AsyncMockBackend", "AsyncMockStream", "MockBackend", "MockStream", # network backends, interface "AsyncNetworkStream", "AsyncNetworkBackend", "NetworkStream", "NetworkBackend", # util "default_ssl_context", "SOCKET_OPTION", # exceptions "ConnectionNotAvailable", "ProxyError", "ProtocolError", "LocalProtocolError", "RemoteProtocolError", "UnsupportedProtocol", "TimeoutException", "PoolTimeout", "ConnectTimeout", "ReadTimeout", "WriteTimeout", "NetworkError", "ConnectError", "ReadError", "WriteError", ] __version__ = "1.0.2" __locals = locals() for __name in __all__: if not __name.startswith("__"): setattr(__locals[__name], "__module__", "httpcore") # noqa httpcore-1.0.2/httpcore/_api.py000066400000000000000000000061371452343067500164740ustar00rootroot00000000000000from contextlib import contextmanager from typing import Iterator, Optional, Union from ._models import URL, Extensions, HeaderTypes, Response from ._sync.connection_pool import ConnectionPool def request( method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, Iterator[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> Response: """ Sends an HTTP request, returning the response. ``` response = httpcore.request("GET", "https://www.example.com/") ``` Arguments: method: The HTTP method for the request. Typically one of `"GET"`, `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, or as str/bytes. headers: The HTTP request headers. Either as a dictionary of str/bytes, or as a list of two-tuples of str/bytes. content: The content of the request body. Either as bytes, or as a bytes iterator. extensions: A dictionary of optional extra information included on the request. Possible keys include `"timeout"`. Returns: An instance of `httpcore.Response`. """ with ConnectionPool() as pool: return pool.request( method=method, url=url, headers=headers, content=content, extensions=extensions, ) @contextmanager def stream( method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, Iterator[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> Iterator[Response]: """ Sends an HTTP request, returning the response within a content manager. ``` with httpcore.stream("GET", "https://www.example.com/") as response: ... ``` When using the `stream()` function, the body of the response will not be automatically read. If you want to access the response body you should either use `content = response.read()`, or `for chunk in response.iter_content()`. Arguments: method: The HTTP method for the request. Typically one of `"GET"`, `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, or as str/bytes. headers: The HTTP request headers. Either as a dictionary of str/bytes, or as a list of two-tuples of str/bytes. content: The content of the request body. Either as bytes, or as a bytes iterator. extensions: A dictionary of optional extra information included on the request. Possible keys include `"timeout"`. Returns: An instance of `httpcore.Response`. """ with ConnectionPool() as pool: with pool.stream( method=method, url=url, headers=headers, content=content, extensions=extensions, ) as response: yield response httpcore-1.0.2/httpcore/_async/000077500000000000000000000000001452343067500164575ustar00rootroot00000000000000httpcore-1.0.2/httpcore/_async/__init__.py000066400000000000000000000023051452343067500205700ustar00rootroot00000000000000from .connection import AsyncHTTPConnection from .connection_pool import AsyncConnectionPool from .http11 import AsyncHTTP11Connection from .http_proxy import AsyncHTTPProxy from .interfaces import AsyncConnectionInterface try: from .http2 import AsyncHTTP2Connection except ImportError: # pragma: nocover class AsyncHTTP2Connection: # type: ignore def __init__(self, *args, **kwargs) -> None: # type: ignore raise RuntimeError( "Attempted to use http2 support, but the `h2` package is not " "installed. Use 'pip install httpcore[http2]'." ) try: from .socks_proxy import AsyncSOCKSProxy except ImportError: # pragma: nocover class AsyncSOCKSProxy: # type: ignore def __init__(self, *args, **kwargs) -> None: # type: ignore raise RuntimeError( "Attempted to use SOCKS support, but the `socksio` package is not " "installed. Use 'pip install httpcore[socks]'." ) __all__ = [ "AsyncHTTPConnection", "AsyncConnectionPool", "AsyncHTTPProxy", "AsyncHTTP11Connection", "AsyncHTTP2Connection", "AsyncConnectionInterface", "AsyncSOCKSProxy", ] httpcore-1.0.2/httpcore/_async/connection.py000066400000000000000000000206531452343067500211760ustar00rootroot00000000000000import itertools import logging import ssl from types import TracebackType from typing import Iterable, Iterator, Optional, Type from .._backends.auto import AutoBackend from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout from .._models import Origin, Request, Response from .._ssl import default_ssl_context from .._synchronization import AsyncLock from .._trace import Trace from .http11 import AsyncHTTP11Connection from .interfaces import AsyncConnectionInterface RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. logger = logging.getLogger("httpcore.connection") def exponential_backoff(factor: float) -> Iterator[float]: """ Generate a geometric sequence that has a ratio of 2 and starts with 0. For example: - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` """ yield 0 for n in itertools.count(): yield factor * 2**n class AsyncHTTPConnection(AsyncConnectionInterface): def __init__( self, origin: Origin, ssl_context: Optional[ssl.SSLContext] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, local_address: Optional[str] = None, uds: Optional[str] = None, network_backend: Optional[AsyncNetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: self._origin = origin self._ssl_context = ssl_context self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._retries = retries self._local_address = local_address self._uds = uds self._network_backend: AsyncNetworkBackend = ( AutoBackend() if network_backend is None else network_backend ) self._connection: Optional[AsyncConnectionInterface] = None self._connect_failed: bool = False self._request_lock = AsyncLock() self._socket_options = socket_options async def handle_async_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): raise RuntimeError( f"Attempted to send request to {request.url.origin} on connection to {self._origin}" ) async with self._request_lock: if self._connection is None: try: stream = await self._connect(request) ssl_object = stream.get_extra_info("ssl_object") http2_negotiated = ( ssl_object is not None and ssl_object.selected_alpn_protocol() == "h2" ) if http2_negotiated or (self._http2 and not self._http1): from .http2 import AsyncHTTP2Connection self._connection = AsyncHTTP2Connection( origin=self._origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) else: self._connection = AsyncHTTP11Connection( origin=self._origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) except Exception as exc: self._connect_failed = True raise exc elif not self._connection.is_available(): raise ConnectionNotAvailable() return await self._connection.handle_async_request(request) async def _connect(self, request: Request) -> AsyncNetworkStream: timeouts = request.extensions.get("timeout", {}) sni_hostname = request.extensions.get("sni_hostname", None) timeout = timeouts.get("connect", None) retries_left = self._retries delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) while True: try: if self._uds is None: kwargs = { "host": self._origin.host.decode("ascii"), "port": self._origin.port, "local_address": self._local_address, "timeout": timeout, "socket_options": self._socket_options, } async with Trace("connect_tcp", logger, request, kwargs) as trace: stream = await self._network_backend.connect_tcp(**kwargs) trace.return_value = stream else: kwargs = { "path": self._uds, "timeout": timeout, "socket_options": self._socket_options, } async with Trace( "connect_unix_socket", logger, request, kwargs ) as trace: stream = await self._network_backend.connect_unix_socket( **kwargs ) trace.return_value = stream if self._origin.scheme == b"https": ssl_context = ( default_ssl_context() if self._ssl_context is None else self._ssl_context ) alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] ssl_context.set_alpn_protocols(alpn_protocols) kwargs = { "ssl_context": ssl_context, "server_hostname": sni_hostname or self._origin.host.decode("ascii"), "timeout": timeout, } async with Trace("start_tls", logger, request, kwargs) as trace: stream = await stream.start_tls(**kwargs) trace.return_value = stream return stream except (ConnectError, ConnectTimeout): if retries_left <= 0: raise retries_left -= 1 delay = next(delays) async with Trace("retry", logger, request, kwargs) as trace: await self._network_backend.sleep(delay) def can_handle_request(self, origin: Origin) -> bool: return origin == self._origin async def aclose(self) -> None: if self._connection is not None: async with Trace("close", logger, None, {}): await self._connection.aclose() def is_available(self) -> bool: if self._connection is None: # If HTTP/2 support is enabled, and the resulting connection could # end up as HTTP/2 then we should indicate the connection as being # available to service multiple requests. return ( self._http2 and (self._origin.scheme == b"https" or not self._http1) and not self._connect_failed ) return self._connection.is_available() def has_expired(self) -> bool: if self._connection is None: return self._connect_failed return self._connection.has_expired() def is_idle(self) -> bool: if self._connection is None: return self._connect_failed return self._connection.is_idle() def is_closed(self) -> bool: if self._connection is None: return self._connect_failed return self._connection.is_closed() def info(self) -> str: if self._connection is None: return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" return self._connection.info() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. async def __aenter__(self) -> "AsyncHTTPConnection": return self async def __aexit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: await self.aclose() httpcore-1.0.2/httpcore/_async/connection_pool.py000066400000000000000000000346571452343067500222400ustar00rootroot00000000000000import ssl import sys import time from types import TracebackType from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type from .._backends.auto import AutoBackend from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend from .._exceptions import ConnectionNotAvailable, PoolTimeout, UnsupportedProtocol from .._models import Origin, Request, Response from .._synchronization import AsyncEvent, AsyncLock, AsyncShieldCancellation from .connection import AsyncHTTPConnection from .interfaces import AsyncConnectionInterface, AsyncRequestInterface class RequestStatus: def __init__(self, request: Request): self.request = request self.connection: Optional[AsyncConnectionInterface] = None self._connection_acquired = AsyncEvent() def set_connection(self, connection: AsyncConnectionInterface) -> None: assert self.connection is None self.connection = connection self._connection_acquired.set() def unset_connection(self) -> None: assert self.connection is not None self.connection = None self._connection_acquired = AsyncEvent() async def wait_for_connection( self, timeout: Optional[float] = None ) -> AsyncConnectionInterface: if self.connection is None: await self._connection_acquired.wait(timeout=timeout) assert self.connection is not None return self.connection class AsyncConnectionPool(AsyncRequestInterface): """ A connection pool for making HTTP requests. """ def __init__( self, ssl_context: Optional[ssl.SSLContext] = None, max_connections: Optional[int] = 10, max_keepalive_connections: Optional[int] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, local_address: Optional[str] = None, uds: Optional[str] = None, network_backend: Optional[AsyncNetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: """ A connection pool for making HTTP requests. Parameters: ssl_context: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. max_connections: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. max_keepalive_connections: The maximum number of idle HTTP connections that will be maintained in the pool. keepalive_expiry: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. http1: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to True. http2: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to False. retries: The maximum number of retries when trying to establish a connection. local_address: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). uds: Path to a Unix Domain Socket to use instead of TCP sockets. network_backend: A backend instance to use for handling network I/O. socket_options: Socket options that have to be included in the TCP socket when the connection was established. """ self._ssl_context = ssl_context self._max_connections = ( sys.maxsize if max_connections is None else max_connections ) self._max_keepalive_connections = ( sys.maxsize if max_keepalive_connections is None else max_keepalive_connections ) self._max_keepalive_connections = min( self._max_connections, self._max_keepalive_connections ) self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._retries = retries self._local_address = local_address self._uds = uds self._pool: List[AsyncConnectionInterface] = [] self._requests: List[RequestStatus] = [] self._pool_lock = AsyncLock() self._network_backend = ( AutoBackend() if network_backend is None else network_backend ) self._socket_options = socket_options def create_connection(self, origin: Origin) -> AsyncConnectionInterface: return AsyncHTTPConnection( origin=origin, ssl_context=self._ssl_context, keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, retries=self._retries, local_address=self._local_address, uds=self._uds, network_backend=self._network_backend, socket_options=self._socket_options, ) @property def connections(self) -> List[AsyncConnectionInterface]: """ Return a list of the connections currently in the pool. For example: ```python >>> pool.connections [ , , , ] ``` """ return list(self._pool) async def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: """ Attempt to provide a connection that can handle the given origin. """ origin = status.request.url.origin # If there are queued requests in front of us, then don't acquire a # connection. We handle requests strictly in order. waiting = [s for s in self._requests if s.connection is None] if waiting and waiting[0] is not status: return False # Reuse an existing connection if one is currently available. for idx, connection in enumerate(self._pool): if connection.can_handle_request(origin) and connection.is_available(): self._pool.pop(idx) self._pool.insert(0, connection) status.set_connection(connection) return True # If the pool is currently full, attempt to close one idle connection. if len(self._pool) >= self._max_connections: for idx, connection in reversed(list(enumerate(self._pool))): if connection.is_idle(): await connection.aclose() self._pool.pop(idx) break # If the pool is still full, then we cannot acquire a connection. if len(self._pool) >= self._max_connections: return False # Otherwise create a new connection. connection = self.create_connection(origin) self._pool.insert(0, connection) status.set_connection(connection) return True async def _close_expired_connections(self) -> None: """ Clean up the connection pool by closing off any connections that have expired. """ # Close any connections that have expired their keep-alive time. for idx, connection in reversed(list(enumerate(self._pool))): if connection.has_expired(): await connection.aclose() self._pool.pop(idx) # If the pool size exceeds the maximum number of allowed keep-alive connections, # then close off idle connections as required. pool_size = len(self._pool) for idx, connection in reversed(list(enumerate(self._pool))): if connection.is_idle() and pool_size > self._max_keepalive_connections: await connection.aclose() self._pool.pop(idx) pool_size -= 1 async def handle_async_request(self, request: Request) -> Response: """ Send an HTTP request, and return an HTTP response. This is the core implementation that is called into by `.request()` or `.stream()`. """ scheme = request.url.scheme.decode() if scheme == "": raise UnsupportedProtocol( "Request URL is missing an 'http://' or 'https://' protocol." ) if scheme not in ("http", "https", "ws", "wss"): raise UnsupportedProtocol( f"Request URL has an unsupported protocol '{scheme}://'." ) status = RequestStatus(request) timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("pool", None) if timeout is not None: deadline = time.monotonic() + timeout else: deadline = float("inf") async with self._pool_lock: self._requests.append(status) await self._close_expired_connections() await self._attempt_to_acquire_connection(status) while True: try: connection = await status.wait_for_connection(timeout=timeout) except BaseException as exc: # If we timeout here, or if the task is cancelled, then make # sure to remove the request from the queue before bubbling # up the exception. async with self._pool_lock: # Ensure only remove when task exists. if status in self._requests: self._requests.remove(status) raise exc try: response = await connection.handle_async_request(request) except ConnectionNotAvailable: # The ConnectionNotAvailable exception is a special case, that # indicates we need to retry the request on a new connection. # # The most common case where this can occur is when multiple # requests are queued waiting for a single connection, which # might end up as an HTTP/2 connection, but which actually ends # up as HTTP/1.1. async with self._pool_lock: # Maintain our position in the request queue, but reset the # status so that the request becomes queued again. status.unset_connection() await self._attempt_to_acquire_connection(status) except BaseException as exc: with AsyncShieldCancellation(): await self.response_closed(status) raise exc else: break timeout = deadline - time.monotonic() if timeout < 0: raise PoolTimeout # pragma: nocover # When we return the response, we wrap the stream in a special class # that handles notifying the connection pool once the response # has been released. assert isinstance(response.stream, AsyncIterable) return Response( status=response.status, headers=response.headers, content=ConnectionPoolByteStream(response.stream, self, status), extensions=response.extensions, ) async def response_closed(self, status: RequestStatus) -> None: """ This method acts as a callback once the request/response cycle is complete. It is called into from the `ConnectionPoolByteStream.aclose()` method. """ assert status.connection is not None connection = status.connection async with self._pool_lock: # Update the state of the connection pool. if status in self._requests: self._requests.remove(status) if connection.is_closed() and connection in self._pool: self._pool.remove(connection) # Since we've had a response closed, it's possible we'll now be able # to service one or more requests that are currently pending. for status in self._requests: if status.connection is None: acquired = await self._attempt_to_acquire_connection(status) # If we could not acquire a connection for a queued request # then we don't need to check anymore requests that are # queued later behind it. if not acquired: break # Housekeeping. await self._close_expired_connections() async def aclose(self) -> None: """ Close any connections in the pool. """ async with self._pool_lock: for connection in self._pool: await connection.aclose() self._pool = [] self._requests = [] async def __aenter__(self) -> "AsyncConnectionPool": # Acquiring the pool lock here ensures that we have the # correct dependencies installed as early as possible. async with self._pool_lock: pass return self async def __aexit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: await self.aclose() class ConnectionPoolByteStream: """ A wrapper around the response byte stream, that additionally handles notifying the connection pool when the response has been closed. """ def __init__( self, stream: AsyncIterable[bytes], pool: AsyncConnectionPool, status: RequestStatus, ) -> None: self._stream = stream self._pool = pool self._status = status async def __aiter__(self) -> AsyncIterator[bytes]: async for part in self._stream: yield part async def aclose(self) -> None: try: if hasattr(self._stream, "aclose"): await self._stream.aclose() finally: with AsyncShieldCancellation(): await self._pool.response_closed(self._status) httpcore-1.0.2/httpcore/_async/http11.py000066400000000000000000000302311452343067500201510ustar00rootroot00000000000000import enum import logging import time from types import TracebackType from typing import ( AsyncIterable, AsyncIterator, List, Optional, Tuple, Type, Union, cast, ) import h11 from .._backends.base import AsyncNetworkStream from .._exceptions import ( ConnectionNotAvailable, LocalProtocolError, RemoteProtocolError, WriteError, map_exceptions, ) from .._models import Origin, Request, Response from .._synchronization import AsyncLock, AsyncShieldCancellation from .._trace import Trace from .interfaces import AsyncConnectionInterface logger = logging.getLogger("httpcore.http11") # A subset of `h11.Event` types supported by `_send_event` H11SendEvent = Union[ h11.Request, h11.Data, h11.EndOfMessage, ] class HTTPConnectionState(enum.IntEnum): NEW = 0 ACTIVE = 1 IDLE = 2 CLOSED = 3 class AsyncHTTP11Connection(AsyncConnectionInterface): READ_NUM_BYTES = 64 * 1024 MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 def __init__( self, origin: Origin, stream: AsyncNetworkStream, keepalive_expiry: Optional[float] = None, ) -> None: self._origin = origin self._network_stream = stream self._keepalive_expiry: Optional[float] = keepalive_expiry self._expire_at: Optional[float] = None self._state = HTTPConnectionState.NEW self._state_lock = AsyncLock() self._request_count = 0 self._h11_state = h11.Connection( our_role=h11.CLIENT, max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, ) async def handle_async_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): raise RuntimeError( f"Attempted to send request to {request.url.origin} on connection " f"to {self._origin}" ) async with self._state_lock: if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): self._request_count += 1 self._state = HTTPConnectionState.ACTIVE self._expire_at = None else: raise ConnectionNotAvailable() try: kwargs = {"request": request} try: async with Trace( "send_request_headers", logger, request, kwargs ) as trace: await self._send_request_headers(**kwargs) async with Trace("send_request_body", logger, request, kwargs) as trace: await self._send_request_body(**kwargs) except WriteError: # If we get a write error while we're writing the request, # then we supress this error and move on to attempting to # read the response. Servers can sometimes close the request # pre-emptively and then respond with a well formed HTTP # error response. pass async with Trace( "receive_response_headers", logger, request, kwargs ) as trace: ( http_version, status, reason_phrase, headers, ) = await self._receive_response_headers(**kwargs) trace.return_value = ( http_version, status, reason_phrase, headers, ) return Response( status=status, headers=headers, content=HTTP11ConnectionByteStream(self, request), extensions={ "http_version": http_version, "reason_phrase": reason_phrase, "network_stream": self._network_stream, }, ) except BaseException as exc: with AsyncShieldCancellation(): async with Trace("response_closed", logger, request) as trace: await self._response_closed() raise exc # Sending the request... async def _send_request_headers(self, request: Request) -> None: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("write", None) with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): event = h11.Request( method=request.method, target=request.url.target, headers=request.headers, ) await self._send_event(event, timeout=timeout) async def _send_request_body(self, request: Request) -> None: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("write", None) assert isinstance(request.stream, AsyncIterable) async for chunk in request.stream: event = h11.Data(data=chunk) await self._send_event(event, timeout=timeout) await self._send_event(h11.EndOfMessage(), timeout=timeout) async def _send_event( self, event: h11.Event, timeout: Optional[float] = None ) -> None: bytes_to_send = self._h11_state.send(event) if bytes_to_send is not None: await self._network_stream.write(bytes_to_send, timeout=timeout) # Receiving the response... async def _receive_response_headers( self, request: Request ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) while True: event = await self._receive_event(timeout=timeout) if isinstance(event, h11.Response): break if ( isinstance(event, h11.InformationalResponse) and event.status_code == 101 ): break http_version = b"HTTP/" + event.http_version # h11 version 0.11+ supports a `raw_items` interface to get the # raw header casing, rather than the enforced lowercase headers. headers = event.headers.raw_items() return http_version, event.status_code, event.reason, headers async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) while True: event = await self._receive_event(timeout=timeout) if isinstance(event, h11.Data): yield bytes(event.data) elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): break async def _receive_event( self, timeout: Optional[float] = None ) -> Union[h11.Event, Type[h11.PAUSED]]: while True: with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): event = self._h11_state.next_event() if event is h11.NEED_DATA: data = await self._network_stream.read( self.READ_NUM_BYTES, timeout=timeout ) # If we feed this case through h11 we'll raise an exception like: # # httpcore.RemoteProtocolError: can't handle event type # ConnectionClosed when role=SERVER and state=SEND_RESPONSE # # Which is accurate, but not very informative from an end-user # perspective. Instead we handle this case distinctly and treat # it as a ConnectError. if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: msg = "Server disconnected without sending a response." raise RemoteProtocolError(msg) self._h11_state.receive_data(data) else: # mypy fails to narrow the type in the above if statement above return cast(Union[h11.Event, Type[h11.PAUSED]], event) async def _response_closed(self) -> None: async with self._state_lock: if ( self._h11_state.our_state is h11.DONE and self._h11_state.their_state is h11.DONE ): self._state = HTTPConnectionState.IDLE self._h11_state.start_next_cycle() if self._keepalive_expiry is not None: now = time.monotonic() self._expire_at = now + self._keepalive_expiry else: await self.aclose() # Once the connection is no longer required... async def aclose(self) -> None: # Note that this method unilaterally closes the connection, and does # not have any kind of locking in place around it. self._state = HTTPConnectionState.CLOSED await self._network_stream.aclose() # The AsyncConnectionInterface methods provide information about the state of # the connection, allowing for a connection pooling implementation to # determine when to reuse and when to close the connection... def can_handle_request(self, origin: Origin) -> bool: return origin == self._origin def is_available(self) -> bool: # Note that HTTP/1.1 connections in the "NEW" state are not treated as # being "available". The control flow which created the connection will # be able to send an outgoing request, but the connection will not be # acquired from the connection pool for any other request. return self._state == HTTPConnectionState.IDLE def has_expired(self) -> bool: now = time.monotonic() keepalive_expired = self._expire_at is not None and now > self._expire_at # If the HTTP connection is idle but the socket is readable, then the # only valid state is that the socket is about to return b"", indicating # a server-initiated disconnect. server_disconnected = ( self._state == HTTPConnectionState.IDLE and self._network_stream.get_extra_info("is_readable") ) return keepalive_expired or server_disconnected def is_idle(self) -> bool: return self._state == HTTPConnectionState.IDLE def is_closed(self) -> bool: return self._state == HTTPConnectionState.CLOSED def info(self) -> str: origin = str(self._origin) return ( f"{origin!r}, HTTP/1.1, {self._state.name}, " f"Request Count: {self._request_count}" ) def __repr__(self) -> str: class_name = self.__class__.__name__ origin = str(self._origin) return ( f"<{class_name} [{origin!r}, {self._state.name}, " f"Request Count: {self._request_count}]>" ) # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. async def __aenter__(self) -> "AsyncHTTP11Connection": return self async def __aexit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: await self.aclose() class HTTP11ConnectionByteStream: def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: self._connection = connection self._request = request self._closed = False async def __aiter__(self) -> AsyncIterator[bytes]: kwargs = {"request": self._request} try: async with Trace("receive_response_body", logger, self._request, kwargs): async for chunk in self._connection._receive_response_body(**kwargs): yield chunk except BaseException as exc: # If we get an exception while streaming the response, # we want to close the response (and possibly the connection) # before raising that exception. with AsyncShieldCancellation(): await self.aclose() raise exc async def aclose(self) -> None: if not self._closed: self._closed = True async with Trace("response_closed", logger, self._request): await self._connection._response_closed() httpcore-1.0.2/httpcore/_async/http2.py000066400000000000000000000565071452343067500201070ustar00rootroot00000000000000import enum import logging import time import types import typing import h2.config import h2.connection import h2.events import h2.exceptions import h2.settings from .._backends.base import AsyncNetworkStream from .._exceptions import ( ConnectionNotAvailable, LocalProtocolError, RemoteProtocolError, ) from .._models import Origin, Request, Response from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation from .._trace import Trace from .interfaces import AsyncConnectionInterface logger = logging.getLogger("httpcore.http2") def has_body_headers(request: Request) -> bool: return any( k.lower() == b"content-length" or k.lower() == b"transfer-encoding" for k, v in request.headers ) class HTTPConnectionState(enum.IntEnum): ACTIVE = 1 IDLE = 2 CLOSED = 3 class AsyncHTTP2Connection(AsyncConnectionInterface): READ_NUM_BYTES = 64 * 1024 CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) def __init__( self, origin: Origin, stream: AsyncNetworkStream, keepalive_expiry: typing.Optional[float] = None, ): self._origin = origin self._network_stream = stream self._keepalive_expiry: typing.Optional[float] = keepalive_expiry self._h2_state = h2.connection.H2Connection(config=self.CONFIG) self._state = HTTPConnectionState.IDLE self._expire_at: typing.Optional[float] = None self._request_count = 0 self._init_lock = AsyncLock() self._state_lock = AsyncLock() self._read_lock = AsyncLock() self._write_lock = AsyncLock() self._sent_connection_init = False self._used_all_stream_ids = False self._connection_error = False # Mapping from stream ID to response stream events. self._events: typing.Dict[ int, typing.Union[ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded, h2.events.StreamReset, ], ] = {} # Connection terminated events are stored as state since # we need to handle them for all streams. self._connection_terminated: typing.Optional[ h2.events.ConnectionTerminated ] = None self._read_exception: typing.Optional[Exception] = None self._write_exception: typing.Optional[Exception] = None async def handle_async_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): # This cannot occur in normal operation, since the connection pool # will only send requests on connections that handle them. # It's in place simply for resilience as a guard against incorrect # usage, for anyone working directly with httpcore connections. raise RuntimeError( f"Attempted to send request to {request.url.origin} on connection " f"to {self._origin}" ) async with self._state_lock: if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): self._request_count += 1 self._expire_at = None self._state = HTTPConnectionState.ACTIVE else: raise ConnectionNotAvailable() async with self._init_lock: if not self._sent_connection_init: try: kwargs = {"request": request} async with Trace("send_connection_init", logger, request, kwargs): await self._send_connection_init(**kwargs) except BaseException as exc: with AsyncShieldCancellation(): await self.aclose() raise exc self._sent_connection_init = True # Initially start with just 1 until the remote server provides # its max_concurrent_streams value self._max_streams = 1 local_settings_max_streams = ( self._h2_state.local_settings.max_concurrent_streams ) self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) for _ in range(local_settings_max_streams - self._max_streams): await self._max_streams_semaphore.acquire() await self._max_streams_semaphore.acquire() try: stream_id = self._h2_state.get_next_available_stream_id() self._events[stream_id] = [] except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover self._used_all_stream_ids = True self._request_count -= 1 raise ConnectionNotAvailable() try: kwargs = {"request": request, "stream_id": stream_id} async with Trace("send_request_headers", logger, request, kwargs): await self._send_request_headers(request=request, stream_id=stream_id) async with Trace("send_request_body", logger, request, kwargs): await self._send_request_body(request=request, stream_id=stream_id) async with Trace( "receive_response_headers", logger, request, kwargs ) as trace: status, headers = await self._receive_response( request=request, stream_id=stream_id ) trace.return_value = (status, headers) return Response( status=status, headers=headers, content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), extensions={ "http_version": b"HTTP/2", "network_stream": self._network_stream, "stream_id": stream_id, }, ) except BaseException as exc: # noqa: PIE786 with AsyncShieldCancellation(): kwargs = {"stream_id": stream_id} async with Trace("response_closed", logger, request, kwargs): await self._response_closed(stream_id=stream_id) if isinstance(exc, h2.exceptions.ProtocolError): # One case where h2 can raise a protocol error is when a # closed frame has been seen by the state machine. # # This happens when one stream is reading, and encounters # a GOAWAY event. Other flows of control may then raise # a protocol error at any point they interact with the 'h2_state'. # # In this case we'll have stored the event, and should raise # it as a RemoteProtocolError. if self._connection_terminated: # pragma: nocover raise RemoteProtocolError(self._connection_terminated) # If h2 raises a protocol error in some other state then we # must somehow have made a protocol violation. raise LocalProtocolError(exc) # pragma: nocover raise exc async def _send_connection_init(self, request: Request) -> None: """ The HTTP/2 connection requires some initial setup before we can start using individual request/response streams on it. """ # Need to set these manually here instead of manipulating via # __setitem__() otherwise the H2Connection will emit SettingsUpdate # frames in addition to sending the undesired defaults. self._h2_state.local_settings = h2.settings.Settings( client=True, initial_values={ # Disable PUSH_PROMISE frames from the server since we don't do anything # with them for now. Maybe when we support caching? h2.settings.SettingCodes.ENABLE_PUSH: 0, # These two are taken from h2 for safe defaults h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, }, ) # Some websites (*cough* Yahoo *cough*) balk at this setting being # present in the initial handshake since it's not defined in the original # RFC despite the RFC mandating ignoring settings you don't know about. del self._h2_state.local_settings[ h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL ] self._h2_state.initiate_connection() self._h2_state.increment_flow_control_window(2**24) await self._write_outgoing_data(request) # Sending the request... async def _send_request_headers(self, request: Request, stream_id: int) -> None: """ Send the request headers to a given stream ID. """ end_stream = not has_body_headers(request) # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require # HTTP/1.1 style headers, and map them appropriately if we end up on # an HTTP/2 connection. authority = [v for k, v in request.headers if k.lower() == b"host"][0] headers = [ (b":method", request.method), (b":authority", authority), (b":scheme", request.url.scheme), (b":path", request.url.target), ] + [ (k.lower(), v) for k, v in request.headers if k.lower() not in ( b"host", b"transfer-encoding", ) ] self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) await self._write_outgoing_data(request) async def _send_request_body(self, request: Request, stream_id: int) -> None: """ Iterate over the request body sending it to a given stream ID. """ if not has_body_headers(request): return assert isinstance(request.stream, typing.AsyncIterable) async for data in request.stream: await self._send_stream_data(request, stream_id, data) await self._send_end_stream(request, stream_id) async def _send_stream_data( self, request: Request, stream_id: int, data: bytes ) -> None: """ Send a single chunk of data in one or more data frames. """ while data: max_flow = await self._wait_for_outgoing_flow(request, stream_id) chunk_size = min(len(data), max_flow) chunk, data = data[:chunk_size], data[chunk_size:] self._h2_state.send_data(stream_id, chunk) await self._write_outgoing_data(request) async def _send_end_stream(self, request: Request, stream_id: int) -> None: """ Send an empty data frame on on a given stream ID with the END_STREAM flag set. """ self._h2_state.end_stream(stream_id) await self._write_outgoing_data(request) # Receiving the response... async def _receive_response( self, request: Request, stream_id: int ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: """ Return the response status code and headers for a given stream ID. """ while True: event = await self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.ResponseReceived): break status_code = 200 headers = [] for k, v in event.headers: if k == b":status": status_code = int(v.decode("ascii", errors="ignore")) elif not k.startswith(b":"): headers.append((k, v)) return (status_code, headers) async def _receive_response_body( self, request: Request, stream_id: int ) -> typing.AsyncIterator[bytes]: """ Iterator that returns the bytes of the response body for a given stream ID. """ while True: event = await self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.DataReceived): amount = event.flow_controlled_length self._h2_state.acknowledge_received_data(amount, stream_id) await self._write_outgoing_data(request) yield event.data elif isinstance(event, h2.events.StreamEnded): break async def _receive_stream_event( self, request: Request, stream_id: int ) -> typing.Union[ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded ]: """ Return the next available event for a given stream ID. Will read more data from the network if required. """ while not self._events.get(stream_id): await self._receive_events(request, stream_id) event = self._events[stream_id].pop(0) if isinstance(event, h2.events.StreamReset): raise RemoteProtocolError(event) return event async def _receive_events( self, request: Request, stream_id: typing.Optional[int] = None ) -> None: """ Read some data from the network until we see one or more events for a given stream ID. """ async with self._read_lock: if self._connection_terminated is not None: last_stream_id = self._connection_terminated.last_stream_id if stream_id and last_stream_id and stream_id > last_stream_id: self._request_count -= 1 raise ConnectionNotAvailable() raise RemoteProtocolError(self._connection_terminated) # This conditional is a bit icky. We don't want to block reading if we've # actually got an event to return for a given stream. We need to do that # check *within* the atomic read lock. Though it also need to be optional, # because when we call it from `_wait_for_outgoing_flow` we *do* want to # block until we've available flow control, event when we have events # pending for the stream ID we're attempting to send on. if stream_id is None or not self._events.get(stream_id): events = await self._read_incoming_data(request) for event in events: if isinstance(event, h2.events.RemoteSettingsChanged): async with Trace( "receive_remote_settings", logger, request ) as trace: await self._receive_remote_settings_change(event) trace.return_value = event elif isinstance( event, ( h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded, h2.events.StreamReset, ), ): if event.stream_id in self._events: self._events[event.stream_id].append(event) elif isinstance(event, h2.events.ConnectionTerminated): self._connection_terminated = event await self._write_outgoing_data(request) async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: max_concurrent_streams = event.changed_settings.get( h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS ) if max_concurrent_streams: new_max_streams = min( max_concurrent_streams.new_value, self._h2_state.local_settings.max_concurrent_streams, ) if new_max_streams and new_max_streams != self._max_streams: while new_max_streams > self._max_streams: await self._max_streams_semaphore.release() self._max_streams += 1 while new_max_streams < self._max_streams: await self._max_streams_semaphore.acquire() self._max_streams -= 1 async def _response_closed(self, stream_id: int) -> None: await self._max_streams_semaphore.release() del self._events[stream_id] async with self._state_lock: if self._connection_terminated and not self._events: await self.aclose() elif self._state == HTTPConnectionState.ACTIVE and not self._events: self._state = HTTPConnectionState.IDLE if self._keepalive_expiry is not None: now = time.monotonic() self._expire_at = now + self._keepalive_expiry if self._used_all_stream_ids: # pragma: nocover await self.aclose() async def aclose(self) -> None: # Note that this method unilaterally closes the connection, and does # not have any kind of locking in place around it. self._h2_state.close_connection() self._state = HTTPConnectionState.CLOSED await self._network_stream.aclose() # Wrappers around network read/write operations... async def _read_incoming_data( self, request: Request ) -> typing.List[h2.events.Event]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) if self._read_exception is not None: raise self._read_exception # pragma: nocover try: data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) if data == b"": raise RemoteProtocolError("Server disconnected") except Exception as exc: # If we get a network error we should: # # 1. Save the exception and just raise it immediately on any future reads. # (For example, this means that a single read timeout or disconnect will # immediately close all pending streams. Without requiring multiple # sequential timeouts.) # 2. Mark the connection as errored, so that we don't accept any other # incoming requests. self._read_exception = exc self._connection_error = True raise exc events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) return events async def _write_outgoing_data(self, request: Request) -> None: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("write", None) async with self._write_lock: data_to_send = self._h2_state.data_to_send() if self._write_exception is not None: raise self._write_exception # pragma: nocover try: await self._network_stream.write(data_to_send, timeout) except Exception as exc: # pragma: nocover # If we get a network error we should: # # 1. Save the exception and just raise it immediately on any future write. # (For example, this means that a single write timeout or disconnect will # immediately close all pending streams. Without requiring multiple # sequential timeouts.) # 2. Mark the connection as errored, so that we don't accept any other # incoming requests. self._write_exception = exc self._connection_error = True raise exc # Flow control... async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: """ Returns the maximum allowable outgoing flow for a given stream. If the allowable flow is zero, then waits on the network until WindowUpdated frames have increased the flow rate. https://tools.ietf.org/html/rfc7540#section-6.9 """ local_flow: int = self._h2_state.local_flow_control_window(stream_id) max_frame_size: int = self._h2_state.max_outbound_frame_size flow = min(local_flow, max_frame_size) while flow == 0: await self._receive_events(request) local_flow = self._h2_state.local_flow_control_window(stream_id) max_frame_size = self._h2_state.max_outbound_frame_size flow = min(local_flow, max_frame_size) return flow # Interface for connection pooling... def can_handle_request(self, origin: Origin) -> bool: return origin == self._origin def is_available(self) -> bool: return ( self._state != HTTPConnectionState.CLOSED and not self._connection_error and not self._used_all_stream_ids and not ( self._h2_state.state_machine.state == h2.connection.ConnectionState.CLOSED ) ) def has_expired(self) -> bool: now = time.monotonic() return self._expire_at is not None and now > self._expire_at def is_idle(self) -> bool: return self._state == HTTPConnectionState.IDLE def is_closed(self) -> bool: return self._state == HTTPConnectionState.CLOSED def info(self) -> str: origin = str(self._origin) return ( f"{origin!r}, HTTP/2, {self._state.name}, " f"Request Count: {self._request_count}" ) def __repr__(self) -> str: class_name = self.__class__.__name__ origin = str(self._origin) return ( f"<{class_name} [{origin!r}, {self._state.name}, " f"Request Count: {self._request_count}]>" ) # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. async def __aenter__(self) -> "AsyncHTTP2Connection": return self async def __aexit__( self, exc_type: typing.Optional[typing.Type[BaseException]] = None, exc_value: typing.Optional[BaseException] = None, traceback: typing.Optional[types.TracebackType] = None, ) -> None: await self.aclose() class HTTP2ConnectionByteStream: def __init__( self, connection: AsyncHTTP2Connection, request: Request, stream_id: int ) -> None: self._connection = connection self._request = request self._stream_id = stream_id self._closed = False async def __aiter__(self) -> typing.AsyncIterator[bytes]: kwargs = {"request": self._request, "stream_id": self._stream_id} try: async with Trace("receive_response_body", logger, self._request, kwargs): async for chunk in self._connection._receive_response_body( request=self._request, stream_id=self._stream_id ): yield chunk except BaseException as exc: # If we get an exception while streaming the response, # we want to close the response (and possibly the connection) # before raising that exception. with AsyncShieldCancellation(): await self.aclose() raise exc async def aclose(self) -> None: if not self._closed: self._closed = True kwargs = {"stream_id": self._stream_id} async with Trace("response_closed", logger, self._request, kwargs): await self._connection._response_closed(stream_id=self._stream_id) httpcore-1.0.2/httpcore/_async/http_proxy.py000066400000000000000000000350031452343067500212520ustar00rootroot00000000000000import logging import ssl from base64 import b64encode from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend from .._exceptions import ProxyError from .._models import ( URL, Origin, Request, Response, enforce_bytes, enforce_headers, enforce_url, ) from .._ssl import default_ssl_context from .._synchronization import AsyncLock from .._trace import Trace from .connection import AsyncHTTPConnection from .connection_pool import AsyncConnectionPool from .http11 import AsyncHTTP11Connection from .interfaces import AsyncConnectionInterface HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] logger = logging.getLogger("httpcore.proxy") def merge_headers( default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, ) -> List[Tuple[bytes, bytes]]: """ Append default_headers and override_headers, de-duplicating if a key exists in both cases. """ default_headers = [] if default_headers is None else list(default_headers) override_headers = [] if override_headers is None else list(override_headers) has_override = set(key.lower() for key, value in override_headers) default_headers = [ (key, value) for key, value in default_headers if key.lower() not in has_override ] return default_headers + override_headers def build_auth_header(username: bytes, password: bytes) -> bytes: userpass = username + b":" + password return b"Basic " + b64encode(userpass) class AsyncHTTPProxy(AsyncConnectionPool): """ A connection pool that sends requests via an HTTP proxy. """ def __init__( self, proxy_url: Union[URL, bytes, str], proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, ssl_context: Optional[ssl.SSLContext] = None, proxy_ssl_context: Optional[ssl.SSLContext] = None, max_connections: Optional[int] = 10, max_keepalive_connections: Optional[int] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, local_address: Optional[str] = None, uds: Optional[str] = None, network_backend: Optional[AsyncNetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: """ A connection pool for making HTTP requests. Parameters: proxy_url: The URL to use when connecting to the proxy server. For example `"http://127.0.0.1:8080/"`. proxy_auth: Any proxy authentication as a two-tuple of (username, password). May be either bytes or ascii-only str. proxy_headers: Any HTTP headers to use for the proxy requests. For example `{"Proxy-Authorization": "Basic :"}`. ssl_context: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. max_connections: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. max_keepalive_connections: The maximum number of idle HTTP connections that will be maintained in the pool. keepalive_expiry: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. http1: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to True. http2: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to False. retries: The maximum number of retries when trying to establish a connection. local_address: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). uds: Path to a Unix Domain Socket to use instead of TCP sockets. network_backend: A backend instance to use for handling network I/O. """ super().__init__( ssl_context=ssl_context, max_connections=max_connections, max_keepalive_connections=max_keepalive_connections, keepalive_expiry=keepalive_expiry, http1=http1, http2=http2, network_backend=network_backend, retries=retries, local_address=local_address, uds=uds, socket_options=socket_options, ) self._proxy_url = enforce_url(proxy_url, name="proxy_url") if ( self._proxy_url.scheme == b"http" and proxy_ssl_context is not None ): # pragma: no cover raise RuntimeError( "The `proxy_ssl_context` argument is not allowed for the http scheme" ) self._ssl_context = ssl_context self._proxy_ssl_context = proxy_ssl_context self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") if proxy_auth is not None: username = enforce_bytes(proxy_auth[0], name="proxy_auth") password = enforce_bytes(proxy_auth[1], name="proxy_auth") authorization = build_auth_header(username, password) self._proxy_headers = [ (b"Proxy-Authorization", authorization) ] + self._proxy_headers def create_connection(self, origin: Origin) -> AsyncConnectionInterface: if origin.scheme == b"http": return AsyncForwardHTTPConnection( proxy_origin=self._proxy_url.origin, proxy_headers=self._proxy_headers, remote_origin=origin, keepalive_expiry=self._keepalive_expiry, network_backend=self._network_backend, proxy_ssl_context=self._proxy_ssl_context, ) return AsyncTunnelHTTPConnection( proxy_origin=self._proxy_url.origin, proxy_headers=self._proxy_headers, remote_origin=origin, ssl_context=self._ssl_context, proxy_ssl_context=self._proxy_ssl_context, keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, network_backend=self._network_backend, ) class AsyncForwardHTTPConnection(AsyncConnectionInterface): def __init__( self, proxy_origin: Origin, remote_origin: Origin, proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, keepalive_expiry: Optional[float] = None, network_backend: Optional[AsyncNetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, proxy_ssl_context: Optional[ssl.SSLContext] = None, ) -> None: self._connection = AsyncHTTPConnection( origin=proxy_origin, keepalive_expiry=keepalive_expiry, network_backend=network_backend, socket_options=socket_options, ssl_context=proxy_ssl_context, ) self._proxy_origin = proxy_origin self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") self._remote_origin = remote_origin async def handle_async_request(self, request: Request) -> Response: headers = merge_headers(self._proxy_headers, request.headers) url = URL( scheme=self._proxy_origin.scheme, host=self._proxy_origin.host, port=self._proxy_origin.port, target=bytes(request.url), ) proxy_request = Request( method=request.method, url=url, headers=headers, content=request.stream, extensions=request.extensions, ) return await self._connection.handle_async_request(proxy_request) def can_handle_request(self, origin: Origin) -> bool: return origin == self._remote_origin async def aclose(self) -> None: await self._connection.aclose() def info(self) -> str: return self._connection.info() def is_available(self) -> bool: return self._connection.is_available() def has_expired(self) -> bool: return self._connection.has_expired() def is_idle(self) -> bool: return self._connection.is_idle() def is_closed(self) -> bool: return self._connection.is_closed() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" class AsyncTunnelHTTPConnection(AsyncConnectionInterface): def __init__( self, proxy_origin: Origin, remote_origin: Origin, ssl_context: Optional[ssl.SSLContext] = None, proxy_ssl_context: Optional[ssl.SSLContext] = None, proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, network_backend: Optional[AsyncNetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: self._connection: AsyncConnectionInterface = AsyncHTTPConnection( origin=proxy_origin, keepalive_expiry=keepalive_expiry, network_backend=network_backend, socket_options=socket_options, ssl_context=proxy_ssl_context, ) self._proxy_origin = proxy_origin self._remote_origin = remote_origin self._ssl_context = ssl_context self._proxy_ssl_context = proxy_ssl_context self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._connect_lock = AsyncLock() self._connected = False async def handle_async_request(self, request: Request) -> Response: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("connect", None) async with self._connect_lock: if not self._connected: target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) connect_url = URL( scheme=self._proxy_origin.scheme, host=self._proxy_origin.host, port=self._proxy_origin.port, target=target, ) connect_headers = merge_headers( [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers ) connect_request = Request( method=b"CONNECT", url=connect_url, headers=connect_headers, extensions=request.extensions, ) connect_response = await self._connection.handle_async_request( connect_request ) if connect_response.status < 200 or connect_response.status > 299: reason_bytes = connect_response.extensions.get("reason_phrase", b"") reason_str = reason_bytes.decode("ascii", errors="ignore") msg = "%d %s" % (connect_response.status, reason_str) await self._connection.aclose() raise ProxyError(msg) stream = connect_response.extensions["network_stream"] # Upgrade the stream to SSL ssl_context = ( default_ssl_context() if self._ssl_context is None else self._ssl_context ) alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] ssl_context.set_alpn_protocols(alpn_protocols) kwargs = { "ssl_context": ssl_context, "server_hostname": self._remote_origin.host.decode("ascii"), "timeout": timeout, } async with Trace("start_tls", logger, request, kwargs) as trace: stream = await stream.start_tls(**kwargs) trace.return_value = stream # Determine if we should be using HTTP/1.1 or HTTP/2 ssl_object = stream.get_extra_info("ssl_object") http2_negotiated = ( ssl_object is not None and ssl_object.selected_alpn_protocol() == "h2" ) # Create the HTTP/1.1 or HTTP/2 connection if http2_negotiated or (self._http2 and not self._http1): from .http2 import AsyncHTTP2Connection self._connection = AsyncHTTP2Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) else: self._connection = AsyncHTTP11Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) self._connected = True return await self._connection.handle_async_request(request) def can_handle_request(self, origin: Origin) -> bool: return origin == self._remote_origin async def aclose(self) -> None: await self._connection.aclose() def info(self) -> str: return self._connection.info() def is_available(self) -> bool: return self._connection.is_available() def has_expired(self) -> bool: return self._connection.has_expired() def is_idle(self) -> bool: return self._connection.is_idle() def is_closed(self) -> bool: return self._connection.is_closed() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" httpcore-1.0.2/httpcore/_async/interfaces.py000066400000000000000000000106061452343067500211570ustar00rootroot00000000000000from contextlib import asynccontextmanager from typing import AsyncIterator, Optional, Union from .._models import ( URL, Extensions, HeaderTypes, Origin, Request, Response, enforce_bytes, enforce_headers, enforce_url, include_request_headers, ) class AsyncRequestInterface: async def request( self, method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, AsyncIterator[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> Response: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") url = enforce_url(url, name="url") headers = enforce_headers(headers, name="headers") # Include Host header, and optionally Content-Length or Transfer-Encoding. headers = include_request_headers(headers, url=url, content=content) request = Request( method=method, url=url, headers=headers, content=content, extensions=extensions, ) response = await self.handle_async_request(request) try: await response.aread() finally: await response.aclose() return response @asynccontextmanager async def stream( self, method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, AsyncIterator[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> AsyncIterator[Response]: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") url = enforce_url(url, name="url") headers = enforce_headers(headers, name="headers") # Include Host header, and optionally Content-Length or Transfer-Encoding. headers = include_request_headers(headers, url=url, content=content) request = Request( method=method, url=url, headers=headers, content=content, extensions=extensions, ) response = await self.handle_async_request(request) try: yield response finally: await response.aclose() async def handle_async_request(self, request: Request) -> Response: raise NotImplementedError() # pragma: nocover class AsyncConnectionInterface(AsyncRequestInterface): async def aclose(self) -> None: raise NotImplementedError() # pragma: nocover def info(self) -> str: raise NotImplementedError() # pragma: nocover def can_handle_request(self, origin: Origin) -> bool: raise NotImplementedError() # pragma: nocover def is_available(self) -> bool: """ Return `True` if the connection is currently able to accept an outgoing request. An HTTP/1.1 connection will only be available if it is currently idle. An HTTP/2 connection will be available so long as the stream ID space is not yet exhausted, and the connection is not in an error state. While the connection is being established we may not yet know if it is going to result in an HTTP/1.1 or HTTP/2 connection. The connection should be treated as being available, but might ultimately raise `NewConnectionRequired` required exceptions if multiple requests are attempted over a connection that ends up being established as HTTP/1.1. """ raise NotImplementedError() # pragma: nocover def has_expired(self) -> bool: """ Return `True` if the connection is in a state where it should be closed. This either means that the connection is idle and it has passed the expiry time on its keep-alive, or that server has sent an EOF. """ raise NotImplementedError() # pragma: nocover def is_idle(self) -> bool: """ Return `True` if the connection is currently idle. """ raise NotImplementedError() # pragma: nocover def is_closed(self) -> bool: """ Return `True` if the connection has been closed. Used when a response is closed to determine if the connection may be returned to the connection pool or not. """ raise NotImplementedError() # pragma: nocover httpcore-1.0.2/httpcore/_async/socks_proxy.py000066400000000000000000000331421452343067500214170ustar00rootroot00000000000000import logging import ssl import typing from socksio import socks5 from .._backends.auto import AutoBackend from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream from .._exceptions import ConnectionNotAvailable, ProxyError from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url from .._ssl import default_ssl_context from .._synchronization import AsyncLock from .._trace import Trace from .connection_pool import AsyncConnectionPool from .http11 import AsyncHTTP11Connection from .interfaces import AsyncConnectionInterface logger = logging.getLogger("httpcore.socks") AUTH_METHODS = { b"\x00": "NO AUTHENTICATION REQUIRED", b"\x01": "GSSAPI", b"\x02": "USERNAME/PASSWORD", b"\xff": "NO ACCEPTABLE METHODS", } REPLY_CODES = { b"\x00": "Succeeded", b"\x01": "General SOCKS server failure", b"\x02": "Connection not allowed by ruleset", b"\x03": "Network unreachable", b"\x04": "Host unreachable", b"\x05": "Connection refused", b"\x06": "TTL expired", b"\x07": "Command not supported", b"\x08": "Address type not supported", } async def _init_socks5_connection( stream: AsyncNetworkStream, *, host: bytes, port: int, auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, ) -> None: conn = socks5.SOCKS5Connection() # Auth method request auth_method = ( socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED if auth is None else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD ) conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) outgoing_bytes = conn.data_to_send() await stream.write(outgoing_bytes) # Auth method response incoming_bytes = await stream.read(max_bytes=4096) response = conn.receive_data(incoming_bytes) assert isinstance(response, socks5.SOCKS5AuthReply) if response.method != auth_method: requested = AUTH_METHODS.get(auth_method, "UNKNOWN") responded = AUTH_METHODS.get(response.method, "UNKNOWN") raise ProxyError( f"Requested {requested} from proxy server, but got {responded}." ) if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: # Username/password request assert auth is not None username, password = auth conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) outgoing_bytes = conn.data_to_send() await stream.write(outgoing_bytes) # Username/password response incoming_bytes = await stream.read(max_bytes=4096) response = conn.receive_data(incoming_bytes) assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) if not response.success: raise ProxyError("Invalid username/password") # Connect request conn.send( socks5.SOCKS5CommandRequest.from_address( socks5.SOCKS5Command.CONNECT, (host, port) ) ) outgoing_bytes = conn.data_to_send() await stream.write(outgoing_bytes) # Connect response incoming_bytes = await stream.read(max_bytes=4096) response = conn.receive_data(incoming_bytes) assert isinstance(response, socks5.SOCKS5Reply) if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") raise ProxyError(f"Proxy Server could not connect: {reply_code}.") class AsyncSOCKSProxy(AsyncConnectionPool): """ A connection pool that sends requests via an HTTP proxy. """ def __init__( self, proxy_url: typing.Union[URL, bytes, str], proxy_auth: typing.Optional[ typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] ] = None, ssl_context: typing.Optional[ssl.SSLContext] = None, max_connections: typing.Optional[int] = 10, max_keepalive_connections: typing.Optional[int] = None, keepalive_expiry: typing.Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, network_backend: typing.Optional[AsyncNetworkBackend] = None, ) -> None: """ A connection pool for making HTTP requests. Parameters: proxy_url: The URL to use when connecting to the proxy server. For example `"http://127.0.0.1:8080/"`. ssl_context: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. max_connections: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. max_keepalive_connections: The maximum number of idle HTTP connections that will be maintained in the pool. keepalive_expiry: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. http1: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to True. http2: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to False. retries: The maximum number of retries when trying to establish a connection. local_address: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). uds: Path to a Unix Domain Socket to use instead of TCP sockets. network_backend: A backend instance to use for handling network I/O. """ super().__init__( ssl_context=ssl_context, max_connections=max_connections, max_keepalive_connections=max_keepalive_connections, keepalive_expiry=keepalive_expiry, http1=http1, http2=http2, network_backend=network_backend, retries=retries, ) self._ssl_context = ssl_context self._proxy_url = enforce_url(proxy_url, name="proxy_url") if proxy_auth is not None: username, password = proxy_auth username_bytes = enforce_bytes(username, name="proxy_auth") password_bytes = enforce_bytes(password, name="proxy_auth") self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( username_bytes, password_bytes, ) else: self._proxy_auth = None def create_connection(self, origin: Origin) -> AsyncConnectionInterface: return AsyncSocks5Connection( proxy_origin=self._proxy_url.origin, remote_origin=origin, proxy_auth=self._proxy_auth, ssl_context=self._ssl_context, keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, network_backend=self._network_backend, ) class AsyncSocks5Connection(AsyncConnectionInterface): def __init__( self, proxy_origin: Origin, remote_origin: Origin, proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, ssl_context: typing.Optional[ssl.SSLContext] = None, keepalive_expiry: typing.Optional[float] = None, http1: bool = True, http2: bool = False, network_backend: typing.Optional[AsyncNetworkBackend] = None, ) -> None: self._proxy_origin = proxy_origin self._remote_origin = remote_origin self._proxy_auth = proxy_auth self._ssl_context = ssl_context self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._network_backend: AsyncNetworkBackend = ( AutoBackend() if network_backend is None else network_backend ) self._connect_lock = AsyncLock() self._connection: typing.Optional[AsyncConnectionInterface] = None self._connect_failed = False async def handle_async_request(self, request: Request) -> Response: timeouts = request.extensions.get("timeout", {}) sni_hostname = request.extensions.get("sni_hostname", None) timeout = timeouts.get("connect", None) async with self._connect_lock: if self._connection is None: try: # Connect to the proxy kwargs = { "host": self._proxy_origin.host.decode("ascii"), "port": self._proxy_origin.port, "timeout": timeout, } with Trace("connect_tcp", logger, request, kwargs) as trace: stream = await self._network_backend.connect_tcp(**kwargs) trace.return_value = stream # Connect to the remote host using socks5 kwargs = { "stream": stream, "host": self._remote_origin.host.decode("ascii"), "port": self._remote_origin.port, "auth": self._proxy_auth, } with Trace( "setup_socks5_connection", logger, request, kwargs ) as trace: await _init_socks5_connection(**kwargs) trace.return_value = stream # Upgrade the stream to SSL if self._remote_origin.scheme == b"https": ssl_context = ( default_ssl_context() if self._ssl_context is None else self._ssl_context ) alpn_protocols = ( ["http/1.1", "h2"] if self._http2 else ["http/1.1"] ) ssl_context.set_alpn_protocols(alpn_protocols) kwargs = { "ssl_context": ssl_context, "server_hostname": sni_hostname or self._remote_origin.host.decode("ascii"), "timeout": timeout, } async with Trace("start_tls", logger, request, kwargs) as trace: stream = await stream.start_tls(**kwargs) trace.return_value = stream # Determine if we should be using HTTP/1.1 or HTTP/2 ssl_object = stream.get_extra_info("ssl_object") http2_negotiated = ( ssl_object is not None and ssl_object.selected_alpn_protocol() == "h2" ) # Create the HTTP/1.1 or HTTP/2 connection if http2_negotiated or ( self._http2 and not self._http1 ): # pragma: nocover from .http2 import AsyncHTTP2Connection self._connection = AsyncHTTP2Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) else: self._connection = AsyncHTTP11Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) except Exception as exc: self._connect_failed = True raise exc elif not self._connection.is_available(): # pragma: nocover raise ConnectionNotAvailable() return await self._connection.handle_async_request(request) def can_handle_request(self, origin: Origin) -> bool: return origin == self._remote_origin async def aclose(self) -> None: if self._connection is not None: await self._connection.aclose() def is_available(self) -> bool: if self._connection is None: # pragma: nocover # If HTTP/2 support is enabled, and the resulting connection could # end up as HTTP/2 then we should indicate the connection as being # available to service multiple requests. return ( self._http2 and (self._remote_origin.scheme == b"https" or not self._http1) and not self._connect_failed ) return self._connection.is_available() def has_expired(self) -> bool: if self._connection is None: # pragma: nocover return self._connect_failed return self._connection.has_expired() def is_idle(self) -> bool: if self._connection is None: # pragma: nocover return self._connect_failed return self._connection.is_idle() def is_closed(self) -> bool: if self._connection is None: # pragma: nocover return self._connect_failed return self._connection.is_closed() def info(self) -> str: if self._connection is None: # pragma: nocover return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" return self._connection.info() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" httpcore-1.0.2/httpcore/_backends/000077500000000000000000000000001452343067500171145ustar00rootroot00000000000000httpcore-1.0.2/httpcore/_backends/__init__.py000066400000000000000000000000001452343067500212130ustar00rootroot00000000000000httpcore-1.0.2/httpcore/_backends/anyio.py000066400000000000000000000121301452343067500206020ustar00rootroot00000000000000import ssl import typing import anyio from .._exceptions import ( ConnectError, ConnectTimeout, ReadError, ReadTimeout, WriteError, WriteTimeout, map_exceptions, ) from .._utils import is_socket_readable from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream class AnyIOStream(AsyncNetworkStream): def __init__(self, stream: anyio.abc.ByteStream) -> None: self._stream = stream async def read( self, max_bytes: int, timeout: typing.Optional[float] = None ) -> bytes: exc_map = { TimeoutError: ReadTimeout, anyio.BrokenResourceError: ReadError, anyio.ClosedResourceError: ReadError, } with map_exceptions(exc_map): with anyio.fail_after(timeout): try: return await self._stream.receive(max_bytes=max_bytes) except anyio.EndOfStream: # pragma: nocover return b"" async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: if not buffer: return exc_map = { TimeoutError: WriteTimeout, anyio.BrokenResourceError: WriteError, anyio.ClosedResourceError: WriteError, } with map_exceptions(exc_map): with anyio.fail_after(timeout): await self._stream.send(item=buffer) async def aclose(self) -> None: await self._stream.aclose() async def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> AsyncNetworkStream: exc_map = { TimeoutError: ConnectTimeout, anyio.BrokenResourceError: ConnectError, } with map_exceptions(exc_map): try: with anyio.fail_after(timeout): ssl_stream = await anyio.streams.tls.TLSStream.wrap( self._stream, ssl_context=ssl_context, hostname=server_hostname, standard_compatible=False, server_side=False, ) except Exception as exc: # pragma: nocover await self.aclose() raise exc return AnyIOStream(ssl_stream) def get_extra_info(self, info: str) -> typing.Any: if info == "ssl_object": return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) if info == "client_addr": return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) if info == "server_addr": return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) if info == "socket": return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) if info == "is_readable": sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) return is_socket_readable(sock) return None class AnyIOBackend(AsyncNetworkBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: if socket_options is None: socket_options = [] # pragma: no cover exc_map = { TimeoutError: ConnectTimeout, OSError: ConnectError, anyio.BrokenResourceError: ConnectError, } with map_exceptions(exc_map): with anyio.fail_after(timeout): stream: anyio.abc.ByteStream = await anyio.connect_tcp( remote_host=host, remote_port=port, local_host=local_address, ) # By default TCP sockets opened in `asyncio` include TCP_NODELAY. for option in socket_options: stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover return AnyIOStream(stream) async def connect_unix_socket( self, path: str, timeout: typing.Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: # pragma: nocover if socket_options is None: socket_options = [] exc_map = { TimeoutError: ConnectTimeout, OSError: ConnectError, anyio.BrokenResourceError: ConnectError, } with map_exceptions(exc_map): with anyio.fail_after(timeout): stream: anyio.abc.ByteStream = await anyio.connect_unix(path) for option in socket_options: stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover return AnyIOStream(stream) async def sleep(self, seconds: float) -> None: await anyio.sleep(seconds) # pragma: nocover httpcore-1.0.2/httpcore/_backends/auto.py000066400000000000000000000032231452343067500204360ustar00rootroot00000000000000import typing from typing import Optional from .._synchronization import current_async_library from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream class AutoBackend(AsyncNetworkBackend): async def _init_backend(self) -> None: if not (hasattr(self, "_backend")): backend = current_async_library() if backend == "trio": from .trio import TrioBackend self._backend: AsyncNetworkBackend = TrioBackend() else: from .anyio import AnyIOBackend self._backend = AnyIOBackend() async def connect_tcp( self, host: str, port: int, timeout: Optional[float] = None, local_address: Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: await self._init_backend() return await self._backend.connect_tcp( host, port, timeout=timeout, local_address=local_address, socket_options=socket_options, ) async def connect_unix_socket( self, path: str, timeout: Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: # pragma: nocover await self._init_backend() return await self._backend.connect_unix_socket( path, timeout=timeout, socket_options=socket_options ) async def sleep(self, seconds: float) -> None: # pragma: nocover await self._init_backend() return await self._backend.sleep(seconds) httpcore-1.0.2/httpcore/_backends/base.py000066400000000000000000000062221452343067500204020ustar00rootroot00000000000000import ssl import time import typing SOCKET_OPTION = typing.Union[ typing.Tuple[int, int, int], typing.Tuple[int, int, typing.Union[bytes, bytearray]], typing.Tuple[int, int, None, int], ] class NetworkStream: def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: raise NotImplementedError() # pragma: nocover def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: raise NotImplementedError() # pragma: nocover def close(self) -> None: raise NotImplementedError() # pragma: nocover def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> "NetworkStream": raise NotImplementedError() # pragma: nocover def get_extra_info(self, info: str) -> typing.Any: return None # pragma: nocover class NetworkBackend: def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: raise NotImplementedError() # pragma: nocover def connect_unix_socket( self, path: str, timeout: typing.Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: raise NotImplementedError() # pragma: nocover def sleep(self, seconds: float) -> None: time.sleep(seconds) # pragma: nocover class AsyncNetworkStream: async def read( self, max_bytes: int, timeout: typing.Optional[float] = None ) -> bytes: raise NotImplementedError() # pragma: nocover async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: raise NotImplementedError() # pragma: nocover async def aclose(self) -> None: raise NotImplementedError() # pragma: nocover async def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> "AsyncNetworkStream": raise NotImplementedError() # pragma: nocover def get_extra_info(self, info: str) -> typing.Any: return None # pragma: nocover class AsyncNetworkBackend: async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: raise NotImplementedError() # pragma: nocover async def connect_unix_socket( self, path: str, timeout: typing.Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: raise NotImplementedError() # pragma: nocover async def sleep(self, seconds: float) -> None: raise NotImplementedError() # pragma: nocover httpcore-1.0.2/httpcore/_backends/mock.py000066400000000000000000000101231452343067500204140ustar00rootroot00000000000000import ssl import typing from typing import Optional from .._exceptions import ReadError from .base import ( SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream, NetworkBackend, NetworkStream, ) class MockSSLObject: def __init__(self, http2: bool): self._http2 = http2 def selected_alpn_protocol(self) -> str: return "h2" if self._http2 else "http/1.1" class MockStream(NetworkStream): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 self._closed = False def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: if self._closed: raise ReadError("Connection closed") if not self._buffer: return b"" return self._buffer.pop(0) def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: pass def close(self) -> None: self._closed = True def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: Optional[str] = None, timeout: Optional[float] = None, ) -> NetworkStream: return self def get_extra_info(self, info: str) -> typing.Any: return MockSSLObject(http2=self._http2) if info == "ssl_object" else None def __repr__(self) -> str: return "" class MockBackend(NetworkBackend): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 def connect_tcp( self, host: str, port: int, timeout: Optional[float] = None, local_address: Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: return MockStream(list(self._buffer), http2=self._http2) def connect_unix_socket( self, path: str, timeout: Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: return MockStream(list(self._buffer), http2=self._http2) def sleep(self, seconds: float) -> None: pass class AsyncMockStream(AsyncNetworkStream): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 self._closed = False async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: if self._closed: raise ReadError("Connection closed") if not self._buffer: return b"" return self._buffer.pop(0) async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: pass async def aclose(self) -> None: self._closed = True async def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: Optional[str] = None, timeout: Optional[float] = None, ) -> AsyncNetworkStream: return self def get_extra_info(self, info: str) -> typing.Any: return MockSSLObject(http2=self._http2) if info == "ssl_object" else None def __repr__(self) -> str: return "" class AsyncMockBackend(AsyncNetworkBackend): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 async def connect_tcp( self, host: str, port: int, timeout: Optional[float] = None, local_address: Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: return AsyncMockStream(list(self._buffer), http2=self._http2) async def connect_unix_socket( self, path: str, timeout: Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: return AsyncMockStream(list(self._buffer), http2=self._http2) async def sleep(self, seconds: float) -> None: pass httpcore-1.0.2/httpcore/_backends/sync.py000066400000000000000000000176261452343067500204560ustar00rootroot00000000000000import socket import ssl import sys import typing from functools import partial from .._exceptions import ( ConnectError, ConnectTimeout, ExceptionMapping, ReadError, ReadTimeout, WriteError, WriteTimeout, map_exceptions, ) from .._utils import is_socket_readable from .base import SOCKET_OPTION, NetworkBackend, NetworkStream class TLSinTLSStream(NetworkStream): # pragma: no cover """ Because the standard `SSLContext.wrap_socket` method does not work for `SSLSocket` objects, we need this class to implement TLS stream using an underlying `SSLObject` instance in order to support TLS on top of TLS. """ # Defined in RFC 8449 TLS_RECORD_SIZE = 16384 def __init__( self, sock: socket.socket, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ): self._sock = sock self._incoming = ssl.MemoryBIO() self._outgoing = ssl.MemoryBIO() self.ssl_obj = ssl_context.wrap_bio( incoming=self._incoming, outgoing=self._outgoing, server_hostname=server_hostname, ) self._sock.settimeout(timeout) self._perform_io(self.ssl_obj.do_handshake) def _perform_io( self, func: typing.Callable[..., typing.Any], ) -> typing.Any: ret = None while True: errno = None try: ret = func() except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: errno = e.errno self._sock.sendall(self._outgoing.read()) if errno == ssl.SSL_ERROR_WANT_READ: buf = self._sock.recv(self.TLS_RECORD_SIZE) if buf: self._incoming.write(buf) else: self._incoming.write_eof() if errno is None: return ret def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} with map_exceptions(exc_map): self._sock.settimeout(timeout) return typing.cast( bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) ) def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} with map_exceptions(exc_map): self._sock.settimeout(timeout) while buffer: nsent = self._perform_io(partial(self.ssl_obj.write, buffer)) buffer = buffer[nsent:] def close(self) -> None: self._sock.close() def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> "NetworkStream": raise NotImplementedError() def get_extra_info(self, info: str) -> typing.Any: if info == "ssl_object": return self.ssl_obj if info == "client_addr": return self._sock.getsockname() if info == "server_addr": return self._sock.getpeername() if info == "socket": return self._sock if info == "is_readable": return is_socket_readable(self._sock) return None class SyncStream(NetworkStream): def __init__(self, sock: socket.socket) -> None: self._sock = sock def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} with map_exceptions(exc_map): self._sock.settimeout(timeout) return self._sock.recv(max_bytes) def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: if not buffer: return exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} with map_exceptions(exc_map): while buffer: self._sock.settimeout(timeout) n = self._sock.send(buffer) buffer = buffer[n:] def close(self) -> None: self._sock.close() def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> NetworkStream: exc_map: ExceptionMapping = { socket.timeout: ConnectTimeout, OSError: ConnectError, } with map_exceptions(exc_map): try: if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover # If the underlying socket has already been upgraded # to the TLS layer (i.e. is an instance of SSLSocket), # we need some additional smarts to support TLS-in-TLS. return TLSinTLSStream( self._sock, ssl_context, server_hostname, timeout ) else: self._sock.settimeout(timeout) sock = ssl_context.wrap_socket( self._sock, server_hostname=server_hostname ) except Exception as exc: # pragma: nocover self.close() raise exc return SyncStream(sock) def get_extra_info(self, info: str) -> typing.Any: if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): return self._sock._sslobj # type: ignore if info == "client_addr": return self._sock.getsockname() if info == "server_addr": return self._sock.getpeername() if info == "socket": return self._sock if info == "is_readable": return is_socket_readable(self._sock) return None class SyncBackend(NetworkBackend): def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: # Note that we automatically include `TCP_NODELAY` # in addition to any other custom socket options. if socket_options is None: socket_options = [] # pragma: no cover address = (host, port) source_address = None if local_address is None else (local_address, 0) exc_map: ExceptionMapping = { socket.timeout: ConnectTimeout, OSError: ConnectError, } with map_exceptions(exc_map): sock = socket.create_connection( address, timeout, source_address=source_address, ) for option in socket_options: sock.setsockopt(*option) # pragma: no cover sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) return SyncStream(sock) def connect_unix_socket( self, path: str, timeout: typing.Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: # pragma: nocover if sys.platform == "win32": raise RuntimeError( "Attempted to connect to a UNIX socket on a Windows system." ) if socket_options is None: socket_options = [] exc_map: ExceptionMapping = { socket.timeout: ConnectTimeout, OSError: ConnectError, } with map_exceptions(exc_map): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) for option in socket_options: sock.setsockopt(*option) sock.settimeout(timeout) sock.connect(path) return SyncStream(sock) httpcore-1.0.2/httpcore/_backends/trio.py000066400000000000000000000136761452343067500204600ustar00rootroot00000000000000import ssl import typing import trio from .._exceptions import ( ConnectError, ConnectTimeout, ExceptionMapping, ReadError, ReadTimeout, WriteError, WriteTimeout, map_exceptions, ) from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream class TrioStream(AsyncNetworkStream): def __init__(self, stream: trio.abc.Stream) -> None: self._stream = stream async def read( self, max_bytes: int, timeout: typing.Optional[float] = None ) -> bytes: timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { trio.TooSlowError: ReadTimeout, trio.BrokenResourceError: ReadError, trio.ClosedResourceError: ReadError, } with map_exceptions(exc_map): with trio.fail_after(timeout_or_inf): data: bytes = await self._stream.receive_some(max_bytes=max_bytes) return data async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: if not buffer: return timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { trio.TooSlowError: WriteTimeout, trio.BrokenResourceError: WriteError, trio.ClosedResourceError: WriteError, } with map_exceptions(exc_map): with trio.fail_after(timeout_or_inf): await self._stream.send_all(data=buffer) async def aclose(self) -> None: await self._stream.aclose() async def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> AsyncNetworkStream: timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { trio.TooSlowError: ConnectTimeout, trio.BrokenResourceError: ConnectError, } ssl_stream = trio.SSLStream( self._stream, ssl_context=ssl_context, server_hostname=server_hostname, https_compatible=True, server_side=False, ) with map_exceptions(exc_map): try: with trio.fail_after(timeout_or_inf): await ssl_stream.do_handshake() except Exception as exc: # pragma: nocover await self.aclose() raise exc return TrioStream(ssl_stream) def get_extra_info(self, info: str) -> typing.Any: if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. # Tracked at https://github.com/python-trio/trio/issues/542 return self._stream._ssl_object # type: ignore[attr-defined] if info == "client_addr": return self._get_socket_stream().socket.getsockname() if info == "server_addr": return self._get_socket_stream().socket.getpeername() if info == "socket": stream = self._stream while isinstance(stream, trio.SSLStream): stream = stream.transport_stream assert isinstance(stream, trio.SocketStream) return stream.socket if info == "is_readable": socket = self.get_extra_info("socket") return socket.is_readable() return None def _get_socket_stream(self) -> trio.SocketStream: stream = self._stream while isinstance(stream, trio.SSLStream): stream = stream.transport_stream assert isinstance(stream, trio.SocketStream) return stream class TrioBackend(AsyncNetworkBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: # By default for TCP sockets, trio enables TCP_NODELAY. # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream if socket_options is None: socket_options = [] # pragma: no cover timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { trio.TooSlowError: ConnectTimeout, trio.BrokenResourceError: ConnectError, OSError: ConnectError, } with map_exceptions(exc_map): with trio.fail_after(timeout_or_inf): stream: trio.abc.Stream = await trio.open_tcp_stream( host=host, port=port, local_address=local_address ) for option in socket_options: stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover return TrioStream(stream) async def connect_unix_socket( self, path: str, timeout: typing.Optional[float] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: # pragma: nocover if socket_options is None: socket_options = [] timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { trio.TooSlowError: ConnectTimeout, trio.BrokenResourceError: ConnectError, OSError: ConnectError, } with map_exceptions(exc_map): with trio.fail_after(timeout_or_inf): stream: trio.abc.Stream = await trio.open_unix_socket(path) for option in socket_options: stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover return TrioStream(stream) async def sleep(self, seconds: float) -> None: await trio.sleep(seconds) # pragma: nocover httpcore-1.0.2/httpcore/_exceptions.py000066400000000000000000000022411452343067500200740ustar00rootroot00000000000000import contextlib from typing import Iterator, Mapping, Type ExceptionMapping = Mapping[Type[Exception], Type[Exception]] @contextlib.contextmanager def map_exceptions(map: ExceptionMapping) -> Iterator[None]: try: yield except Exception as exc: # noqa: PIE786 for from_exc, to_exc in map.items(): if isinstance(exc, from_exc): raise to_exc(exc) from exc raise # pragma: nocover class ConnectionNotAvailable(Exception): pass class ProxyError(Exception): pass class UnsupportedProtocol(Exception): pass class ProtocolError(Exception): pass class RemoteProtocolError(ProtocolError): pass class LocalProtocolError(ProtocolError): pass # Timeout errors class TimeoutException(Exception): pass class PoolTimeout(TimeoutException): pass class ConnectTimeout(TimeoutException): pass class ReadTimeout(TimeoutException): pass class WriteTimeout(TimeoutException): pass # Network errors class NetworkError(Exception): pass class ConnectError(NetworkError): pass class ReadError(NetworkError): pass class WriteError(NetworkError): pass httpcore-1.0.2/httpcore/_models.py000066400000000000000000000377621452343067500172160ustar00rootroot00000000000000from typing import ( Any, AsyncIterable, AsyncIterator, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from urllib.parse import urlparse # Functions for typechecking... HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] Extensions = MutableMapping[str, Any] def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: """ Any arguments that are ultimately represented as bytes can be specified either as bytes or as strings. However we enforce that any string arguments must only contain characters in the plain ASCII range. chr(0)...chr(127). If you need to use characters outside that range then be precise, and use a byte-wise argument. """ if isinstance(value, str): try: return value.encode("ascii") except UnicodeEncodeError: raise TypeError(f"{name} strings may not include unicode characters.") elif isinstance(value, bytes): return value seen_type = type(value).__name__ raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": """ Type check for URL parameters. """ if isinstance(value, (bytes, str)): return URL(value) elif isinstance(value, URL): return value seen_type = type(value).__name__ raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") def enforce_headers( value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str ) -> List[Tuple[bytes, bytes]]: """ Convienence function that ensure all items in request or response headers are either bytes or strings in the plain ASCII range. """ if value is None: return [] elif isinstance(value, Mapping): return [ ( enforce_bytes(k, name="header name"), enforce_bytes(v, name="header value"), ) for k, v in value.items() ] elif isinstance(value, Sequence): return [ ( enforce_bytes(k, name="header name"), enforce_bytes(v, name="header value"), ) for k, v in value ] seen_type = type(value).__name__ raise TypeError( f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." ) def enforce_stream( value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str ) -> Union[Iterable[bytes], AsyncIterable[bytes]]: if value is None: return ByteStream(b"") elif isinstance(value, bytes): return ByteStream(value) return value # * https://tools.ietf.org/html/rfc3986#section-3.2.3 # * https://url.spec.whatwg.org/#url-miscellaneous # * https://url.spec.whatwg.org/#scheme-state DEFAULT_PORTS = { b"ftp": 21, b"http": 80, b"https": 443, b"ws": 80, b"wss": 443, } def include_request_headers( headers: List[Tuple[bytes, bytes]], *, url: "URL", content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], ) -> List[Tuple[bytes, bytes]]: headers_set = set(k.lower() for k, v in headers) if b"host" not in headers_set: default_port = DEFAULT_PORTS.get(url.scheme) if url.port is None or url.port == default_port: header_value = url.host else: header_value = b"%b:%d" % (url.host, url.port) headers = [(b"Host", header_value)] + headers if ( content is not None and b"content-length" not in headers_set and b"transfer-encoding" not in headers_set ): if isinstance(content, bytes): content_length = str(len(content)).encode("ascii") headers += [(b"Content-Length", content_length)] else: headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover return headers # Interfaces for byte streams... class ByteStream: """ A container for non-streaming content, and that supports both sync and async stream iteration. """ def __init__(self, content: bytes) -> None: self._content = content def __iter__(self) -> Iterator[bytes]: yield self._content async def __aiter__(self) -> AsyncIterator[bytes]: yield self._content def __repr__(self) -> str: return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" class Origin: def __init__(self, scheme: bytes, host: bytes, port: int) -> None: self.scheme = scheme self.host = host self.port = port def __eq__(self, other: Any) -> bool: return ( isinstance(other, Origin) and self.scheme == other.scheme and self.host == other.host and self.port == other.port ) def __str__(self) -> str: scheme = self.scheme.decode("ascii") host = self.host.decode("ascii") port = str(self.port) return f"{scheme}://{host}:{port}" class URL: """ Represents the URL against which an HTTP request may be made. The URL may either be specified as a plain string, for convienence: ```python url = httpcore.URL("https://www.example.com/") ``` Or be constructed with explicitily pre-parsed components: ```python url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') ``` Using this second more explicit style allows integrations that are using `httpcore` to pass through URLs that have already been parsed in order to use libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures that URL parsing is treated identically at both the networking level and at any higher layers of abstraction. The four components are important here, as they allow the URL to be precisely specified in a pre-parsed format. They also allow certain types of request to be created that could not otherwise be expressed. For example, an HTTP request to `http://www.example.com/` forwarded via a proxy at `http://localhost:8080`... ```python # Constructs an HTTP request with a complete URL as the target: # GET https://www.example.com/ HTTP/1.1 url = httpcore.URL( scheme=b'http', host=b'localhost', port=8080, target=b'https://www.example.com/' ) request = httpcore.Request( method="GET", url=url ) ``` Another example is constructing an `OPTIONS *` request... ```python # Constructs an 'OPTIONS *' HTTP request: # OPTIONS * HTTP/1.1 url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') request = httpcore.Request(method="OPTIONS", url=url) ``` This kind of request is not possible to formulate with a URL string, because the `/` delimiter is always used to demark the target from the host/port portion of the URL. For convenience, string-like arguments may be specified either as strings or as bytes. However, once a request is being issue over-the-wire, the URL components are always ultimately required to be a bytewise representation. In order to avoid any ambiguity over character encodings, when strings are used as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. If you require a bytewise representation that is outside this range you must handle the character encoding directly, and pass a bytes instance. """ def __init__( self, url: Union[bytes, str] = "", *, scheme: Union[bytes, str] = b"", host: Union[bytes, str] = b"", port: Optional[int] = None, target: Union[bytes, str] = b"", ) -> None: """ Parameters: url: The complete URL as a string or bytes. scheme: The URL scheme as a string or bytes. Typically either `"http"` or `"https"`. host: The URL host as a string or bytes. Such as `"www.example.com"`. port: The port to connect to. Either an integer or `None`. target: The target of the HTTP request. Such as `"/items?search=red"`. """ if url: parsed = urlparse(enforce_bytes(url, name="url")) self.scheme = parsed.scheme self.host = parsed.hostname or b"" self.port = parsed.port self.target = (parsed.path or b"/") + ( b"?" + parsed.query if parsed.query else b"" ) else: self.scheme = enforce_bytes(scheme, name="scheme") self.host = enforce_bytes(host, name="host") self.port = port self.target = enforce_bytes(target, name="target") @property def origin(self) -> Origin: default_port = { b"http": 80, b"https": 443, b"ws": 80, b"wss": 443, b"socks5": 1080, }[self.scheme] return Origin( scheme=self.scheme, host=self.host, port=self.port or default_port ) def __eq__(self, other: Any) -> bool: return ( isinstance(other, URL) and other.scheme == self.scheme and other.host == self.host and other.port == self.port and other.target == self.target ) def __bytes__(self) -> bytes: if self.port is None: return b"%b://%b%b" % (self.scheme, self.host, self.target) return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) def __repr__(self) -> str: return ( f"{self.__class__.__name__}(scheme={self.scheme!r}, " f"host={self.host!r}, port={self.port!r}, target={self.target!r})" ) class Request: """ An HTTP request. """ def __init__( self, method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> None: """ Parameters: method: The HTTP request method, either as a string or bytes. For example: `GET`. url: The request URL, either as a `URL` instance, or as a string or bytes. For example: `"https://www.example.com".` headers: The HTTP request headers. content: The content of the response body. extensions: A dictionary of optional extra information included on the request. Possible keys include `"timeout"`, and `"trace"`. """ self.method: bytes = enforce_bytes(method, name="method") self.url: URL = enforce_url(url, name="url") self.headers: List[Tuple[bytes, bytes]] = enforce_headers( headers, name="headers" ) self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( content, name="content" ) self.extensions = {} if extensions is None else extensions def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.method!r}]>" class Response: """ An HTTP response. """ def __init__( self, status: int, *, headers: HeaderTypes = None, content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> None: """ Parameters: status: The HTTP status code of the response. For example `200`. headers: The HTTP response headers. content: The content of the response body. extensions: A dictionary of optional extra information included on the responseself.Possible keys include `"http_version"`, `"reason_phrase"`, and `"network_stream"`. """ self.status: int = status self.headers: List[Tuple[bytes, bytes]] = enforce_headers( headers, name="headers" ) self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( content, name="content" ) self.extensions = {} if extensions is None else extensions self._stream_consumed = False @property def content(self) -> bytes: if not hasattr(self, "_content"): if isinstance(self.stream, Iterable): raise RuntimeError( "Attempted to access 'response.content' on a streaming response. " "Call 'response.read()' first." ) else: raise RuntimeError( "Attempted to access 'response.content' on a streaming response. " "Call 'await response.aread()' first." ) return self._content def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.status}]>" # Sync interface... def read(self) -> bytes: if not isinstance(self.stream, Iterable): # pragma: nocover raise RuntimeError( "Attempted to read an asynchronous response using 'response.read()'. " "You should use 'await response.aread()' instead." ) if not hasattr(self, "_content"): self._content = b"".join([part for part in self.iter_stream()]) return self._content def iter_stream(self) -> Iterator[bytes]: if not isinstance(self.stream, Iterable): # pragma: nocover raise RuntimeError( "Attempted to stream an asynchronous response using 'for ... in " "response.iter_stream()'. " "You should use 'async for ... in response.aiter_stream()' instead." ) if self._stream_consumed: raise RuntimeError( "Attempted to call 'for ... in response.iter_stream()' more than once." ) self._stream_consumed = True for chunk in self.stream: yield chunk def close(self) -> None: if not isinstance(self.stream, Iterable): # pragma: nocover raise RuntimeError( "Attempted to close an asynchronous response using 'response.close()'. " "You should use 'await response.aclose()' instead." ) if hasattr(self.stream, "close"): self.stream.close() # Async interface... async def aread(self) -> bytes: if not isinstance(self.stream, AsyncIterable): # pragma: nocover raise RuntimeError( "Attempted to read an synchronous response using " "'await response.aread()'. " "You should use 'response.read()' instead." ) if not hasattr(self, "_content"): self._content = b"".join([part async for part in self.aiter_stream()]) return self._content async def aiter_stream(self) -> AsyncIterator[bytes]: if not isinstance(self.stream, AsyncIterable): # pragma: nocover raise RuntimeError( "Attempted to stream an synchronous response using 'async for ... in " "response.aiter_stream()'. " "You should use 'for ... in response.iter_stream()' instead." ) if self._stream_consumed: raise RuntimeError( "Attempted to call 'async for ... in response.aiter_stream()' " "more than once." ) self._stream_consumed = True async for chunk in self.stream: yield chunk async def aclose(self) -> None: if not isinstance(self.stream, AsyncIterable): # pragma: nocover raise RuntimeError( "Attempted to close a synchronous response using " "'await response.aclose()'. " "You should use 'response.close()' instead." ) if hasattr(self.stream, "aclose"): await self.stream.aclose() httpcore-1.0.2/httpcore/_ssl.py000066400000000000000000000002731452343067500165170ustar00rootroot00000000000000import ssl import certifi def default_ssl_context() -> ssl.SSLContext: context = ssl.create_default_context() context.load_verify_locations(certifi.where()) return context httpcore-1.0.2/httpcore/_sync/000077500000000000000000000000001452343067500163165ustar00rootroot00000000000000httpcore-1.0.2/httpcore/_sync/__init__.py000066400000000000000000000021651452343067500204330ustar00rootroot00000000000000from .connection import HTTPConnection from .connection_pool import ConnectionPool from .http11 import HTTP11Connection from .http_proxy import HTTPProxy from .interfaces import ConnectionInterface try: from .http2 import HTTP2Connection except ImportError: # pragma: nocover class HTTP2Connection: # type: ignore def __init__(self, *args, **kwargs) -> None: # type: ignore raise RuntimeError( "Attempted to use http2 support, but the `h2` package is not " "installed. Use 'pip install httpcore[http2]'." ) try: from .socks_proxy import SOCKSProxy except ImportError: # pragma: nocover class SOCKSProxy: # type: ignore def __init__(self, *args, **kwargs) -> None: # type: ignore raise RuntimeError( "Attempted to use SOCKS support, but the `socksio` package is not " "installed. Use 'pip install httpcore[socks]'." ) __all__ = [ "HTTPConnection", "ConnectionPool", "HTTPProxy", "HTTP11Connection", "HTTP2Connection", "ConnectionInterface", "SOCKSProxy", ] httpcore-1.0.2/httpcore/_sync/connection.py000066400000000000000000000203301452343067500210250ustar00rootroot00000000000000import itertools import logging import ssl from types import TracebackType from typing import Iterable, Iterator, Optional, Type from .._backends.sync import SyncBackend from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout from .._models import Origin, Request, Response from .._ssl import default_ssl_context from .._synchronization import Lock from .._trace import Trace from .http11 import HTTP11Connection from .interfaces import ConnectionInterface RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. logger = logging.getLogger("httpcore.connection") def exponential_backoff(factor: float) -> Iterator[float]: """ Generate a geometric sequence that has a ratio of 2 and starts with 0. For example: - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` """ yield 0 for n in itertools.count(): yield factor * 2**n class HTTPConnection(ConnectionInterface): def __init__( self, origin: Origin, ssl_context: Optional[ssl.SSLContext] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, local_address: Optional[str] = None, uds: Optional[str] = None, network_backend: Optional[NetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: self._origin = origin self._ssl_context = ssl_context self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._retries = retries self._local_address = local_address self._uds = uds self._network_backend: NetworkBackend = ( SyncBackend() if network_backend is None else network_backend ) self._connection: Optional[ConnectionInterface] = None self._connect_failed: bool = False self._request_lock = Lock() self._socket_options = socket_options def handle_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): raise RuntimeError( f"Attempted to send request to {request.url.origin} on connection to {self._origin}" ) with self._request_lock: if self._connection is None: try: stream = self._connect(request) ssl_object = stream.get_extra_info("ssl_object") http2_negotiated = ( ssl_object is not None and ssl_object.selected_alpn_protocol() == "h2" ) if http2_negotiated or (self._http2 and not self._http1): from .http2 import HTTP2Connection self._connection = HTTP2Connection( origin=self._origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) else: self._connection = HTTP11Connection( origin=self._origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) except Exception as exc: self._connect_failed = True raise exc elif not self._connection.is_available(): raise ConnectionNotAvailable() return self._connection.handle_request(request) def _connect(self, request: Request) -> NetworkStream: timeouts = request.extensions.get("timeout", {}) sni_hostname = request.extensions.get("sni_hostname", None) timeout = timeouts.get("connect", None) retries_left = self._retries delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) while True: try: if self._uds is None: kwargs = { "host": self._origin.host.decode("ascii"), "port": self._origin.port, "local_address": self._local_address, "timeout": timeout, "socket_options": self._socket_options, } with Trace("connect_tcp", logger, request, kwargs) as trace: stream = self._network_backend.connect_tcp(**kwargs) trace.return_value = stream else: kwargs = { "path": self._uds, "timeout": timeout, "socket_options": self._socket_options, } with Trace( "connect_unix_socket", logger, request, kwargs ) as trace: stream = self._network_backend.connect_unix_socket( **kwargs ) trace.return_value = stream if self._origin.scheme == b"https": ssl_context = ( default_ssl_context() if self._ssl_context is None else self._ssl_context ) alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] ssl_context.set_alpn_protocols(alpn_protocols) kwargs = { "ssl_context": ssl_context, "server_hostname": sni_hostname or self._origin.host.decode("ascii"), "timeout": timeout, } with Trace("start_tls", logger, request, kwargs) as trace: stream = stream.start_tls(**kwargs) trace.return_value = stream return stream except (ConnectError, ConnectTimeout): if retries_left <= 0: raise retries_left -= 1 delay = next(delays) with Trace("retry", logger, request, kwargs) as trace: self._network_backend.sleep(delay) def can_handle_request(self, origin: Origin) -> bool: return origin == self._origin def close(self) -> None: if self._connection is not None: with Trace("close", logger, None, {}): self._connection.close() def is_available(self) -> bool: if self._connection is None: # If HTTP/2 support is enabled, and the resulting connection could # end up as HTTP/2 then we should indicate the connection as being # available to service multiple requests. return ( self._http2 and (self._origin.scheme == b"https" or not self._http1) and not self._connect_failed ) return self._connection.is_available() def has_expired(self) -> bool: if self._connection is None: return self._connect_failed return self._connection.has_expired() def is_idle(self) -> bool: if self._connection is None: return self._connect_failed return self._connection.is_idle() def is_closed(self) -> bool: if self._connection is None: return self._connect_failed return self._connection.is_closed() def info(self) -> str: if self._connection is None: return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" return self._connection.info() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. def __enter__(self) -> "HTTPConnection": return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: self.close() httpcore-1.0.2/httpcore/_sync/connection_pool.py000066400000000000000000000340601452343067500220630ustar00rootroot00000000000000import ssl import sys import time from types import TracebackType from typing import Iterable, Iterator, Iterable, List, Optional, Type from .._backends.sync import SyncBackend from .._backends.base import SOCKET_OPTION, NetworkBackend from .._exceptions import ConnectionNotAvailable, PoolTimeout, UnsupportedProtocol from .._models import Origin, Request, Response from .._synchronization import Event, Lock, ShieldCancellation from .connection import HTTPConnection from .interfaces import ConnectionInterface, RequestInterface class RequestStatus: def __init__(self, request: Request): self.request = request self.connection: Optional[ConnectionInterface] = None self._connection_acquired = Event() def set_connection(self, connection: ConnectionInterface) -> None: assert self.connection is None self.connection = connection self._connection_acquired.set() def unset_connection(self) -> None: assert self.connection is not None self.connection = None self._connection_acquired = Event() def wait_for_connection( self, timeout: Optional[float] = None ) -> ConnectionInterface: if self.connection is None: self._connection_acquired.wait(timeout=timeout) assert self.connection is not None return self.connection class ConnectionPool(RequestInterface): """ A connection pool for making HTTP requests. """ def __init__( self, ssl_context: Optional[ssl.SSLContext] = None, max_connections: Optional[int] = 10, max_keepalive_connections: Optional[int] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, local_address: Optional[str] = None, uds: Optional[str] = None, network_backend: Optional[NetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: """ A connection pool for making HTTP requests. Parameters: ssl_context: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. max_connections: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. max_keepalive_connections: The maximum number of idle HTTP connections that will be maintained in the pool. keepalive_expiry: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. http1: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to True. http2: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to False. retries: The maximum number of retries when trying to establish a connection. local_address: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). uds: Path to a Unix Domain Socket to use instead of TCP sockets. network_backend: A backend instance to use for handling network I/O. socket_options: Socket options that have to be included in the TCP socket when the connection was established. """ self._ssl_context = ssl_context self._max_connections = ( sys.maxsize if max_connections is None else max_connections ) self._max_keepalive_connections = ( sys.maxsize if max_keepalive_connections is None else max_keepalive_connections ) self._max_keepalive_connections = min( self._max_connections, self._max_keepalive_connections ) self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._retries = retries self._local_address = local_address self._uds = uds self._pool: List[ConnectionInterface] = [] self._requests: List[RequestStatus] = [] self._pool_lock = Lock() self._network_backend = ( SyncBackend() if network_backend is None else network_backend ) self._socket_options = socket_options def create_connection(self, origin: Origin) -> ConnectionInterface: return HTTPConnection( origin=origin, ssl_context=self._ssl_context, keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, retries=self._retries, local_address=self._local_address, uds=self._uds, network_backend=self._network_backend, socket_options=self._socket_options, ) @property def connections(self) -> List[ConnectionInterface]: """ Return a list of the connections currently in the pool. For example: ```python >>> pool.connections [ , , , ] ``` """ return list(self._pool) def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: """ Attempt to provide a connection that can handle the given origin. """ origin = status.request.url.origin # If there are queued requests in front of us, then don't acquire a # connection. We handle requests strictly in order. waiting = [s for s in self._requests if s.connection is None] if waiting and waiting[0] is not status: return False # Reuse an existing connection if one is currently available. for idx, connection in enumerate(self._pool): if connection.can_handle_request(origin) and connection.is_available(): self._pool.pop(idx) self._pool.insert(0, connection) status.set_connection(connection) return True # If the pool is currently full, attempt to close one idle connection. if len(self._pool) >= self._max_connections: for idx, connection in reversed(list(enumerate(self._pool))): if connection.is_idle(): connection.close() self._pool.pop(idx) break # If the pool is still full, then we cannot acquire a connection. if len(self._pool) >= self._max_connections: return False # Otherwise create a new connection. connection = self.create_connection(origin) self._pool.insert(0, connection) status.set_connection(connection) return True def _close_expired_connections(self) -> None: """ Clean up the connection pool by closing off any connections that have expired. """ # Close any connections that have expired their keep-alive time. for idx, connection in reversed(list(enumerate(self._pool))): if connection.has_expired(): connection.close() self._pool.pop(idx) # If the pool size exceeds the maximum number of allowed keep-alive connections, # then close off idle connections as required. pool_size = len(self._pool) for idx, connection in reversed(list(enumerate(self._pool))): if connection.is_idle() and pool_size > self._max_keepalive_connections: connection.close() self._pool.pop(idx) pool_size -= 1 def handle_request(self, request: Request) -> Response: """ Send an HTTP request, and return an HTTP response. This is the core implementation that is called into by `.request()` or `.stream()`. """ scheme = request.url.scheme.decode() if scheme == "": raise UnsupportedProtocol( "Request URL is missing an 'http://' or 'https://' protocol." ) if scheme not in ("http", "https", "ws", "wss"): raise UnsupportedProtocol( f"Request URL has an unsupported protocol '{scheme}://'." ) status = RequestStatus(request) timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("pool", None) if timeout is not None: deadline = time.monotonic() + timeout else: deadline = float("inf") with self._pool_lock: self._requests.append(status) self._close_expired_connections() self._attempt_to_acquire_connection(status) while True: try: connection = status.wait_for_connection(timeout=timeout) except BaseException as exc: # If we timeout here, or if the task is cancelled, then make # sure to remove the request from the queue before bubbling # up the exception. with self._pool_lock: # Ensure only remove when task exists. if status in self._requests: self._requests.remove(status) raise exc try: response = connection.handle_request(request) except ConnectionNotAvailable: # The ConnectionNotAvailable exception is a special case, that # indicates we need to retry the request on a new connection. # # The most common case where this can occur is when multiple # requests are queued waiting for a single connection, which # might end up as an HTTP/2 connection, but which actually ends # up as HTTP/1.1. with self._pool_lock: # Maintain our position in the request queue, but reset the # status so that the request becomes queued again. status.unset_connection() self._attempt_to_acquire_connection(status) except BaseException as exc: with ShieldCancellation(): self.response_closed(status) raise exc else: break timeout = deadline - time.monotonic() if timeout < 0: raise PoolTimeout # pragma: nocover # When we return the response, we wrap the stream in a special class # that handles notifying the connection pool once the response # has been released. assert isinstance(response.stream, Iterable) return Response( status=response.status, headers=response.headers, content=ConnectionPoolByteStream(response.stream, self, status), extensions=response.extensions, ) def response_closed(self, status: RequestStatus) -> None: """ This method acts as a callback once the request/response cycle is complete. It is called into from the `ConnectionPoolByteStream.close()` method. """ assert status.connection is not None connection = status.connection with self._pool_lock: # Update the state of the connection pool. if status in self._requests: self._requests.remove(status) if connection.is_closed() and connection in self._pool: self._pool.remove(connection) # Since we've had a response closed, it's possible we'll now be able # to service one or more requests that are currently pending. for status in self._requests: if status.connection is None: acquired = self._attempt_to_acquire_connection(status) # If we could not acquire a connection for a queued request # then we don't need to check anymore requests that are # queued later behind it. if not acquired: break # Housekeeping. self._close_expired_connections() def close(self) -> None: """ Close any connections in the pool. """ with self._pool_lock: for connection in self._pool: connection.close() self._pool = [] self._requests = [] def __enter__(self) -> "ConnectionPool": # Acquiring the pool lock here ensures that we have the # correct dependencies installed as early as possible. with self._pool_lock: pass return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: self.close() class ConnectionPoolByteStream: """ A wrapper around the response byte stream, that additionally handles notifying the connection pool when the response has been closed. """ def __init__( self, stream: Iterable[bytes], pool: ConnectionPool, status: RequestStatus, ) -> None: self._stream = stream self._pool = pool self._status = status def __iter__(self) -> Iterator[bytes]: for part in self._stream: yield part def close(self) -> None: try: if hasattr(self._stream, "close"): self._stream.close() finally: with ShieldCancellation(): self._pool.response_closed(self._status) httpcore-1.0.2/httpcore/_sync/http11.py000066400000000000000000000275061452343067500200230ustar00rootroot00000000000000import enum import logging import time from types import TracebackType from typing import ( Iterable, Iterator, List, Optional, Tuple, Type, Union, cast, ) import h11 from .._backends.base import NetworkStream from .._exceptions import ( ConnectionNotAvailable, LocalProtocolError, RemoteProtocolError, WriteError, map_exceptions, ) from .._models import Origin, Request, Response from .._synchronization import Lock, ShieldCancellation from .._trace import Trace from .interfaces import ConnectionInterface logger = logging.getLogger("httpcore.http11") # A subset of `h11.Event` types supported by `_send_event` H11SendEvent = Union[ h11.Request, h11.Data, h11.EndOfMessage, ] class HTTPConnectionState(enum.IntEnum): NEW = 0 ACTIVE = 1 IDLE = 2 CLOSED = 3 class HTTP11Connection(ConnectionInterface): READ_NUM_BYTES = 64 * 1024 MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 def __init__( self, origin: Origin, stream: NetworkStream, keepalive_expiry: Optional[float] = None, ) -> None: self._origin = origin self._network_stream = stream self._keepalive_expiry: Optional[float] = keepalive_expiry self._expire_at: Optional[float] = None self._state = HTTPConnectionState.NEW self._state_lock = Lock() self._request_count = 0 self._h11_state = h11.Connection( our_role=h11.CLIENT, max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, ) def handle_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): raise RuntimeError( f"Attempted to send request to {request.url.origin} on connection " f"to {self._origin}" ) with self._state_lock: if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): self._request_count += 1 self._state = HTTPConnectionState.ACTIVE self._expire_at = None else: raise ConnectionNotAvailable() try: kwargs = {"request": request} try: with Trace( "send_request_headers", logger, request, kwargs ) as trace: self._send_request_headers(**kwargs) with Trace("send_request_body", logger, request, kwargs) as trace: self._send_request_body(**kwargs) except WriteError: # If we get a write error while we're writing the request, # then we supress this error and move on to attempting to # read the response. Servers can sometimes close the request # pre-emptively and then respond with a well formed HTTP # error response. pass with Trace( "receive_response_headers", logger, request, kwargs ) as trace: ( http_version, status, reason_phrase, headers, ) = self._receive_response_headers(**kwargs) trace.return_value = ( http_version, status, reason_phrase, headers, ) return Response( status=status, headers=headers, content=HTTP11ConnectionByteStream(self, request), extensions={ "http_version": http_version, "reason_phrase": reason_phrase, "network_stream": self._network_stream, }, ) except BaseException as exc: with ShieldCancellation(): with Trace("response_closed", logger, request) as trace: self._response_closed() raise exc # Sending the request... def _send_request_headers(self, request: Request) -> None: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("write", None) with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): event = h11.Request( method=request.method, target=request.url.target, headers=request.headers, ) self._send_event(event, timeout=timeout) def _send_request_body(self, request: Request) -> None: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("write", None) assert isinstance(request.stream, Iterable) for chunk in request.stream: event = h11.Data(data=chunk) self._send_event(event, timeout=timeout) self._send_event(h11.EndOfMessage(), timeout=timeout) def _send_event( self, event: h11.Event, timeout: Optional[float] = None ) -> None: bytes_to_send = self._h11_state.send(event) if bytes_to_send is not None: self._network_stream.write(bytes_to_send, timeout=timeout) # Receiving the response... def _receive_response_headers( self, request: Request ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) while True: event = self._receive_event(timeout=timeout) if isinstance(event, h11.Response): break if ( isinstance(event, h11.InformationalResponse) and event.status_code == 101 ): break http_version = b"HTTP/" + event.http_version # h11 version 0.11+ supports a `raw_items` interface to get the # raw header casing, rather than the enforced lowercase headers. headers = event.headers.raw_items() return http_version, event.status_code, event.reason, headers def _receive_response_body(self, request: Request) -> Iterator[bytes]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) while True: event = self._receive_event(timeout=timeout) if isinstance(event, h11.Data): yield bytes(event.data) elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): break def _receive_event( self, timeout: Optional[float] = None ) -> Union[h11.Event, Type[h11.PAUSED]]: while True: with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): event = self._h11_state.next_event() if event is h11.NEED_DATA: data = self._network_stream.read( self.READ_NUM_BYTES, timeout=timeout ) # If we feed this case through h11 we'll raise an exception like: # # httpcore.RemoteProtocolError: can't handle event type # ConnectionClosed when role=SERVER and state=SEND_RESPONSE # # Which is accurate, but not very informative from an end-user # perspective. Instead we handle this case distinctly and treat # it as a ConnectError. if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: msg = "Server disconnected without sending a response." raise RemoteProtocolError(msg) self._h11_state.receive_data(data) else: # mypy fails to narrow the type in the above if statement above return cast(Union[h11.Event, Type[h11.PAUSED]], event) def _response_closed(self) -> None: with self._state_lock: if ( self._h11_state.our_state is h11.DONE and self._h11_state.their_state is h11.DONE ): self._state = HTTPConnectionState.IDLE self._h11_state.start_next_cycle() if self._keepalive_expiry is not None: now = time.monotonic() self._expire_at = now + self._keepalive_expiry else: self.close() # Once the connection is no longer required... def close(self) -> None: # Note that this method unilaterally closes the connection, and does # not have any kind of locking in place around it. self._state = HTTPConnectionState.CLOSED self._network_stream.close() # The ConnectionInterface methods provide information about the state of # the connection, allowing for a connection pooling implementation to # determine when to reuse and when to close the connection... def can_handle_request(self, origin: Origin) -> bool: return origin == self._origin def is_available(self) -> bool: # Note that HTTP/1.1 connections in the "NEW" state are not treated as # being "available". The control flow which created the connection will # be able to send an outgoing request, but the connection will not be # acquired from the connection pool for any other request. return self._state == HTTPConnectionState.IDLE def has_expired(self) -> bool: now = time.monotonic() keepalive_expired = self._expire_at is not None and now > self._expire_at # If the HTTP connection is idle but the socket is readable, then the # only valid state is that the socket is about to return b"", indicating # a server-initiated disconnect. server_disconnected = ( self._state == HTTPConnectionState.IDLE and self._network_stream.get_extra_info("is_readable") ) return keepalive_expired or server_disconnected def is_idle(self) -> bool: return self._state == HTTPConnectionState.IDLE def is_closed(self) -> bool: return self._state == HTTPConnectionState.CLOSED def info(self) -> str: origin = str(self._origin) return ( f"{origin!r}, HTTP/1.1, {self._state.name}, " f"Request Count: {self._request_count}" ) def __repr__(self) -> str: class_name = self.__class__.__name__ origin = str(self._origin) return ( f"<{class_name} [{origin!r}, {self._state.name}, " f"Request Count: {self._request_count}]>" ) # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. def __enter__(self) -> "HTTP11Connection": return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: self.close() class HTTP11ConnectionByteStream: def __init__(self, connection: HTTP11Connection, request: Request) -> None: self._connection = connection self._request = request self._closed = False def __iter__(self) -> Iterator[bytes]: kwargs = {"request": self._request} try: with Trace("receive_response_body", logger, self._request, kwargs): for chunk in self._connection._receive_response_body(**kwargs): yield chunk except BaseException as exc: # If we get an exception while streaming the response, # we want to close the response (and possibly the connection) # before raising that exception. with ShieldCancellation(): self.close() raise exc def close(self) -> None: if not self._closed: self._closed = True with Trace("response_closed", logger, self._request): self._connection._response_closed() httpcore-1.0.2/httpcore/_sync/http2.py000066400000000000000000000554571452343067500177510ustar00rootroot00000000000000import enum import logging import time import types import typing import h2.config import h2.connection import h2.events import h2.exceptions import h2.settings from .._backends.base import NetworkStream from .._exceptions import ( ConnectionNotAvailable, LocalProtocolError, RemoteProtocolError, ) from .._models import Origin, Request, Response from .._synchronization import Lock, Semaphore, ShieldCancellation from .._trace import Trace from .interfaces import ConnectionInterface logger = logging.getLogger("httpcore.http2") def has_body_headers(request: Request) -> bool: return any( k.lower() == b"content-length" or k.lower() == b"transfer-encoding" for k, v in request.headers ) class HTTPConnectionState(enum.IntEnum): ACTIVE = 1 IDLE = 2 CLOSED = 3 class HTTP2Connection(ConnectionInterface): READ_NUM_BYTES = 64 * 1024 CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) def __init__( self, origin: Origin, stream: NetworkStream, keepalive_expiry: typing.Optional[float] = None, ): self._origin = origin self._network_stream = stream self._keepalive_expiry: typing.Optional[float] = keepalive_expiry self._h2_state = h2.connection.H2Connection(config=self.CONFIG) self._state = HTTPConnectionState.IDLE self._expire_at: typing.Optional[float] = None self._request_count = 0 self._init_lock = Lock() self._state_lock = Lock() self._read_lock = Lock() self._write_lock = Lock() self._sent_connection_init = False self._used_all_stream_ids = False self._connection_error = False # Mapping from stream ID to response stream events. self._events: typing.Dict[ int, typing.Union[ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded, h2.events.StreamReset, ], ] = {} # Connection terminated events are stored as state since # we need to handle them for all streams. self._connection_terminated: typing.Optional[ h2.events.ConnectionTerminated ] = None self._read_exception: typing.Optional[Exception] = None self._write_exception: typing.Optional[Exception] = None def handle_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): # This cannot occur in normal operation, since the connection pool # will only send requests on connections that handle them. # It's in place simply for resilience as a guard against incorrect # usage, for anyone working directly with httpcore connections. raise RuntimeError( f"Attempted to send request to {request.url.origin} on connection " f"to {self._origin}" ) with self._state_lock: if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): self._request_count += 1 self._expire_at = None self._state = HTTPConnectionState.ACTIVE else: raise ConnectionNotAvailable() with self._init_lock: if not self._sent_connection_init: try: kwargs = {"request": request} with Trace("send_connection_init", logger, request, kwargs): self._send_connection_init(**kwargs) except BaseException as exc: with ShieldCancellation(): self.close() raise exc self._sent_connection_init = True # Initially start with just 1 until the remote server provides # its max_concurrent_streams value self._max_streams = 1 local_settings_max_streams = ( self._h2_state.local_settings.max_concurrent_streams ) self._max_streams_semaphore = Semaphore(local_settings_max_streams) for _ in range(local_settings_max_streams - self._max_streams): self._max_streams_semaphore.acquire() self._max_streams_semaphore.acquire() try: stream_id = self._h2_state.get_next_available_stream_id() self._events[stream_id] = [] except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover self._used_all_stream_ids = True self._request_count -= 1 raise ConnectionNotAvailable() try: kwargs = {"request": request, "stream_id": stream_id} with Trace("send_request_headers", logger, request, kwargs): self._send_request_headers(request=request, stream_id=stream_id) with Trace("send_request_body", logger, request, kwargs): self._send_request_body(request=request, stream_id=stream_id) with Trace( "receive_response_headers", logger, request, kwargs ) as trace: status, headers = self._receive_response( request=request, stream_id=stream_id ) trace.return_value = (status, headers) return Response( status=status, headers=headers, content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), extensions={ "http_version": b"HTTP/2", "network_stream": self._network_stream, "stream_id": stream_id, }, ) except BaseException as exc: # noqa: PIE786 with ShieldCancellation(): kwargs = {"stream_id": stream_id} with Trace("response_closed", logger, request, kwargs): self._response_closed(stream_id=stream_id) if isinstance(exc, h2.exceptions.ProtocolError): # One case where h2 can raise a protocol error is when a # closed frame has been seen by the state machine. # # This happens when one stream is reading, and encounters # a GOAWAY event. Other flows of control may then raise # a protocol error at any point they interact with the 'h2_state'. # # In this case we'll have stored the event, and should raise # it as a RemoteProtocolError. if self._connection_terminated: # pragma: nocover raise RemoteProtocolError(self._connection_terminated) # If h2 raises a protocol error in some other state then we # must somehow have made a protocol violation. raise LocalProtocolError(exc) # pragma: nocover raise exc def _send_connection_init(self, request: Request) -> None: """ The HTTP/2 connection requires some initial setup before we can start using individual request/response streams on it. """ # Need to set these manually here instead of manipulating via # __setitem__() otherwise the H2Connection will emit SettingsUpdate # frames in addition to sending the undesired defaults. self._h2_state.local_settings = h2.settings.Settings( client=True, initial_values={ # Disable PUSH_PROMISE frames from the server since we don't do anything # with them for now. Maybe when we support caching? h2.settings.SettingCodes.ENABLE_PUSH: 0, # These two are taken from h2 for safe defaults h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, }, ) # Some websites (*cough* Yahoo *cough*) balk at this setting being # present in the initial handshake since it's not defined in the original # RFC despite the RFC mandating ignoring settings you don't know about. del self._h2_state.local_settings[ h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL ] self._h2_state.initiate_connection() self._h2_state.increment_flow_control_window(2**24) self._write_outgoing_data(request) # Sending the request... def _send_request_headers(self, request: Request, stream_id: int) -> None: """ Send the request headers to a given stream ID. """ end_stream = not has_body_headers(request) # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require # HTTP/1.1 style headers, and map them appropriately if we end up on # an HTTP/2 connection. authority = [v for k, v in request.headers if k.lower() == b"host"][0] headers = [ (b":method", request.method), (b":authority", authority), (b":scheme", request.url.scheme), (b":path", request.url.target), ] + [ (k.lower(), v) for k, v in request.headers if k.lower() not in ( b"host", b"transfer-encoding", ) ] self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) self._write_outgoing_data(request) def _send_request_body(self, request: Request, stream_id: int) -> None: """ Iterate over the request body sending it to a given stream ID. """ if not has_body_headers(request): return assert isinstance(request.stream, typing.Iterable) for data in request.stream: self._send_stream_data(request, stream_id, data) self._send_end_stream(request, stream_id) def _send_stream_data( self, request: Request, stream_id: int, data: bytes ) -> None: """ Send a single chunk of data in one or more data frames. """ while data: max_flow = self._wait_for_outgoing_flow(request, stream_id) chunk_size = min(len(data), max_flow) chunk, data = data[:chunk_size], data[chunk_size:] self._h2_state.send_data(stream_id, chunk) self._write_outgoing_data(request) def _send_end_stream(self, request: Request, stream_id: int) -> None: """ Send an empty data frame on on a given stream ID with the END_STREAM flag set. """ self._h2_state.end_stream(stream_id) self._write_outgoing_data(request) # Receiving the response... def _receive_response( self, request: Request, stream_id: int ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: """ Return the response status code and headers for a given stream ID. """ while True: event = self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.ResponseReceived): break status_code = 200 headers = [] for k, v in event.headers: if k == b":status": status_code = int(v.decode("ascii", errors="ignore")) elif not k.startswith(b":"): headers.append((k, v)) return (status_code, headers) def _receive_response_body( self, request: Request, stream_id: int ) -> typing.Iterator[bytes]: """ Iterator that returns the bytes of the response body for a given stream ID. """ while True: event = self._receive_stream_event(request, stream_id) if isinstance(event, h2.events.DataReceived): amount = event.flow_controlled_length self._h2_state.acknowledge_received_data(amount, stream_id) self._write_outgoing_data(request) yield event.data elif isinstance(event, h2.events.StreamEnded): break def _receive_stream_event( self, request: Request, stream_id: int ) -> typing.Union[ h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded ]: """ Return the next available event for a given stream ID. Will read more data from the network if required. """ while not self._events.get(stream_id): self._receive_events(request, stream_id) event = self._events[stream_id].pop(0) if isinstance(event, h2.events.StreamReset): raise RemoteProtocolError(event) return event def _receive_events( self, request: Request, stream_id: typing.Optional[int] = None ) -> None: """ Read some data from the network until we see one or more events for a given stream ID. """ with self._read_lock: if self._connection_terminated is not None: last_stream_id = self._connection_terminated.last_stream_id if stream_id and last_stream_id and stream_id > last_stream_id: self._request_count -= 1 raise ConnectionNotAvailable() raise RemoteProtocolError(self._connection_terminated) # This conditional is a bit icky. We don't want to block reading if we've # actually got an event to return for a given stream. We need to do that # check *within* the atomic read lock. Though it also need to be optional, # because when we call it from `_wait_for_outgoing_flow` we *do* want to # block until we've available flow control, event when we have events # pending for the stream ID we're attempting to send on. if stream_id is None or not self._events.get(stream_id): events = self._read_incoming_data(request) for event in events: if isinstance(event, h2.events.RemoteSettingsChanged): with Trace( "receive_remote_settings", logger, request ) as trace: self._receive_remote_settings_change(event) trace.return_value = event elif isinstance( event, ( h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded, h2.events.StreamReset, ), ): if event.stream_id in self._events: self._events[event.stream_id].append(event) elif isinstance(event, h2.events.ConnectionTerminated): self._connection_terminated = event self._write_outgoing_data(request) def _receive_remote_settings_change(self, event: h2.events.Event) -> None: max_concurrent_streams = event.changed_settings.get( h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS ) if max_concurrent_streams: new_max_streams = min( max_concurrent_streams.new_value, self._h2_state.local_settings.max_concurrent_streams, ) if new_max_streams and new_max_streams != self._max_streams: while new_max_streams > self._max_streams: self._max_streams_semaphore.release() self._max_streams += 1 while new_max_streams < self._max_streams: self._max_streams_semaphore.acquire() self._max_streams -= 1 def _response_closed(self, stream_id: int) -> None: self._max_streams_semaphore.release() del self._events[stream_id] with self._state_lock: if self._connection_terminated and not self._events: self.close() elif self._state == HTTPConnectionState.ACTIVE and not self._events: self._state = HTTPConnectionState.IDLE if self._keepalive_expiry is not None: now = time.monotonic() self._expire_at = now + self._keepalive_expiry if self._used_all_stream_ids: # pragma: nocover self.close() def close(self) -> None: # Note that this method unilaterally closes the connection, and does # not have any kind of locking in place around it. self._h2_state.close_connection() self._state = HTTPConnectionState.CLOSED self._network_stream.close() # Wrappers around network read/write operations... def _read_incoming_data( self, request: Request ) -> typing.List[h2.events.Event]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) if self._read_exception is not None: raise self._read_exception # pragma: nocover try: data = self._network_stream.read(self.READ_NUM_BYTES, timeout) if data == b"": raise RemoteProtocolError("Server disconnected") except Exception as exc: # If we get a network error we should: # # 1. Save the exception and just raise it immediately on any future reads. # (For example, this means that a single read timeout or disconnect will # immediately close all pending streams. Without requiring multiple # sequential timeouts.) # 2. Mark the connection as errored, so that we don't accept any other # incoming requests. self._read_exception = exc self._connection_error = True raise exc events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) return events def _write_outgoing_data(self, request: Request) -> None: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("write", None) with self._write_lock: data_to_send = self._h2_state.data_to_send() if self._write_exception is not None: raise self._write_exception # pragma: nocover try: self._network_stream.write(data_to_send, timeout) except Exception as exc: # pragma: nocover # If we get a network error we should: # # 1. Save the exception and just raise it immediately on any future write. # (For example, this means that a single write timeout or disconnect will # immediately close all pending streams. Without requiring multiple # sequential timeouts.) # 2. Mark the connection as errored, so that we don't accept any other # incoming requests. self._write_exception = exc self._connection_error = True raise exc # Flow control... def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: """ Returns the maximum allowable outgoing flow for a given stream. If the allowable flow is zero, then waits on the network until WindowUpdated frames have increased the flow rate. https://tools.ietf.org/html/rfc7540#section-6.9 """ local_flow: int = self._h2_state.local_flow_control_window(stream_id) max_frame_size: int = self._h2_state.max_outbound_frame_size flow = min(local_flow, max_frame_size) while flow == 0: self._receive_events(request) local_flow = self._h2_state.local_flow_control_window(stream_id) max_frame_size = self._h2_state.max_outbound_frame_size flow = min(local_flow, max_frame_size) return flow # Interface for connection pooling... def can_handle_request(self, origin: Origin) -> bool: return origin == self._origin def is_available(self) -> bool: return ( self._state != HTTPConnectionState.CLOSED and not self._connection_error and not self._used_all_stream_ids and not ( self._h2_state.state_machine.state == h2.connection.ConnectionState.CLOSED ) ) def has_expired(self) -> bool: now = time.monotonic() return self._expire_at is not None and now > self._expire_at def is_idle(self) -> bool: return self._state == HTTPConnectionState.IDLE def is_closed(self) -> bool: return self._state == HTTPConnectionState.CLOSED def info(self) -> str: origin = str(self._origin) return ( f"{origin!r}, HTTP/2, {self._state.name}, " f"Request Count: {self._request_count}" ) def __repr__(self) -> str: class_name = self.__class__.__name__ origin = str(self._origin) return ( f"<{class_name} [{origin!r}, {self._state.name}, " f"Request Count: {self._request_count}]>" ) # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. def __enter__(self) -> "HTTP2Connection": return self def __exit__( self, exc_type: typing.Optional[typing.Type[BaseException]] = None, exc_value: typing.Optional[BaseException] = None, traceback: typing.Optional[types.TracebackType] = None, ) -> None: self.close() class HTTP2ConnectionByteStream: def __init__( self, connection: HTTP2Connection, request: Request, stream_id: int ) -> None: self._connection = connection self._request = request self._stream_id = stream_id self._closed = False def __iter__(self) -> typing.Iterator[bytes]: kwargs = {"request": self._request, "stream_id": self._stream_id} try: with Trace("receive_response_body", logger, self._request, kwargs): for chunk in self._connection._receive_response_body( request=self._request, stream_id=self._stream_id ): yield chunk except BaseException as exc: # If we get an exception while streaming the response, # we want to close the response (and possibly the connection) # before raising that exception. with ShieldCancellation(): self.close() raise exc def close(self) -> None: if not self._closed: self._closed = True kwargs = {"stream_id": self._stream_id} with Trace("response_closed", logger, self._request, kwargs): self._connection._response_closed(stream_id=self._stream_id) httpcore-1.0.2/httpcore/_sync/http_proxy.py000066400000000000000000000344251452343067500211200ustar00rootroot00000000000000import logging import ssl from base64 import b64encode from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union from .._backends.base import SOCKET_OPTION, NetworkBackend from .._exceptions import ProxyError from .._models import ( URL, Origin, Request, Response, enforce_bytes, enforce_headers, enforce_url, ) from .._ssl import default_ssl_context from .._synchronization import Lock from .._trace import Trace from .connection import HTTPConnection from .connection_pool import ConnectionPool from .http11 import HTTP11Connection from .interfaces import ConnectionInterface HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] logger = logging.getLogger("httpcore.proxy") def merge_headers( default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, ) -> List[Tuple[bytes, bytes]]: """ Append default_headers and override_headers, de-duplicating if a key exists in both cases. """ default_headers = [] if default_headers is None else list(default_headers) override_headers = [] if override_headers is None else list(override_headers) has_override = set(key.lower() for key, value in override_headers) default_headers = [ (key, value) for key, value in default_headers if key.lower() not in has_override ] return default_headers + override_headers def build_auth_header(username: bytes, password: bytes) -> bytes: userpass = username + b":" + password return b"Basic " + b64encode(userpass) class HTTPProxy(ConnectionPool): """ A connection pool that sends requests via an HTTP proxy. """ def __init__( self, proxy_url: Union[URL, bytes, str], proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, ssl_context: Optional[ssl.SSLContext] = None, proxy_ssl_context: Optional[ssl.SSLContext] = None, max_connections: Optional[int] = 10, max_keepalive_connections: Optional[int] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, local_address: Optional[str] = None, uds: Optional[str] = None, network_backend: Optional[NetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: """ A connection pool for making HTTP requests. Parameters: proxy_url: The URL to use when connecting to the proxy server. For example `"http://127.0.0.1:8080/"`. proxy_auth: Any proxy authentication as a two-tuple of (username, password). May be either bytes or ascii-only str. proxy_headers: Any HTTP headers to use for the proxy requests. For example `{"Proxy-Authorization": "Basic :"}`. ssl_context: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. max_connections: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. max_keepalive_connections: The maximum number of idle HTTP connections that will be maintained in the pool. keepalive_expiry: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. http1: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to True. http2: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to False. retries: The maximum number of retries when trying to establish a connection. local_address: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). uds: Path to a Unix Domain Socket to use instead of TCP sockets. network_backend: A backend instance to use for handling network I/O. """ super().__init__( ssl_context=ssl_context, max_connections=max_connections, max_keepalive_connections=max_keepalive_connections, keepalive_expiry=keepalive_expiry, http1=http1, http2=http2, network_backend=network_backend, retries=retries, local_address=local_address, uds=uds, socket_options=socket_options, ) self._proxy_url = enforce_url(proxy_url, name="proxy_url") if ( self._proxy_url.scheme == b"http" and proxy_ssl_context is not None ): # pragma: no cover raise RuntimeError( "The `proxy_ssl_context` argument is not allowed for the http scheme" ) self._ssl_context = ssl_context self._proxy_ssl_context = proxy_ssl_context self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") if proxy_auth is not None: username = enforce_bytes(proxy_auth[0], name="proxy_auth") password = enforce_bytes(proxy_auth[1], name="proxy_auth") authorization = build_auth_header(username, password) self._proxy_headers = [ (b"Proxy-Authorization", authorization) ] + self._proxy_headers def create_connection(self, origin: Origin) -> ConnectionInterface: if origin.scheme == b"http": return ForwardHTTPConnection( proxy_origin=self._proxy_url.origin, proxy_headers=self._proxy_headers, remote_origin=origin, keepalive_expiry=self._keepalive_expiry, network_backend=self._network_backend, proxy_ssl_context=self._proxy_ssl_context, ) return TunnelHTTPConnection( proxy_origin=self._proxy_url.origin, proxy_headers=self._proxy_headers, remote_origin=origin, ssl_context=self._ssl_context, proxy_ssl_context=self._proxy_ssl_context, keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, network_backend=self._network_backend, ) class ForwardHTTPConnection(ConnectionInterface): def __init__( self, proxy_origin: Origin, remote_origin: Origin, proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, keepalive_expiry: Optional[float] = None, network_backend: Optional[NetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, proxy_ssl_context: Optional[ssl.SSLContext] = None, ) -> None: self._connection = HTTPConnection( origin=proxy_origin, keepalive_expiry=keepalive_expiry, network_backend=network_backend, socket_options=socket_options, ssl_context=proxy_ssl_context, ) self._proxy_origin = proxy_origin self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") self._remote_origin = remote_origin def handle_request(self, request: Request) -> Response: headers = merge_headers(self._proxy_headers, request.headers) url = URL( scheme=self._proxy_origin.scheme, host=self._proxy_origin.host, port=self._proxy_origin.port, target=bytes(request.url), ) proxy_request = Request( method=request.method, url=url, headers=headers, content=request.stream, extensions=request.extensions, ) return self._connection.handle_request(proxy_request) def can_handle_request(self, origin: Origin) -> bool: return origin == self._remote_origin def close(self) -> None: self._connection.close() def info(self) -> str: return self._connection.info() def is_available(self) -> bool: return self._connection.is_available() def has_expired(self) -> bool: return self._connection.has_expired() def is_idle(self) -> bool: return self._connection.is_idle() def is_closed(self) -> bool: return self._connection.is_closed() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" class TunnelHTTPConnection(ConnectionInterface): def __init__( self, proxy_origin: Origin, remote_origin: Origin, ssl_context: Optional[ssl.SSLContext] = None, proxy_ssl_context: Optional[ssl.SSLContext] = None, proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, keepalive_expiry: Optional[float] = None, http1: bool = True, http2: bool = False, network_backend: Optional[NetworkBackend] = None, socket_options: Optional[Iterable[SOCKET_OPTION]] = None, ) -> None: self._connection: ConnectionInterface = HTTPConnection( origin=proxy_origin, keepalive_expiry=keepalive_expiry, network_backend=network_backend, socket_options=socket_options, ssl_context=proxy_ssl_context, ) self._proxy_origin = proxy_origin self._remote_origin = remote_origin self._ssl_context = ssl_context self._proxy_ssl_context = proxy_ssl_context self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._connect_lock = Lock() self._connected = False def handle_request(self, request: Request) -> Response: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("connect", None) with self._connect_lock: if not self._connected: target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) connect_url = URL( scheme=self._proxy_origin.scheme, host=self._proxy_origin.host, port=self._proxy_origin.port, target=target, ) connect_headers = merge_headers( [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers ) connect_request = Request( method=b"CONNECT", url=connect_url, headers=connect_headers, extensions=request.extensions, ) connect_response = self._connection.handle_request( connect_request ) if connect_response.status < 200 or connect_response.status > 299: reason_bytes = connect_response.extensions.get("reason_phrase", b"") reason_str = reason_bytes.decode("ascii", errors="ignore") msg = "%d %s" % (connect_response.status, reason_str) self._connection.close() raise ProxyError(msg) stream = connect_response.extensions["network_stream"] # Upgrade the stream to SSL ssl_context = ( default_ssl_context() if self._ssl_context is None else self._ssl_context ) alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] ssl_context.set_alpn_protocols(alpn_protocols) kwargs = { "ssl_context": ssl_context, "server_hostname": self._remote_origin.host.decode("ascii"), "timeout": timeout, } with Trace("start_tls", logger, request, kwargs) as trace: stream = stream.start_tls(**kwargs) trace.return_value = stream # Determine if we should be using HTTP/1.1 or HTTP/2 ssl_object = stream.get_extra_info("ssl_object") http2_negotiated = ( ssl_object is not None and ssl_object.selected_alpn_protocol() == "h2" ) # Create the HTTP/1.1 or HTTP/2 connection if http2_negotiated or (self._http2 and not self._http1): from .http2 import HTTP2Connection self._connection = HTTP2Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) else: self._connection = HTTP11Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) self._connected = True return self._connection.handle_request(request) def can_handle_request(self, origin: Origin) -> bool: return origin == self._remote_origin def close(self) -> None: self._connection.close() def info(self) -> str: return self._connection.info() def is_available(self) -> bool: return self._connection.is_available() def has_expired(self) -> bool: return self._connection.has_expired() def is_idle(self) -> bool: return self._connection.is_idle() def is_closed(self) -> bool: return self._connection.is_closed() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" httpcore-1.0.2/httpcore/_sync/interfaces.py000066400000000000000000000104151452343067500210140ustar00rootroot00000000000000from contextlib import contextmanager from typing import Iterator, Optional, Union from .._models import ( URL, Extensions, HeaderTypes, Origin, Request, Response, enforce_bytes, enforce_headers, enforce_url, include_request_headers, ) class RequestInterface: def request( self, method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, Iterator[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> Response: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") url = enforce_url(url, name="url") headers = enforce_headers(headers, name="headers") # Include Host header, and optionally Content-Length or Transfer-Encoding. headers = include_request_headers(headers, url=url, content=content) request = Request( method=method, url=url, headers=headers, content=content, extensions=extensions, ) response = self.handle_request(request) try: response.read() finally: response.close() return response @contextmanager def stream( self, method: Union[bytes, str], url: Union[URL, bytes, str], *, headers: HeaderTypes = None, content: Union[bytes, Iterator[bytes], None] = None, extensions: Optional[Extensions] = None, ) -> Iterator[Response]: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") url = enforce_url(url, name="url") headers = enforce_headers(headers, name="headers") # Include Host header, and optionally Content-Length or Transfer-Encoding. headers = include_request_headers(headers, url=url, content=content) request = Request( method=method, url=url, headers=headers, content=content, extensions=extensions, ) response = self.handle_request(request) try: yield response finally: response.close() def handle_request(self, request: Request) -> Response: raise NotImplementedError() # pragma: nocover class ConnectionInterface(RequestInterface): def close(self) -> None: raise NotImplementedError() # pragma: nocover def info(self) -> str: raise NotImplementedError() # pragma: nocover def can_handle_request(self, origin: Origin) -> bool: raise NotImplementedError() # pragma: nocover def is_available(self) -> bool: """ Return `True` if the connection is currently able to accept an outgoing request. An HTTP/1.1 connection will only be available if it is currently idle. An HTTP/2 connection will be available so long as the stream ID space is not yet exhausted, and the connection is not in an error state. While the connection is being established we may not yet know if it is going to result in an HTTP/1.1 or HTTP/2 connection. The connection should be treated as being available, but might ultimately raise `NewConnectionRequired` required exceptions if multiple requests are attempted over a connection that ends up being established as HTTP/1.1. """ raise NotImplementedError() # pragma: nocover def has_expired(self) -> bool: """ Return `True` if the connection is in a state where it should be closed. This either means that the connection is idle and it has passed the expiry time on its keep-alive, or that server has sent an EOF. """ raise NotImplementedError() # pragma: nocover def is_idle(self) -> bool: """ Return `True` if the connection is currently idle. """ raise NotImplementedError() # pragma: nocover def is_closed(self) -> bool: """ Return `True` if the connection has been closed. Used when a response is closed to determine if the connection may be returned to the connection pool or not. """ raise NotImplementedError() # pragma: nocover httpcore-1.0.2/httpcore/_sync/socks_proxy.py000066400000000000000000000326131452343067500212600ustar00rootroot00000000000000import logging import ssl import typing from socksio import socks5 from .._backends.sync import SyncBackend from .._backends.base import NetworkBackend, NetworkStream from .._exceptions import ConnectionNotAvailable, ProxyError from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url from .._ssl import default_ssl_context from .._synchronization import Lock from .._trace import Trace from .connection_pool import ConnectionPool from .http11 import HTTP11Connection from .interfaces import ConnectionInterface logger = logging.getLogger("httpcore.socks") AUTH_METHODS = { b"\x00": "NO AUTHENTICATION REQUIRED", b"\x01": "GSSAPI", b"\x02": "USERNAME/PASSWORD", b"\xff": "NO ACCEPTABLE METHODS", } REPLY_CODES = { b"\x00": "Succeeded", b"\x01": "General SOCKS server failure", b"\x02": "Connection not allowed by ruleset", b"\x03": "Network unreachable", b"\x04": "Host unreachable", b"\x05": "Connection refused", b"\x06": "TTL expired", b"\x07": "Command not supported", b"\x08": "Address type not supported", } def _init_socks5_connection( stream: NetworkStream, *, host: bytes, port: int, auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, ) -> None: conn = socks5.SOCKS5Connection() # Auth method request auth_method = ( socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED if auth is None else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD ) conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) outgoing_bytes = conn.data_to_send() stream.write(outgoing_bytes) # Auth method response incoming_bytes = stream.read(max_bytes=4096) response = conn.receive_data(incoming_bytes) assert isinstance(response, socks5.SOCKS5AuthReply) if response.method != auth_method: requested = AUTH_METHODS.get(auth_method, "UNKNOWN") responded = AUTH_METHODS.get(response.method, "UNKNOWN") raise ProxyError( f"Requested {requested} from proxy server, but got {responded}." ) if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: # Username/password request assert auth is not None username, password = auth conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) outgoing_bytes = conn.data_to_send() stream.write(outgoing_bytes) # Username/password response incoming_bytes = stream.read(max_bytes=4096) response = conn.receive_data(incoming_bytes) assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) if not response.success: raise ProxyError("Invalid username/password") # Connect request conn.send( socks5.SOCKS5CommandRequest.from_address( socks5.SOCKS5Command.CONNECT, (host, port) ) ) outgoing_bytes = conn.data_to_send() stream.write(outgoing_bytes) # Connect response incoming_bytes = stream.read(max_bytes=4096) response = conn.receive_data(incoming_bytes) assert isinstance(response, socks5.SOCKS5Reply) if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") raise ProxyError(f"Proxy Server could not connect: {reply_code}.") class SOCKSProxy(ConnectionPool): """ A connection pool that sends requests via an HTTP proxy. """ def __init__( self, proxy_url: typing.Union[URL, bytes, str], proxy_auth: typing.Optional[ typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] ] = None, ssl_context: typing.Optional[ssl.SSLContext] = None, max_connections: typing.Optional[int] = 10, max_keepalive_connections: typing.Optional[int] = None, keepalive_expiry: typing.Optional[float] = None, http1: bool = True, http2: bool = False, retries: int = 0, network_backend: typing.Optional[NetworkBackend] = None, ) -> None: """ A connection pool for making HTTP requests. Parameters: proxy_url: The URL to use when connecting to the proxy server. For example `"http://127.0.0.1:8080/"`. ssl_context: An SSL context to use for verifying connections. If not specified, the default `httpcore.default_ssl_context()` will be used. max_connections: The maximum number of concurrent HTTP connections that the pool should allow. Any attempt to send a request on a pool that would exceed this amount will block until a connection is available. max_keepalive_connections: The maximum number of idle HTTP connections that will be maintained in the pool. keepalive_expiry: The duration in seconds that an idle HTTP connection may be maintained for before being expired from the pool. http1: A boolean indicating if HTTP/1.1 requests should be supported by the connection pool. Defaults to True. http2: A boolean indicating if HTTP/2 requests should be supported by the connection pool. Defaults to False. retries: The maximum number of retries when trying to establish a connection. local_address: Local address to connect from. Can also be used to connect using a particular address family. Using `local_address="0.0.0.0"` will connect using an `AF_INET` address (IPv4), while using `local_address="::"` will connect using an `AF_INET6` address (IPv6). uds: Path to a Unix Domain Socket to use instead of TCP sockets. network_backend: A backend instance to use for handling network I/O. """ super().__init__( ssl_context=ssl_context, max_connections=max_connections, max_keepalive_connections=max_keepalive_connections, keepalive_expiry=keepalive_expiry, http1=http1, http2=http2, network_backend=network_backend, retries=retries, ) self._ssl_context = ssl_context self._proxy_url = enforce_url(proxy_url, name="proxy_url") if proxy_auth is not None: username, password = proxy_auth username_bytes = enforce_bytes(username, name="proxy_auth") password_bytes = enforce_bytes(password, name="proxy_auth") self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( username_bytes, password_bytes, ) else: self._proxy_auth = None def create_connection(self, origin: Origin) -> ConnectionInterface: return Socks5Connection( proxy_origin=self._proxy_url.origin, remote_origin=origin, proxy_auth=self._proxy_auth, ssl_context=self._ssl_context, keepalive_expiry=self._keepalive_expiry, http1=self._http1, http2=self._http2, network_backend=self._network_backend, ) class Socks5Connection(ConnectionInterface): def __init__( self, proxy_origin: Origin, remote_origin: Origin, proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, ssl_context: typing.Optional[ssl.SSLContext] = None, keepalive_expiry: typing.Optional[float] = None, http1: bool = True, http2: bool = False, network_backend: typing.Optional[NetworkBackend] = None, ) -> None: self._proxy_origin = proxy_origin self._remote_origin = remote_origin self._proxy_auth = proxy_auth self._ssl_context = ssl_context self._keepalive_expiry = keepalive_expiry self._http1 = http1 self._http2 = http2 self._network_backend: NetworkBackend = ( SyncBackend() if network_backend is None else network_backend ) self._connect_lock = Lock() self._connection: typing.Optional[ConnectionInterface] = None self._connect_failed = False def handle_request(self, request: Request) -> Response: timeouts = request.extensions.get("timeout", {}) sni_hostname = request.extensions.get("sni_hostname", None) timeout = timeouts.get("connect", None) with self._connect_lock: if self._connection is None: try: # Connect to the proxy kwargs = { "host": self._proxy_origin.host.decode("ascii"), "port": self._proxy_origin.port, "timeout": timeout, } with Trace("connect_tcp", logger, request, kwargs) as trace: stream = self._network_backend.connect_tcp(**kwargs) trace.return_value = stream # Connect to the remote host using socks5 kwargs = { "stream": stream, "host": self._remote_origin.host.decode("ascii"), "port": self._remote_origin.port, "auth": self._proxy_auth, } with Trace( "setup_socks5_connection", logger, request, kwargs ) as trace: _init_socks5_connection(**kwargs) trace.return_value = stream # Upgrade the stream to SSL if self._remote_origin.scheme == b"https": ssl_context = ( default_ssl_context() if self._ssl_context is None else self._ssl_context ) alpn_protocols = ( ["http/1.1", "h2"] if self._http2 else ["http/1.1"] ) ssl_context.set_alpn_protocols(alpn_protocols) kwargs = { "ssl_context": ssl_context, "server_hostname": sni_hostname or self._remote_origin.host.decode("ascii"), "timeout": timeout, } with Trace("start_tls", logger, request, kwargs) as trace: stream = stream.start_tls(**kwargs) trace.return_value = stream # Determine if we should be using HTTP/1.1 or HTTP/2 ssl_object = stream.get_extra_info("ssl_object") http2_negotiated = ( ssl_object is not None and ssl_object.selected_alpn_protocol() == "h2" ) # Create the HTTP/1.1 or HTTP/2 connection if http2_negotiated or ( self._http2 and not self._http1 ): # pragma: nocover from .http2 import HTTP2Connection self._connection = HTTP2Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) else: self._connection = HTTP11Connection( origin=self._remote_origin, stream=stream, keepalive_expiry=self._keepalive_expiry, ) except Exception as exc: self._connect_failed = True raise exc elif not self._connection.is_available(): # pragma: nocover raise ConnectionNotAvailable() return self._connection.handle_request(request) def can_handle_request(self, origin: Origin) -> bool: return origin == self._remote_origin def close(self) -> None: if self._connection is not None: self._connection.close() def is_available(self) -> bool: if self._connection is None: # pragma: nocover # If HTTP/2 support is enabled, and the resulting connection could # end up as HTTP/2 then we should indicate the connection as being # available to service multiple requests. return ( self._http2 and (self._remote_origin.scheme == b"https" or not self._http1) and not self._connect_failed ) return self._connection.is_available() def has_expired(self) -> bool: if self._connection is None: # pragma: nocover return self._connect_failed return self._connection.has_expired() def is_idle(self) -> bool: if self._connection is None: # pragma: nocover return self._connect_failed return self._connection.is_idle() def is_closed(self) -> bool: if self._connection is None: # pragma: nocover return self._connect_failed return self._connection.is_closed() def info(self) -> str: if self._connection is None: # pragma: nocover return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" return self._connection.info() def __repr__(self) -> str: return f"<{self.__class__.__name__} [{self.info()}]>" httpcore-1.0.2/httpcore/_synchronization.py000066400000000000000000000175661452343067500211740ustar00rootroot00000000000000import threading from types import TracebackType from typing import Optional, Type from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions # Our async synchronization primatives use either 'anyio' or 'trio' depending # on if they're running under asyncio or trio. try: import trio except ImportError: # pragma: nocover trio = None # type: ignore try: import anyio except ImportError: # pragma: nocover anyio = None # type: ignore def current_async_library() -> str: # Determine if we're running under trio or asyncio. # See https://sniffio.readthedocs.io/en/latest/ try: import sniffio except ImportError: # pragma: nocover environment = "asyncio" else: environment = sniffio.current_async_library() if environment not in ("asyncio", "trio"): # pragma: nocover raise RuntimeError("Running under an unsupported async environment.") if environment == "asyncio" and anyio is None: # pragma: nocover raise RuntimeError( "Running with asyncio requires installation of 'httpcore[asyncio]'." ) if environment == "trio" and trio is None: # pragma: nocover raise RuntimeError( "Running with trio requires installation of 'httpcore[trio]'." ) return environment class AsyncLock: def __init__(self) -> None: self._backend = "" def setup(self) -> None: """ Detect if we're running under 'asyncio' or 'trio' and create a lock with the correct implementation. """ self._backend = current_async_library() if self._backend == "trio": self._trio_lock = trio.Lock() elif self._backend == "asyncio": self._anyio_lock = anyio.Lock() async def __aenter__(self) -> "AsyncLock": if not self._backend: self.setup() if self._backend == "trio": await self._trio_lock.acquire() elif self._backend == "asyncio": await self._anyio_lock.acquire() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: if self._backend == "trio": self._trio_lock.release() elif self._backend == "asyncio": self._anyio_lock.release() class AsyncEvent: def __init__(self) -> None: self._backend = "" def setup(self) -> None: """ Detect if we're running under 'asyncio' or 'trio' and create a lock with the correct implementation. """ self._backend = current_async_library() if self._backend == "trio": self._trio_event = trio.Event() elif self._backend == "asyncio": self._anyio_event = anyio.Event() def set(self) -> None: if not self._backend: self.setup() if self._backend == "trio": self._trio_event.set() elif self._backend == "asyncio": self._anyio_event.set() async def wait(self, timeout: Optional[float] = None) -> None: if not self._backend: self.setup() if self._backend == "trio": trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} timeout_or_inf = float("inf") if timeout is None else timeout with map_exceptions(trio_exc_map): with trio.fail_after(timeout_or_inf): await self._trio_event.wait() elif self._backend == "asyncio": anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} with map_exceptions(anyio_exc_map): with anyio.fail_after(timeout): await self._anyio_event.wait() class AsyncSemaphore: def __init__(self, bound: int) -> None: self._bound = bound self._backend = "" def setup(self) -> None: """ Detect if we're running under 'asyncio' or 'trio' and create a semaphore with the correct implementation. """ self._backend = current_async_library() if self._backend == "trio": self._trio_semaphore = trio.Semaphore( initial_value=self._bound, max_value=self._bound ) elif self._backend == "asyncio": self._anyio_semaphore = anyio.Semaphore( initial_value=self._bound, max_value=self._bound ) async def acquire(self) -> None: if not self._backend: self.setup() if self._backend == "trio": await self._trio_semaphore.acquire() elif self._backend == "asyncio": await self._anyio_semaphore.acquire() async def release(self) -> None: if self._backend == "trio": self._trio_semaphore.release() elif self._backend == "asyncio": self._anyio_semaphore.release() class AsyncShieldCancellation: # For certain portions of our codebase where we're dealing with # closing connections during exception handling we want to shield # the operation from being cancelled. # # with AsyncShieldCancellation(): # ... # clean-up operations, shielded from cancellation. def __init__(self) -> None: """ Detect if we're running under 'asyncio' or 'trio' and create a shielded scope with the correct implementation. """ self._backend = current_async_library() if self._backend == "trio": self._trio_shield = trio.CancelScope(shield=True) elif self._backend == "asyncio": self._anyio_shield = anyio.CancelScope(shield=True) def __enter__(self) -> "AsyncShieldCancellation": if self._backend == "trio": self._trio_shield.__enter__() elif self._backend == "asyncio": self._anyio_shield.__enter__() return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: if self._backend == "trio": self._trio_shield.__exit__(exc_type, exc_value, traceback) elif self._backend == "asyncio": self._anyio_shield.__exit__(exc_type, exc_value, traceback) # Our thread-based synchronization primitives... class Lock: def __init__(self) -> None: self._lock = threading.Lock() def __enter__(self) -> "Lock": self._lock.acquire() return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: self._lock.release() class Event: def __init__(self) -> None: self._event = threading.Event() def set(self) -> None: self._event.set() def wait(self, timeout: Optional[float] = None) -> None: if timeout == float("inf"): # pragma: no cover timeout = None if not self._event.wait(timeout=timeout): raise PoolTimeout() # pragma: nocover class Semaphore: def __init__(self, bound: int) -> None: self._semaphore = threading.Semaphore(value=bound) def acquire(self) -> None: self._semaphore.acquire() def release(self) -> None: self._semaphore.release() class ShieldCancellation: # Thread-synchronous codebases don't support cancellation semantics. # We have this class because we need to mirror the async and sync # cases within our package, but it's just a no-op. def __enter__(self) -> "ShieldCancellation": return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: pass httpcore-1.0.2/httpcore/_trace.py000066400000000000000000000075621452343067500170240ustar00rootroot00000000000000import inspect import logging from types import TracebackType from typing import Any, Dict, Optional, Type from ._models import Request class Trace: def __init__( self, name: str, logger: logging.Logger, request: Optional[Request] = None, kwargs: Optional[Dict[str, Any]] = None, ) -> None: self.name = name self.logger = logger self.trace_extension = ( None if request is None else request.extensions.get("trace") ) self.debug = self.logger.isEnabledFor(logging.DEBUG) self.kwargs = kwargs or {} self.return_value: Any = None self.should_trace = self.debug or self.trace_extension is not None self.prefix = self.logger.name.split(".")[-1] def trace(self, name: str, info: Dict[str, Any]) -> None: if self.trace_extension is not None: prefix_and_name = f"{self.prefix}.{name}" ret = self.trace_extension(prefix_and_name, info) if inspect.iscoroutine(ret): # pragma: no cover raise TypeError( "If you are using a synchronous interface, " "the callback of the `trace` extension should " "be a normal function instead of an asynchronous function." ) if self.debug: if not info or "return_value" in info and info["return_value"] is None: message = name else: args = " ".join([f"{key}={value!r}" for key, value in info.items()]) message = f"{name} {args}" self.logger.debug(message) def __enter__(self) -> "Trace": if self.should_trace: info = self.kwargs self.trace(f"{self.name}.started", info) return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: if self.should_trace: if exc_value is None: info = {"return_value": self.return_value} self.trace(f"{self.name}.complete", info) else: info = {"exception": exc_value} self.trace(f"{self.name}.failed", info) async def atrace(self, name: str, info: Dict[str, Any]) -> None: if self.trace_extension is not None: prefix_and_name = f"{self.prefix}.{name}" coro = self.trace_extension(prefix_and_name, info) if not inspect.iscoroutine(coro): # pragma: no cover raise TypeError( "If you're using an asynchronous interface, " "the callback of the `trace` extension should " "be an asynchronous function rather than a normal function." ) await coro if self.debug: if not info or "return_value" in info and info["return_value"] is None: message = name else: args = " ".join([f"{key}={value!r}" for key, value in info.items()]) message = f"{name} {args}" self.logger.debug(message) async def __aenter__(self) -> "Trace": if self.should_trace: info = self.kwargs await self.atrace(f"{self.name}.started", info) return self async def __aexit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: if self.should_trace: if exc_value is None: info = {"return_value": self.return_value} await self.atrace(f"{self.name}.complete", info) else: info = {"exception": exc_value} await self.atrace(f"{self.name}.failed", info) httpcore-1.0.2/httpcore/_utils.py000066400000000000000000000027651452343067500170660ustar00rootroot00000000000000import select import socket import sys import typing def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: """ Return whether a socket, as identifed by its file descriptor, is readable. "A socket is readable" means that the read buffer isn't empty, i.e. that calling .recv() on it would immediately return some data. """ # NOTE: we want check for readability without actually attempting to read, because # we don't want to block forever if it's not readable. # In the case that the socket no longer exists, or cannot return a file # descriptor, we treat it as being readable, as if it the next read operation # on it is ready to return the terminating `b""`. sock_fd = None if sock is None else sock.fileno() if sock_fd is None or sock_fd < 0: # pragma: nocover return True # The implementation below was stolen from: # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 # Use select.select on Windows, and when poll is unavailable and select.poll # everywhere else. (E.g. When eventlet is in use. See #327) if ( sys.platform == "win32" or getattr(select, "poll", None) is None ): # pragma: nocover rready, _, _ = select.select([sock_fd], [], [], 0) return bool(rready) p = select.poll() p.register(sock_fd, select.POLLIN) return bool(p.poll(0)) httpcore-1.0.2/httpcore/py.typed000066400000000000000000000000001452343067500166700ustar00rootroot00000000000000httpcore-1.0.2/mkdocs.yml000066400000000000000000000015051452343067500153570ustar00rootroot00000000000000site_name: HTTPCore site_description: A minimal HTTP client for Python. site_url: https://www.encode.io/httpcore/ repo_name: encode/httpcore repo_url: https://github.com/encode/httpcore/ nav: - Introduction: 'index.md' - Quickstart: 'quickstart.md' - Connection Pools: 'connection-pools.md' - Proxies: 'proxies.md' - HTTP/2: 'http2.md' - Async Support: 'async.md' - Network Backends: 'network-backends.md' - Extensions: 'extensions.md' - Logging: 'logging.md' - Exceptions: 'exceptions.md' theme: name: "material" plugins: - search - mkdocstrings: default_handler: python watch: - httpcore handlers: python: members_order: - "source" markdown_extensions: - codehilite: css_class: highlight httpcore-1.0.2/pyproject.toml000066400000000000000000000050731452343067500162740ustar00rootroot00000000000000[build-system] requires = ["hatchling", "hatch-fancy-pypi-readme"] build-backend = "hatchling.build" [project] name = "httpcore" dynamic = ["readme", "version"] description = "A minimal low-level HTTP client." license = "BSD-3-Clause" requires-python = ">=3.8" authors = [ { name = "Tom Christie", email = "tom@tomchristie.com" }, ] classifiers = [ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "Framework :: AsyncIO", "Framework :: Trio", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Internet :: WWW/HTTP", ] dependencies = [ "certifi", "h11>=0.13,<0.15", ] [project.optional-dependencies] http2 = [ "h2>=3,<5", ] socks = [ "socksio==1.*", ] trio = [ "trio>=0.22.0,<0.23.0", ] asyncio = [ "anyio>=4.0,<5.0", ] [project.urls] Documentation = "https://www.encode.io/httpcore" Homepage = "https://www.encode.io/httpcore/" Source = "https://github.com/encode/httpcore" [tool.hatch.version] path = "httpcore/__init__.py" [tool.hatch.build.targets.sdist] include = [ "/httpcore", "/CHANGELOG.md", "/README.md", "/tests" ] [tool.hatch.metadata.hooks.fancy-pypi-readme] content-type = "text/markdown" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] path = "README.md" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] path = "CHANGELOG.md" [tool.mypy] strict = true show_error_codes = true [[tool.mypy.overrides]] module = "tests.*" disallow_untyped_defs = false check_untyped_defs = true [[tool.mypy.overrides]] module = "h2.*" ignore_missing_imports = true [[tool.mypy.overrides]] module = "hpack.*" ignore_missing_imports = true [tool.pytest.ini_options] addopts = ["-rxXs", "--strict-config", "--strict-markers"] markers = ["copied_from(source, changes=None): mark test as copied from somewhere else, along with a description of changes made to accodomate e.g. our test setup"] filterwarnings = ["error"] [tool.coverage.run] omit = [ "venv/*", "httpcore/_sync/*" ] include = ["httpcore/*", "tests/*"] [tool.ruff] exclude = [ "httpcore/_sync", "tests/_sync", ] line-length = 120 select = [ "E", "F", "W", "I" ] [tool.ruff.isort] combine-as-imports = true httpcore-1.0.2/requirements.txt000066400000000000000000000007211452343067500166370ustar00rootroot00000000000000-e .[asyncio,trio,http2,socks] # Docs mkdocs==1.5.3 mkdocs-autorefs==0.5.0 mkdocs-material==9.4.7 mkdocs-material-extensions==1.3 mkdocstrings[python-legacy]==0.22.0 jinja2==3.1.2 # Packaging build==1.0.3 twine # Tests & Linting black==23.10.1 coverage[toml]==7.3.0 ruff==0.1.3 mypy==1.5.1 trio-typing==0.9.0 types-certifi==2021.10.8.3 pytest==7.4.3 pytest-httpbin==2.0.0 pytest-trio==0.7.0 werkzeug<2.1 # See: https://github.com/postmanlabs/httpbin/issues/673 httpcore-1.0.2/scripts/000077500000000000000000000000001452343067500150425ustar00rootroot00000000000000httpcore-1.0.2/scripts/build000077500000000000000000000002651452343067500160720ustar00rootroot00000000000000#!/bin/sh -e PYTHONPATH=. if [ -d 'venv' ] ; then PREFIX="venv/bin/" else PREFIX="" fi set -x ${PREFIX}python -m build ${PREFIX}twine check dist/* ${PREFIX}mkdocs build httpcore-1.0.2/scripts/check000077500000000000000000000004671452343067500160540ustar00rootroot00000000000000#!/bin/sh -e export PREFIX="" if [ -d 'venv' ] ; then export PREFIX="venv/bin/" fi export SOURCE_FILES="httpcore tests" set -x ${PREFIX}ruff check --show-source $SOURCE_FILES ${PREFIX}black --exclude '/(_sync|sync_tests)/' --check --diff $SOURCE_FILES ${PREFIX}mypy $SOURCE_FILES scripts/unasync --check httpcore-1.0.2/scripts/clean000077500000000000000000000003261452343067500160530ustar00rootroot00000000000000#!/bin/sh -e if [ -d 'dist' ] ; then rm -r dist fi if [ -d 'site' ] ; then rm -r site fi if [ -d 'htmlcov' ] ; then rm -r htmlcov fi if [ -d 'httpcore.egg-info' ] ; then rm -r httpcore.egg-info fi httpcore-1.0.2/scripts/coverage000077500000000000000000000002511452343067500165610ustar00rootroot00000000000000#!/bin/sh -e export PREFIX="" if [ -d 'venv' ] ; then export PREFIX="venv/bin/" fi set -x ${PREFIX}coverage report --show-missing --skip-covered --fail-under=100 httpcore-1.0.2/scripts/install000077500000000000000000000005661452343067500164450ustar00rootroot00000000000000#!/bin/sh -e # Use the Python executable provided from the `-p` option, or a default. [ "$1" = "-p" ] && PYTHON=$2 || PYTHON="python3" REQUIREMENTS="requirements.txt" VENV="venv" set -x if [ -z "$GITHUB_ACTIONS" ]; then "$PYTHON" -m venv "$VENV" PIP="$VENV/bin/pip" else PIP="pip" fi "$PIP" install -U pip setuptools wheel "$PIP" install -r "$REQUIREMENTS" httpcore-1.0.2/scripts/lint000077500000000000000000000006111452343067500157340ustar00rootroot00000000000000#!/bin/sh -e export PREFIX="" if [ -d 'venv' ] ; then export PREFIX="venv/bin/" fi export SOURCE_FILES="httpcore tests" set -x ${PREFIX}ruff --fix $SOURCE_FILES ${PREFIX}black --exclude '/(_sync|sync_tests)/' $SOURCE_FILES # Run unasync last because its `--check` mode is not aware of code formatters. # (This means sync code isn't prettified, and that's mostly okay.) scripts/unasync httpcore-1.0.2/scripts/publish000077500000000000000000000011351452343067500164360ustar00rootroot00000000000000#!/bin/sh -e VERSION_FILE="httpcore/__init__.py" PYTHONPATH=. if [ -d 'venv' ] ; then PREFIX="venv/bin/" else PREFIX="" fi if [ ! -z "$GITHUB_ACTIONS" ]; then git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" git config --local user.name "GitHub Action" VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` if [ "refs/tags/${VERSION}" != "${GITHUB_REF}" ] ; then echo "GitHub Ref '${GITHUB_REF}' did not match package version '${VERSION}'" exit 1 fi fi set -x ${PREFIX}twine upload dist/* ${PREFIX}mkdocs gh-deploy --force httpcore-1.0.2/scripts/test000077500000000000000000000003561452343067500157530ustar00rootroot00000000000000#!/bin/sh export PREFIX="" if [ -d 'venv' ] ; then export PREFIX="venv/bin/" fi set -ex if [ -z $GITHUB_ACTIONS ]; then scripts/check fi ${PREFIX}coverage run -m pytest if [ -z $GITHUB_ACTIONS ]; then scripts/coverage fi httpcore-1.0.2/scripts/unasync000077500000000000000000000001711452343067500164470ustar00rootroot00000000000000#!/bin/sh -e export PREFIX="" if [ -d 'venv' ] ; then export PREFIX="venv/bin/" fi ${PREFIX}python unasync.py ${@} httpcore-1.0.2/tests/000077500000000000000000000000001452343067500145155ustar00rootroot00000000000000httpcore-1.0.2/tests/__init__.py000066400000000000000000000000001452343067500166140ustar00rootroot00000000000000httpcore-1.0.2/tests/_async/000077500000000000000000000000001452343067500157715ustar00rootroot00000000000000httpcore-1.0.2/tests/_async/__init__.py000066400000000000000000000000001452343067500200700ustar00rootroot00000000000000httpcore-1.0.2/tests/_async/test_connection.py000066400000000000000000000310051452343067500215400ustar00rootroot00000000000000import ssl import typing import hpack import hyperframe.frame import pytest from httpcore import ( SOCKET_OPTION, AsyncHTTPConnection, AsyncMockBackend, AsyncMockStream, AsyncNetworkStream, ConnectError, ConnectionNotAvailable, Origin, RemoteProtocolError, WriteError, ) @pytest.mark.anyio async def test_http_connection(): origin = Origin(b"https", b"example.com", 443) network_backend = AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: assert not conn.is_idle() assert not conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert repr(conn) == "" async with conn.stream("GET", "https://example.com/") as response: assert ( repr(conn) == "" ) await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" assert conn.is_idle() assert not conn.is_closed() assert conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_concurrent_requests_not_available_on_http11_connections(): """ Attempting to issue a request against an already active HTTP/1.1 connection will raise a `ConnectionNotAvailable` exception. """ origin = Origin(b"https", b"example.com", 443) network_backend = AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: async with conn.stream("GET", "https://example.com/"): with pytest.raises(ConnectionNotAvailable): await conn.request("GET", "https://example.com/") @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") @pytest.mark.anyio async def test_write_error_with_response_sent(): """ If a server half-closes the connection while the client is sending the request, it may still send a response. In this case the client should successfully read and return the response. See also the `test_write_error_without_response_sent` test above. """ class ErrorOnRequestTooLargeStream(AsyncMockStream): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: super().__init__(buffer, http2) self.count = 0 async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: self.count += len(buffer) if self.count > 1_000_000: raise WriteError() class ErrorOnRequestTooLarge(AsyncMockBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncMockStream: return ErrorOnRequestTooLargeStream(list(self._buffer), http2=self._http2) origin = Origin(b"https", b"example.com", 443) network_backend = ErrorOnRequestTooLarge( [ b"HTTP/1.1 413 Payload Too Large\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 37\r\n", b"\r\n", b"Request body exceeded 1,000,000 bytes", ] ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: content = b"x" * 10_000_000 response = await conn.request("POST", "https://example.com/", content=content) assert response.status == 413 assert response.content == b"Request body exceeded 1,000,000 bytes" @pytest.mark.anyio @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") async def test_write_error_without_response_sent(): """ If a server fully closes the connection while the client is sending the request, then client should raise an error. See also the `test_write_error_with_response_sent` test above. """ class ErrorOnRequestTooLargeStream(AsyncMockStream): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: super().__init__(buffer, http2) self.count = 0 async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: self.count += len(buffer) if self.count > 1_000_000: raise WriteError() class ErrorOnRequestTooLarge(AsyncMockBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncMockStream: return ErrorOnRequestTooLargeStream(list(self._buffer), http2=self._http2) origin = Origin(b"https", b"example.com", 443) network_backend = ErrorOnRequestTooLarge([]) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: content = b"x" * 10_000_000 with pytest.raises(RemoteProtocolError) as exc_info: await conn.request("POST", "https://example.com/", content=content) assert str(exc_info.value) == "Server disconnected without sending a response." @pytest.mark.anyio @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") async def test_http2_connection(): origin = Origin(b"https", b"example.com", 443) network_backend = AsyncMockBackend( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ], http2=True, ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, http2=True ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" assert response.extensions["http_version"] == b"HTTP/2" @pytest.mark.anyio async def test_request_to_incorrect_origin(): """ A connection can only send requests whichever origin it is connected to. """ origin = Origin(b"https", b"example.com", 443) network_backend = AsyncMockBackend([]) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend ) as conn: with pytest.raises(RuntimeError): await conn.request("GET", "https://other.com/") class NeedsRetryBackend(AsyncMockBackend): def __init__( self, buffer: typing.List[bytes], http2: bool = False, connect_tcp_failures: int = 2, start_tls_failures: int = 0, ) -> None: self._connect_tcp_failures = connect_tcp_failures self._start_tls_failures = start_tls_failures super().__init__(buffer, http2) async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: if self._connect_tcp_failures > 0: self._connect_tcp_failures -= 1 raise ConnectError() stream = await super().connect_tcp( host, port, timeout=timeout, local_address=local_address ) return self._NeedsRetryAsyncNetworkStream(self, stream) class _NeedsRetryAsyncNetworkStream(AsyncNetworkStream): def __init__( self, backend: "NeedsRetryBackend", stream: AsyncNetworkStream ) -> None: self._backend = backend self._stream = stream async def read( self, max_bytes: int, timeout: typing.Optional[float] = None ) -> bytes: return await self._stream.read(max_bytes, timeout) async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: await self._stream.write(buffer, timeout) async def aclose(self) -> None: await self._stream.aclose() async def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> "AsyncNetworkStream": if self._backend._start_tls_failures > 0: self._backend._start_tls_failures -= 1 raise ConnectError() stream = await self._stream.start_tls(ssl_context, server_hostname, timeout) return self._backend._NeedsRetryAsyncNetworkStream(self._backend, stream) def get_extra_info(self, info: str) -> typing.Any: return self._stream.get_extra_info(info) @pytest.mark.anyio async def test_connection_retries(): origin = Origin(b"https", b"example.com", 443) content = [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] network_backend = NeedsRetryBackend(content) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, retries=3 ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 network_backend = NeedsRetryBackend(content) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, ) as conn: with pytest.raises(ConnectError): await conn.request("GET", "https://example.com/") @pytest.mark.anyio async def test_connection_retries_tls(): origin = Origin(b"https", b"example.com", 443) content = [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] network_backend = NeedsRetryBackend( content, connect_tcp_failures=0, start_tls_failures=2 ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, retries=3 ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 network_backend = NeedsRetryBackend( content, connect_tcp_failures=0, start_tls_failures=2 ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, ) as conn: with pytest.raises(ConnectError): await conn.request("GET", "https://example.com/") @pytest.mark.anyio async def test_uds_connections(): # We're not actually testing Unix Domain Sockets here, because we're just # using a mock backend, but at least we're covering the UDS codepath # in `connection.py` which we may as well do. origin = Origin(b"https", b"example.com", 443) network_backend = AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with AsyncHTTPConnection( origin=origin, network_backend=network_backend, uds="/mock/example" ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 httpcore-1.0.2/tests/_async/test_connection_pool.py000066400000000000000000000670761452343067500226120ustar00rootroot00000000000000import logging import typing import hpack import hyperframe.frame import pytest import trio as concurrency import httpcore @pytest.mark.anyio async def test_connection_pool_with_keepalive(): """ By default HTTP/1.1 requests should be returned to the connection pool. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool( network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will return to the pool, IDLE. async with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a second request to the same origin will reuse the existing IDLE connection. async with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a request to a different origin will not reuse the existing IDLE connection. async with pool.stream("GET", "http://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "", "", ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "", "", ] @pytest.mark.anyio async def test_connection_pool_with_close(): """ HTTP/1.1 requests that include a 'Connection: Close' header should not be returned to the connection pool. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: # Sending an intial request, which once complete will not return to the pool. async with pool.stream( "GET", "https://example.com/", headers={"Connection": "close"} ) as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [] @pytest.mark.anyio async def test_connection_pool_with_http2(): """ Test a connection pool with HTTP/2 requests. """ network_backend = httpcore.AsyncMockBackend( buffer=[ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), hyperframe.frame.HeadersFrame( stream_id=3, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=3, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ], http2=True, ) async with httpcore.AsyncConnectionPool( network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will return to the pool, IDLE. response = await pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a second request to the same origin will reuse the existing IDLE connection. response = await pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] @pytest.mark.anyio async def test_connection_pool_with_http2_goaway(): """ Test a connection pool with HTTP/2 requests, that cleanly disconnects with a GoAway frame after the first request. """ network_backend = httpcore.AsyncMockBackend( buffer=[ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), hyperframe.frame.GoAwayFrame( stream_id=0, error_code=0, last_stream_id=1 ).serialize(), b"", ], http2=True, ) async with httpcore.AsyncConnectionPool( network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will return to the pool, IDLE. response = await pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a second request to the same origin will require a new connection. response = await pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "", "", ] @pytest.mark.anyio async def test_trace_request(): """ The 'trace' request extension allows for a callback function to inspect the internal events that occur while sending a request. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) called = [] async def trace(name, kwargs): called.append(name) async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: await pool.request("GET", "https://example.com/", extensions={"trace": trace}) assert called == [ "connection.connect_tcp.started", "connection.connect_tcp.complete", "connection.start_tls.started", "connection.start_tls.complete", "http11.send_request_headers.started", "http11.send_request_headers.complete", "http11.send_request_body.started", "http11.send_request_body.complete", "http11.receive_response_headers.started", "http11.receive_response_headers.complete", "http11.receive_response_body.started", "http11.receive_response_body.complete", "http11.response_closed.started", "http11.response_closed.complete", ] @pytest.mark.anyio async def test_debug_request(caplog): """ The 'trace' request extension allows for a callback function to inspect the internal events that occur while sending a request. """ caplog.set_level(logging.DEBUG) network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: await pool.request("GET", "http://example.com/") assert caplog.record_tuples == [ ( "httpcore.connection", logging.DEBUG, "connect_tcp.started host='example.com' port=80 local_address=None timeout=None socket_options=None", ), ( "httpcore.connection", logging.DEBUG, "connect_tcp.complete return_value=", ), ( "httpcore.http11", logging.DEBUG, "send_request_headers.started request=", ), ("httpcore.http11", logging.DEBUG, "send_request_headers.complete"), ( "httpcore.http11", logging.DEBUG, "send_request_body.started request=", ), ("httpcore.http11", logging.DEBUG, "send_request_body.complete"), ( "httpcore.http11", logging.DEBUG, "receive_response_headers.started request=", ), ( "httpcore.http11", logging.DEBUG, "receive_response_headers.complete return_value=" "(b'HTTP/1.1', 200, b'OK', [(b'Content-Type', b'plain/text'), (b'Content-Length', b'13')])", ), ( "httpcore.http11", logging.DEBUG, "receive_response_body.started request=", ), ("httpcore.http11", logging.DEBUG, "receive_response_body.complete"), ("httpcore.http11", logging.DEBUG, "response_closed.started"), ("httpcore.http11", logging.DEBUG, "response_closed.complete"), ("httpcore.connection", logging.DEBUG, "close.started"), ("httpcore.connection", logging.DEBUG, "close.complete"), ] @pytest.mark.anyio async def test_connection_pool_with_http_exception(): """ HTTP/1.1 requests that result in an exception during the connection should not be returned to the connection pool. """ network_backend = httpcore.AsyncMockBackend([b"Wait, this isn't valid HTTP!"]) called = [] async def trace(name, kwargs): called.append(name) async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: # Sending an initial request, which once complete will not return to the pool. with pytest.raises(Exception): await pool.request( "GET", "https://example.com/", extensions={"trace": trace} ) info = [repr(c) for c in pool.connections] assert info == [] assert called == [ "connection.connect_tcp.started", "connection.connect_tcp.complete", "connection.start_tls.started", "connection.start_tls.complete", "http11.send_request_headers.started", "http11.send_request_headers.complete", "http11.send_request_body.started", "http11.send_request_body.complete", "http11.receive_response_headers.started", "http11.receive_response_headers.failed", "http11.response_closed.started", "http11.response_closed.complete", ] @pytest.mark.anyio async def test_connection_pool_with_connect_exception(): """ HTTP/1.1 requests that result in an exception during connection should not be returned to the connection pool. """ class FailedConnectBackend(httpcore.AsyncMockBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[ typing.Iterable[httpcore.SOCKET_OPTION] ] = None, ) -> httpcore.AsyncNetworkStream: raise httpcore.ConnectError("Could not connect") network_backend = FailedConnectBackend([]) called = [] async def trace(name, kwargs): called.append(name) async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: # Sending an initial request, which once complete will not return to the pool. with pytest.raises(Exception): await pool.request( "GET", "https://example.com/", extensions={"trace": trace} ) info = [repr(c) for c in pool.connections] assert info == [] assert called == [ "connection.connect_tcp.started", "connection.connect_tcp.failed", ] @pytest.mark.anyio async def test_connection_pool_with_immediate_expiry(): """ Connection pools with keepalive_expiry=0.0 should immediately expire keep alive connections. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool( keepalive_expiry=0.0, network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will not return to the pool. async with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [] @pytest.mark.anyio async def test_connection_pool_with_no_keepalive_connections_allowed(): """ When 'max_keepalive_connections=0' is used, IDLE connections should not be returned to the pool. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool( max_keepalive_connections=0, network_backend=network_backend ) as pool: # Sending an intial request, which once complete will not return to the pool. async with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [] @pytest.mark.trio async def test_connection_pool_concurrency(): """ HTTP/1.1 requests made in concurrency must not ever exceed the maximum number of allowable connection in the pool. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async def fetch(pool, domain, info_list): async with pool.stream("GET", f"http://{domain}/") as response: info = [repr(c) for c in pool.connections] info_list.append(info) await response.aread() async with httpcore.AsyncConnectionPool( max_connections=1, network_backend=network_backend ) as pool: info_list: typing.List[str] = [] async with concurrency.open_nursery() as nursery: for domain in ["a.com", "b.com", "c.com", "d.com", "e.com"]: nursery.start_soon(fetch, pool, domain, info_list) for item in info_list: # Check that each time we inspected the connection pool, only a # single connection was established at any one time. assert len(item) == 1 # Each connection was to a different host, and only sent a single # request on that connection. assert item[0] in [ "", "", "", "", "", ] @pytest.mark.trio async def test_connection_pool_concurrency_same_domain_closing(): """ HTTP/1.1 requests made in concurrency must not ever exceed the maximum number of allowable connection in the pool. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"Connection: close\r\n", b"\r\n", b"Hello, world!", ] ) async def fetch(pool, domain, info_list): async with pool.stream("GET", f"https://{domain}/") as response: info = [repr(c) for c in pool.connections] info_list.append(info) await response.aread() async with httpcore.AsyncConnectionPool( max_connections=1, network_backend=network_backend, http2=True ) as pool: info_list: typing.List[str] = [] async with concurrency.open_nursery() as nursery: for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]: nursery.start_soon(fetch, pool, domain, info_list) for item in info_list: # Check that each time we inspected the connection pool, only a # single connection was established at any one time. assert len(item) == 1 # Only a single request was sent on each connection. assert ( item[0] == "" ) @pytest.mark.trio async def test_connection_pool_concurrency_same_domain_keepalive(): """ HTTP/1.1 requests made in concurrency must not ever exceed the maximum number of allowable connection in the pool. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] * 5 ) async def fetch(pool, domain, info_list): async with pool.stream("GET", f"https://{domain}/") as response: info = [repr(c) for c in pool.connections] info_list.append(info) await response.aread() async with httpcore.AsyncConnectionPool( max_connections=1, network_backend=network_backend, http2=True ) as pool: info_list: typing.List[str] = [] async with concurrency.open_nursery() as nursery: for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]: nursery.start_soon(fetch, pool, domain, info_list) for item in info_list: # Check that each time we inspected the connection pool, only a # single connection was established at any one time. assert len(item) == 1 # The connection sent multiple requests. assert item[0] in [ "", "", "", "", "", ] @pytest.mark.anyio async def test_unsupported_protocol(): async with httpcore.AsyncConnectionPool() as pool: with pytest.raises(httpcore.UnsupportedProtocol): await pool.request("GET", "ftp://www.example.com/") with pytest.raises(httpcore.UnsupportedProtocol): await pool.request("GET", "://www.example.com/") @pytest.mark.anyio async def test_connection_pool_closed_while_request_in_flight(): """ Closing a connection pool while a request/response is still in-flight should raise an error. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool( network_backend=network_backend, ) as pool: # Send a request, and then close the connection pool while the # response has not yet been streamed. async with pool.stream("GET", "https://example.com/") as response: await pool.aclose() with pytest.raises(httpcore.ReadError): await response.aread() @pytest.mark.anyio async def test_connection_pool_timeout(): """ Ensure that exceeding max_connections can cause a request to timeout. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: # Send a request to a pool that is configured to only support a single # connection, and then ensure that a second concurrent request # fails with a timeout. async with pool.stream("GET", "https://example.com/"): with pytest.raises(httpcore.PoolTimeout): extensions = {"timeout": {"pool": 0.0001}} await pool.request("GET", "https://example.com/", extensions=extensions) @pytest.mark.anyio async def test_connection_pool_timeout_zero(): """ A pool timeout of 0 shouldn't raise a PoolTimeout if there's no need to wait on a new connection. """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) # Use a pool timeout of zero. extensions = {"timeout": {"pool": 0}} # A connection pool configured to allow only one connection at a time. async with httpcore.AsyncConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: # Two consecutive requests with a pool timeout of zero. # Both succeed without raising a timeout. response = await pool.request( "GET", "https://example.com/", extensions=extensions ) assert response.status == 200 assert response.content == b"Hello, world!" response = await pool.request( "GET", "https://example.com/", extensions=extensions ) assert response.status == 200 assert response.content == b"Hello, world!" # A connection pool configured to allow only one connection at a time. async with httpcore.AsyncConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: # Two concurrent requests with a pool timeout of zero. # Only the first will succeed without raising a timeout. async with pool.stream( "GET", "https://example.com/", extensions=extensions ) as response: # The first response hasn't yet completed. with pytest.raises(httpcore.PoolTimeout): # So a pool timeout occurs. await pool.request("GET", "https://example.com/", extensions=extensions) # The first response now completes. await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" @pytest.mark.anyio async def test_http11_upgrade_connection(): """ HTTP "101 Switching Protocols" indicates an upgraded connection. We should return the response, so that the network stream may be used for the upgraded connection. https://httpwg.org/specs/rfc9110.html#status.101 https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101 """ network_backend = httpcore.AsyncMockBackend( [ b"HTTP/1.1 101 Switching Protocols\r\n", b"Connection: upgrade\r\n", b"Upgrade: custom\r\n", b"\r\n", b"...", ] ) async with httpcore.AsyncConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: async with pool.stream( "GET", "wss://example.com/", headers={"Connection": "upgrade", "Upgrade": "custom"}, ) as response: assert response.status == 101 network_stream = response.extensions["network_stream"] content = await network_stream.read(max_bytes=1024) assert content == b"..." httpcore-1.0.2/tests/_async/test_http11.py000066400000000000000000000255301452343067500205300ustar00rootroot00000000000000import pytest import httpcore @pytest.mark.anyio async def test_http11_connection(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncHTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" assert conn.is_idle() assert not conn.is_closed() assert conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_http11_connection_unread_response(): """ If the client releases the response without reading it to termination, then the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: async with conn.stream("GET", "https://example.com/") as response: assert response.status == 200 assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_http11_connection_with_remote_protocol_error(): """ If a remote protocol error occurs, then no response will be returned, and the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream([b"Wait, this isn't valid HTTP!", b""]) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): await conn.request("GET", "https://example.com/") assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_http11_connection_with_incomplete_response(): """ We should be gracefully handling the case where the connection ends prematurely. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, wor", ] ) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): await conn.request("GET", "https://example.com/") assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_http11_connection_with_local_protocol_error(): """ If a local protocol error occurs, then no response will be returned, and the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.LocalProtocolError) as exc_info: await conn.request("GET", "https://example.com/", headers={"Host": "\0"}) assert str(exc_info.value) == "Illegal header value b'\\x00'" assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_http11_connection_handles_one_active_request(): """ Attempting to send a request while one is already in-flight will raise a ConnectionNotAvailable exception. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: async with conn.stream("GET", "https://example.com/"): with pytest.raises(httpcore.ConnectionNotAvailable): await conn.request("GET", "https://example.com/") @pytest.mark.anyio async def test_http11_connection_attempt_close(): """ A connection can only be closed when it is idle. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: async with conn.stream("GET", "https://example.com/") as response: await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" @pytest.mark.anyio async def test_http11_request_to_incorrect_origin(): """ A connection can only send requests to whichever origin it is connected to. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream([]) async with httpcore.AsyncHTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(RuntimeError): await conn.request("GET", "https://other.com/") @pytest.mark.anyio async def test_http11_expect_continue(): """ HTTP "100 Continue" is an interim response. We simply ignore it and return the final response. https://httpwg.org/specs/rfc9110.html#status.100 https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/100 """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 100 Continue\r\n", b"\r\n", b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncHTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = await conn.request( "GET", "https://example.com/", headers={"Expect": "continue"}, ) assert response.status == 200 assert response.content == b"Hello, world!" @pytest.mark.anyio async def test_http11_upgrade_connection(): """ HTTP "101 Switching Protocols" indicates an upgraded connection. We should return the response, so that the network stream may be used for the upgraded connection. https://httpwg.org/specs/rfc9110.html#status.101 https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101 """ origin = httpcore.Origin(b"wss", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 101 Switching Protocols\r\n", b"Connection: upgrade\r\n", b"Upgrade: custom\r\n", b"\r\n", b"...", ] ) async with httpcore.AsyncHTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: async with conn.stream( "GET", "wss://example.com/", headers={"Connection": "upgrade", "Upgrade": "custom"}, ) as response: assert response.status == 101 network_stream = response.extensions["network_stream"] content = await network_stream.read(max_bytes=1024) assert content == b"..." @pytest.mark.anyio async def test_http11_early_hints(): """ HTTP "103 Early Hints" is an interim response. We simply ignore it and return the final response. https://datatracker.ietf.org/doc/rfc8297/ """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 103 Early Hints\r\n", b"Link: ; rel=preload; as=style\r\n", b"Link: ; rel=preload; as=style\r\n", b"\r\n", b"HTTP/1.1 200 OK\r\n", b"Content-Type: text/html; charset=utf-8\r\n", b"Content-Length: 30\r\n", b"Link: ; rel=preload; as=style\r\n", b"Link: ; rel=preload; as=script\r\n", b"\r\n", b"Hello, world! ...", ] ) async with httpcore.AsyncHTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = await conn.request( "GET", "https://example.com/", headers={"Expect": "continue"}, ) assert response.status == 200 assert response.content == b"Hello, world! ..." @pytest.mark.anyio async def test_http11_header_sub_100kb(): """ A connection should be able to handle a http header size up to 100kB. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ b"HTTP/1.1 200 OK\r\n", # 17 b"Content-Type: plain/text\r\n", # 43 b"Cookie: " + b"x" * (100 * 1024 - 72) + b"\r\n", # 102381 b"Content-Length: 0\r\n", # 102400 b"\r\n", b"", ] ) async with httpcore.AsyncHTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"" httpcore-1.0.2/tests/_async/test_http2.py000066400000000000000000000342511452343067500204500ustar00rootroot00000000000000import hpack import hyperframe.frame import pytest import httpcore @pytest.mark.anyio async def test_http2_connection(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = await conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" assert conn.is_idle() assert conn.is_available() assert not conn.is_closed() assert not conn.has_expired() assert ( conn.info() == "'https://example.com:443', HTTP/2, IDLE, Request Count: 1" ) assert ( repr(conn) == "" ) @pytest.mark.anyio async def test_http2_connection_closed(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), # Connection is closed after the first response hyperframe.frame.GoAwayFrame( stream_id=0, error_code=0, last_stream_id=1 ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: await conn.request("GET", "https://example.com/") with pytest.raises(httpcore.ConnectionNotAvailable): await conn.request("GET", "https://example.com/") assert not conn.is_available() @pytest.mark.anyio async def test_http2_connection_post_request(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: response = await conn.request( "POST", "https://example.com/", headers={b"content-length": b"17"}, content=b'{"data": "upload"}', ) assert response.status == 200 assert response.content == b"Hello, world!" @pytest.mark.anyio async def test_http2_connection_with_remote_protocol_error(): """ If a remote protocol error occurs, then no response will be returned, and the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream([b"Wait, this isn't valid HTTP!", b""]) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): await conn.request("GET", "https://example.com/") @pytest.mark.anyio async def test_http2_connection_with_rst_stream(): """ If a stream reset occurs, then no response will be returned, but the connection will remain reusable for other requests. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), # Stream is closed midway through the first response... hyperframe.frame.RstStreamFrame(stream_id=1, error_code=8).serialize(), # ...Which doesn't prevent the second response. hyperframe.frame.HeadersFrame( stream_id=3, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=3, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), b"", ] ) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): await conn.request("GET", "https://example.com/") response = await conn.request("GET", "https://example.com/") assert response.status == 200 @pytest.mark.anyio async def test_http2_connection_with_goaway(): """ If a GoAway frame occurs, then no response will be returned, and the connection will not be reusable for other requests. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), # Connection is closed midway through the first response... hyperframe.frame.GoAwayFrame(stream_id=0, error_code=0).serialize(), # ...We'll never get to this second response. hyperframe.frame.HeadersFrame( stream_id=3, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=3, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), b"", ] ) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: # The initial request has been closed midway, with an unrecoverable error. with pytest.raises(httpcore.RemoteProtocolError): await conn.request("GET", "https://example.com/") # The second request can receive a graceful `ConnectionNotAvailable`, # and may be retried on a new connection. with pytest.raises(httpcore.ConnectionNotAvailable): await conn.request("GET", "https://example.com/") @pytest.mark.anyio async def test_http2_connection_with_flow_control(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), # Available flow: 65,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 75,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 85,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 95,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 105,535 hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"100,000 bytes received", flags=["END_STREAM"] ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: response = await conn.request( "POST", "https://example.com/", content=b"x" * 100_000, ) assert response.status == 200 assert response.content == b"100,000 bytes received" @pytest.mark.anyio async def test_http2_connection_attempt_close(): """ A connection can only be closed when it is idle. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: async with conn.stream("GET", "https://example.com/") as response: await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" await conn.aclose() with pytest.raises(httpcore.ConnectionNotAvailable): await conn.request("GET", "https://example.com/") @pytest.mark.anyio async def test_http2_request_to_incorrect_origin(): """ A connection can only send requests to whichever origin it is connected to. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream([]) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: with pytest.raises(RuntimeError): await conn.request("GET", "https://other.com/") @pytest.mark.anyio async def test_http2_remote_max_streams_update(): """ If the remote server updates the maximum concurrent streams value, we should be adjusting how many streams we will allow. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.AsyncMockStream( [ hyperframe.frame.SettingsFrame( settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1000} ).serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame(stream_id=1, data=b"Hello, world!").serialize(), hyperframe.frame.SettingsFrame( settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 50} ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world...again!", flags=["END_STREAM"] ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection(origin=origin, stream=stream) as conn: async with conn.stream("GET", "https://example.com/") as response: i = 0 async for chunk in response.aiter_stream(): if i == 0: assert chunk == b"Hello, world!" assert conn._h2_state.remote_settings.max_concurrent_streams == 1000 assert conn._max_streams == min( conn._h2_state.remote_settings.max_concurrent_streams, conn._h2_state.local_settings.max_concurrent_streams, ) elif i == 1: assert chunk == b"Hello, world...again!" assert conn._h2_state.remote_settings.max_concurrent_streams == 50 assert conn._max_streams == min( conn._h2_state.remote_settings.max_concurrent_streams, conn._h2_state.local_settings.max_concurrent_streams, ) i += 1 httpcore-1.0.2/tests/_async/test_http_proxy.py000066400000000000000000000221051452343067500216220ustar00rootroot00000000000000import ssl import typing import hpack import hyperframe.frame import pytest from httpcore import ( SOCKET_OPTION, AsyncHTTPProxy, AsyncMockBackend, AsyncMockStream, AsyncNetworkStream, Origin, ProxyError, ) @pytest.mark.anyio async def test_proxy_forwarding(): """ Send an HTTP request via a proxy. """ network_backend = AsyncMockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with AsyncHTTPProxy( proxy_url="http://localhost:8080/", max_connections=10, network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. async with proxy.stream("GET", "http://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a forwarding proxy can only handle HTTP requests to the same origin. assert proxy.connections[0].can_handle_request( Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"http", b"other.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"other.com", 443) ) @pytest.mark.anyio async def test_proxy_tunneling(): """ Send an HTTPS request via a proxy. """ network_backend = AsyncMockBackend( [ # The initial response to the proxy CONNECT b"HTTP/1.1 200 OK\r\n\r\n", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with AsyncHTTPProxy( proxy_url="http://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. async with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a tunneled proxy can only handle HTTPS requests to the same origin. assert not proxy.connections[0].can_handle_request( Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"http", b"other.com", 80) ) assert proxy.connections[0].can_handle_request( Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"other.com", 443) ) # We need to adapt the mock backend here slightly in order to deal # with the proxy case. We do not want the initial connection to the proxy # to indicate an HTTP/2 connection, but we do want it to indicate HTTP/2 # once the SSL upgrade has taken place. class HTTP1ThenHTTP2Stream(AsyncMockStream): async def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> AsyncNetworkStream: self._http2 = True return self class HTTP1ThenHTTP2Backend(AsyncMockBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> AsyncNetworkStream: return HTTP1ThenHTTP2Stream(list(self._buffer)) @pytest.mark.anyio async def test_proxy_tunneling_http2(): """ Send an HTTP/2 request via a proxy. """ network_backend = HTTP1ThenHTTP2Backend( [ # The initial response to the proxy CONNECT b"HTTP/1.1 200 OK\r\n\r\n", # The actual response from the remote server hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ], ) async with AsyncHTTPProxy( proxy_url="http://localhost:8080/", network_backend=network_backend, http2=True, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. async with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a tunneled proxy can only handle HTTPS requests to the same origin. assert not proxy.connections[0].can_handle_request( Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"http", b"other.com", 80) ) assert proxy.connections[0].can_handle_request( Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"other.com", 443) ) @pytest.mark.anyio async def test_proxy_tunneling_with_403(): """ Send an HTTPS request via a proxy. """ network_backend = AsyncMockBackend( [ b"HTTP/1.1 403 Permission Denied\r\n" b"\r\n", ] ) async with AsyncHTTPProxy( proxy_url="http://localhost:8080/", network_backend=network_backend, ) as proxy: with pytest.raises(ProxyError) as exc_info: await proxy.request("GET", "https://example.com/") assert str(exc_info.value) == "403 Permission Denied" assert not proxy.connections @pytest.mark.anyio async def test_proxy_tunneling_with_auth(): """ Send an authenticated HTTPS request via a proxy. """ network_backend = AsyncMockBackend( [ # The initial response to the proxy CONNECT b"HTTP/1.1 200 OK\r\n\r\n", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with AsyncHTTPProxy( proxy_url="http://localhost:8080/", proxy_auth=("username", "password"), network_backend=network_backend, ) as proxy: response = await proxy.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" # Dig into this private property as a cheap lazy way of # checking that the proxy header is set correctly. assert proxy._proxy_headers == [ # type: ignore (b"Proxy-Authorization", b"Basic dXNlcm5hbWU6cGFzc3dvcmQ=") ] httpcore-1.0.2/tests/_async/test_integration.py000066400000000000000000000032521452343067500217270ustar00rootroot00000000000000import ssl import pytest import httpcore @pytest.mark.anyio async def test_request(httpbin): async with httpcore.AsyncConnectionPool() as pool: response = await pool.request("GET", httpbin.url) assert response.status == 200 @pytest.mark.anyio async def test_ssl_request(httpbin_secure): ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE async with httpcore.AsyncConnectionPool(ssl_context=ssl_context) as pool: response = await pool.request("GET", httpbin_secure.url) assert response.status == 200 @pytest.mark.anyio async def test_extra_info(httpbin_secure): ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE async with httpcore.AsyncConnectionPool(ssl_context=ssl_context) as pool: async with pool.stream("GET", httpbin_secure.url) as response: assert response.status == 200 stream = response.extensions["network_stream"] ssl_object = stream.get_extra_info("ssl_object") assert ssl_object.version() == "TLSv1.3" local_addr = stream.get_extra_info("client_addr") assert local_addr[0] == "127.0.0.1" remote_addr = stream.get_extra_info("server_addr") assert "https://%s:%d" % remote_addr == httpbin_secure.url sock = stream.get_extra_info("socket") assert hasattr(sock, "family") assert hasattr(sock, "type") invalid = stream.get_extra_info("invalid") assert invalid is None stream.get_extra_info("is_readable") httpcore-1.0.2/tests/_async/test_socks_proxy.py000066400000000000000000000147541452343067500220000ustar00rootroot00000000000000import pytest import httpcore @pytest.mark.anyio async def test_socks5_request(): """ Send an HTTP request via a SOCKS proxy. """ network_backend = httpcore.AsyncMockBackend( [ # The initial socks CONNECT # v5 NOAUTH b"\x05\x00", # v5 SUC RSV IP4 127 .0 .0 .1 :80 b"\x05\x00\x00\x01\xff\x00\x00\x01\x00\x50", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncSOCKSProxy( proxy_url="socks5://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. async with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a tunneled proxy can only handle HTTPS requests to the same origin. assert not proxy.connections[0].can_handle_request( httpcore.Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( httpcore.Origin(b"http", b"other.com", 80) ) assert proxy.connections[0].can_handle_request( httpcore.Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( httpcore.Origin(b"https", b"other.com", 443) ) @pytest.mark.anyio async def test_authenticated_socks5_request(): """ Send an HTTP request via a SOCKS proxy. """ network_backend = httpcore.AsyncMockBackend( [ # The initial socks CONNECT # v5 USERNAME/PASSWORD b"\x05\x02", # v1 VALID USERNAME/PASSWORD b"\x01\x00", # v5 SUC RSV IP4 127 .0 .0 .1 :80 b"\x05\x00\x00\x01\xff\x00\x00\x01\x00\x50", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) async with httpcore.AsyncSOCKSProxy( proxy_url="socks5://localhost:8080/", proxy_auth=(b"username", b"password"), network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. async with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] await response.aread() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() @pytest.mark.anyio async def test_socks5_request_connect_failed(): """ Attempt to send an HTTP request via a SOCKS proxy, resulting in a connect failure. """ network_backend = httpcore.AsyncMockBackend( [ # The initial socks CONNECT # v5 NOAUTH b"\x05\x00", # v5 NO RSV IP4 0 .0 .0 .0 :00 b"\x05\x05\x00\x01\x00\x00\x00\x00\x00\x00", ] ) async with httpcore.AsyncSOCKSProxy( proxy_url="socks5://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending a request, which the proxy rejects with pytest.raises(httpcore.ProxyError) as exc_info: await proxy.request("GET", "https://example.com/") assert ( str(exc_info.value) == "Proxy Server could not connect: Connection refused." ) assert not proxy.connections @pytest.mark.anyio async def test_socks5_request_failed_to_provide_auth(): """ Attempt to send an HTTP request via an authenticated SOCKS proxy, without providing authentication credentials. """ network_backend = httpcore.AsyncMockBackend( [ # v5 USERNAME/PASSWORD b"\x05\x02", ] ) async with httpcore.AsyncSOCKSProxy( proxy_url="socks5://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending a request, which the proxy rejects with pytest.raises(httpcore.ProxyError) as exc_info: await proxy.request("GET", "https://example.com/") assert ( str(exc_info.value) == "Requested NO AUTHENTICATION REQUIRED from proxy server, but got USERNAME/PASSWORD." ) assert not proxy.connections @pytest.mark.anyio async def test_socks5_request_incorrect_auth(): """ Attempt to send an HTTP request via an authenticated SOCKS proxy, wit incorrect authentication credentials. """ network_backend = httpcore.AsyncMockBackend( [ # v5 USERNAME/PASSWORD b"\x05\x02", # v1 INVALID USERNAME/PASSWORD b"\x01\x01", ] ) async with httpcore.AsyncSOCKSProxy( proxy_url="socks5://localhost:8080/", proxy_auth=(b"invalid", b"invalid"), network_backend=network_backend, ) as proxy: # Sending a request, which the proxy rejects with pytest.raises(httpcore.ProxyError) as exc_info: await proxy.request("GET", "https://example.com/") assert str(exc_info.value) == "Invalid username/password" assert not proxy.connections httpcore-1.0.2/tests/_sync/000077500000000000000000000000001452343067500156305ustar00rootroot00000000000000httpcore-1.0.2/tests/_sync/__init__.py000066400000000000000000000000001452343067500177270ustar00rootroot00000000000000httpcore-1.0.2/tests/_sync/test_connection.py000066400000000000000000000276341452343067500214140ustar00rootroot00000000000000import ssl import typing import hpack import hyperframe.frame import pytest from httpcore import ( SOCKET_OPTION, HTTPConnection, MockBackend, MockStream, NetworkStream, ConnectError, ConnectionNotAvailable, Origin, RemoteProtocolError, WriteError, ) def test_http_connection(): origin = Origin(b"https", b"example.com", 443) network_backend = MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with HTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: assert not conn.is_idle() assert not conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert repr(conn) == "" with conn.stream("GET", "https://example.com/") as response: assert ( repr(conn) == "" ) response.read() assert response.status == 200 assert response.content == b"Hello, world!" assert conn.is_idle() assert not conn.is_closed() assert conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) def test_concurrent_requests_not_available_on_http11_connections(): """ Attempting to issue a request against an already active HTTP/1.1 connection will raise a `ConnectionNotAvailable` exception. """ origin = Origin(b"https", b"example.com", 443) network_backend = MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with HTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: with conn.stream("GET", "https://example.com/"): with pytest.raises(ConnectionNotAvailable): conn.request("GET", "https://example.com/") @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_write_error_with_response_sent(): """ If a server half-closes the connection while the client is sending the request, it may still send a response. In this case the client should successfully read and return the response. See also the `test_write_error_without_response_sent` test above. """ class ErrorOnRequestTooLargeStream(MockStream): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: super().__init__(buffer, http2) self.count = 0 def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: self.count += len(buffer) if self.count > 1_000_000: raise WriteError() class ErrorOnRequestTooLarge(MockBackend): def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> MockStream: return ErrorOnRequestTooLargeStream(list(self._buffer), http2=self._http2) origin = Origin(b"https", b"example.com", 443) network_backend = ErrorOnRequestTooLarge( [ b"HTTP/1.1 413 Payload Too Large\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 37\r\n", b"\r\n", b"Request body exceeded 1,000,000 bytes", ] ) with HTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: content = b"x" * 10_000_000 response = conn.request("POST", "https://example.com/", content=content) assert response.status == 413 assert response.content == b"Request body exceeded 1,000,000 bytes" @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_write_error_without_response_sent(): """ If a server fully closes the connection while the client is sending the request, then client should raise an error. See also the `test_write_error_with_response_sent` test above. """ class ErrorOnRequestTooLargeStream(MockStream): def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: super().__init__(buffer, http2) self.count = 0 def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: self.count += len(buffer) if self.count > 1_000_000: raise WriteError() class ErrorOnRequestTooLarge(MockBackend): def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> MockStream: return ErrorOnRequestTooLargeStream(list(self._buffer), http2=self._http2) origin = Origin(b"https", b"example.com", 443) network_backend = ErrorOnRequestTooLarge([]) with HTTPConnection( origin=origin, network_backend=network_backend, keepalive_expiry=5.0 ) as conn: content = b"x" * 10_000_000 with pytest.raises(RemoteProtocolError) as exc_info: conn.request("POST", "https://example.com/", content=content) assert str(exc_info.value) == "Server disconnected without sending a response." @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_http2_connection(): origin = Origin(b"https", b"example.com", 443) network_backend = MockBackend( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ], http2=True, ) with HTTPConnection( origin=origin, network_backend=network_backend, http2=True ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" assert response.extensions["http_version"] == b"HTTP/2" def test_request_to_incorrect_origin(): """ A connection can only send requests whichever origin it is connected to. """ origin = Origin(b"https", b"example.com", 443) network_backend = MockBackend([]) with HTTPConnection( origin=origin, network_backend=network_backend ) as conn: with pytest.raises(RuntimeError): conn.request("GET", "https://other.com/") class NeedsRetryBackend(MockBackend): def __init__( self, buffer: typing.List[bytes], http2: bool = False, connect_tcp_failures: int = 2, start_tls_failures: int = 0, ) -> None: self._connect_tcp_failures = connect_tcp_failures self._start_tls_failures = start_tls_failures super().__init__(buffer, http2) def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: if self._connect_tcp_failures > 0: self._connect_tcp_failures -= 1 raise ConnectError() stream = super().connect_tcp( host, port, timeout=timeout, local_address=local_address ) return self._NeedsRetryAsyncNetworkStream(self, stream) class _NeedsRetryAsyncNetworkStream(NetworkStream): def __init__( self, backend: "NeedsRetryBackend", stream: NetworkStream ) -> None: self._backend = backend self._stream = stream def read( self, max_bytes: int, timeout: typing.Optional[float] = None ) -> bytes: return self._stream.read(max_bytes, timeout) def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: self._stream.write(buffer, timeout) def close(self) -> None: self._stream.close() def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> "NetworkStream": if self._backend._start_tls_failures > 0: self._backend._start_tls_failures -= 1 raise ConnectError() stream = self._stream.start_tls(ssl_context, server_hostname, timeout) return self._backend._NeedsRetryAsyncNetworkStream(self._backend, stream) def get_extra_info(self, info: str) -> typing.Any: return self._stream.get_extra_info(info) def test_connection_retries(): origin = Origin(b"https", b"example.com", 443) content = [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] network_backend = NeedsRetryBackend(content) with HTTPConnection( origin=origin, network_backend=network_backend, retries=3 ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 network_backend = NeedsRetryBackend(content) with HTTPConnection( origin=origin, network_backend=network_backend, ) as conn: with pytest.raises(ConnectError): conn.request("GET", "https://example.com/") def test_connection_retries_tls(): origin = Origin(b"https", b"example.com", 443) content = [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] network_backend = NeedsRetryBackend( content, connect_tcp_failures=0, start_tls_failures=2 ) with HTTPConnection( origin=origin, network_backend=network_backend, retries=3 ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 network_backend = NeedsRetryBackend( content, connect_tcp_failures=0, start_tls_failures=2 ) with HTTPConnection( origin=origin, network_backend=network_backend, ) as conn: with pytest.raises(ConnectError): conn.request("GET", "https://example.com/") def test_uds_connections(): # We're not actually testing Unix Domain Sockets here, because we're just # using a mock backend, but at least we're covering the UDS codepath # in `connection.py` which we may as well do. origin = Origin(b"https", b"example.com", 443) network_backend = MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with HTTPConnection( origin=origin, network_backend=network_backend, uds="/mock/example" ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 httpcore-1.0.2/tests/_sync/test_connection_pool.py000066400000000000000000000646451452343067500224500ustar00rootroot00000000000000import logging import typing import hpack import hyperframe.frame import pytest from tests import concurrency import httpcore def test_connection_pool_with_keepalive(): """ By default HTTP/1.1 requests should be returned to the connection pool. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool( network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will return to the pool, IDLE. with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a second request to the same origin will reuse the existing IDLE connection. with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a request to a different origin will not reuse the existing IDLE connection. with pool.stream("GET", "http://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "", "", ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "", "", ] def test_connection_pool_with_close(): """ HTTP/1.1 requests that include a 'Connection: Close' header should not be returned to the connection pool. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool(network_backend=network_backend) as pool: # Sending an intial request, which once complete will not return to the pool. with pool.stream( "GET", "https://example.com/", headers={"Connection": "close"} ) as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [] def test_connection_pool_with_http2(): """ Test a connection pool with HTTP/2 requests. """ network_backend = httpcore.MockBackend( buffer=[ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), hyperframe.frame.HeadersFrame( stream_id=3, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=3, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ], http2=True, ) with httpcore.ConnectionPool( network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will return to the pool, IDLE. response = pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a second request to the same origin will reuse the existing IDLE connection. response = pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] def test_connection_pool_with_http2_goaway(): """ Test a connection pool with HTTP/2 requests, that cleanly disconnects with a GoAway frame after the first request. """ network_backend = httpcore.MockBackend( buffer=[ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), hyperframe.frame.GoAwayFrame( stream_id=0, error_code=0, last_stream_id=1 ).serialize(), b"", ], http2=True, ) with httpcore.ConnectionPool( network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will return to the pool, IDLE. response = pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "" ] # Sending a second request to the same origin will require a new connection. response = pool.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [ "", "", ] def test_trace_request(): """ The 'trace' request extension allows for a callback function to inspect the internal events that occur while sending a request. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) called = [] def trace(name, kwargs): called.append(name) with httpcore.ConnectionPool(network_backend=network_backend) as pool: pool.request("GET", "https://example.com/", extensions={"trace": trace}) assert called == [ "connection.connect_tcp.started", "connection.connect_tcp.complete", "connection.start_tls.started", "connection.start_tls.complete", "http11.send_request_headers.started", "http11.send_request_headers.complete", "http11.send_request_body.started", "http11.send_request_body.complete", "http11.receive_response_headers.started", "http11.receive_response_headers.complete", "http11.receive_response_body.started", "http11.receive_response_body.complete", "http11.response_closed.started", "http11.response_closed.complete", ] def test_debug_request(caplog): """ The 'trace' request extension allows for a callback function to inspect the internal events that occur while sending a request. """ caplog.set_level(logging.DEBUG) network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool(network_backend=network_backend) as pool: pool.request("GET", "http://example.com/") assert caplog.record_tuples == [ ( "httpcore.connection", logging.DEBUG, "connect_tcp.started host='example.com' port=80 local_address=None timeout=None socket_options=None", ), ( "httpcore.connection", logging.DEBUG, "connect_tcp.complete return_value=", ), ( "httpcore.http11", logging.DEBUG, "send_request_headers.started request=", ), ("httpcore.http11", logging.DEBUG, "send_request_headers.complete"), ( "httpcore.http11", logging.DEBUG, "send_request_body.started request=", ), ("httpcore.http11", logging.DEBUG, "send_request_body.complete"), ( "httpcore.http11", logging.DEBUG, "receive_response_headers.started request=", ), ( "httpcore.http11", logging.DEBUG, "receive_response_headers.complete return_value=" "(b'HTTP/1.1', 200, b'OK', [(b'Content-Type', b'plain/text'), (b'Content-Length', b'13')])", ), ( "httpcore.http11", logging.DEBUG, "receive_response_body.started request=", ), ("httpcore.http11", logging.DEBUG, "receive_response_body.complete"), ("httpcore.http11", logging.DEBUG, "response_closed.started"), ("httpcore.http11", logging.DEBUG, "response_closed.complete"), ("httpcore.connection", logging.DEBUG, "close.started"), ("httpcore.connection", logging.DEBUG, "close.complete"), ] def test_connection_pool_with_http_exception(): """ HTTP/1.1 requests that result in an exception during the connection should not be returned to the connection pool. """ network_backend = httpcore.MockBackend([b"Wait, this isn't valid HTTP!"]) called = [] def trace(name, kwargs): called.append(name) with httpcore.ConnectionPool(network_backend=network_backend) as pool: # Sending an initial request, which once complete will not return to the pool. with pytest.raises(Exception): pool.request( "GET", "https://example.com/", extensions={"trace": trace} ) info = [repr(c) for c in pool.connections] assert info == [] assert called == [ "connection.connect_tcp.started", "connection.connect_tcp.complete", "connection.start_tls.started", "connection.start_tls.complete", "http11.send_request_headers.started", "http11.send_request_headers.complete", "http11.send_request_body.started", "http11.send_request_body.complete", "http11.receive_response_headers.started", "http11.receive_response_headers.failed", "http11.response_closed.started", "http11.response_closed.complete", ] def test_connection_pool_with_connect_exception(): """ HTTP/1.1 requests that result in an exception during connection should not be returned to the connection pool. """ class FailedConnectBackend(httpcore.MockBackend): def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[ typing.Iterable[httpcore.SOCKET_OPTION] ] = None, ) -> httpcore.NetworkStream: raise httpcore.ConnectError("Could not connect") network_backend = FailedConnectBackend([]) called = [] def trace(name, kwargs): called.append(name) with httpcore.ConnectionPool(network_backend=network_backend) as pool: # Sending an initial request, which once complete will not return to the pool. with pytest.raises(Exception): pool.request( "GET", "https://example.com/", extensions={"trace": trace} ) info = [repr(c) for c in pool.connections] assert info == [] assert called == [ "connection.connect_tcp.started", "connection.connect_tcp.failed", ] def test_connection_pool_with_immediate_expiry(): """ Connection pools with keepalive_expiry=0.0 should immediately expire keep alive connections. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool( keepalive_expiry=0.0, network_backend=network_backend, ) as pool: # Sending an intial request, which once complete will not return to the pool. with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [] def test_connection_pool_with_no_keepalive_connections_allowed(): """ When 'max_keepalive_connections=0' is used, IDLE connections should not be returned to the pool. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool( max_keepalive_connections=0, network_backend=network_backend ) as pool: # Sending an intial request, which once complete will not return to the pool. with pool.stream("GET", "https://example.com/") as response: info = [repr(c) for c in pool.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in pool.connections] assert info == [] def test_connection_pool_concurrency(): """ HTTP/1.1 requests made in concurrency must not ever exceed the maximum number of allowable connection in the pool. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) def fetch(pool, domain, info_list): with pool.stream("GET", f"http://{domain}/") as response: info = [repr(c) for c in pool.connections] info_list.append(info) response.read() with httpcore.ConnectionPool( max_connections=1, network_backend=network_backend ) as pool: info_list: typing.List[str] = [] with concurrency.open_nursery() as nursery: for domain in ["a.com", "b.com", "c.com", "d.com", "e.com"]: nursery.start_soon(fetch, pool, domain, info_list) for item in info_list: # Check that each time we inspected the connection pool, only a # single connection was established at any one time. assert len(item) == 1 # Each connection was to a different host, and only sent a single # request on that connection. assert item[0] in [ "", "", "", "", "", ] def test_connection_pool_concurrency_same_domain_closing(): """ HTTP/1.1 requests made in concurrency must not ever exceed the maximum number of allowable connection in the pool. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"Connection: close\r\n", b"\r\n", b"Hello, world!", ] ) def fetch(pool, domain, info_list): with pool.stream("GET", f"https://{domain}/") as response: info = [repr(c) for c in pool.connections] info_list.append(info) response.read() with httpcore.ConnectionPool( max_connections=1, network_backend=network_backend, http2=True ) as pool: info_list: typing.List[str] = [] with concurrency.open_nursery() as nursery: for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]: nursery.start_soon(fetch, pool, domain, info_list) for item in info_list: # Check that each time we inspected the connection pool, only a # single connection was established at any one time. assert len(item) == 1 # Only a single request was sent on each connection. assert ( item[0] == "" ) def test_connection_pool_concurrency_same_domain_keepalive(): """ HTTP/1.1 requests made in concurrency must not ever exceed the maximum number of allowable connection in the pool. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] * 5 ) def fetch(pool, domain, info_list): with pool.stream("GET", f"https://{domain}/") as response: info = [repr(c) for c in pool.connections] info_list.append(info) response.read() with httpcore.ConnectionPool( max_connections=1, network_backend=network_backend, http2=True ) as pool: info_list: typing.List[str] = [] with concurrency.open_nursery() as nursery: for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]: nursery.start_soon(fetch, pool, domain, info_list) for item in info_list: # Check that each time we inspected the connection pool, only a # single connection was established at any one time. assert len(item) == 1 # The connection sent multiple requests. assert item[0] in [ "", "", "", "", "", ] def test_unsupported_protocol(): with httpcore.ConnectionPool() as pool: with pytest.raises(httpcore.UnsupportedProtocol): pool.request("GET", "ftp://www.example.com/") with pytest.raises(httpcore.UnsupportedProtocol): pool.request("GET", "://www.example.com/") def test_connection_pool_closed_while_request_in_flight(): """ Closing a connection pool while a request/response is still in-flight should raise an error. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool( network_backend=network_backend, ) as pool: # Send a request, and then close the connection pool while the # response has not yet been streamed. with pool.stream("GET", "https://example.com/") as response: pool.close() with pytest.raises(httpcore.ReadError): response.read() def test_connection_pool_timeout(): """ Ensure that exceeding max_connections can cause a request to timeout. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.ConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: # Send a request to a pool that is configured to only support a single # connection, and then ensure that a second concurrent request # fails with a timeout. with pool.stream("GET", "https://example.com/"): with pytest.raises(httpcore.PoolTimeout): extensions = {"timeout": {"pool": 0.0001}} pool.request("GET", "https://example.com/", extensions=extensions) def test_connection_pool_timeout_zero(): """ A pool timeout of 0 shouldn't raise a PoolTimeout if there's no need to wait on a new connection. """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) # Use a pool timeout of zero. extensions = {"timeout": {"pool": 0}} # A connection pool configured to allow only one connection at a time. with httpcore.ConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: # Two consecutive requests with a pool timeout of zero. # Both succeed without raising a timeout. response = pool.request( "GET", "https://example.com/", extensions=extensions ) assert response.status == 200 assert response.content == b"Hello, world!" response = pool.request( "GET", "https://example.com/", extensions=extensions ) assert response.status == 200 assert response.content == b"Hello, world!" # A connection pool configured to allow only one connection at a time. with httpcore.ConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: # Two concurrent requests with a pool timeout of zero. # Only the first will succeed without raising a timeout. with pool.stream( "GET", "https://example.com/", extensions=extensions ) as response: # The first response hasn't yet completed. with pytest.raises(httpcore.PoolTimeout): # So a pool timeout occurs. pool.request("GET", "https://example.com/", extensions=extensions) # The first response now completes. response.read() assert response.status == 200 assert response.content == b"Hello, world!" def test_http11_upgrade_connection(): """ HTTP "101 Switching Protocols" indicates an upgraded connection. We should return the response, so that the network stream may be used for the upgraded connection. https://httpwg.org/specs/rfc9110.html#status.101 https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101 """ network_backend = httpcore.MockBackend( [ b"HTTP/1.1 101 Switching Protocols\r\n", b"Connection: upgrade\r\n", b"Upgrade: custom\r\n", b"\r\n", b"...", ] ) with httpcore.ConnectionPool( network_backend=network_backend, max_connections=1 ) as pool: with pool.stream( "GET", "wss://example.com/", headers={"Connection": "upgrade", "Upgrade": "custom"}, ) as response: assert response.status == 101 network_stream = response.extensions["network_stream"] content = network_stream.read(max_bytes=1024) assert content == b"..." httpcore-1.0.2/tests/_sync/test_http11.py000066400000000000000000000244041452343067500203660ustar00rootroot00000000000000import pytest import httpcore def test_http11_connection(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.HTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" assert conn.is_idle() assert not conn.is_closed() assert conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) def test_http11_connection_unread_response(): """ If the client releases the response without reading it to termination, then the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with conn.stream("GET", "https://example.com/") as response: assert response.status == 200 assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) def test_http11_connection_with_remote_protocol_error(): """ If a remote protocol error occurs, then no response will be returned, and the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream([b"Wait, this isn't valid HTTP!", b""]) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): conn.request("GET", "https://example.com/") assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) def test_http11_connection_with_incomplete_response(): """ We should be gracefully handling the case where the connection ends prematurely. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, wor", ] ) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): conn.request("GET", "https://example.com/") assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) def test_http11_connection_with_local_protocol_error(): """ If a local protocol error occurs, then no response will be returned, and the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.LocalProtocolError) as exc_info: conn.request("GET", "https://example.com/", headers={"Host": "\0"}) assert str(exc_info.value) == "Illegal header value b'\\x00'" assert not conn.is_idle() assert conn.is_closed() assert not conn.is_available() assert not conn.has_expired() assert ( repr(conn) == "" ) def test_http11_connection_handles_one_active_request(): """ Attempting to send a request while one is already in-flight will raise a ConnectionNotAvailable exception. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with conn.stream("GET", "https://example.com/"): with pytest.raises(httpcore.ConnectionNotAvailable): conn.request("GET", "https://example.com/") def test_http11_connection_attempt_close(): """ A connection can only be closed when it is idle. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with conn.stream("GET", "https://example.com/") as response: response.read() assert response.status == 200 assert response.content == b"Hello, world!" def test_http11_request_to_incorrect_origin(): """ A connection can only send requests to whichever origin it is connected to. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream([]) with httpcore.HTTP11Connection(origin=origin, stream=stream) as conn: with pytest.raises(RuntimeError): conn.request("GET", "https://other.com/") def test_http11_expect_continue(): """ HTTP "100 Continue" is an interim response. We simply ignore it and return the final response. https://httpwg.org/specs/rfc9110.html#status.100 https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/100 """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 100 Continue\r\n", b"\r\n", b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.HTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = conn.request( "GET", "https://example.com/", headers={"Expect": "continue"}, ) assert response.status == 200 assert response.content == b"Hello, world!" def test_http11_upgrade_connection(): """ HTTP "101 Switching Protocols" indicates an upgraded connection. We should return the response, so that the network stream may be used for the upgraded connection. https://httpwg.org/specs/rfc9110.html#status.101 https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101 """ origin = httpcore.Origin(b"wss", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 101 Switching Protocols\r\n", b"Connection: upgrade\r\n", b"Upgrade: custom\r\n", b"\r\n", b"...", ] ) with httpcore.HTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: with conn.stream( "GET", "wss://example.com/", headers={"Connection": "upgrade", "Upgrade": "custom"}, ) as response: assert response.status == 101 network_stream = response.extensions["network_stream"] content = network_stream.read(max_bytes=1024) assert content == b"..." def test_http11_early_hints(): """ HTTP "103 Early Hints" is an interim response. We simply ignore it and return the final response. https://datatracker.ietf.org/doc/rfc8297/ """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 103 Early Hints\r\n", b"Link: ; rel=preload; as=style\r\n", b"Link: ; rel=preload; as=style\r\n", b"\r\n", b"HTTP/1.1 200 OK\r\n", b"Content-Type: text/html; charset=utf-8\r\n", b"Content-Length: 30\r\n", b"Link: ; rel=preload; as=style\r\n", b"Link: ; rel=preload; as=script\r\n", b"\r\n", b"Hello, world! ...", ] ) with httpcore.HTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = conn.request( "GET", "https://example.com/", headers={"Expect": "continue"}, ) assert response.status == 200 assert response.content == b"Hello, world! ..." def test_http11_header_sub_100kb(): """ A connection should be able to handle a http header size up to 100kB. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ b"HTTP/1.1 200 OK\r\n", # 17 b"Content-Type: plain/text\r\n", # 43 b"Cookie: " + b"x" * (100 * 1024 - 72) + b"\r\n", # 102381 b"Content-Length: 0\r\n", # 102400 b"\r\n", b"", ] ) with httpcore.HTTP11Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"" httpcore-1.0.2/tests/_sync/test_http2.py000066400000000000000000000332531452343067500203100ustar00rootroot00000000000000import hpack import hyperframe.frame import pytest import httpcore def test_http2_connection(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] ) with httpcore.HTTP2Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: response = conn.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" assert conn.is_idle() assert conn.is_available() assert not conn.is_closed() assert not conn.has_expired() assert ( conn.info() == "'https://example.com:443', HTTP/2, IDLE, Request Count: 1" ) assert ( repr(conn) == "" ) def test_http2_connection_closed(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), # Connection is closed after the first response hyperframe.frame.GoAwayFrame( stream_id=0, error_code=0, last_stream_id=1 ).serialize(), ] ) with httpcore.HTTP2Connection( origin=origin, stream=stream, keepalive_expiry=5.0 ) as conn: conn.request("GET", "https://example.com/") with pytest.raises(httpcore.ConnectionNotAvailable): conn.request("GET", "https://example.com/") assert not conn.is_available() def test_http2_connection_post_request(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] ) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: response = conn.request( "POST", "https://example.com/", headers={b"content-length": b"17"}, content=b'{"data": "upload"}', ) assert response.status == 200 assert response.content == b"Hello, world!" def test_http2_connection_with_remote_protocol_error(): """ If a remote protocol error occurs, then no response will be returned, and the connection will not be reusable. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream([b"Wait, this isn't valid HTTP!", b""]) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): conn.request("GET", "https://example.com/") def test_http2_connection_with_rst_stream(): """ If a stream reset occurs, then no response will be returned, but the connection will remain reusable for other requests. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), # Stream is closed midway through the first response... hyperframe.frame.RstStreamFrame(stream_id=1, error_code=8).serialize(), # ...Which doesn't prevent the second response. hyperframe.frame.HeadersFrame( stream_id=3, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=3, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), b"", ] ) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: with pytest.raises(httpcore.RemoteProtocolError): conn.request("GET", "https://example.com/") response = conn.request("GET", "https://example.com/") assert response.status == 200 def test_http2_connection_with_goaway(): """ If a GoAway frame occurs, then no response will be returned, and the connection will not be reusable for other requests. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), # Connection is closed midway through the first response... hyperframe.frame.GoAwayFrame(stream_id=0, error_code=0).serialize(), # ...We'll never get to this second response. hyperframe.frame.HeadersFrame( stream_id=3, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=3, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), b"", ] ) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: # The initial request has been closed midway, with an unrecoverable error. with pytest.raises(httpcore.RemoteProtocolError): conn.request("GET", "https://example.com/") # The second request can receive a graceful `ConnectionNotAvailable`, # and may be retried on a new connection. with pytest.raises(httpcore.ConnectionNotAvailable): conn.request("GET", "https://example.com/") def test_http2_connection_with_flow_control(): origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), # Available flow: 65,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 75,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 85,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 95,535 hyperframe.frame.WindowUpdateFrame( stream_id=0, window_increment=10_000 ).serialize(), hyperframe.frame.WindowUpdateFrame( stream_id=1, window_increment=10_000 ).serialize(), # Available flow: 105,535 hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"100,000 bytes received", flags=["END_STREAM"] ).serialize(), ] ) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: response = conn.request( "POST", "https://example.com/", content=b"x" * 100_000, ) assert response.status == 200 assert response.content == b"100,000 bytes received" def test_http2_connection_attempt_close(): """ A connection can only be closed when it is idle. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ] ) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: with conn.stream("GET", "https://example.com/") as response: response.read() assert response.status == 200 assert response.content == b"Hello, world!" conn.close() with pytest.raises(httpcore.ConnectionNotAvailable): conn.request("GET", "https://example.com/") def test_http2_request_to_incorrect_origin(): """ A connection can only send requests to whichever origin it is connected to. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream([]) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: with pytest.raises(RuntimeError): conn.request("GET", "https://other.com/") def test_http2_remote_max_streams_update(): """ If the remote server updates the maximum concurrent streams value, we should be adjusting how many streams we will allow. """ origin = httpcore.Origin(b"https", b"example.com", 443) stream = httpcore.MockStream( [ hyperframe.frame.SettingsFrame( settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 1000} ).serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame(stream_id=1, data=b"Hello, world!").serialize(), hyperframe.frame.SettingsFrame( settings={hyperframe.frame.SettingsFrame.MAX_CONCURRENT_STREAMS: 50} ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world...again!", flags=["END_STREAM"] ).serialize(), ] ) with httpcore.HTTP2Connection(origin=origin, stream=stream) as conn: with conn.stream("GET", "https://example.com/") as response: i = 0 for chunk in response.iter_stream(): if i == 0: assert chunk == b"Hello, world!" assert conn._h2_state.remote_settings.max_concurrent_streams == 1000 assert conn._max_streams == min( conn._h2_state.remote_settings.max_concurrent_streams, conn._h2_state.local_settings.max_concurrent_streams, ) elif i == 1: assert chunk == b"Hello, world...again!" assert conn._h2_state.remote_settings.max_concurrent_streams == 50 assert conn._max_streams == min( conn._h2_state.remote_settings.max_concurrent_streams, conn._h2_state.local_settings.max_concurrent_streams, ) i += 1 httpcore-1.0.2/tests/_sync/test_http_proxy.py000066400000000000000000000213751452343067500214710ustar00rootroot00000000000000import ssl import typing import hpack import hyperframe.frame import pytest from httpcore import ( SOCKET_OPTION, HTTPProxy, MockBackend, MockStream, NetworkStream, Origin, ProxyError, ) def test_proxy_forwarding(): """ Send an HTTP request via a proxy. """ network_backend = MockBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with HTTPProxy( proxy_url="http://localhost:8080/", max_connections=10, network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. with proxy.stream("GET", "http://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a forwarding proxy can only handle HTTP requests to the same origin. assert proxy.connections[0].can_handle_request( Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"http", b"other.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"other.com", 443) ) def test_proxy_tunneling(): """ Send an HTTPS request via a proxy. """ network_backend = MockBackend( [ # The initial response to the proxy CONNECT b"HTTP/1.1 200 OK\r\n\r\n", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with HTTPProxy( proxy_url="http://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a tunneled proxy can only handle HTTPS requests to the same origin. assert not proxy.connections[0].can_handle_request( Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"http", b"other.com", 80) ) assert proxy.connections[0].can_handle_request( Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"other.com", 443) ) # We need to adapt the mock backend here slightly in order to deal # with the proxy case. We do not want the initial connection to the proxy # to indicate an HTTP/2 connection, but we do want it to indicate HTTP/2 # once the SSL upgrade has taken place. class HTTP1ThenHTTP2Stream(MockStream): def start_tls( self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str] = None, timeout: typing.Optional[float] = None, ) -> NetworkStream: self._http2 = True return self class HTTP1ThenHTTP2Backend(MockBackend): def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, ) -> NetworkStream: return HTTP1ThenHTTP2Stream(list(self._buffer)) def test_proxy_tunneling_http2(): """ Send an HTTP/2 request via a proxy. """ network_backend = HTTP1ThenHTTP2Backend( [ # The initial response to the proxy CONNECT b"HTTP/1.1 200 OK\r\n\r\n", # The actual response from the remote server hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!", flags=["END_STREAM"] ).serialize(), ], ) with HTTPProxy( proxy_url="http://localhost:8080/", network_backend=network_backend, http2=True, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a tunneled proxy can only handle HTTPS requests to the same origin. assert not proxy.connections[0].can_handle_request( Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( Origin(b"http", b"other.com", 80) ) assert proxy.connections[0].can_handle_request( Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( Origin(b"https", b"other.com", 443) ) def test_proxy_tunneling_with_403(): """ Send an HTTPS request via a proxy. """ network_backend = MockBackend( [ b"HTTP/1.1 403 Permission Denied\r\n" b"\r\n", ] ) with HTTPProxy( proxy_url="http://localhost:8080/", network_backend=network_backend, ) as proxy: with pytest.raises(ProxyError) as exc_info: proxy.request("GET", "https://example.com/") assert str(exc_info.value) == "403 Permission Denied" assert not proxy.connections def test_proxy_tunneling_with_auth(): """ Send an authenticated HTTPS request via a proxy. """ network_backend = MockBackend( [ # The initial response to the proxy CONNECT b"HTTP/1.1 200 OK\r\n\r\n", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with HTTPProxy( proxy_url="http://localhost:8080/", proxy_auth=("username", "password"), network_backend=network_backend, ) as proxy: response = proxy.request("GET", "https://example.com/") assert response.status == 200 assert response.content == b"Hello, world!" # Dig into this private property as a cheap lazy way of # checking that the proxy header is set correctly. assert proxy._proxy_headers == [ # type: ignore (b"Proxy-Authorization", b"Basic dXNlcm5hbWU6cGFzc3dvcmQ=") ] httpcore-1.0.2/tests/_sync/test_integration.py000066400000000000000000000030571452343067500215710ustar00rootroot00000000000000import ssl import pytest import httpcore def test_request(httpbin): with httpcore.ConnectionPool() as pool: response = pool.request("GET", httpbin.url) assert response.status == 200 def test_ssl_request(httpbin_secure): ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE with httpcore.ConnectionPool(ssl_context=ssl_context) as pool: response = pool.request("GET", httpbin_secure.url) assert response.status == 200 def test_extra_info(httpbin_secure): ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE with httpcore.ConnectionPool(ssl_context=ssl_context) as pool: with pool.stream("GET", httpbin_secure.url) as response: assert response.status == 200 stream = response.extensions["network_stream"] ssl_object = stream.get_extra_info("ssl_object") assert ssl_object.version() == "TLSv1.3" local_addr = stream.get_extra_info("client_addr") assert local_addr[0] == "127.0.0.1" remote_addr = stream.get_extra_info("server_addr") assert "https://%s:%d" % remote_addr == httpbin_secure.url sock = stream.get_extra_info("socket") assert hasattr(sock, "family") assert hasattr(sock, "type") invalid = stream.get_extra_info("invalid") assert invalid is None stream.get_extra_info("is_readable") httpcore-1.0.2/tests/_sync/test_socks_proxy.py000066400000000000000000000143441452343067500216320ustar00rootroot00000000000000import pytest import httpcore def test_socks5_request(): """ Send an HTTP request via a SOCKS proxy. """ network_backend = httpcore.MockBackend( [ # The initial socks CONNECT # v5 NOAUTH b"\x05\x00", # v5 SUC RSV IP4 127 .0 .0 .1 :80 b"\x05\x00\x00\x01\xff\x00\x00\x01\x00\x50", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.SOCKSProxy( proxy_url="socks5://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() # A connection on a tunneled proxy can only handle HTTPS requests to the same origin. assert not proxy.connections[0].can_handle_request( httpcore.Origin(b"http", b"example.com", 80) ) assert not proxy.connections[0].can_handle_request( httpcore.Origin(b"http", b"other.com", 80) ) assert proxy.connections[0].can_handle_request( httpcore.Origin(b"https", b"example.com", 443) ) assert not proxy.connections[0].can_handle_request( httpcore.Origin(b"https", b"other.com", 443) ) def test_authenticated_socks5_request(): """ Send an HTTP request via a SOCKS proxy. """ network_backend = httpcore.MockBackend( [ # The initial socks CONNECT # v5 USERNAME/PASSWORD b"\x05\x02", # v1 VALID USERNAME/PASSWORD b"\x01\x00", # v5 SUC RSV IP4 127 .0 .0 .1 :80 b"\x05\x00\x00\x01\xff\x00\x00\x01\x00\x50", # The actual response from the remote server b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 13\r\n", b"\r\n", b"Hello, world!", ] ) with httpcore.SOCKSProxy( proxy_url="socks5://localhost:8080/", proxy_auth=(b"username", b"password"), network_backend=network_backend, ) as proxy: # Sending an intial request, which once complete will return to the pool, IDLE. with proxy.stream("GET", "https://example.com/") as response: info = [repr(c) for c in proxy.connections] assert info == [ "" ] response.read() assert response.status == 200 assert response.content == b"Hello, world!" info = [repr(c) for c in proxy.connections] assert info == [ "" ] assert proxy.connections[0].is_idle() assert proxy.connections[0].is_available() assert not proxy.connections[0].is_closed() def test_socks5_request_connect_failed(): """ Attempt to send an HTTP request via a SOCKS proxy, resulting in a connect failure. """ network_backend = httpcore.MockBackend( [ # The initial socks CONNECT # v5 NOAUTH b"\x05\x00", # v5 NO RSV IP4 0 .0 .0 .0 :00 b"\x05\x05\x00\x01\x00\x00\x00\x00\x00\x00", ] ) with httpcore.SOCKSProxy( proxy_url="socks5://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending a request, which the proxy rejects with pytest.raises(httpcore.ProxyError) as exc_info: proxy.request("GET", "https://example.com/") assert ( str(exc_info.value) == "Proxy Server could not connect: Connection refused." ) assert not proxy.connections def test_socks5_request_failed_to_provide_auth(): """ Attempt to send an HTTP request via an authenticated SOCKS proxy, without providing authentication credentials. """ network_backend = httpcore.MockBackend( [ # v5 USERNAME/PASSWORD b"\x05\x02", ] ) with httpcore.SOCKSProxy( proxy_url="socks5://localhost:8080/", network_backend=network_backend, ) as proxy: # Sending a request, which the proxy rejects with pytest.raises(httpcore.ProxyError) as exc_info: proxy.request("GET", "https://example.com/") assert ( str(exc_info.value) == "Requested NO AUTHENTICATION REQUIRED from proxy server, but got USERNAME/PASSWORD." ) assert not proxy.connections def test_socks5_request_incorrect_auth(): """ Attempt to send an HTTP request via an authenticated SOCKS proxy, wit incorrect authentication credentials. """ network_backend = httpcore.MockBackend( [ # v5 USERNAME/PASSWORD b"\x05\x02", # v1 INVALID USERNAME/PASSWORD b"\x01\x01", ] ) with httpcore.SOCKSProxy( proxy_url="socks5://localhost:8080/", proxy_auth=(b"invalid", b"invalid"), network_backend=network_backend, ) as proxy: # Sending a request, which the proxy rejects with pytest.raises(httpcore.ProxyError) as exc_info: proxy.request("GET", "https://example.com/") assert str(exc_info.value) == "Invalid username/password" assert not proxy.connections httpcore-1.0.2/tests/concurrency.py000066400000000000000000000022721452343067500174240ustar00rootroot00000000000000""" Some of our tests require branching of flow control. We'd like to have the same kind of test for both async and sync environments, and so we have functionality here that replicate's Trio's `open_nursery` API, but in a plain old multi-threaded context. We don't do any smarts around cancellations, or managing exceptions from childen, because we don't need that for our use-case. """ import threading from types import TracebackType from typing import Any, Callable, List, Optional, Type class Nursery: def __init__(self) -> None: self._threads: List[threading.Thread] = [] def __enter__(self) -> "Nursery": return self def __exit__( self, exc_type: Optional[Type[BaseException]] = None, exc_value: Optional[BaseException] = None, traceback: Optional[TracebackType] = None, ) -> None: for thread in self._threads: thread.start() for thread in self._threads: thread.join() def start_soon(self, func: Callable[..., object], *args: Any) -> None: thread = threading.Thread(target=func, args=args) self._threads.append(thread) def open_nursery() -> Nursery: return Nursery() httpcore-1.0.2/tests/test_api.py000066400000000000000000000010111452343067500166700ustar00rootroot00000000000000import json import httpcore def test_request(httpbin): response = httpcore.request("GET", httpbin.url) assert response.status == 200 def test_stream(httpbin): with httpcore.stream("GET", httpbin.url) as response: assert response.status == 200 def test_request_with_content(httpbin): url = f"{httpbin.url}/post" response = httpcore.request("POST", url, content=b'{"hello":"world"}') assert response.status == 200 assert json.loads(response.content)["json"] == {"hello": "world"} httpcore-1.0.2/tests/test_cancellations.py000066400000000000000000000165471452343067500207620ustar00rootroot00000000000000import typing import anyio import hpack import hyperframe import pytest import httpcore class SlowWriteStream(httpcore.AsyncNetworkStream): """ A stream that we can use to test cancellations during the request writing. """ async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: await anyio.sleep(999) async def aclose(self) -> None: await anyio.sleep(0) class HandshakeThenSlowWriteStream(httpcore.AsyncNetworkStream): """ A stream that we can use to test cancellations during the HTTP/2 request writing, after allowing the initial handshake to complete. """ def __init__(self) -> None: self._handshake_complete = False async def write( self, buffer: bytes, timeout: typing.Optional[float] = None ) -> None: if not self._handshake_complete: self._handshake_complete = True else: await anyio.sleep(999) async def aclose(self) -> None: await anyio.sleep(0) class SlowReadStream(httpcore.AsyncNetworkStream): """ A stream that we can use to test cancellations during the response reading. """ def __init__(self, buffer: typing.List[bytes]): self._buffer = buffer async def write(self, buffer, timeout=None): pass async def read( self, max_bytes: int, timeout: typing.Optional[float] = None ) -> bytes: if not self._buffer: await anyio.sleep(999) return self._buffer.pop(0) async def aclose(self): await anyio.sleep(0) class SlowWriteBackend(httpcore.AsyncNetworkBackend): async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[httpcore.SOCKET_OPTION]] = None, ) -> httpcore.AsyncNetworkStream: return SlowWriteStream() class SlowReadBackend(httpcore.AsyncNetworkBackend): def __init__(self, buffer: typing.List[bytes]): self._buffer = buffer async def connect_tcp( self, host: str, port: int, timeout: typing.Optional[float] = None, local_address: typing.Optional[str] = None, socket_options: typing.Optional[typing.Iterable[httpcore.SOCKET_OPTION]] = None, ) -> httpcore.AsyncNetworkStream: return SlowReadStream(self._buffer) @pytest.mark.anyio async def test_connection_pool_timeout_during_request(): """ An async timeout when writing an HTTP/1.1 response on the connection pool should leave the pool in a consistent state. In this case, that means the connection will become closed, and no longer remain in the pool. """ network_backend = SlowWriteBackend() async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: with anyio.move_on_after(0.01): await pool.request("GET", "http://example.com") assert not pool.connections @pytest.mark.anyio async def test_connection_pool_timeout_during_response(): """ An async timeout when reading an HTTP/1.1 response on the connection pool should leave the pool in a consistent state. In this case, that means the connection will become closed, and no longer remain in the pool. """ network_backend = SlowReadBackend( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 1000\r\n", b"\r\n", b"Hello, world!...", ] ) async with httpcore.AsyncConnectionPool(network_backend=network_backend) as pool: with anyio.move_on_after(0.01): await pool.request("GET", "http://example.com") assert not pool.connections @pytest.mark.anyio async def test_h11_timeout_during_request(): """ An async timeout on an HTTP/1.1 during the request writing should leave the connection in a neatly closed state. """ origin = httpcore.Origin(b"http", b"example.com", 80) stream = SlowWriteStream() async with httpcore.AsyncHTTP11Connection(origin, stream) as conn: with anyio.move_on_after(0.01): await conn.request("GET", "http://example.com") assert conn.is_closed() @pytest.mark.anyio async def test_h11_timeout_during_response(): """ An async timeout on an HTTP/1.1 during the response reading should leave the connection in a neatly closed state. """ origin = httpcore.Origin(b"http", b"example.com", 80) stream = SlowReadStream( [ b"HTTP/1.1 200 OK\r\n", b"Content-Type: plain/text\r\n", b"Content-Length: 1000\r\n", b"\r\n", b"Hello, world!...", ] ) async with httpcore.AsyncHTTP11Connection(origin, stream) as conn: with anyio.move_on_after(0.01): await conn.request("GET", "http://example.com") assert conn.is_closed() @pytest.mark.xfail @pytest.mark.anyio async def test_h2_timeout_during_handshake(): """ An async timeout on an HTTP/2 during the initial handshake should leave the connection in a neatly closed state. """ origin = httpcore.Origin(b"http", b"example.com", 80) stream = SlowWriteStream() async with httpcore.AsyncHTTP2Connection(origin, stream) as conn: with anyio.move_on_after(0.01): await conn.request("GET", "http://example.com") assert conn.is_closed() @pytest.mark.xfail @pytest.mark.anyio async def test_h2_timeout_during_request(): """ An async timeout on an HTTP/2 during a request should leave the connection in a neatly idle state. The connection is not closed because it is multiplexed, and a timeout on one request does not require the entire connection be closed. """ origin = httpcore.Origin(b"http", b"example.com", 80) stream = HandshakeThenSlowWriteStream() async with httpcore.AsyncHTTP2Connection(origin, stream) as conn: with anyio.move_on_after(0.01): await conn.request("GET", "http://example.com") assert not conn.is_closed() assert conn.is_idle() @pytest.mark.xfail @pytest.mark.anyio async def test_h2_timeout_during_response(): """ An async timeout on an HTTP/2 during the response reading should leave the connection in a neatly idle state. The connection is not closed because it is multiplexed, and a timeout on one request does not require the entire connection be closed. """ origin = httpcore.Origin(b"http", b"example.com", 80) stream = SlowReadStream( [ hyperframe.frame.SettingsFrame().serialize(), hyperframe.frame.HeadersFrame( stream_id=1, data=hpack.Encoder().encode( [ (b":status", b"200"), (b"content-type", b"plain/text"), ] ), flags=["END_HEADERS"], ).serialize(), hyperframe.frame.DataFrame( stream_id=1, data=b"Hello, world!...", flags=[] ).serialize(), ] ) async with httpcore.AsyncHTTP2Connection(origin, stream) as conn: with anyio.move_on_after(0.01): await conn.request("GET", "http://example.com") assert not conn.is_closed() assert conn.is_idle() httpcore-1.0.2/tests/test_models.py000066400000000000000000000117771452343067500174260ustar00rootroot00000000000000import typing import pytest import httpcore # URL def test_url(): url = httpcore.URL("https://www.example.com/") assert url == httpcore.URL( scheme="https", host="www.example.com", port=None, target="/" ) assert bytes(url) == b"https://www.example.com/" def test_url_with_port(): url = httpcore.URL("https://www.example.com:443/") assert url == httpcore.URL( scheme="https", host="www.example.com", port=443, target="/" ) assert bytes(url) == b"https://www.example.com:443/" def test_url_with_invalid_argument(): with pytest.raises(TypeError) as exc_info: httpcore.URL(123) # type: ignore assert str(exc_info.value) == "url must be bytes or str, but got int." def test_url_cannot_include_unicode_strings(): """ URLs instantiated with strings outside of the plain ASCII range are disallowed, but the explicit style allows for these ambiguous cases to be precisely expressed. """ with pytest.raises(TypeError) as exc_info: httpcore.URL("https://www.example.com/☺") assert str(exc_info.value) == "url strings may not include unicode characters." httpcore.URL(scheme=b"https", host=b"www.example.com", target="/☺".encode("utf-8")) # Request def test_request(): request = httpcore.Request("GET", "https://www.example.com/") assert request.method == b"GET" assert request.url == httpcore.URL("https://www.example.com/") assert request.headers == [] assert request.extensions == {} assert repr(request) == "" assert ( repr(request.url) == "URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/')" ) assert repr(request.stream) == "" def test_request_with_invalid_method(): with pytest.raises(TypeError) as exc_info: httpcore.Request(123, "https://www.example.com/") # type: ignore assert str(exc_info.value) == "method must be bytes or str, but got int." def test_request_with_invalid_url(): with pytest.raises(TypeError) as exc_info: httpcore.Request("GET", 123) # type: ignore assert str(exc_info.value) == "url must be a URL, bytes, or str, but got int." def test_request_with_invalid_headers(): with pytest.raises(TypeError) as exc_info: httpcore.Request("GET", "https://www.example.com/", headers=123) # type: ignore assert ( str(exc_info.value) == "headers must be a mapping or sequence of two-tuples, but got int." ) # Response def test_response(): response = httpcore.Response(200) assert response.status == 200 assert response.headers == [] assert response.extensions == {} assert repr(response) == "" assert repr(response.stream) == "" # Tests for reading and streaming sync byte streams... class ByteIterator: def __init__(self, chunks: typing.List[bytes]) -> None: self._chunks = chunks def __iter__(self) -> typing.Iterator[bytes]: for chunk in self._chunks: yield chunk def test_response_sync_read(): stream = ByteIterator([b"Hello, ", b"world!"]) response = httpcore.Response(200, content=stream) assert response.read() == b"Hello, world!" assert response.content == b"Hello, world!" def test_response_sync_streaming(): stream = ByteIterator([b"Hello, ", b"world!"]) response = httpcore.Response(200, content=stream) content = b"".join([chunk for chunk in response.iter_stream()]) assert content == b"Hello, world!" # We streamed the response rather than reading it, so .content is not available. with pytest.raises(RuntimeError): response.content # Once we've streamed the response, we can't access the stream again. with pytest.raises(RuntimeError): for _chunk in response.iter_stream(): pass # pragma: nocover # Tests for reading and streaming async byte streams... class AsyncByteIterator: def __init__(self, chunks: typing.List[bytes]) -> None: self._chunks = chunks async def __aiter__(self) -> typing.AsyncIterator[bytes]: for chunk in self._chunks: yield chunk @pytest.mark.trio async def test_response_async_read(): stream = AsyncByteIterator([b"Hello, ", b"world!"]) response = httpcore.Response(200, content=stream) assert await response.aread() == b"Hello, world!" assert response.content == b"Hello, world!" @pytest.mark.trio async def test_response_async_streaming(): stream = AsyncByteIterator([b"Hello, ", b"world!"]) response = httpcore.Response(200, content=stream) content = b"".join([chunk async for chunk in response.aiter_stream()]) assert content == b"Hello, world!" # We streamed the response rather than reading it, so .content is not available. with pytest.raises(RuntimeError): response.content # Once we've streamed the response, we can't access the stream again. with pytest.raises(RuntimeError): async for chunk in response.aiter_stream(): pass # pragma: nocover httpcore-1.0.2/unasync.py000077500000000000000000000060351452343067500154140ustar00rootroot00000000000000#!venv/bin/python import os import re import sys from pprint import pprint SUBS = [ ('from .._backends.auto import AutoBackend', 'from .._backends.sync import SyncBackend'), ('import trio as concurrency', 'from tests import concurrency'), ('AsyncIterator', 'Iterator'), ('Async([A-Z][A-Za-z0-9_]*)', r'\2'), ('async def', 'def'), ('async with', 'with'), ('async for', 'for'), ('await ', ''), ('handle_async_request', 'handle_request'), ('aclose', 'close'), ('aiter_stream', 'iter_stream'), ('aread', 'read'), ('asynccontextmanager', 'contextmanager'), ('__aenter__', '__enter__'), ('__aexit__', '__exit__'), ('__aiter__', '__iter__'), ('@pytest.mark.anyio', ''), ('@pytest.mark.trio', ''), ('AutoBackend', 'SyncBackend'), ] COMPILED_SUBS = [ (re.compile(r'(^|\b)' + regex + r'($|\b)'), repl) for regex, repl in SUBS ] USED_SUBS = set() def unasync_line(line): for index, (regex, repl) in enumerate(COMPILED_SUBS): old_line = line line = re.sub(regex, repl, line) if old_line != line: USED_SUBS.add(index) return line def unasync_file(in_path, out_path): with open(in_path, "r") as in_file: with open(out_path, "w", newline="") as out_file: for line in in_file.readlines(): line = unasync_line(line) out_file.write(line) def unasync_file_check(in_path, out_path): with open(in_path, "r") as in_file: with open(out_path, "r") as out_file: for in_line, out_line in zip(in_file.readlines(), out_file.readlines()): expected = unasync_line(in_line) if out_line != expected: print(f'unasync mismatch between {in_path!r} and {out_path!r}') print(f'Async code: {in_line!r}') print(f'Expected sync code: {expected!r}') print(f'Actual sync code: {out_line!r}') sys.exit(1) def unasync_dir(in_dir, out_dir, check_only=False): for dirpath, dirnames, filenames in os.walk(in_dir): for filename in filenames: if not filename.endswith('.py'): continue rel_dir = os.path.relpath(dirpath, in_dir) in_path = os.path.normpath(os.path.join(in_dir, rel_dir, filename)) out_path = os.path.normpath(os.path.join(out_dir, rel_dir, filename)) print(in_path, '->', out_path) if check_only: unasync_file_check(in_path, out_path) else: unasync_file(in_path, out_path) def main(): check_only = '--check' in sys.argv unasync_dir("httpcore/_async", "httpcore/_sync", check_only=check_only) unasync_dir("tests/_async", "tests/_sync", check_only=check_only) if len(USED_SUBS) != len(SUBS): unused_subs = [SUBS[i] for i in range(len(SUBS)) if i not in USED_SUBS] print("These patterns were not used:") pprint(unused_subs) exit(1) if __name__ == '__main__': main()