aiohttp-3.0.1/0000777000000000000000000000000013240305035011310 5ustar 00000000000000aiohttp-3.0.1/.appveyor.yml0000666000000000000000000000170713240304665013773 0ustar 00000000000000environment: PYPI_PASSWD: secure: u+K6dKi7+CXXVFEUG4V7zUyV3w7Ntg0Ork/RGVV0eSQ= matrix: - PYTHON: "C:\\Python35" - PYTHON: "C:\\Python35-x64" - PYTHON: "C:\\Python36" - PYTHON: "C:\\Python36-x64" install: - "tools/build.cmd %PYTHON%\\python.exe -m pip install -U wheel setuptools" - "tools/build.cmd %PYTHON%\\python.exe -m pip install -r requirements/ci.txt" build: false test_script: - "tools/build.cmd %PYTHON%\\python.exe setup.py test" after_test: - "tools/build.cmd %PYTHON%\\python.exe setup.py sdist bdist_wheel" artifacts: - path: dist\* deploy_script: - ps: >- if($env:appveyor_repo_tag -eq 'True') { Invoke-Expression "$env:PYTHON\\python.exe -m twine upload dist/* --username andrew.svetlov --password $env:PYPI_PASSWD" } #notifications: # - provider: Webhook # url: https://ci.appveyor.com/api/github/webhook?id=08c7793w1tp839fl # on_build_success: false # on_build_failure: True aiohttp-3.0.1/.github/0000777000000000000000000000000013240305035012650 5ustar 00000000000000aiohttp-3.0.1/.github/ISSUE_TEMPLATE.md0000666000000000000000000000136313240304665015370 0ustar 00000000000000## Long story short ## Expected behaviour ## Actual behaviour ## Steps to reproduce ## Your environment aiohttp-3.0.1/.github/PULL_REQUEST_TEMPLATE.md0000666000000000000000000000246413240304665016467 0ustar 00000000000000 ## Are there changes in behavior for the user? ## Related issue number ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES` folder * name it `.` for example (588.bugfix) * if you don't have an `issue_id` change it to the pr id after creating the pr * ensure type is one of the following: * `.feature`: Signifying a new feature. * `.bugfix`: Signifying a bug fix. * `.doc`: Signifying a documentation improvement. * `.removal`: Signifying a deprecation or removal of public API. * `.misc`: A ticket has been closed, but it is not of interest to users. * Make sure to use full sentences with correct case and punctuation, for example: "Fix issue with non-ascii contents in doctest text files." aiohttp-3.0.1/.gitignore0000666000000000000000000000077013240304665013314 0ustar 00000000000000*.swp *.bak *.egg *.egg-info *.eggs *.pyc *.pyd *.pyo *.so *.tar.gz *~ .DS_Store .Python .cache .coverage .coverage.* .idea .installed.cfg .noseids .tox .vimrc aiohttp/_frozenlist.c aiohttp/_frozenlist.html aiohttp/_websocket.c aiohttp/_websocket.html aiohttp/_http_parser.c aiohttp/_http_parser.html bin build htmlcov develop-eggs dist docs/_build/ eggs include/ lib/ man/ nosetests.xml parts pyvenv sources var/* venv virtualenv.py .install-deps .develop .gitconfig .flake .python-version .pytest_cacheaiohttp-3.0.1/.pyup.yml0000666000000000000000000000012213240304665013111 0ustar 00000000000000# Label PRs with `deps-update` label label_prs: deps-update schedule: every week aiohttp-3.0.1/.readthedocs.yml0000666000000000000000000000010213240304665014377 0ustar 00000000000000build: image: latest python: version: 3.6 pip_install: true aiohttp-3.0.1/.travis.yml0000666000000000000000000001731513240304665013440 0ustar 00000000000000sudo: false language: python python: - 3.5 - &mainstream_python 3.6 - 3.6-dev - nightly - &pypy3 pypy3.5-5.8.0 install: - &upgrade_python_toolset pip install --upgrade pip wheel setuptools - pip install -r requirements/ci.txt script: - make cov-ci-no-ext - make cov-ci-aio-debug - make cov-ci-run after_success: - codecov _helpers: - &_mainstream_python_base python: *mainstream_python - &_reset_steps env: [] before_install: skip install: skip script: skip after_success: [] - &_lint_base stage: &doc_stage_name docs, linting and pre-test checks <<: *_mainstream_python_base <<: *_reset_steps install: - *upgrade_python_toolset - pip install -U -r requirements/ci.txt - &_doc_base <<: *_lint_base install: - *upgrade_python_toolset - pip install -U -r requirements/ci.txt -r requirements/doc.txt -r requirements/doc-spelling.txt after_failure: cat docs/_build/spelling/output.txt addons: apt: packages: - libenchant-dev - &osx_python_base stage: &stage_test_osx_name test under OS X (last chance to fail before deploy available) os: osx language: generic python: *pypy3 env: - &env_pypy3 PYTHON_VERSION=pypy3.5-5.8.0 - &env_pyenv PYENV_ROOT="$HOME/.pyenv" - &env_path PATH="$PYENV_ROOT/bin:$PATH" before_install: - brew update - brew install readline xz - &ensure_pyenv_installed | if [ ! -f "$PYENV_ROOT/bin/pyenv" ] then rm -rf "$PYENV_ROOT" curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash fi pyenv update eval "$(pyenv init -)" eval "$(pyenv virtualenv-init -)" - &install_python pyenv install --skip-existing --keep --verbose "$PYTHON_VERSION" - &switch_python pyenv shell "$PYTHON_VERSION" - &python_version python --version before_cache: - brew --cache - &generic_deploy_base stage: &deploy_stage_name deploy (PYPI upload itself runs only for tagged commits) <<: *_mainstream_python_base deploy: &deploy_step provider: pypi # `skip_cleanup: true` is required to preserve binary wheels, built # inside of manylinux1 docker container during `script` step above. skip_cleanup: true user: andrew.svetlov password: secure: ZQKbdPT9BlNqP5CTbWRQyeyig7Bpf7wsnYVQIQPOZc9Ec74A+dsbagstR1sPkAO+d+5PN0pZMovvmU7OQhSVPAnJ74nsN90/fL4ux3kqYecMbevv0rJg20hMXSSkwMEIpjUsMdMjJvZAcaKytGWmKL0qAlOJHhixd1pBbWyuIUE= # Although Travis CI instructs `setup.py` to build source distribution, # which is default value for distribution option (`distribution: sdist`), # it will also upload all wheels we've previously built in manylinux1 # docker container using `twine upload -r pypi dist/*` command. # Also since commit https://github.com/travis-ci/dpl/commit/90b5e39 # it is default that Travis PYPI provider has `skip_upload_docs: true` # set by default. # Besides above, we don't do cleanup of `dist/*`, because it's being done # by Travis CI PYPI deployment provider after upload, unconditionally. on: tags: true all_branches: true - &osx_pypi_deploy_base_1011 <<: *osx_python_base <<: *generic_deploy_base osx_image: xcode7.3 script: skip after_success: [] env: - &env_os1011_msg Build and deploy to PYPI of OS X 10.11 binary wheel - &env_py36 PYTHON_VERSION=3.6.3 - *env_pyenv - *env_path deploy: <<: *deploy_step skip_cleanup: false distributions: bdist_wheel - &osx_pypi_deploy_base_1012 <<: *osx_pypi_deploy_base_1011 osx_image: xcode8.1 env: - &env_os1012_msg Build and deploy to PYPI of OS X 10.12 binary wheel - *env_py36 - *env_pyenv - *env_path - &osx_pypi_deploy_base_1010 <<: *osx_pypi_deploy_base_1011 osx_image: xcode6.4 env: - &env_os1010_msg Build and deploy to PYPI of OS X 10.10 binary wheel - *env_py36 - *env_pyenv - *env_path # doesn't work on MacOSX out of the box -- the system has no Python installed # there's a workaround to use `language: generic` and install it, but it's slow os: linux jobs: fast_finish: true allow_failures: - python: nightly - python: *pypy3 include: - <<: *_doc_base env: - docs script: - towncrier --yes - make doc-spelling - <<: *_lint_base env: - flake8 script: - flake8 aiohttp examples tests demos - <<: *_lint_base env: - dist setup check install: - *upgrade_python_toolset - pip install -r requirements/doc.txt script: - python setup.py check --metadata --restructuredtext --strict --verbose - <<: *osx_python_base python: 3.5.3 env: - &env_py35 PYTHON_VERSION=3.5.3 - *env_pyenv - *env_path - <<: *osx_python_base python: *mainstream_python env: - *env_py36 - *env_pyenv - *env_path - <<: *osx_python_base python: *mainstream_python env: - PYTHON_VERSION=3.6-dev - *env_pyenv - *env_path - <<: *osx_python_base python: nightly env: - PYTHON_VERSION=3.7-dev - *env_pyenv - *env_path # pypy3.5-5.8.0 fails under OS X because it's unsupported # Build and deploy manylinux1 binary wheels and source distribution - <<: *generic_deploy_base <<: *_reset_steps env: Build and deploy to PYPI of manylinux1 binary wheels for all supported Pythons and source distribution dist: trusty group: edge services: - docker script: - ./tools/run_docker.sh "aiohttp" - pip install -r requirements/ci.txt # to compile *.c files by Cython deploy: provider: pypi # `skip_cleanup: true` is required to preserve binary wheels, built # inside of manylinux1 docker container during `script` step above. skip_cleanup: true user: andrew.svetlov password: secure: ZQKbdPT9BlNqP5CTbWRQyeyig7Bpf7wsnYVQIQPOZc9Ec74A+dsbagstR1sPkAO+d+5PN0pZMovvmU7OQhSVPAnJ74nsN90/fL4ux3kqYecMbevv0rJg20hMXSSkwMEIpjUsMdMjJvZAcaKytGWmKL0qAlOJHhixd1pBbWyuIUE= # Although Travis CI instructs `setup.py` to build source distribution, # which is default value for distribution option (`distribution: sdist`), # it will also upload all wheels we've previously built in manylinux1 # docker container using `twine upload -r pypi dist/*` command. # Also since commit https://github.com/travis-ci/dpl/commit/90b5e39 # it is default that Travis PYPI provider has `skip_upload_docs: true` # set by default. # Besides above, we don't do cleanup of `dist/*`, because it's being done # by Travis CI PYPI deployment provider after upload, unconditionally. on: tags: true all_branches: true # Build and deploy MacOS binary wheels for each OSX+Python combo possible # OS X 10.10, Python 3.5 - <<: *osx_pypi_deploy_base_1010 python: 3.5 env: - *env_os1010_msg - *env_py35 - *env_pyenv - *env_path # OS X 10.10, Python 3.6 - <<: *osx_pypi_deploy_base_1010 env: - *env_os1010_msg - *env_py36 - *env_pyenv - *env_path # OS X 10.11, Python 3.5 - <<: *osx_pypi_deploy_base_1011 python: 3.5 env: - *env_os1011_msg - *env_py35 - *env_pyenv - *env_path # OS X 10.11, Python 3.6 - <<: *osx_pypi_deploy_base_1011 env: - *env_os1011_msg - *env_py36 - *env_pyenv - *env_path # OS X 10.12, Python 3.5 - <<: *osx_pypi_deploy_base_1012 python: 3.5 env: - *env_os1012_msg - *env_py35 - *env_pyenv - *env_path # OS X 10.12, Python 3.6 - <<: *osx_pypi_deploy_base_1012 env: - *env_os1012_msg - *env_py36 - *env_pyenv - *env_path stages: - *doc_stage_name - test - name: *stage_test_osx_name if: type IN (api, cron) - name: *deploy_stage_name # This will prevent deploy unless it's a tagged commit: if: tag IS present cache: pip before_cache: - rm -f $HOME/.cache/pip/log/debug.log aiohttp-3.0.1/aiohttp/0000777000000000000000000000000013240305035012760 5ustar 00000000000000aiohttp-3.0.1/aiohttp/abc.py0000666000000000000000000000647613240304665014104 0ustar 00000000000000import asyncio from abc import ABC, abstractmethod from collections.abc import Iterable, Sized class AbstractRouter(ABC): def __init__(self): self._frozen = False def post_init(self, app): """Post init stage. Not an abstract method for sake of backward compatibility, but if the router wants to be aware of the application it can override this. """ @property def frozen(self): return self._frozen def freeze(self): """Freeze router.""" self._frozen = True @abstractmethod async def resolve(self, request): """Return MATCH_INFO for given request""" class AbstractMatchInfo(ABC): @abstractmethod async def handler(self, request): """Execute matched request handler""" @abstractmethod async def expect_handler(self, request): """Expect handler for 100-continue processing""" @property # pragma: no branch @abstractmethod def http_exception(self): """HTTPException instance raised on router's resolving, or None""" @abstractmethod # pragma: no branch def get_info(self): """Return a dict with additional info useful for introspection""" @property # pragma: no branch @abstractmethod def apps(self): """Stack of nested applications. Top level application is left-most element. """ @abstractmethod def add_app(self, app): """Add application to the nested apps stack.""" @abstractmethod def freeze(self): """Freeze the match info. The method is called after route resolution. After the call .add_app() is forbidden. """ class AbstractView(ABC): """Abstract class based view.""" def __init__(self, request): self._request = request @property def request(self): """Request instance.""" return self._request @abstractmethod def __await__(self): """Execute the view handler.""" class AbstractResolver(ABC): """Abstract DNS resolver.""" @abstractmethod async def resolve(self, hostname): """Return IP address for given hostname""" @abstractmethod async def close(self): """Release resolver""" class AbstractCookieJar(Sized, Iterable): """Abstract Cookie Jar.""" def __init__(self, *, loop=None): self._loop = loop or asyncio.get_event_loop() @abstractmethod def clear(self): """Clear all cookies.""" @abstractmethod def update_cookies(self, cookies, response_url=None): """Update cookies.""" @abstractmethod def filter_cookies(self, request_url): """Return the jar's cookies filtered by their attributes.""" class AbstractStreamWriter(ABC): """Abstract stream writer.""" @abstractmethod async def write(self, chunk): """Write chunk into stream.""" @abstractmethod async def write_eof(self, chunk=b''): """Write last chunk.""" @abstractmethod async def drain(self): """Flush the write buffer.""" class AbstractAccessLogger(ABC): """Abstract writer to access log.""" def __init__(self, logger, log_format): self.logger = logger self.log_format = log_format @abstractmethod def log(self, request, response, time): """Emit log to logger.""" aiohttp-3.0.1/aiohttp/client.py0000666000000000000000000007750013240304665014631 0ustar 00000000000000"""HTTP Client for asyncio.""" import asyncio import base64 import hashlib import json import os import sys import traceback import warnings from collections.abc import Coroutine from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr from yarl import URL from . import client_exceptions, client_reqrep from . import connector as connector_mod from . import hdrs, http, payload from .client_exceptions import * # noqa from .client_exceptions import (ClientError, ClientOSError, InvalidURL, ServerTimeoutError, WSServerHandshakeError) from .client_reqrep import * # noqa from .client_reqrep import ClientRequest, ClientResponse, _merge_ssl_params from .client_ws import ClientWebSocketResponse from .connector import * # noqa from .connector import TCPConnector from .cookiejar import CookieJar from .helpers import (PY_36, CeilTimeout, TimeoutHandle, proxies_from_env, sentinel, strip_auth_from_url) from .http import WS_KEY, WebSocketReader, WebSocketWriter from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse from .streams import FlowControlDataQueue from .tcp_helpers import tcp_cork, tcp_nodelay from .tracing import Trace __all__ = (client_exceptions.__all__ + # noqa client_reqrep.__all__ + # noqa connector_mod.__all__ + # noqa ('ClientSession', 'ClientWebSocketResponse', 'request')) # 5 Minute default read and connect timeout DEFAULT_TIMEOUT = 5 * 60 class ClientSession: """First-class interface for making HTTP requests.""" ATTRS = frozenset([ '_source_traceback', '_connector', 'requote_redirect_url', '_loop', '_cookie_jar', '_connector_owner', '_default_auth', '_version', '_json_serialize', '_read_timeout', '_conn_timeout', '_raise_for_status', '_auto_decompress', '_trust_env', '_default_headers', '_skip_auto_headers', '_request_class', '_response_class', '_ws_response_class', '_trace_configs']) _source_traceback = None _connector = None requote_redirect_url = True def __init__(self, *, connector=None, loop=None, cookies=None, headers=None, skip_auto_headers=None, auth=None, json_serialize=json.dumps, request_class=ClientRequest, response_class=ClientResponse, ws_response_class=ClientWebSocketResponse, version=http.HttpVersion11, cookie_jar=None, connector_owner=True, raise_for_status=False, read_timeout=sentinel, conn_timeout=None, auto_decompress=True, trust_env=False, trace_configs=None): implicit_loop = False if loop is None: if connector is not None: loop = connector._loop else: implicit_loop = True loop = asyncio.get_event_loop() if connector is None: connector = TCPConnector(loop=loop) if connector._loop is not loop: raise RuntimeError( "Session and connector has to use same event loop") self._loop = loop if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) if implicit_loop and not loop.is_running(): warnings.warn("Creating a client session outside of coroutine is " "a very dangerous idea", stacklevel=2) context = {'client_session': self, 'message': 'Creating a client session outside ' 'of coroutine'} if self._source_traceback is not None: context['source_traceback'] = self._source_traceback loop.call_exception_handler(context) if cookie_jar is None: cookie_jar = CookieJar(loop=loop) self._cookie_jar = cookie_jar if cookies is not None: self._cookie_jar.update_cookies(cookies) self._connector = connector self._connector_owner = connector_owner self._default_auth = auth self._version = version self._json_serialize = json_serialize self._read_timeout = (read_timeout if read_timeout is not sentinel else DEFAULT_TIMEOUT) self._conn_timeout = conn_timeout self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env # Convert to list of tuples if headers: headers = CIMultiDict(headers) else: headers = CIMultiDict() self._default_headers = headers if skip_auto_headers is not None: self._skip_auto_headers = frozenset([istr(i) for i in skip_auto_headers]) else: self._skip_auto_headers = frozenset() self._request_class = request_class self._response_class = response_class self._ws_response_class = ws_response_class self._trace_configs = trace_configs or [] for trace_config in self._trace_configs: trace_config.freeze() def __init_subclass__(cls): warnings.warn("Inheritance class {} from ClientSession " "is discouraged".format(cls.__name__), DeprecationWarning, stacklevel=2) def __setattr__(self, name, val): if name not in self.ATTRS: warnings.warn("Setting custom ClientSession.{} attribute " "is discouraged".format(name), DeprecationWarning, stacklevel=2) super().__setattr__(name, val) def __del__(self, _warnings=warnings): if not self.closed: if PY_36: kwargs = {'source': self} else: kwargs = {} _warnings.warn("Unclosed client session {!r}".format(self), ResourceWarning, **kwargs) context = {'client_session': self, 'message': 'Unclosed client session'} if self._source_traceback is not None: context['source_traceback'] = self._source_traceback self._loop.call_exception_handler(context) def request(self, method, url, **kwargs): """Perform HTTP request.""" return _RequestContextManager(self._request(method, url, **kwargs)) async def _request(self, method, url, *, params=None, data=None, json=None, headers=None, skip_auto_headers=None, auth=None, allow_redirects=True, max_redirects=10, compress=None, chunked=None, expect100=False, read_until_eof=True, proxy=None, proxy_auth=None, timeout=sentinel, verify_ssl=None, fingerprint=None, ssl_context=None, ssl=None, proxy_headers=None, trace_request_ctx=None): # NOTE: timeout clamps existing connect and read timeouts. We cannot # set the default to None because we need to detect if the user wants # to use the existing timeouts by setting timeout to None. if self.closed: raise RuntimeError('Session is closed') ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) if data is not None and json is not None: raise ValueError( 'data and json parameters can not be used at the same time') elif json is not None: data = payload.JsonPayload(json, dumps=self._json_serialize) if not isinstance(chunked, bool) and chunked is not None: warnings.warn( 'Chunk size is deprecated #1615', DeprecationWarning) redirects = 0 history = [] version = self._version # Merge with default headers and transform to CIMultiDict headers = self._prepare_headers(headers) proxy_headers = self._prepare_headers(proxy_headers) try: url = URL(url) except ValueError: raise InvalidURL(url) skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: for i in skip_auto_headers: skip_headers.add(istr(i)) if proxy is not None: try: proxy = URL(proxy) except ValueError: raise InvalidURL(proxy) # timeout is cumulative for all request operations # (request, redirects, responses, data consuming) tm = TimeoutHandle( self._loop, timeout if timeout is not sentinel else self._read_timeout) handle = tm.start() traces = [ Trace( self, trace_config, trace_config.trace_config_ctx( trace_request_ctx=trace_request_ctx) ) for trace_config in self._trace_configs ] for trace in traces: await trace.send_request_start( method, url, headers ) timer = tm.timer() try: with timer: while True: url, auth_from_url = strip_auth_from_url(url) if auth and auth_from_url: raise ValueError("Cannot combine AUTH argument with " "credentials encoded in URL") if auth is None: auth = auth_from_url if auth is None: auth = self._default_auth # It would be confusing if we support explicit # Authorization header with auth argument if (headers is not None and auth is not None and hdrs.AUTHORIZATION in headers): raise ValueError("Cannot combine AUTHORIZATION header " "with AUTH argument or credentials " "encoded in URL") url = url.with_fragment(None) cookies = self._cookie_jar.filter_cookies(url) if proxy is not None: proxy = URL(proxy) elif self._trust_env: for scheme, proxy_info in proxies_from_env().items(): if scheme == url.scheme: proxy = proxy_info.proxy proxy_auth = proxy_info.proxy_auth break req = self._request_class( method, url, params=params, headers=headers, skip_auto_headers=skip_headers, data=data, cookies=cookies, auth=auth, version=version, compress=compress, chunked=chunked, expect100=expect100, loop=self._loop, response_class=self._response_class, proxy=proxy, proxy_auth=proxy_auth, timer=timer, session=self, auto_decompress=self._auto_decompress, ssl=ssl, proxy_headers=proxy_headers) # connection timeout try: with CeilTimeout(self._conn_timeout, loop=self._loop): conn = await self._connector.connect( req, traces=traces ) except asyncio.TimeoutError as exc: raise ServerTimeoutError( 'Connection timeout ' 'to host {0}'.format(url)) from exc tcp_nodelay(conn.transport, True) tcp_cork(conn.transport, False) try: resp = req.send(conn) try: await resp.start(conn, read_until_eof) except BaseException: resp.close() conn.close() raise except ClientError: raise except OSError as exc: raise ClientOSError(*exc.args) from exc self._cookie_jar.update_cookies(resp.cookies, resp.url) # redirects if resp.status in ( 301, 302, 303, 307, 308) and allow_redirects: for trace in traces: await trace.send_request_redirect( method, url, headers, resp ) redirects += 1 history.append(resp) if max_redirects and redirects >= max_redirects: resp.close() break else: resp.release() # For 301 and 302, mimic IE, now changed in RFC # https://github.com/kennethreitz/requests/pull/269 if (resp.status == 303 and resp.method != hdrs.METH_HEAD) \ or (resp.status in (301, 302) and resp.method == hdrs.METH_POST): method = hdrs.METH_GET data = None if headers.get(hdrs.CONTENT_LENGTH): headers.pop(hdrs.CONTENT_LENGTH) r_url = (resp.headers.get(hdrs.LOCATION) or resp.headers.get(hdrs.URI)) if r_url is None: # see github.com/aio-libs/aiohttp/issues/2022 break try: r_url = URL( r_url, encoded=not self.requote_redirect_url) except ValueError: raise InvalidURL(r_url) scheme = r_url.scheme if scheme not in ('http', 'https', ''): resp.close() raise ValueError( 'Can redirect only to http or https') elif not scheme: r_url = url.join(r_url) if url.origin() != r_url.origin(): auth = None headers.pop(hdrs.AUTHORIZATION, None) url = r_url params = None resp.release() continue break # check response status if self._raise_for_status: resp.raise_for_status() # register connection if handle is not None: if resp.connection is not None: resp.connection.add_callback(handle.cancel) else: handle.cancel() resp._history = tuple(history) for trace in traces: await trace.send_request_end( method, url, headers, resp ) return resp except BaseException as e: # cleanup timer tm.close() if handle: handle.cancel() handle = None for trace in traces: await trace.send_request_exception( method, url, headers, e ) raise def ws_connect(self, url, *, protocols=(), timeout=10.0, receive_timeout=None, autoclose=True, autoping=True, heartbeat=None, auth=None, origin=None, headers=None, proxy=None, proxy_auth=None, ssl=None, verify_ssl=None, fingerprint=None, ssl_context=None, proxy_headers=None, compress=0): """Initiate websocket connection.""" return _WSRequestContextManager( self._ws_connect(url, protocols=protocols, timeout=timeout, receive_timeout=receive_timeout, autoclose=autoclose, autoping=autoping, heartbeat=heartbeat, auth=auth, origin=origin, headers=headers, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, verify_ssl=verify_ssl, fingerprint=fingerprint, ssl_context=ssl_context, proxy_headers=proxy_headers, compress=compress)) async def _ws_connect(self, url, *, protocols=(), timeout=10.0, receive_timeout=None, autoclose=True, autoping=True, heartbeat=None, auth=None, origin=None, headers=None, proxy=None, proxy_auth=None, ssl=None, verify_ssl=None, fingerprint=None, ssl_context=None, proxy_headers=None, compress=0): if headers is None: headers = CIMultiDict() default_headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_VERSION: '13', } for key, value in default_headers.items(): if key not in headers: headers[key] = value sec_key = base64.b64encode(os.urandom(16)) headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() if protocols: headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ','.join(protocols) if origin is not None: headers[hdrs.ORIGIN] = origin if compress: extstr = ws_ext_gen(compress=compress) headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) # send request resp = await self.get(url, headers=headers, read_until_eof=False, auth=auth, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers) try: # check handshake if resp.status != 101: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid response status', code=resp.status, headers=resp.headers) if resp.headers.get(hdrs.UPGRADE, '').lower() != 'websocket': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid upgrade header', code=resp.status, headers=resp.headers) if resp.headers.get(hdrs.CONNECTION, '').lower() != 'upgrade': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid connection header', code=resp.status, headers=resp.headers) # key calculation key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, '') match = base64.b64encode( hashlib.sha1(sec_key + WS_KEY).digest()).decode() if key != match: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid challenge response', code=resp.status, headers=resp.headers) # websocket protocol protocol = None if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: resp_protocols = [ proto.strip() for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] for proto in resp_protocols: if proto in protocols: protocol = proto break # websocket compress notakeover = False if compress: compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) if compress_hdrs: try: compress, notakeover = ws_ext_parse(compress_hdrs) except WSHandshakeError as exc: raise WSServerHandshakeError( resp.request_info, resp.history, message=exc.args[0], code=resp.status, headers=resp.headers) else: compress = 0 notakeover = False proto = resp.connection.protocol transport = resp.connection.transport reader = FlowControlDataQueue( proto, limit=2 ** 16, loop=self._loop) proto.set_parser(WebSocketReader(reader), reader) tcp_nodelay(transport, True) writer = WebSocketWriter( proto, transport, use_mask=True, compress=compress, notakeover=notakeover) except BaseException: resp.close() raise else: return self._ws_response_class(reader, writer, protocol, resp, timeout, autoclose, autoping, self._loop, receive_timeout=receive_timeout, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover) def _prepare_headers(self, headers): """ Add default headers and transform it to CIMultiDict """ # Convert headers to MultiDict result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) added_names = set() for key, value in headers.items(): if key in added_names: result.add(key, value) else: result[key] = value added_names.add(key) return result def get(self, url, *, allow_redirects=True, **kwargs): """Perform HTTP GET request.""" return _RequestContextManager( self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)) def options(self, url, *, allow_redirects=True, **kwargs): """Perform HTTP OPTIONS request.""" return _RequestContextManager( self._request(hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs)) def head(self, url, *, allow_redirects=False, **kwargs): """Perform HTTP HEAD request.""" return _RequestContextManager( self._request(hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs)) def post(self, url, *, data=None, **kwargs): """Perform HTTP POST request.""" return _RequestContextManager( self._request(hdrs.METH_POST, url, data=data, **kwargs)) def put(self, url, *, data=None, **kwargs): """Perform HTTP PUT request.""" return _RequestContextManager( self._request(hdrs.METH_PUT, url, data=data, **kwargs)) def patch(self, url, *, data=None, **kwargs): """Perform HTTP PATCH request.""" return _RequestContextManager( self._request(hdrs.METH_PATCH, url, data=data, **kwargs)) def delete(self, url, **kwargs): """Perform HTTP DELETE request.""" return _RequestContextManager( self._request(hdrs.METH_DELETE, url, **kwargs)) async def close(self): """Close underlying connector. Release all acquired resources. """ if not self.closed: if self._connector_owner: self._connector.close() self._connector = None @property def closed(self): """Is client session closed. A readonly property. """ return self._connector is None or self._connector.closed @property def connector(self): """Connector instance used for the session.""" return self._connector @property def cookie_jar(self): """The session cookies.""" return self._cookie_jar @property def version(self): """The session HTTP protocol version.""" return self._version @property def loop(self): """Session's loop.""" return self._loop def detach(self): """Detach connector from session without closing the former. Session is switched to closed state anyway. """ self._connector = None def __enter__(self): raise TypeError("Use async with instead") def __exit__(self, exc_type, exc_val, exc_tb): # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close() class _BaseRequestContextManager(Coroutine): __slots__ = ('_coro', '_resp') def __init__(self, coro): self._coro = coro def send(self, arg): return self._coro.send(arg) def throw(self, arg): return self._coro.throw(arg) def close(self): return self._coro.close() def __await__(self): ret = self._coro.__await__() return ret def __iter__(self): return self.__await__() async def __aenter__(self): self._resp = await self._coro return self._resp class _RequestContextManager(_BaseRequestContextManager): async def __aexit__(self, exc_type, exc, tb): # We're basing behavior on the exception as it can be caused by # user code unrelated to the status of the connection. If you # would like to close a connection you must do that # explicitly. Otherwise connection error handling should kick in # and close/recycle the connection as required. self._resp.release() class _WSRequestContextManager(_BaseRequestContextManager): async def __aexit__(self, exc_type, exc, tb): await self._resp.close() class _SessionRequestContextManager: __slots__ = ('_coro', '_resp', '_session') def __init__(self, coro, session): self._coro = coro self._resp = None self._session = session async def __aenter__(self): self._resp = await self._coro return self._resp async def __aexit__(self, exc_type, exc_val, exc_tb): self._resp.close() await self._session.close() def request(method, url, *, params=None, data=None, json=None, headers=None, skip_auto_headers=None, cookies=None, auth=None, allow_redirects=True, max_redirects=10, version=http.HttpVersion11, compress=None, chunked=None, expect100=False, connector=None, loop=None, read_until_eof=True, proxy=None, proxy_auth=None): """Constructs and sends a request. Returns response object. method - HTTP method url - request url params - (optional) Dictionary or bytes to be sent in the query string of the new request data - (optional) Dictionary, bytes, or file-like object to send in the body of the request json - (optional) Any json compatibile python object headers - (optional) Dictionary of HTTP Headers to send with the request cookies - (optional) Dict object to send with the request auth - (optional) BasicAuth named tuple represent HTTP Basic Auth auth - aiohttp.helpers.BasicAuth allow_redirects - (optional) If set to False, do not follow redirects version - Request HTTP version. compress - Set to True if request has to be compressed with deflate encoding. chunked - Set to chunk size for chunked transfer encoding. expect100 - Expect 100-continue response from server. connector - BaseConnector sub-class instance to support connection pooling. read_until_eof - Read response until eof if response does not have Content-Length header. loop - Optional event loop. Usage:: >>> import aiohttp >>> resp = await aiohttp.request('GET', 'http://python.org/') >>> resp >>> data = await resp.read() """ connector_owner = False if connector is None: connector_owner = True connector = TCPConnector(loop=loop, force_close=True) session = ClientSession( loop=loop, cookies=cookies, version=version, connector=connector, connector_owner=connector_owner) return _SessionRequestContextManager( session._request(method, url, params=params, data=data, json=json, headers=headers, skip_auto_headers=skip_auto_headers, auth=auth, allow_redirects=allow_redirects, max_redirects=max_redirects, compress=compress, chunked=chunked, expect100=expect100, read_until_eof=read_until_eof, proxy=proxy, proxy_auth=proxy_auth,), session) aiohttp-3.0.1/aiohttp/client_exceptions.py0000666000000000000000000001243413240304665017065 0ustar 00000000000000"""HTTP related errors.""" import asyncio try: import ssl except ImportError: # pragma: no cover ssl = None __all__ = ( 'ClientError', 'ClientConnectionError', 'ClientOSError', 'ClientConnectorError', 'ClientProxyConnectionError', 'ClientSSLError', 'ClientConnectorSSLError', 'ClientConnectorCertificateError', 'ServerConnectionError', 'ServerTimeoutError', 'ServerDisconnectedError', 'ServerFingerprintMismatch', 'ClientResponseError', 'ClientHttpProxyError', 'WSServerHandshakeError', 'ContentTypeError', 'ClientPayloadError', 'InvalidURL') class ClientError(Exception): """Base class for client connection errors.""" class ClientResponseError(ClientError): """Connection error during reading response. request_info: instance of RequestInfo """ def __init__(self, request_info, history, *, code=0, message='', headers=None): self.request_info = request_info self.code = code self.message = message self.headers = headers self.history = history super().__init__("%s, message='%s'" % (code, message)) class ContentTypeError(ClientResponseError): """ContentType found is not valid.""" class WSServerHandshakeError(ClientResponseError): """websocket server handshake error.""" class ClientHttpProxyError(ClientResponseError): """HTTP proxy error. Raised in :class:`aiohttp.connector.TCPConnector` if proxy responds with status other than ``200 OK`` on ``CONNECT`` request. """ class ClientConnectionError(ClientError): """Base class for client socket errors.""" class ClientOSError(ClientConnectionError, OSError): """OSError error.""" class ClientConnectorError(ClientOSError): """Client connector error. Raised in :class:`aiohttp.connector.TCPConnector` if connection to proxy can not be established. """ def __init__(self, connection_key, os_error): self._conn_key = connection_key self._os_error = os_error super().__init__(os_error.errno, os_error.strerror) @property def os_error(self): return self._os_error @property def host(self): return self._conn_key.host @property def port(self): return self._conn_key.port @property def ssl(self): return self._conn_key.ssl def __str__(self): return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} [{1}]' .format(self, self.strerror)) class ClientProxyConnectionError(ClientConnectorError): """Proxy connection error. Raised in :class:`aiohttp.connector.TCPConnector` if connection to proxy can not be established. """ class ServerConnectionError(ClientConnectionError): """Server connection errors.""" class ServerDisconnectedError(ServerConnectionError): """Server disconnected.""" def __init__(self, message=None): self.message = message class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): """Server timeout error.""" class ServerFingerprintMismatch(ServerConnectionError): """SSL certificate does not match expected fingerprint.""" def __init__(self, expected, got, host, port): self.expected = expected self.got = got self.host = host self.port = port def __repr__(self): return '<{} expected={} got={} host={} port={}>'.format( self.__class__.__name__, self.expected, self.got, self.host, self.port) class ClientPayloadError(ClientError): """Response payload error.""" class InvalidURL(ClientError, ValueError): """Invalid URL. URL used for fetching is malformed, e.g. it doesn't contains host part.""" # Derive from ValueError for backward compatibility def __init__(self, url): super().__init__(url) @property def url(self): return self.args[0] def __repr__(self): return '<{} {}>'.format(self.__class__.__name__, self.url) class ClientSSLError(ClientConnectorError): """Base error for ssl.*Errors.""" if ssl is not None: certificate_errors = (ssl.CertificateError,) certificate_errors_bases = (ClientSSLError, ssl.CertificateError,) ssl_errors = (ssl.SSLError,) ssl_error_bases = (ClientSSLError, ssl.SSLError) else: # pragma: no cover certificate_errors = tuple() certificate_errors_bases = (ClientSSLError, ValueError,) ssl_errors = tuple() ssl_error_bases = (ClientSSLError,) class ClientConnectorSSLError(*ssl_error_bases): """Response ssl error.""" class ClientConnectorCertificateError(*certificate_errors_bases): """Response certificate error.""" def __init__(self, connection_key, certificate_error): self._conn_key = connection_key self._certificate_error = certificate_error @property def certificate_error(self): return self._certificate_error @property def host(self): return self._conn_key.host @property def port(self): return self._conn_key.port @property def ssl(self): return self._conn_key.ssl def __str__(self): return ('Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} ' '[{0.certificate_error.__class__.__name__}: ' '{0.certificate_error.args}]'.format(self)) aiohttp-3.0.1/aiohttp/client_proto.py0000666000000000000000000001336713240304665016055 0ustar 00000000000000import asyncio import asyncio.streams from contextlib import suppress from .client_exceptions import (ClientOSError, ClientPayloadError, ServerDisconnectedError) from .http import HttpResponseParser from .streams import EMPTY_PAYLOAD, DataQueue class ResponseHandler(DataQueue, asyncio.streams.FlowControlMixin): """Helper class to adapt between Protocol and StreamReader.""" def __init__(self, *, loop=None): asyncio.streams.FlowControlMixin.__init__(self, loop=loop) DataQueue.__init__(self, loop=loop) self.transport = None self._should_close = False self._message = None self._payload = None self._skip_payload = False self._payload_parser = None self._reading_paused = False self._timer = None self._tail = b'' self._upgraded = False self._parser = None @property def upgraded(self): return self._upgraded @property def should_close(self): if (self._payload is not None and not self._payload.is_eof() or self._upgraded): return True return (self._should_close or self._upgraded or self.exception() is not None or self._payload_parser is not None or len(self) or self._tail) def close(self): transport = self.transport if transport is not None: transport.close() self.transport = None self._payload = None return transport def is_connected(self): return self.transport is not None def connection_made(self, transport): self.transport = transport def connection_lost(self, exc): if self._payload_parser is not None: with suppress(Exception): self._payload_parser.feed_eof() try: uncompleted = self._parser.feed_eof() except Exception: uncompleted = None if self._payload is not None: self._payload.set_exception( ClientPayloadError('Response payload is not completed')) if not self.is_eof(): if isinstance(exc, OSError): exc = ClientOSError(*exc.args) if exc is None: exc = ServerDisconnectedError(uncompleted) # assigns self._should_close to True as side effect, # we do it anyway below self.set_exception(exc) self.transport = None self._should_close = True self._parser = None self._message = None self._payload = None self._payload_parser = None self._reading_paused = False super().connection_lost(exc) def eof_received(self): pass def pause_reading(self): if not self._reading_paused: try: self.transport.pause_reading() except (AttributeError, NotImplementedError, RuntimeError): pass self._reading_paused = True def resume_reading(self): if self._reading_paused: try: self.transport.resume_reading() except (AttributeError, NotImplementedError, RuntimeError): pass self._reading_paused = False def set_exception(self, exc): self._should_close = True super().set_exception(exc) def set_parser(self, parser, payload): self._payload = payload self._payload_parser = parser if self._tail: data, self._tail = self._tail, b'' self.data_received(data) def set_response_params(self, *, timer=None, skip_payload=False, read_until_eof=False, auto_decompress=True): self._skip_payload = skip_payload self._parser = HttpResponseParser( self, self._loop, timer=timer, payload_exception=ClientPayloadError, read_until_eof=read_until_eof, auto_decompress=auto_decompress) if self._tail: data, self._tail = self._tail, b'' self.data_received(data) def data_received(self, data): if not data: return # custom payload parser if self._payload_parser is not None: eof, tail = self._payload_parser.feed_data(data) if eof: self._payload = None self._payload_parser = None if tail: self.data_received(tail) return else: if self._upgraded or self._parser is None: # i.e. websocket connection, websocket parser is not set yet self._tail += data else: # parse http messages try: messages, upgraded, tail = self._parser.feed_data(data) except BaseException as exc: self.transport.close() # should_close is True after the call self.set_exception(exc) return self._upgraded = upgraded for message, payload in messages: if message.should_close: self._should_close = True self._message = message self._payload = payload if self._skip_payload or message.code in (204, 304): self.feed_data((message, EMPTY_PAYLOAD), 0) else: self.feed_data((message, payload), 0) if tail: if upgraded: self.data_received(tail) else: self._tail = tail aiohttp-3.0.1/aiohttp/client_reqrep.py0000666000000000000000000006770413240304665016214 0ustar 00000000000000import asyncio import codecs import io import json import sys import traceback import warnings from collections import namedtuple from hashlib import md5, sha1, sha256 from http.cookies import CookieError, Morsel, SimpleCookie from types import MappingProxyType import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy from yarl import URL from . import hdrs, helpers, http, multipart, payload from .client_exceptions import (ClientConnectionError, ClientOSError, ClientResponseError, ContentTypeError, InvalidURL, ServerFingerprintMismatch) from .formdata import FormData from .helpers import PY_36, HeadersMixin, TimerNoop, noop, reify, set_result from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter from .log import client_logger from .streams import StreamReader try: import ssl except ImportError: # pragma: no cover ssl = None try: import cchardet as chardet except ImportError: # pragma: no cover import chardet __all__ = ('ClientRequest', 'ClientResponse', 'RequestInfo', 'Fingerprint') @attr.s(frozen=True, slots=True) class ContentDisposition: type = attr.ib(type=str) parameters = attr.ib(type=MappingProxyType) filename = attr.ib(type=str) @attr.s(frozen=True, slots=True) class RequestInfo: url = attr.ib(type=URL) method = attr.ib(type=str) headers = attr.ib(type=CIMultiDictProxy) class Fingerprint: HASHFUNC_BY_DIGESTLEN = { 16: md5, 20: sha1, 32: sha256, } def __init__(self, fingerprint): digestlen = len(fingerprint) hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen) if not hashfunc: raise ValueError('fingerprint has invalid length') elif hashfunc is md5 or hashfunc is sha1: raise ValueError('md5 and sha1 are insecure and ' 'not supported. Use sha256.') self._hashfunc = hashfunc self._fingerprint = fingerprint @property def fingerprint(self): return self._fingerprint def check(self, transport): if not transport.get_extra_info('sslcontext'): return sslobj = transport.get_extra_info('ssl_object') cert = sslobj.getpeercert(binary_form=True) got = self._hashfunc(cert).digest() if got != self._fingerprint: host, port, *_ = transport.get_extra_info('peername') raise ServerFingerprintMismatch(self._fingerprint, got, host, port) if ssl is not None: SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) else: # pragma: no cover SSL_ALLOWED_TYPES = type(None) def _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint): if verify_ssl is not None and not verify_ssl: warnings.warn("verify_ssl is deprecated, use ssl=False instead", DeprecationWarning, stacklevel=3) if ssl is not None: raise ValueError("verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive") else: ssl = False if ssl_context is not None: warnings.warn("ssl_context is deprecated, use ssl=context instead", DeprecationWarning, stacklevel=3) if ssl is not None: raise ValueError("verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive") else: ssl = ssl_context if fingerprint is not None: warnings.warn("fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", DeprecationWarning, stacklevel=3) if ssl is not None: raise ValueError("verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive") else: ssl = Fingerprint(fingerprint) if not isinstance(ssl, SSL_ALLOWED_TYPES): raise TypeError("ssl should be SSLContext, bool, Fingerprint or None, " "got {!r} instead.".format(ssl)) return ssl ConnectionKey = namedtuple('ConnectionKey', ['host', 'port', 'ssl']) class ClientRequest: GET_METHODS = { hdrs.METH_GET, hdrs.METH_HEAD, hdrs.METH_OPTIONS, hdrs.METH_TRACE, } POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) DEFAULT_HEADERS = { hdrs.ACCEPT: '*/*', hdrs.ACCEPT_ENCODING: 'gzip, deflate', } body = b'' auth = None response = None response_class = None _writer = None # async task for streaming data _continue = None # waiter future for '100 Continue' response # N.B. # Adding __del__ method with self._writer closing doesn't make sense # because _writer is instance method, thus it keeps a reference to self. # Until writer has finished finalizer will not be called. def __init__(self, method, url, *, params=None, headers=None, skip_auto_headers=frozenset(), data=None, cookies=None, auth=None, version=http.HttpVersion11, compress=None, chunked=None, expect100=False, loop=None, response_class=None, proxy=None, proxy_auth=None, timer=None, session=None, auto_decompress=True, ssl=None, proxy_headers=None): if loop is None: loop = asyncio.get_event_loop() assert isinstance(url, URL), url assert isinstance(proxy, (URL, type(None))), proxy self._session = session if params: q = MultiDict(url.query) url2 = url.with_query(params) q.extend(url2.query) url = url.with_query(q) self.url = url.with_fragment(None) self.original_url = url self.method = method.upper() self.chunked = chunked self.compress = compress self.loop = loop self.length = None self.response_class = response_class or ClientResponse self._timer = timer if timer is not None else TimerNoop() self._auto_decompress = auto_decompress self._ssl = ssl if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self.update_version(version) self.update_host(url) self.update_headers(headers) self.update_auto_headers(skip_auto_headers) self.update_cookies(cookies) self.update_content_encoding(data) self.update_auth(auth) self.update_proxy(proxy, proxy_auth, proxy_headers) self.update_body_from_data(data) if data or self.method not in self.GET_METHODS: self.update_transfer_encoding() self.update_expect_continue(expect100) def is_ssl(self): return self.url.scheme in ('https', 'wss') @property def ssl(self): return self._ssl @property def connection_key(self): return ConnectionKey(self.host, self.port, self.is_ssl()) @property def host(self): return self.url.host @property def port(self): return self.url.port @property def request_info(self): return RequestInfo(self.url, self.method, self.headers) def update_host(self, url): """Update destination host, port and connection type (ssl).""" # get host/port if not url.host: raise InvalidURL(url) # basic auth info username, password = url.user, url.password if username: self.auth = helpers.BasicAuth(username, password or '') def update_version(self, version): """Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1) """ if isinstance(version, str): v = [l.strip() for l in version.split('.', 1)] try: version = int(v[0]), int(v[1]) except ValueError: raise ValueError( 'Can not parse http version number: {}' .format(version)) from None self.version = version def update_headers(self, headers): """Update request headers.""" self.headers = CIMultiDict() if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): headers = headers.items() for key, value in headers: self.headers.add(key, value) def update_auto_headers(self, skip_auto_headers): self.skip_auto_headers = CIMultiDict( (hdr, None) for hdr in sorted(skip_auto_headers)) used_headers = self.headers.copy() used_headers.extend(self.skip_auto_headers) for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: self.headers.add(hdr, val) # add host if hdrs.HOST not in used_headers: netloc = self.url.raw_host if not self.url.is_default_port(): netloc += ':' + str(self.url.port) self.headers[hdrs.HOST] = netloc if hdrs.USER_AGENT not in used_headers: self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE def update_cookies(self, cookies): """Update request cookies header.""" if not cookies: return c = SimpleCookie() if hdrs.COOKIE in self.headers: c.load(self.headers.get(hdrs.COOKIE, '')) del self.headers[hdrs.COOKIE] for name, value in cookies.items(): if isinstance(value, Morsel): # Preserve coded_value mrsl_val = value.get(value.key, Morsel()) mrsl_val.set(value.key, value.value, value.coded_value) c[name] = mrsl_val else: c[name] = value self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip() def update_content_encoding(self, data): """Set request content encoding.""" if not data: return enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() if enc: if self.compress: raise ValueError( 'compress can not be set ' 'if Content-Encoding header is set') elif self.compress: if not isinstance(self.compress, str): self.compress = 'deflate' self.headers[hdrs.CONTENT_ENCODING] = self.compress self.chunked = True # enable chunked, no need to deal with length def update_transfer_encoding(self): """Analyze transfer-encoding header.""" te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() if 'chunked' in te: if self.chunked: raise ValueError( 'chunked can not be set ' 'if "Transfer-Encoding: chunked" header is set') elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( 'chunked can not be set ' 'if Content-Length header is set') self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body)) def update_auth(self, auth): """Set basic auth.""" if auth is None: auth = self.auth if auth is None: return if not isinstance(auth, helpers.BasicAuth): raise TypeError('BasicAuth() tuple is required instead') self.headers[hdrs.AUTHORIZATION] = auth.encode() def update_body_from_data(self, body): if not body: return # FormData if isinstance(body, FormData): body = body() try: body = payload.PAYLOAD_REGISTRY.get(body, disposition=None) except payload.LookupError: body = FormData(body)() self.body = body # enable chunked encoding if needed if not self.chunked: if hdrs.CONTENT_LENGTH not in self.headers: size = body.size if size is None: self.chunked = True else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(size) # set content-type if (hdrs.CONTENT_TYPE not in self.headers and hdrs.CONTENT_TYPE not in self.skip_auto_headers): self.headers[hdrs.CONTENT_TYPE] = body.content_type # copy payload headers if body.headers: for (key, value) in body.headers.items(): if key not in self.headers: self.headers[key] = value def update_expect_continue(self, expect=False): if expect: self.headers[hdrs.EXPECT] = '100-continue' elif self.headers.get(hdrs.EXPECT, '').lower() == '100-continue': expect = True if expect: self._continue = self.loop.create_future() def update_proxy(self, proxy, proxy_auth, proxy_headers): if proxy and not proxy.scheme == 'http': raise ValueError("Only http proxies are supported") if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") self.proxy = proxy self.proxy_auth = proxy_auth self.proxy_headers = proxy_headers def keep_alive(self): if self.version < HttpVersion10: # keep alive not supported at all return False if self.version == HttpVersion10: if self.headers.get(hdrs.CONNECTION) == 'keep-alive': return True else: # no headers means we close for Http 1.0 return False elif self.headers.get(hdrs.CONNECTION) == 'close': return False return True async def write_bytes(self, writer, conn): """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: await writer.drain() await self._continue try: if isinstance(self.body, payload.Payload): await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): self.body = (self.body,) for chunk in self.body: writer.write(chunk) await writer.write_eof() except OSError as exc: new_exc = ClientOSError( exc.errno, 'Can not write request body for %s' % self.url) new_exc.__context__ = exc new_exc.__cause__ = exc conn.protocol.set_exception(new_exc) except asyncio.CancelledError as exc: if not conn.closed: conn.protocol.set_exception(exc) except Exception as exc: conn.protocol.set_exception(exc) finally: self._writer = None def send(self, conn): # Specify request target: # - CONNECT request must send authority form URI # - not CONNECT proxy must send absolute form URI # - most common is origin form URI if self.method == hdrs.METH_CONNECT: path = '{}:{}'.format(self.url.raw_host, self.url.port) elif self.proxy and not self.ssl: path = str(self.url) else: path = self.url.raw_path if self.url.raw_query_string: path += '?' + self.url.raw_query_string writer = StreamWriter(conn.protocol, conn.transport, self.loop) if self.compress: writer.enable_compression(self.compress) if self.chunked is not None: writer.enable_chunking() # set default content-type if (self.method in self.POST_METHODS and hdrs.CONTENT_TYPE not in self.skip_auto_headers and hdrs.CONTENT_TYPE not in self.headers): self.headers[hdrs.CONTENT_TYPE] = 'application/octet-stream' # set the connection header connection = self.headers.get(hdrs.CONNECTION) if not connection: if self.keep_alive(): if self.version == HttpVersion10: connection = 'keep-alive' else: if self.version == HttpVersion11: connection = 'close' if connection is not None: self.headers[hdrs.CONNECTION] = connection # status + headers status_line = '{0} {1} HTTP/{2[0]}.{2[1]}\r\n'.format( self.method, path, self.version) writer.write_headers(status_line, self.headers) self._writer = self.loop.create_task(self.write_bytes(writer, conn)) self.response = self.response_class( self.method, self.original_url, writer=self._writer, continue100=self._continue, timer=self._timer, request_info=self.request_info, auto_decompress=self._auto_decompress) self.response._post_init(self.loop, self._session) return self.response async def close(self): if self._writer is not None: try: await self._writer finally: self._writer = None def terminate(self): if self._writer is not None: if not self.loop.is_closed(): self._writer.cancel() self._writer = None class ClientResponse(HeadersMixin): # from the Status-Line of the response version = None # HTTP-Version status = None # Status-Code reason = None # Reason-Phrase content = None # Payload stream headers = None # Response headers, CIMultiDictProxy raw_headers = None # Response raw headers, a sequence of pairs _connection = None # current connection flow_control_class = StreamReader # reader flow control _reader = None # input stream _source_traceback = None # setted up by ClientRequest after ClientResponse object creation # post-init stage allows to not change ctor signature _loop = None _closed = True # to allow __del__ for non-initialized properly response _session = None def __init__(self, method, url, *, writer=None, continue100=None, timer=None, request_info=None, auto_decompress=True): assert isinstance(url, URL) self.method = method self.headers = None self.cookies = SimpleCookie() self._url = url self._content = None self._writer = writer self._continue = continue100 self._closed = True self._history = () self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() self._auto_decompress = auto_decompress self._cache = {} # reqired for @reify method decorator @property def url(self): return self._url @property def url_obj(self): warnings.warn( "Deprecated, use .url #1654", DeprecationWarning, stacklevel=2) return self._url @property def host(self): return self._url.host @property def _headers(self): return self.headers @property def request_info(self): return self._request_info @reify def content_disposition(self): raw = self._headers.get(hdrs.CONTENT_DISPOSITION) if raw is None: return None disposition_type, params = multipart.parse_content_disposition(raw) params = MappingProxyType(params) filename = multipart.content_disposition_filename(params) return ContentDisposition(disposition_type, params, filename) def _post_init(self, loop, session): self._loop = loop self._session = session # store a reference to session #1985 if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) def __del__(self, _warnings=warnings): if self._loop is None: return # not started if self._closed: return if self._connection is not None: self._connection.release() self._cleanup_writer() if self._loop.get_debug(): if PY_36: kwargs = {'source': self} else: kwargs = {} _warnings.warn("Unclosed response {!r}".format(self), ResourceWarning, **kwargs) context = {'client_response': self, 'message': 'Unclosed response'} if self._source_traceback: context['source_traceback'] = self._source_traceback self._loop.call_exception_handler(context) def __repr__(self): out = io.StringIO() ascii_encodable_url = str(self.url) if self.reason: ascii_encodable_reason = self.reason.encode('ascii', 'backslashreplace') \ .decode('ascii') else: ascii_encodable_reason = self.reason print(''.format( ascii_encodable_url, self.status, ascii_encodable_reason), file=out) print(self.headers, file=out) return out.getvalue() @property def connection(self): return self._connection @property def history(self): """A sequence of of responses, if redirects occurred.""" return self._history async def start(self, connection, read_until_eof=False): """Start response processing.""" self._closed = False self._protocol = connection.protocol self._connection = connection connection.protocol.set_response_params( timer=self._timer, skip_payload=self.method.lower() == 'head', read_until_eof=read_until_eof, auto_decompress=self._auto_decompress) with self._timer: while True: # read response try: (message, payload) = await self._protocol.read() except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, self.history, code=exc.code, message=exc.message, headers=exc.headers) from exc if (message.code < 100 or message.code > 199 or message.code == 101): break if self._continue is not None: set_result(self._continue, True) self._continue = None # payload eof handler payload.on_eof(self._response_eof) # response status self.version = message.version self.status = message.code self.reason = message.reason # headers self.headers = CIMultiDictProxy(message.headers) self.raw_headers = tuple(message.raw_headers) # payload self.content = payload # cookies for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): try: self.cookies.load(hdr) except CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) return self def _response_eof(self): if self._closed: return if self._connection is not None: # websocket, protocol could be None because # connection could be detached if (self._connection.protocol is not None and self._connection.protocol.upgraded): return self._connection.release() self._connection = None self._closed = True self._cleanup_writer() @property def closed(self): return self._closed def close(self): if self._closed: return self._closed = True if self._loop is None or self._loop.is_closed(): return if self._connection is not None: self._connection.close() self._connection = None self._cleanup_writer() self._notify_content() def release(self): if self._closed: return noop() self._closed = True if self._connection is not None: self._connection.release() self._connection = None self._cleanup_writer() self._notify_content() return noop() def raise_for_status(self): if 400 <= self.status: raise ClientResponseError( self.request_info, self.history, code=self.status, message=self.reason, headers=self.headers) def _cleanup_writer(self): if self._writer is not None: self._writer.cancel() self._writer = None self._session = None def _notify_content(self): content = self.content if content and content.exception() is None and not content.is_eof(): content.set_exception( ClientConnectionError('Connection closed')) async def wait_for_close(self): if self._writer is not None: try: await self._writer finally: self._writer = None self.release() async def read(self): """Read response payload.""" if self._content is None: try: self._content = await self.content.read() except BaseException: self.close() raise return self._content def get_encoding(self): ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() mimetype = helpers.parse_mimetype(ctype) encoding = mimetype.parameters.get('charset') if encoding: try: codecs.lookup(encoding) except LookupError: encoding = None if not encoding: if mimetype.type == 'application' and mimetype.subtype == 'json': # RFC 7159 states that the default encoding is UTF-8. encoding = 'utf-8' else: encoding = chardet.detect(self._content)['encoding'] if not encoding: encoding = 'utf-8' return encoding async def text(self, encoding=None, errors='strict'): """Read response payload and decode.""" if self._content is None: await self.read() if encoding is None: encoding = self.get_encoding() return self._content.decode(encoding, errors=errors) async def json(self, *, encoding=None, loads=json.loads, content_type='application/json'): """Read and decodes JSON response.""" if self._content is None: await self.read() if content_type: ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() if content_type not in ctype: raise ContentTypeError( self.request_info, self.history, message=('Attempt to decode JSON with ' 'unexpected mimetype: %s' % ctype), headers=self.headers) stripped = self._content.strip() if not stripped: return None if encoding is None: encoding = self.get_encoding() return loads(stripped.decode(encoding)) async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): # similar to _RequestContextManager, we do not need to check # for exceptions, response object can closes connection # is state is broken self.release() aiohttp-3.0.1/aiohttp/client_ws.py0000666000000000000000000002127613240304665015341 0ustar 00000000000000"""WebSocket client for asyncio.""" import asyncio import json import async_timeout from .client_exceptions import ClientError from .helpers import call_later, set_result from .http import (WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, WebSocketError, WSMessage, WSMsgType) class ClientWebSocketResponse: def __init__(self, reader, writer, protocol, response, timeout, autoclose, autoping, loop, *, receive_timeout=None, heartbeat=None, compress=0, client_notakeover=False): self._response = response self._conn = response.connection self._writer = writer self._reader = reader self._protocol = protocol self._closed = False self._closing = False self._close_code = None self._timeout = timeout self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat self._heartbeat_cb = None if heartbeat is not None: self._pong_heartbeat = heartbeat/2.0 self._pong_response_cb = None self._loop = loop self._waiting = None self._exception = None self._compress = compress self._client_notakeover = client_notakeover self._reset_heartbeat() def _cancel_heartbeat(self): if self._pong_response_cb is not None: self._pong_response_cb.cancel() self._pong_response_cb = None if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None def _reset_heartbeat(self): self._cancel_heartbeat() if self._heartbeat is not None: self._heartbeat_cb = call_later( self._send_heartbeat, self._heartbeat, self._loop) def _send_heartbeat(self): if self._heartbeat is not None and not self._closed: self._writer.ping() if self._pong_response_cb is not None: self._pong_response_cb.cancel() self._pong_response_cb = call_later( self._pong_not_received, self._pong_heartbeat, self._loop) def _pong_not_received(self): if not self._closed: self._closed = True self._close_code = 1006 self._exception = asyncio.TimeoutError() self._response.close() @property def closed(self): return self._closed @property def close_code(self): return self._close_code @property def protocol(self): return self._protocol @property def compress(self): return self._compress @property def client_notakeover(self): return self._client_notakeover def get_extra_info(self, name, default=None): """extra info from connection transport""" try: return self._response.connection.transport.get_extra_info( name, default) except Exception: return default def exception(self): return self._exception async def ping(self, message='b'): await self._writer.ping(message) async def pong(self, message='b'): await self._writer.pong(message) async def send_str(self, data, compress=None): if not isinstance(data, str): raise TypeError('data argument must be str (%r)' % type(data)) await self._writer.send(data, binary=False, compress=compress) async def send_bytes(self, data, compress=None): if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError('data argument must be byte-ish (%r)' % type(data)) await self._writer.send(data, binary=True, compress=compress) async def send_json(self, data, compress=None, *, dumps=json.dumps): await self.send_str(dumps(data), compress=compress) async def close(self, *, code=1000, message=b''): # we need to break `receive()` cycle first, # `close()` may be called from different task if self._waiting is not None and not self._closed: self._reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._waiting if not self._closed: self._cancel_heartbeat() self._closed = True try: self._writer.close(code, message) except asyncio.CancelledError: self._close_code = 1006 self._response.close() raise except Exception as exc: self._close_code = 1006 self._exception = exc self._response.close() return True if self._closing: self._response.close() return True while True: try: with async_timeout.timeout(self._timeout, loop=self._loop): msg = await self._reader.read() except asyncio.CancelledError: self._close_code = 1006 self._response.close() raise except Exception as exc: self._close_code = 1006 self._exception = exc self._response.close() return True if msg.type == WSMsgType.CLOSE: self._close_code = msg.data self._response.close() return True else: return False async def receive(self, timeout=None): while True: if self._waiting is not None: raise RuntimeError( 'Concurrent call to receive() is not allowed') if self._closed: return WS_CLOSED_MESSAGE elif self._closing: await self.close() return WS_CLOSED_MESSAGE try: self._waiting = self._loop.create_future() try: with async_timeout.timeout( timeout or self._receive_timeout, loop=self._loop): msg = await self._reader.read() self._reset_heartbeat() finally: waiter = self._waiting self._waiting = None set_result(waiter, True) except (asyncio.CancelledError, asyncio.TimeoutError): self._close_code = 1006 raise except ClientError: self._closed = True self._close_code = 1006 return WS_CLOSED_MESSAGE except WebSocketError as exc: self._close_code = exc.code await self.close(code=exc.code) return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc self._closing = True self._close_code = 1006 await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type == WSMsgType.CLOSE: self._closing = True self._close_code = msg.data if not self._closed and self._autoclose: await self.close() elif msg.type == WSMsgType.CLOSING: self._closing = True elif msg.type == WSMsgType.PING and self._autoping: await self.pong(msg.data) continue elif msg.type == WSMsgType.PONG and self._autoping: continue return msg async def receive_str(self, *, timeout=None): msg = await self.receive(timeout) if msg.type != WSMsgType.TEXT: raise TypeError( "Received message {}:{!r} is not str".format(msg.type, msg.data)) return msg.data async def receive_bytes(self, *, timeout=None): msg = await self.receive(timeout) if msg.type != WSMsgType.BINARY: raise TypeError( "Received message {}:{!r} is not bytes".format(msg.type, msg.data)) return msg.data async def receive_json(self, *, loads=json.loads, timeout=None): data = await self.receive_str(timeout=timeout) return loads(data) def __aiter__(self): return self async def __anext__(self): msg = await self.receive() if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration # NOQA return msg aiohttp-3.0.1/aiohttp/connector.py0000666000000000000000000007750213240304665015347 0ustar 00000000000000import asyncio import functools import sys import traceback import warnings from collections import defaultdict from contextlib import suppress from http.cookies import SimpleCookie from itertools import cycle, islice from time import monotonic from . import hdrs, helpers from .client_exceptions import (ClientConnectionError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, ClientHttpProxyError, ClientProxyConnectionError, ServerFingerprintMismatch, certificate_errors, ssl_errors) from .client_proto import ResponseHandler from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params from .helpers import PY_36, is_ip_address, noop, sentinel from .locks import EventResultOrError from .resolver import DefaultResolver try: import ssl except ImportError: # pragma: no cover ssl = None __all__ = ('BaseConnector', 'TCPConnector', 'UnixConnector') class Connection: _source_traceback = None _transport = None def __init__(self, connector, key, protocol, loop): self._key = key self._connector = connector self._loop = loop self._protocol = protocol self._callbacks = [] if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) def __repr__(self): return 'Connection<{}>'.format(self._key) def __del__(self, _warnings=warnings): if self._protocol is not None: if PY_36: kwargs = {'source': self} else: kwargs = {} _warnings.warn('Unclosed connection {!r}'.format(self), ResourceWarning, **kwargs) if self._loop.is_closed(): return self._connector._release( self._key, self._protocol, should_close=True) context = {'client_connection': self, 'message': 'Unclosed connection'} if self._source_traceback is not None: context['source_traceback'] = self._source_traceback self._loop.call_exception_handler(context) @property def loop(self): return self._loop @property def transport(self): return self._protocol.transport @property def protocol(self): return self._protocol @property def writer(self): return self._protocol.writer def add_callback(self, callback): if callback is not None: self._callbacks.append(callback) def _notify_release(self): callbacks, self._callbacks = self._callbacks[:], [] for cb in callbacks: with suppress(Exception): cb() def close(self): self._notify_release() if self._protocol is not None: self._connector._release( self._key, self._protocol, should_close=True) self._protocol = None def release(self): self._notify_release() if self._protocol is not None: self._connector._release( self._key, self._protocol, should_close=self._protocol.should_close) self._protocol = None def detach(self): self._notify_release() if self._protocol is not None: self._connector._release_acquired(self._protocol) self._protocol = None @property def closed(self): return self._protocol is None or not self._protocol.is_connected() class _TransportPlaceholder: """ placeholder for BaseConnector.connect function """ def close(self): pass class BaseConnector: """Base connector class. keepalive_timeout - (optional) Keep-alive timeout. force_close - Set to True to force close and do reconnect after each request (and between redirects). limit - The total number of simultaneous connections. limit_per_host - Number of simultaneous connections to one host. enable_cleanup_closed - Enables clean-up closed ssl transports. Disabled by default. loop - Optional event loop. """ _closed = True # prevent AttributeError in __del__ if ctor was failed _source_traceback = None # abort transport after 2 seconds (cleanup broken connections) _cleanup_closed_period = 2.0 def __init__(self, *, keepalive_timeout=sentinel, force_close=False, limit=100, limit_per_host=0, enable_cleanup_closed=False, loop=None): if force_close: if keepalive_timeout is not None and \ keepalive_timeout is not sentinel: raise ValueError('keepalive_timeout cannot ' 'be set if force_close is True') else: if keepalive_timeout is sentinel: keepalive_timeout = 15.0 if loop is None: loop = asyncio.get_event_loop() self._closed = False if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) self._conns = {} self._limit = limit self._limit_per_host = limit_per_host self._acquired = set() self._acquired_per_host = defaultdict(set) self._keepalive_timeout = keepalive_timeout self._force_close = force_close self._waiters = defaultdict(list) self._loop = loop self._factory = functools.partial(ResponseHandler, loop=loop) self.cookies = SimpleCookie() # start keep-alive connection cleanup task self._cleanup_handle = None # start cleanup closed transports task self._cleanup_closed_handle = None self._cleanup_closed_disabled = not enable_cleanup_closed self._cleanup_closed_transports = [] self._cleanup_closed() def __del__(self, _warnings=warnings): if self._closed: return if not self._conns: return conns = [repr(c) for c in self._conns.values()] self.close() if PY_36: kwargs = {'source': self} else: kwargs = {} _warnings.warn("Unclosed connector {!r}".format(self), ResourceWarning, **kwargs) context = {'connector': self, 'connections': conns, 'message': 'Unclosed connector'} if self._source_traceback is not None: context['source_traceback'] = self._source_traceback self._loop.call_exception_handler(context) def __enter__(self): return self def __exit__(self, *exc): self.close() @property def force_close(self): """Ultimately close connection on releasing if True.""" return self._force_close @property def limit(self): """The total number for simultaneous connections. If limit is 0 the connector has no limit. The default limit size is 100. """ return self._limit @property def limit_per_host(self): """The limit_per_host for simultaneous connections to the same endpoint. Endpoints are the same if they are have equal (host, port, is_ssl) triple. """ return self._limit_per_host def _cleanup(self): """Cleanup unused transports.""" if self._cleanup_handle: self._cleanup_handle.cancel() now = self._loop.time() timeout = self._keepalive_timeout if self._conns: connections = {} deadline = now - timeout for key, conns in self._conns.items(): alive = [] for proto, use_time in conns: if proto.is_connected(): if use_time - deadline < 0: transport = proto.close() if key[-1] and not self._cleanup_closed_disabled: self._cleanup_closed_transports.append( transport) else: alive.append((proto, use_time)) if alive: connections[key] = alive self._conns = connections if self._conns: self._cleanup_handle = helpers.weakref_handle( self, '_cleanup', timeout, self._loop) def _drop_acquired_per_host(self, key, val): acquired_per_host = self._acquired_per_host if key not in acquired_per_host: return conns = acquired_per_host[key] conns.remove(val) if not conns: del self._acquired_per_host[key] def _cleanup_closed(self): """Double confirmation for transport close. Some broken ssl servers may leave socket open without proper close. """ if self._cleanup_closed_handle: self._cleanup_closed_handle.cancel() for transport in self._cleanup_closed_transports: if transport is not None: transport.abort() self._cleanup_closed_transports = [] if not self._cleanup_closed_disabled: self._cleanup_closed_handle = helpers.weakref_handle( self, '_cleanup_closed', self._cleanup_closed_period, self._loop) def close(self): """Close all opened transports.""" if self._closed: return self._closed = True try: if self._loop.is_closed(): return noop() # cancel cleanup task if self._cleanup_handle: self._cleanup_handle.cancel() # cancel cleanup close task if self._cleanup_closed_handle: self._cleanup_closed_handle.cancel() for data in self._conns.values(): for proto, t0 in data: proto.close() for proto in self._acquired: proto.close() for transport in self._cleanup_closed_transports: if transport is not None: transport.abort() finally: self._conns.clear() self._acquired.clear() self._waiters.clear() self._cleanup_handle = None self._cleanup_closed_transports.clear() self._cleanup_closed_handle = None @property def closed(self): """Is connector closed. A readonly property. """ return self._closed async def connect(self, req, traces=None): """Get from pool or create new connection.""" key = req.connection_key if self._limit: # total calc available connections available = self._limit - len(self._acquired) # check limit per host if (self._limit_per_host and available > 0 and key in self._acquired_per_host): available = self._limit_per_host - len( self._acquired_per_host.get(key)) elif self._limit_per_host and key in self._acquired_per_host: # check limit per host available = self._limit_per_host - len( self._acquired_per_host.get(key)) else: available = 1 # Wait if there are no available connections. if available <= 0: fut = self._loop.create_future() # This connection will now count towards the limit. waiters = self._waiters[key] waiters.append(fut) if traces: for trace in traces: await trace.send_connection_queued_start() try: await fut finally: # remove a waiter even if it was cancelled waiters.remove(fut) if not waiters: del self._waiters[key] if traces: for trace in traces: await trace.send_connection_queued_end() proto = self._get(key) if proto is None: placeholder = _TransportPlaceholder() self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) if traces: for trace in traces: await trace.send_connection_create_start() try: proto = await self._create_connection( req, traces=traces ) if self._closed: proto.close() raise ClientConnectionError("Connector is closed.") except BaseException: # signal to waiter if key in self._waiters: for waiter in self._waiters[key]: if not waiter.done(): waiter.set_result(None) break raise finally: if not self._closed: self._acquired.remove(placeholder) self._drop_acquired_per_host(key, placeholder) if traces: for trace in traces: await trace.send_connection_create_end() else: if traces: for trace in traces: await trace.send_connection_reuseconn() self._acquired.add(proto) self._acquired_per_host[key].add(proto) return Connection(self, key, proto, self._loop) def _get(self, key): try: conns = self._conns[key] except KeyError: return None t1 = self._loop.time() while conns: proto, t0 = conns.pop() if proto.is_connected(): if t1 - t0 > self._keepalive_timeout: transport = proto.close() # only for SSL transports if key[-1] and not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transport) else: if not conns: # The very last connection was reclaimed: drop the key del self._conns[key] return proto # No more connections: drop the key del self._conns[key] return None def _release_waiter(self): # always release only one waiter if self._limit: # if we have limit and we have available if self._limit - len(self._acquired) > 0: for key, waiters in self._waiters.items(): if waiters: if not waiters[0].done(): waiters[0].set_result(None) break elif self._limit_per_host: # if we have dont have limit but have limit per host # then release first available for key, waiters in self._waiters.items(): if waiters: if not waiters[0].done(): waiters[0].set_result(None) break def _release_acquired(self, key, proto): if self._closed: # acquired connection is already released on connector closing return try: self._acquired.remove(proto) self._drop_acquired_per_host(key, proto) except KeyError: # pragma: no cover # this may be result of undetermenistic order of objects # finalization due garbage collection. pass else: self._release_waiter() def _release(self, key, protocol, *, should_close=False): if self._closed: # acquired connection is already released on connector closing return self._release_acquired(key, protocol) if self._force_close: should_close = True if should_close or protocol.should_close: transport = protocol.close() if key[-1] and not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transport) else: conns = self._conns.get(key) if conns is None: conns = self._conns[key] = [] conns.append((protocol, self._loop.time())) if self._cleanup_handle is None: self._cleanup_handle = helpers.weakref_handle( self, '_cleanup', self._keepalive_timeout, self._loop) async def _create_connection(self, req, traces=None): raise NotImplementedError() class _DNSCacheTable: def __init__(self, ttl=None): self._addrs_rr = {} self._timestamps = {} self._ttl = ttl def __contains__(self, host): return host in self._addrs_rr def add(self, host, addrs): self._addrs_rr[host] = (cycle(addrs), len(addrs)) if self._ttl: self._timestamps[host] = monotonic() def remove(self, host): self._addrs_rr.pop(host, None) if self._ttl: self._timestamps.pop(host, None) def clear(self): self._addrs_rr.clear() self._timestamps.clear() def next_addrs(self, host): loop, length = self._addrs_rr[host] addrs = list(islice(loop, length)) # Consume one more element to shift internal state of `cycle` next(loop) return addrs def expired(self, host): if self._ttl is None: return False return self._timestamps[host] + self._ttl < monotonic() class TCPConnector(BaseConnector): """TCP connector. verify_ssl - Set to True to check ssl certifications. fingerprint - Pass the binary sha256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. See also https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning resolver - Enable DNS lookups and use this resolver use_dns_cache - Use memory cache for DNS lookups. ttl_dns_cache - Max seconds having cached a DNS entry, None forever. family - socket address family local_addr - local tuple of (host, port) to bind socket to keepalive_timeout - (optional) Keep-alive timeout. force_close - Set to True to force close and do reconnect after each request (and between redirects). limit - The total number of simultaneous connections. limit_per_host - Number of simultaneous connections to one host. enable_cleanup_closed - Enables clean-up closed ssl transports. Disabled by default. loop - Optional event loop. """ def __init__(self, *, verify_ssl=True, fingerprint=None, use_dns_cache=True, ttl_dns_cache=10, family=0, ssl_context=None, ssl=None, local_addr=None, resolver=None, keepalive_timeout=sentinel, force_close=False, limit=100, limit_per_host=0, enable_cleanup_closed=False, loop=None): super().__init__(keepalive_timeout=keepalive_timeout, force_close=force_close, limit=limit, limit_per_host=limit_per_host, enable_cleanup_closed=enable_cleanup_closed, loop=loop) self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) if resolver is None: resolver = DefaultResolver(loop=self._loop) self._resolver = resolver self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) self._throttle_dns_events = {} self._family = family self._local_addr = local_addr def close(self): """Close all ongoing DNS calls.""" for ev in self._throttle_dns_events.values(): ev.cancel() super().close() @property def family(self): """Socket family like AF_INET.""" return self._family @property def use_dns_cache(self): """True if local DNS caching is enabled.""" return self._use_dns_cache def clear_dns_cache(self, host=None, port=None): """Remove specified host/port or clear all dns local cache.""" if host is not None and port is not None: self._cached_hosts.remove((host, port)) elif host is not None or port is not None: raise ValueError("either both host and port " "or none of them are allowed") else: self._cached_hosts.clear() async def _resolve_host(self, host, port, traces=None): if is_ip_address(host): return [{'hostname': host, 'host': host, 'port': port, 'family': self._family, 'proto': 0, 'flags': 0}] if not self._use_dns_cache: if traces: for trace in traces: await trace.send_dns_resolvehost_start(host) res = (await self._resolver.resolve( host, port, family=self._family)) if traces: for trace in traces: await trace.send_dns_resolvehost_end(host) return res key = (host, port) if (key in self._cached_hosts) and \ (not self._cached_hosts.expired(key)): if traces: for trace in traces: await trace.send_dns_cache_hit(host) return self._cached_hosts.next_addrs(key) if key in self._throttle_dns_events: if traces: for trace in traces: await trace.send_dns_cache_hit(host) await self._throttle_dns_events[key].wait() else: if traces: for trace in traces: await trace.send_dns_cache_miss(host) self._throttle_dns_events[key] = \ EventResultOrError(self._loop) try: if traces: for trace in traces: await trace.send_dns_resolvehost_start(host) addrs = await \ asyncio.shield(self._resolver.resolve(host, port, family=self._family), loop=self._loop) if traces: for trace in traces: await trace.send_dns_resolvehost_end(host) self._cached_hosts.add(key, addrs) self._throttle_dns_events[key].set() except BaseException as e: # any DNS exception, independently of the implementation # is set for the waiters to raise the same exception. self._throttle_dns_events[key].set(exc=e) raise finally: self._throttle_dns_events.pop(key) return self._cached_hosts.next_addrs(key) async def _create_connection(self, req, traces=None): """Create connection. Has same keyword arguments as BaseEventLoop.create_connection. """ if req.proxy: _, proto = await self._create_proxy_connection( req, traces=None ) else: _, proto = await self._create_direct_connection( req, traces=None ) return proto @staticmethod @functools.lru_cache(None) def _make_ssl_context(verified): if verified: return ssl.create_default_context() else: sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.options |= ssl.OP_NO_SSLv2 sslcontext.options |= ssl.OP_NO_SSLv3 sslcontext.options |= ssl.OP_NO_COMPRESSION sslcontext.set_default_verify_paths() return sslcontext def _get_ssl_context(self, req): """Logic to get the correct SSL context 0. if req.ssl is false, return None 1. if ssl_context is specified in req, use it 2. if _ssl_context is specified in self, use it 3. otherwise: 1. if verify_ssl is not specified in req, use self.ssl_context (will generate a default context according to self.verify_ssl) 2. if verify_ssl is True in req, generate a default SSL context 3. if verify_ssl is False in req, generate a SSL context that won't verify """ if req.is_ssl(): if ssl is None: # pragma: no cover raise RuntimeError('SSL is not supported.') sslcontext = req.ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not None: # not verified or fingerprinted return self._make_ssl_context(False) sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not None: # not verified or fingerprinted return self._make_ssl_context(False) return self._make_ssl_context(True) else: return None def _get_fingerprint(self, req): ret = req.ssl if isinstance(ret, Fingerprint): return ret ret = self._ssl if isinstance(ret, Fingerprint): return ret return None async def _wrap_create_connection(self, *args, req, client_error=ClientConnectorError, **kwargs): try: return await self._loop.create_connection(*args, **kwargs) except certificate_errors as exc: raise ClientConnectorCertificateError( req.connection_key, exc) from exc except ssl_errors as exc: raise ClientConnectorSSLError(req.connection_key, exc) from exc except OSError as exc: raise client_error(req.connection_key, exc) from exc async def _create_direct_connection(self, req, *, client_error=ClientConnectorError, traces=None): sslcontext = self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) try: hosts = await self._resolve_host( req.url.raw_host, req.port, traces=traces) except OSError as exc: # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself raise ClientConnectorError(req.connection_key, exc) from exc last_exc = None for hinfo in hosts: host = hinfo['host'] port = hinfo['port'] try: transp, proto = await self._wrap_create_connection( self._factory, host, port, ssl=sslcontext, family=hinfo['family'], proto=hinfo['proto'], flags=hinfo['flags'], server_hostname=hinfo['hostname'] if sslcontext else None, local_addr=self._local_addr, req=req, client_error=client_error) except ClientConnectorError as exc: last_exc = exc continue if req.is_ssl() and fingerprint: try: fingerprint.check(transp) except ServerFingerprintMismatch as exc: transp.close() if not self._cleanup_closed_disabled: self._cleanup_closed_transports.append(transp) last_exc = exc continue return transp, proto else: raise last_exc async def _create_proxy_connection(self, req, traces=None): headers = {} if req.proxy_headers is not None: headers = req.proxy_headers headers[hdrs.HOST] = req.headers[hdrs.HOST] proxy_req = ClientRequest( hdrs.METH_GET, req.proxy, headers=headers, auth=req.proxy_auth, loop=self._loop, ssl=req.ssl) # create connection to proxy server transport, proto = await self._create_direct_connection( proxy_req, client_error=ClientProxyConnectionError) auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) if auth is not None: if not req.is_ssl(): req.headers[hdrs.PROXY_AUTHORIZATION] = auth else: proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth if req.is_ssl(): sslcontext = self._get_ssl_context(req) # For HTTPS requests over HTTP proxy # we must notify proxy to tunnel connection # so we send CONNECT command: # CONNECT www.python.org:443 HTTP/1.1 # Host: www.python.org # # next we must do TLS handshake and so on # to do this we must wrap raw socket into secure one # asyncio handles this perfectly proxy_req.method = hdrs.METH_CONNECT proxy_req.url = req.url key = (req.host, req.port, req.ssl) conn = Connection(self, key, proto, self._loop) proxy_resp = proxy_req.send(conn) try: resp = await proxy_resp.start(conn, True) except BaseException: proxy_resp.close() conn.close() raise else: conn._protocol = None conn._transport = None try: if resp.status != 200: raise ClientHttpProxyError( proxy_resp.request_info, resp.history, code=resp.status, message=resp.reason, headers=resp.headers) rawsock = transport.get_extra_info('socket', default=None) if rawsock is None: raise RuntimeError( "Transport does not expose socket instance") # Duplicate the socket, so now we can close proxy transport rawsock = rawsock.dup() finally: transport.close() transport, proto = await self._wrap_create_connection( self._factory, ssl=sslcontext, sock=rawsock, server_hostname=req.host, req=req) finally: proxy_resp.close() return transport, proto class UnixConnector(BaseConnector): """Unix socket connector. path - Unix socket path. keepalive_timeout - (optional) Keep-alive timeout. force_close - Set to True to force close and do reconnect after each request (and between redirects). limit - The total number of simultaneous connections. limit_per_host - Number of simultaneous connections to one host. loop - Optional event loop. """ def __init__(self, path, force_close=False, keepalive_timeout=sentinel, limit=100, limit_per_host=0, loop=None): super().__init__(force_close=force_close, keepalive_timeout=keepalive_timeout, limit=limit, limit_per_host=limit_per_host, loop=loop) self._path = path @property def path(self): """Path to unix socket.""" return self._path async def _create_connection(self, req, traces=None): try: _, proto = await self._loop.create_unix_connection( self._factory, self._path) except OSError as exc: raise ClientConnectorError(req.connection_key, exc) from exc return proto aiohttp-3.0.1/aiohttp/cookiejar.py0000666000000000000000000002377213240304665015323 0ustar 00000000000000import datetime import pathlib import pickle import re from collections import defaultdict from collections.abc import Mapping from http.cookies import Morsel, SimpleCookie from math import ceil from yarl import URL from .abc import AbstractCookieJar from .helpers import is_ip_address __all__ = ('CookieJar', 'DummyCookieJar') class CookieJar(AbstractCookieJar): """Implements cookie storage adhering to RFC 6265.""" DATE_TOKENS_RE = re.compile( r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)") DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") DATE_MONTH_RE = re.compile("(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", re.I) DATE_YEAR_RE = re.compile(r"(\d{2,4})") MAX_TIME = 2051215261.0 # so far in future (2035-01-01) def __init__(self, *, unsafe=False, loop=None): super().__init__(loop=loop) self._cookies = defaultdict(SimpleCookie) self._host_only_cookies = set() self._unsafe = unsafe self._next_expiration = ceil(self._loop.time()) self._expirations = {} def save(self, file_path): file_path = pathlib.Path(file_path) with file_path.open(mode='wb') as f: pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) def load(self, file_path): file_path = pathlib.Path(file_path) with file_path.open(mode='rb') as f: self._cookies = pickle.load(f) def clear(self): self._cookies.clear() self._host_only_cookies.clear() self._next_expiration = ceil(self._loop.time()) self._expirations.clear() def __iter__(self): self._do_expiration() for val in self._cookies.values(): yield from val.values() def __len__(self): return sum(1 for i in self) def _do_expiration(self): now = self._loop.time() if self._next_expiration > now: return if not self._expirations: return next_expiration = self.MAX_TIME to_del = [] cookies = self._cookies expirations = self._expirations for (domain, name), when in expirations.items(): if when <= now: cookies[domain].pop(name, None) to_del.append((domain, name)) self._host_only_cookies.discard((domain, name)) else: next_expiration = min(next_expiration, when) for key in to_del: del expirations[key] self._next_expiration = ceil(next_expiration) def _expire_cookie(self, when, domain, name): self._next_expiration = min(self._next_expiration, when) self._expirations[(domain, name)] = when def update_cookies(self, cookies, response_url=URL()): """Update cookies.""" hostname = response_url.raw_host if not self._unsafe and is_ip_address(hostname): # Don't accept cookies from IPs return if isinstance(cookies, Mapping): cookies = cookies.items() for name, cookie in cookies: if not isinstance(cookie, Morsel): tmp = SimpleCookie() tmp[name] = cookie cookie = tmp[name] domain = cookie["domain"] # ignore domains with trailing dots if domain.endswith('.'): domain = "" del cookie["domain"] if not domain and hostname is not None: # Set the cookie's domain to the response hostname # and set its host-only-flag self._host_only_cookies.add((hostname, name)) domain = cookie["domain"] = hostname if domain.startswith("."): # Remove leading dot domain = domain[1:] cookie["domain"] = domain if hostname and not self._is_domain_match(domain, hostname): # Setting cookies for different domains is not allowed continue path = cookie["path"] if not path or not path.startswith("/"): # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): path = "/" else: # Cut everything from the last slash to the end path = "/" + path[1:path.rfind("/")] cookie["path"] = path max_age = cookie["max-age"] if max_age: try: delta_seconds = int(max_age) self._expire_cookie(self._loop.time() + delta_seconds, domain, name) except ValueError: cookie["max-age"] = "" else: expires = cookie["expires"] if expires: expire_time = self._parse_date(expires) if expire_time: self._expire_cookie(expire_time.timestamp(), domain, name) else: cookie["expires"] = "" self._cookies[domain][name] = cookie self._do_expiration() def filter_cookies(self, request_url=URL()): """Returns this jar's cookies filtered by their attributes.""" self._do_expiration() request_url = URL(request_url) filtered = SimpleCookie() hostname = request_url.raw_host or "" is_not_secure = request_url.scheme not in ("https", "wss") for cookie in self: name = cookie.key domain = cookie["domain"] # Send shared cookies if not domain: filtered[name] = cookie.value continue if not self._unsafe and is_ip_address(hostname): continue if (domain, name) in self._host_only_cookies: if domain != hostname: continue elif not self._is_domain_match(domain, hostname): continue if not self._is_path_match(request_url.path, cookie["path"]): continue if is_not_secure and cookie["secure"]: continue # It's critical we use the Morsel so the coded_value # (based on cookie version) is preserved mrsl_val = cookie.get(cookie.key, Morsel()) mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) filtered[name] = mrsl_val return filtered @staticmethod def _is_domain_match(domain, hostname): """Implements domain matching adhering to RFC 6265.""" if hostname == domain: return True if not hostname.endswith(domain): return False non_matching = hostname[:-len(domain)] if not non_matching.endswith("."): return False return not is_ip_address(hostname) @staticmethod def _is_path_match(req_path, cookie_path): """Implements path matching adhering to RFC 6265.""" if not req_path.startswith("/"): req_path = "/" if req_path == cookie_path: return True if not req_path.startswith(cookie_path): return False if cookie_path.endswith("/"): return True non_matching = req_path[len(cookie_path):] return non_matching.startswith("/") @classmethod def _parse_date(cls, date_str): """Implements date string parsing adhering to RFC 6265.""" if not date_str: return found_time = False found_day = False found_month = False found_year = False hour = minute = second = 0 day = 0 month = 0 year = 0 for token_match in cls.DATE_TOKENS_RE.finditer(date_str): token = token_match.group("token") if not found_time: time_match = cls.DATE_HMS_TIME_RE.match(token) if time_match: found_time = True hour, minute, second = [ int(s) for s in time_match.groups()] continue if not found_day: day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) if day_match: found_day = True day = int(day_match.group()) continue if not found_month: month_match = cls.DATE_MONTH_RE.match(token) if month_match: found_month = True month = month_match.lastindex continue if not found_year: year_match = cls.DATE_YEAR_RE.match(token) if year_match: found_year = True year = int(year_match.group()) if 70 <= year <= 99: year += 1900 elif 0 <= year <= 69: year += 2000 if False in (found_day, found_month, found_year, found_time): return if not 1 <= day <= 31: return if year < 1601 or hour > 23 or minute > 59 or second > 59: return return datetime.datetime(year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc) class DummyCookieJar(AbstractCookieJar): """Implements a dummy cookie storage. It can be used with the ClientSession when no cookie processing is needed. """ def __init__(self, *, loop=None): super().__init__(loop=loop) def __iter__(self): while False: yield None def __len__(self): return 0 def clear(self): pass def update_cookies(self, cookies, response_url=None): pass def filter_cookies(self, request_url): return None aiohttp-3.0.1/aiohttp/formdata.py0000666000000000000000000001234013240304665015137 0ustar 00000000000000import io from urllib.parse import urlencode from multidict import MultiDict, MultiDictProxy from . import hdrs, multipart, payload from .helpers import guess_filename __all__ = ('FormData',) class FormData: """Helper class for multipart/form-data and application/x-www-form-urlencoded body generation.""" def __init__(self, fields=(), quote_fields=True, charset=None): self._writer = multipart.MultipartWriter('form-data') self._fields = [] self._is_multipart = False self._quote_fields = quote_fields self._charset = charset if isinstance(fields, dict): fields = list(fields.items()) elif not isinstance(fields, (list, tuple)): fields = (fields,) self.add_fields(*fields) @property def is_multipart(self): return self._is_multipart def add_field(self, name, value, *, content_type=None, filename=None, content_transfer_encoding=None): if isinstance(value, io.IOBase): self._is_multipart = True elif isinstance(value, (bytes, bytearray, memoryview)): if filename is None and content_transfer_encoding is None: filename = name type_options = MultiDict({'name': name}) if filename is not None and not isinstance(filename, str): raise TypeError('filename must be an instance of str. ' 'Got: %s' % filename) if filename is None and isinstance(value, io.IOBase): filename = guess_filename(value, name) if filename is not None: type_options['filename'] = filename self._is_multipart = True headers = {} if content_type is not None: if not isinstance(content_type, str): raise TypeError('content_type must be an instance of str. ' 'Got: %s' % content_type) headers[hdrs.CONTENT_TYPE] = content_type self._is_multipart = True if content_transfer_encoding is not None: if not isinstance(content_transfer_encoding, str): raise TypeError('content_transfer_encoding must be an instance' ' of str. Got: %s' % content_transfer_encoding) headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding self._is_multipart = True self._fields.append((type_options, headers, value)) def add_fields(self, *fields): to_add = list(fields) while to_add: rec = to_add.pop(0) if isinstance(rec, io.IOBase): k = guess_filename(rec, 'unknown') self.add_field(k, rec) elif isinstance(rec, (MultiDictProxy, MultiDict)): to_add.extend(rec.items()) elif isinstance(rec, (list, tuple)) and len(rec) == 2: k, fp = rec self.add_field(k, fp) else: raise TypeError('Only io.IOBase, multidict and (name, file) ' 'pairs allowed, use .add_field() for passing ' 'more complex parameters, got {!r}' .format(rec)) def _gen_form_urlencoded(self): # form data (x-www-form-urlencoded) data = [] for type_options, _, value in self._fields: data.append((type_options['name'], value)) charset = self._charset if self._charset is not None else 'utf-8' if charset == 'utf-8': content_type = 'application/x-www-form-urlencoded' else: content_type = ('application/x-www-form-urlencoded; ' 'charset=%s' % charset) return payload.BytesPayload( urlencode(data, doseq=True, encoding=charset).encode(), content_type=content_type) def _gen_form_data(self): """Encode a list of fields using the multipart/form-data MIME format""" for dispparams, headers, value in self._fields: try: if hdrs.CONTENT_TYPE in headers: part = payload.get_payload( value, content_type=headers[hdrs.CONTENT_TYPE], headers=headers, encoding=self._charset) else: part = payload.get_payload( value, headers=headers, encoding=self._charset) except Exception as exc: raise TypeError( 'Can not serialize value type: %r\n ' 'headers: %r\n value: %r' % ( type(value), headers, value)) from exc if dispparams: part.set_content_disposition( 'form-data', quote_fields=self._quote_fields, **dispparams ) # FIXME cgi.FieldStorage doesn't likes body parts with # Content-Length which were sent via chunked transfer encoding part.headers.popall(hdrs.CONTENT_LENGTH, None) self._writer.append_payload(part) return self._writer def __call__(self): if self._is_multipart: return self._gen_form_data() else: return self._gen_form_urlencoded() aiohttp-3.0.1/aiohttp/frozenlist.py0000666000000000000000000000335513240304665015547 0ustar 00000000000000from collections.abc import MutableSequence from functools import total_ordering from .helpers import NO_EXTENSIONS if not NO_EXTENSIONS: try: from aiohttp._frozenlist import FrozenList except ImportError: # pragma: no cover FrozenList = None @total_ordering class PyFrozenList(MutableSequence): __slots__ = ('_frozen', '_items') def __init__(self, items=None): self._frozen = False if items is not None: items = list(items) else: items = [] self._items = items @property def frozen(self): return self._frozen def freeze(self): self._frozen = True def __getitem__(self, index): return self._items[index] def __setitem__(self, index, value): if self._frozen: raise RuntimeError("Cannot modify frozen list.") self._items[index] = value def __delitem__(self, index): if self._frozen: raise RuntimeError("Cannot modify frozen list.") del self._items[index] def __len__(self): return self._items.__len__() def __iter__(self): return self._items.__iter__() def __reversed__(self): return self._items.__reversed__() def __eq__(self, other): return list(self) == other def __le__(self, other): return list(self) <= other def insert(self, pos, item): if self._frozen: raise RuntimeError("Cannot modify frozen list.") self._items.insert(pos, item) def __repr__(self): return ''.format(self._frozen, self._items) if NO_EXTENSIONS or FrozenList is None: FrozenList = PyFrozenList aiohttp-3.0.1/aiohttp/hdrs.py0000666000000000000000000000643713240304665014314 0ustar 00000000000000"""HTTP Headers constants.""" from multidict import istr METH_ANY = '*' METH_CONNECT = 'CONNECT' METH_HEAD = 'HEAD' METH_GET = 'GET' METH_DELETE = 'DELETE' METH_OPTIONS = 'OPTIONS' METH_PATCH = 'PATCH' METH_POST = 'POST' METH_PUT = 'PUT' METH_TRACE = 'TRACE' METH_ALL = {METH_CONNECT, METH_HEAD, METH_GET, METH_DELETE, METH_OPTIONS, METH_PATCH, METH_POST, METH_PUT, METH_TRACE} ACCEPT = istr('ACCEPT') ACCEPT_CHARSET = istr('ACCEPT-CHARSET') ACCEPT_ENCODING = istr('ACCEPT-ENCODING') ACCEPT_LANGUAGE = istr('ACCEPT-LANGUAGE') ACCEPT_RANGES = istr('ACCEPT-RANGES') ACCESS_CONTROL_MAX_AGE = istr('ACCESS-CONTROL-MAX-AGE') ACCESS_CONTROL_ALLOW_CREDENTIALS = istr('ACCESS-CONTROL-ALLOW-CREDENTIALS') ACCESS_CONTROL_ALLOW_HEADERS = istr('ACCESS-CONTROL-ALLOW-HEADERS') ACCESS_CONTROL_ALLOW_METHODS = istr('ACCESS-CONTROL-ALLOW-METHODS') ACCESS_CONTROL_ALLOW_ORIGIN = istr('ACCESS-CONTROL-ALLOW-ORIGIN') ACCESS_CONTROL_EXPOSE_HEADERS = istr('ACCESS-CONTROL-EXPOSE-HEADERS') ACCESS_CONTROL_REQUEST_HEADERS = istr('ACCESS-CONTROL-REQUEST-HEADERS') ACCESS_CONTROL_REQUEST_METHOD = istr('ACCESS-CONTROL-REQUEST-METHOD') AGE = istr('AGE') ALLOW = istr('ALLOW') AUTHORIZATION = istr('AUTHORIZATION') CACHE_CONTROL = istr('CACHE-CONTROL') CONNECTION = istr('CONNECTION') CONTENT_DISPOSITION = istr('CONTENT-DISPOSITION') CONTENT_ENCODING = istr('CONTENT-ENCODING') CONTENT_LANGUAGE = istr('CONTENT-LANGUAGE') CONTENT_LENGTH = istr('CONTENT-LENGTH') CONTENT_LOCATION = istr('CONTENT-LOCATION') CONTENT_MD5 = istr('CONTENT-MD5') CONTENT_RANGE = istr('CONTENT-RANGE') CONTENT_TRANSFER_ENCODING = istr('CONTENT-TRANSFER-ENCODING') CONTENT_TYPE = istr('CONTENT-TYPE') COOKIE = istr('COOKIE') DATE = istr('DATE') DESTINATION = istr('DESTINATION') DIGEST = istr('DIGEST') ETAG = istr('ETAG') EXPECT = istr('EXPECT') EXPIRES = istr('EXPIRES') FORWARDED = istr('FORWARDED') FROM = istr('FROM') HOST = istr('HOST') IF_MATCH = istr('IF-MATCH') IF_MODIFIED_SINCE = istr('IF-MODIFIED-SINCE') IF_NONE_MATCH = istr('IF-NONE-MATCH') IF_RANGE = istr('IF-RANGE') IF_UNMODIFIED_SINCE = istr('IF-UNMODIFIED-SINCE') KEEP_ALIVE = istr('KEEP-ALIVE') LAST_EVENT_ID = istr('LAST-EVENT-ID') LAST_MODIFIED = istr('LAST-MODIFIED') LINK = istr('LINK') LOCATION = istr('LOCATION') MAX_FORWARDS = istr('MAX-FORWARDS') ORIGIN = istr('ORIGIN') PRAGMA = istr('PRAGMA') PROXY_AUTHENTICATE = istr('PROXY_AUTHENTICATE') PROXY_AUTHORIZATION = istr('PROXY-AUTHORIZATION') RANGE = istr('RANGE') REFERER = istr('REFERER') RETRY_AFTER = istr('RETRY-AFTER') SEC_WEBSOCKET_ACCEPT = istr('SEC-WEBSOCKET-ACCEPT') SEC_WEBSOCKET_VERSION = istr('SEC-WEBSOCKET-VERSION') SEC_WEBSOCKET_PROTOCOL = istr('SEC-WEBSOCKET-PROTOCOL') SEC_WEBSOCKET_EXTENSIONS = istr('SEC-WEBSOCKET-EXTENSIONS') SEC_WEBSOCKET_KEY = istr('SEC-WEBSOCKET-KEY') SEC_WEBSOCKET_KEY1 = istr('SEC-WEBSOCKET-KEY1') SERVER = istr('SERVER') SET_COOKIE = istr('SET-COOKIE') TE = istr('TE') TRAILER = istr('TRAILER') TRANSFER_ENCODING = istr('TRANSFER-ENCODING') UPGRADE = istr('UPGRADE') WEBSOCKET = istr('WEBSOCKET') URI = istr('URI') USER_AGENT = istr('USER-AGENT') VARY = istr('VARY') VIA = istr('VIA') WANT_DIGEST = istr('WANT-DIGEST') WARNING = istr('WARNING') WWW_AUTHENTICATE = istr('WWW-AUTHENTICATE') X_FORWARDED_FOR = istr('X-FORWARDED-FOR') X_FORWARDED_HOST = istr('X-FORWARDED-HOST') X_FORWARDED_PROTO = istr('X-FORWARDED-PROTO') aiohttp-3.0.1/aiohttp/helpers.py0000666000000000000000000005430213240304665015010 0ustar 00000000000000"""Various helper functions""" import asyncio import base64 import binascii import cgi import datetime import functools import inspect import netrc import os import re import sys import time import weakref from collections import namedtuple from contextlib import suppress from math import ceil from pathlib import Path from urllib.parse import quote from urllib.request import getproxies import async_timeout import attr from multidict import MultiDict from yarl import URL from . import hdrs from .abc import AbstractAccessLogger from .log import client_logger __all__ = ('BasicAuth',) PY_36 = sys.version_info >= (3, 6) if sys.version_info < (3, 7): import idna_ssl idna_ssl.patch_match_hostname() sentinel = object() NO_EXTENSIONS = bool(os.environ.get('AIOHTTP_NO_EXTENSIONS')) CHAR = set(chr(i) for i in range(0, 128)) CTL = set(chr(i) for i in range(0, 32)) | {chr(127), } SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']', '?', '=', '{', '}', ' ', chr(9)} TOKEN = CHAR ^ CTL ^ SEPARATORS coroutines = asyncio.coroutines old_debug = coroutines._DEBUG coroutines._DEBUG = False @asyncio.coroutine def noop(*args, **kwargs): return coroutines._DEBUG = old_debug class BasicAuth(namedtuple('BasicAuth', ['login', 'password', 'encoding'])): """Http basic authentication helper.""" def __new__(cls, login, password='', encoding='latin1'): if login is None: raise ValueError('None is not allowed as login value') if password is None: raise ValueError('None is not allowed as password value') if ':' in login: raise ValueError( 'A ":" is not allowed in login (RFC 1945#section-11.1)') return super().__new__(cls, login, password, encoding) @classmethod def decode(cls, auth_header, encoding='latin1'): """Create a BasicAuth object from an Authorization HTTP header.""" split = auth_header.strip().split(' ') if len(split) == 2: if split[0].strip().lower() != 'basic': raise ValueError('Unknown authorization method %s' % split[0]) to_decode = split[1] else: raise ValueError('Could not parse authorization header.') try: username, _, password = base64.b64decode( to_decode.encode('ascii') ).decode(encoding).partition(':') except binascii.Error: raise ValueError('Invalid base64 encoding.') return cls(username, password, encoding=encoding) @classmethod def from_url(cls, url, *, encoding='latin1'): """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") if url.user is None: return None return cls(url.user, url.password or '', encoding=encoding) def encode(self): """Encode credentials.""" creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) return 'Basic %s' % base64.b64encode(creds).decode(self.encoding) def strip_auth_from_url(url): auth = BasicAuth.from_url(url) if auth is None: return url, None else: return url.with_user(None), auth def netrc_from_env(): netrc_obj = None netrc_path = os.environ.get('NETRC') try: if netrc_path is not None: netrc_path = Path(netrc_path) else: home_dir = Path.home() if os.name == 'nt': # pragma: no cover netrc_path = home_dir.joinpath('_netrc') else: netrc_path = home_dir.joinpath('.netrc') if netrc_path and netrc_path.is_file(): try: netrc_obj = netrc.netrc(str(netrc_path)) except (netrc.NetrcParseError, OSError) as e: client_logger.warning(".netrc file parses fail: %s", e) if netrc_obj is None: client_logger.warning("could't find .netrc file") except RuntimeError as e: # pragma: no cover """ handle error raised by pathlib """ client_logger.warning("could't find .netrc file: %s", e) return netrc_obj @attr.s(frozen=True, slots=True) class ProxyInfo: proxy = attr.ib(type=str) proxy_auth = attr.ib(type=BasicAuth) def proxies_from_env(): proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ('http', 'https')} netrc_obj = netrc_from_env() stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme == 'https': client_logger.warning( "HTTPS proxies %s are not supported, ignoring", proxy) continue if netrc_obj and auth is None: auth_from_netrc = netrc_obj.authenticators(proxy.host) if auth_from_netrc is not None: # auth_from_netrc is a (`user`, `account`, `password`) tuple, # `user` and `account` both can be username, # if `user` is None, use `account` *logins, password = auth_from_netrc auth = BasicAuth(logins[0] if logins[0] else logins[-1], password) ret[proto] = ProxyInfo(proxy, auth) return ret def current_task(loop=None): if loop is None: loop = asyncio.get_event_loop() task = asyncio.Task.current_task(loop=loop) if task is None: if hasattr(loop, 'current_task'): task = loop.current_task() return task def isasyncgenfunction(obj): if hasattr(inspect, 'isasyncgenfunction'): return inspect.isasyncgenfunction(obj) return False @attr.s(frozen=True, slots=True) class MimeType: type = attr.ib(type=str) subtype = attr.ib(type=str) suffix = attr.ib(type=str) parameters = attr.ib(type=MultiDict) def parse_mimetype(mimetype): """Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'}) """ if not mimetype: return MimeType(type='', subtype='', suffix='', parameters={}) parts = mimetype.split(';') params = [] for item in parts[1:]: if not item: continue key, value = item.split('=', 1) if '=' in item else (item, '') params.append((key.lower().strip(), value.strip(' "'))) params = MultiDict(params) fulltype = parts[0].strip().lower() if fulltype == '*': fulltype = '*/*' mtype, stype = fulltype.split('/', 1) \ if '/' in fulltype else (fulltype, '') stype, suffix = stype.split('+', 1) if '+' in stype else (stype, '') return MimeType(type=mtype, subtype=stype, suffix=suffix, parameters=params) def guess_filename(obj, default=None): name = getattr(obj, 'name', None) if name and isinstance(name, str) and name[0] != '<' and name[-1] != '>': return Path(name).name return default def content_disposition_header(disptype, quote_fields=True, **params): """Sets ``Content-Disposition`` header. disptype is a disposition type: inline, attachment, form-data. Should be valid extension token (see RFC 2183) params is a dict with disposition params. """ if not disptype or not (TOKEN > set(disptype)): raise ValueError('bad content disposition type {!r}' ''.format(disptype)) value = disptype if params: lparams = [] for key, val in params.items(): if not key or not (TOKEN > set(key)): raise ValueError('bad content disposition parameter' ' {!r}={!r}'.format(key, val)) qval = quote(val, '') if quote_fields else val lparams.append((key, '"%s"' % qval)) if key == 'filename': lparams.append(('filename*', "utf-8''" + qval)) sparams = '; '.join('='.join(pair) for pair in lparams) value = '; '.join((value, sparams)) return value class AccessLogger(AbstractAccessLogger): """Helper object to log access. Usage: log = logging.getLogger("spam") log_format = "%a %{User-Agent}i" access_logger = AccessLogger(log, log_format) access_logger.log(request, response, time) Format: %% The percent sign %a Remote IP-address (IP-address of proxy if using reverse proxy) %t Time when the request was started to process %P The process ID of the child that serviced the request %r First line of request %s Response status code %b Size of response in bytes, including HTTP headers %T Time taken to serve the request, in seconds %Tf Time taken to serve the request, in seconds with floating fraction in .06f format %D Time taken to serve the request, in microseconds %{FOO}i request.headers['FOO'] %{FOO}o response.headers['FOO'] %{FOO}e os.environ['FOO'] """ LOG_FORMAT_MAP = { 'a': 'remote_address', 't': 'request_start_time', 'P': 'process_id', 'r': 'first_request_line', 's': 'response_status', 'b': 'response_size', 'T': 'request_time', 'Tf': 'request_time_frac', 'D': 'request_time_micro', 'i': 'request_header', 'o': 'response_header', } LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' FORMAT_RE = re.compile(r'%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)') CLEANUP_RE = re.compile(r'(%[^s])') _FORMAT_CACHE = {} KeyMethod = namedtuple('KeyMethod', 'key method') def __init__(self, logger, log_format=LOG_FORMAT): """Initialise the logger. logger is a logger object to be used for logging. log_format is an string with apache compatible log format description. """ super().__init__(logger, log_format=log_format) _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format) if not _compiled_format: _compiled_format = self.compile_format(log_format) AccessLogger._FORMAT_CACHE[log_format] = _compiled_format self._log_format, self._methods = _compiled_format def compile_format(self, log_format): """Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in apropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial) """ # list of (key, method) tuples, we don't use an OrderedDict as users # can repeat the same key more than once methods = list() for atom in self.FORMAT_RE.findall(log_format): if atom[1] == '': format_key = self.LOG_FORMAT_MAP[atom[0]] m = getattr(AccessLogger, '_format_%s' % atom[0]) else: format_key = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) m = getattr(AccessLogger, '_format_%s' % atom[2]) m = functools.partial(m, atom[1]) methods.append(self.KeyMethod(format_key, m)) log_format = self.FORMAT_RE.sub(r'%s', log_format) log_format = self.CLEANUP_RE.sub(r'%\1', log_format) return log_format, methods @staticmethod def _format_i(key, request, response, time): if request is None: return '(no headers)' # suboptimal, make istr(key) once return request.headers.get(key, '-') @staticmethod def _format_o(key, request, response, time): # suboptimal, make istr(key) once return response.headers.get(key, '-') @staticmethod def _format_a(request, response, time): if request is None: return '-' ip = request.remote return ip if ip is not None else '-' @staticmethod def _format_t(request, response, time): now = datetime.datetime.utcnow() start_time = now - datetime.timedelta(seconds=time) return start_time.strftime('[%d/%b/%Y:%H:%M:%S +0000]') @staticmethod def _format_P(request, response, time): return "<%s>" % os.getpid() @staticmethod def _format_r(request, response, time): if request is None: return '-' return '%s %s HTTP/%s.%s' % tuple((request.method, request.path_qs) + request.version) @staticmethod def _format_s(request, response, time): return response.status @staticmethod def _format_b(request, response, time): return response.body_length @staticmethod def _format_T(request, response, time): return round(time) @staticmethod def _format_Tf(request, response, time): return '%06f' % time @staticmethod def _format_D(request, response, time): return round(time * 1000000) def _format_line(self, request, response, time): return ((key, method(request, response, time)) for key, method in self._methods) def log(self, request, response, time): try: fmt_info = self._format_line(request, response, time) values = list() extra = dict() for key, value in fmt_info: values.append(value) if key.__class__ is str: extra[key] = value else: k1, k2 = key dct = extra.get(k1, {}) dct[k2] = value extra[k1] = dct self.logger.info(self._log_format % tuple(values), extra=extra) except Exception: self.logger.exception("Error in logging") class reify: """Use as a class method decorator. It operates almost exactly like the Python `@property` decorator, but it puts the result of the method it decorates into the instance dict after the first call, effectively replacing the function it decorates with an instance variable. It is, in Python parlance, a data descriptor. """ def __init__(self, wrapped): self.wrapped = wrapped try: self.__doc__ = wrapped.__doc__ except Exception: # pragma: no cover self.__doc__ = "" self.name = wrapped.__name__ def __get__(self, inst, owner, _sentinel=sentinel): try: try: return inst._cache[self.name] except KeyError: val = self.wrapped(inst) inst._cache[self.name] = val return val except AttributeError: if inst is None: return self raise def __set__(self, inst, value): raise AttributeError("reified property is read-only") _ipv4_pattern = (r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}' r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$') _ipv6_pattern = ( r'^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}' r'(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)' r'((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})' r'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}' r'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}' r'[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)' r'(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}' r':|:(:[A-F0-9]{1,4}){7})$') _ipv4_regex = re.compile(_ipv4_pattern) _ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE) _ipv4_regexb = re.compile(_ipv4_pattern.encode('ascii')) _ipv6_regexb = re.compile(_ipv6_pattern.encode('ascii'), flags=re.IGNORECASE) def is_ip_address(host): if host is None: return False if isinstance(host, str): if _ipv4_regex.match(host) or _ipv6_regex.match(host): return True else: return False elif isinstance(host, (bytes, bytearray, memoryview)): if _ipv4_regexb.match(host) or _ipv6_regexb.match(host): return True else: return False else: raise TypeError("{} [{}] is not a str or bytes" .format(host, type(host))) _cached_current_datetime = None _cached_formatted_datetime = None def rfc822_formatted_time(): global _cached_current_datetime global _cached_formatted_datetime now = int(time.time()) if now != _cached_current_datetime: # Weekday and month names for HTTP date/time formatting; # always English! # Tuples are constants stored in codeobject! _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") _monthname = ("", # Dummy so we can use 1-based month numbers "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec") year, month, day, hh, mm, ss, wd, y, z = time.gmtime(now) _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( _weekdayname[wd], day, _monthname[month], year, hh, mm, ss ) _cached_current_datetime = now return _cached_formatted_datetime def _weakref_handle(info): ref, name = info ob = ref() if ob is not None: with suppress(Exception): getattr(ob, name)() def weakref_handle(ob, name, timeout, loop, ceil_timeout=True): if timeout is not None and timeout > 0: when = loop.time() + timeout if ceil_timeout: when = ceil(when) return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) def call_later(cb, timeout, loop): if timeout is not None and timeout > 0: when = ceil(loop.time() + timeout) return loop.call_at(when, cb) class TimeoutHandle: """ Timeout handle """ def __init__(self, loop, timeout): self._timeout = timeout self._loop = loop self._callbacks = [] def register(self, callback, *args, **kwargs): self._callbacks.append((callback, args, kwargs)) def close(self): self._callbacks.clear() def start(self): if self._timeout is not None and self._timeout > 0: at = ceil(self._loop.time() + self._timeout) return self._loop.call_at(at, self.__call__) def timer(self): if self._timeout is not None and self._timeout > 0: timer = TimerContext(self._loop) self.register(timer.timeout) else: timer = TimerNoop() return timer def __call__(self): for cb, args, kwargs in self._callbacks: with suppress(Exception): cb(*args, **kwargs) self._callbacks.clear() class TimerNoop: def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): return False class TimerContext: """ Low resolution timeout context manager """ def __init__(self, loop): self._loop = loop self._tasks = [] self._cancelled = False def __enter__(self): task = current_task(loop=self._loop) if task is None: raise RuntimeError('Timeout context manager should be used ' 'inside a task') if self._cancelled: task.cancel() raise asyncio.TimeoutError from None self._tasks.append(task) return self def __exit__(self, exc_type, exc_val, exc_tb): if self._tasks: self._tasks.pop() if exc_type is asyncio.CancelledError and self._cancelled: raise asyncio.TimeoutError from None def timeout(self): if not self._cancelled: for task in set(self._tasks): task.cancel() self._cancelled = True class CeilTimeout(async_timeout.timeout): def __enter__(self): if self._timeout is not None: self._task = current_task(loop=self._loop) if self._task is None: raise RuntimeError( 'Timeout context manager should be used inside a task') self._cancel_handler = self._loop.call_at( ceil(self._loop.time() + self._timeout), self._cancel_task) return self class HeadersMixin: ATTRS = frozenset([ '_content_type', '_content_dict', '_stored_content_type']) _content_type = None _content_dict = None _stored_content_type = sentinel def _parse_content_type(self, raw): self._stored_content_type = raw if raw is None: # default value according to RFC 2616 self._content_type = 'application/octet-stream' self._content_dict = {} else: self._content_type, self._content_dict = cgi.parse_header(raw) @property def content_type(self, *, _CONTENT_TYPE=hdrs.CONTENT_TYPE): """The value of content part for Content-Type HTTP header.""" raw = self._headers.get(_CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_type @property def charset(self, *, _CONTENT_TYPE=hdrs.CONTENT_TYPE): """The value of charset part for Content-Type HTTP header.""" raw = self._headers.get(_CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_dict.get('charset') @property def content_length(self, *, _CONTENT_LENGTH=hdrs.CONTENT_LENGTH): """The value of Content-Length HTTP header.""" content_length = self._headers.get(_CONTENT_LENGTH) if content_length: return int(content_length) def set_result(fut, result): if not fut.done(): fut.set_result(result) def set_exception(fut, exc): if not fut.done(): fut.set_exception(exc) aiohttp-3.0.1/aiohttp/http.py0000666000000000000000000000241613240304665014324 0ustar 00000000000000import http.server import sys from . import __version__ from .http_exceptions import HttpProcessingError from .http_parser import (HttpParser, HttpRequestParser, HttpResponseParser, RawRequestMessage, RawResponseMessage) from .http_websocket import (WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, WS_KEY, WebSocketError, WebSocketReader, WebSocketWriter, WSCloseCode, WSMessage, WSMsgType, ws_ext_gen, ws_ext_parse) from .http_writer import (HttpVersion, HttpVersion10, HttpVersion11, StreamWriter) __all__ = ( 'HttpProcessingError', 'RESPONSES', 'SERVER_SOFTWARE', # .http_writer 'StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11', # .http_parser 'HttpParser', 'HttpRequestParser', 'HttpResponseParser', 'RawRequestMessage', 'RawResponseMessage', # .http_websocket 'WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY', 'WebSocketReader', 'WebSocketWriter', 'ws_ext_gen', 'ws_ext_parse', 'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode', ) SERVER_SOFTWARE = 'Python/{0[0]}.{0[1]} aiohttp/{1}'.format( sys.version_info, __version__) RESPONSES = http.server.BaseHTTPRequestHandler.responses aiohttp-3.0.1/aiohttp/http_exceptions.py0000666000000000000000000000366513240304665016574 0ustar 00000000000000"""Low-level http related exceptions.""" __all__ = ('HttpProcessingError',) class HttpProcessingError(Exception): """HTTP error. Shortcut for raising HTTP errors with custom code, message and headers. code: HTTP Error code. message: (optional) Error message. headers: (optional) Headers to be sent in response, a list of pairs """ code = 0 message = '' headers = None def __init__(self, *, code=None, message='', headers=None): if code is not None: self.code = code self.headers = headers self.message = message super().__init__("%s, message='%s'" % (self.code, message)) class BadHttpMessage(HttpProcessingError): code = 400 message = 'Bad Request' def __init__(self, message, *, headers=None): super().__init__(message=message, headers=headers) class HttpBadRequest(BadHttpMessage): code = 400 message = 'Bad Request' class PayloadEncodingError(BadHttpMessage): """Base class for payload errors""" class ContentEncodingError(PayloadEncodingError): """Content encoding error.""" class TransferEncodingError(PayloadEncodingError): """transfer encoding error.""" class ContentLengthError(PayloadEncodingError): """Not enough data for satisfy content length header.""" class LineTooLong(BadHttpMessage): def __init__(self, line, limit='Unknown'): super().__init__( "Got more than %s bytes when reading %s." % (limit, line)) class InvalidHeader(BadHttpMessage): def __init__(self, hdr): if isinstance(hdr, bytes): hdr = hdr.decode('utf-8', 'surrogateescape') super().__init__('Invalid HTTP Header: {}'.format(hdr)) self.hdr = hdr class BadStatusLine(BadHttpMessage): def __init__(self, line=''): if not line: line = repr(line) self.args = line, self.line = line class InvalidURLError(BadHttpMessage): pass aiohttp-3.0.1/aiohttp/http_parser.py0000666000000000000000000005742713240304665015714 0ustar 00000000000000import collections import re import string import zlib from enum import IntEnum from multidict import CIMultiDict from yarl import URL from . import hdrs from .helpers import NO_EXTENSIONS from .http_exceptions import (BadStatusLine, ContentEncodingError, ContentLengthError, InvalidHeader, LineTooLong, TransferEncodingError) from .http_writer import HttpVersion, HttpVersion10 from .log import internal_logger from .streams import EMPTY_PAYLOAD, StreamReader try: import brotli HAS_BROTLI = True except ImportError: # pragma: no cover HAS_BROTLI = False __all__ = ( 'HttpParser', 'HttpRequestParser', 'HttpResponseParser', 'RawRequestMessage', 'RawResponseMessage') ASCIISET = set(string.printable) METHRE = re.compile('[A-Z0-9$-_.]+') VERSRE = re.compile(r'HTTP/(\d+).(\d+)') HDRRE = re.compile(rb'[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]') RawRequestMessage = collections.namedtuple( 'RawRequestMessage', ['method', 'path', 'version', 'headers', 'raw_headers', 'should_close', 'compression', 'upgrade', 'chunked', 'url']) RawResponseMessage = collections.namedtuple( 'RawResponseMessage', ['version', 'code', 'reason', 'headers', 'raw_headers', 'should_close', 'compression', 'upgrade', 'chunked']) class ParseState(IntEnum): PARSE_NONE = 0 PARSE_LENGTH = 1 PARSE_CHUNKED = 2 PARSE_UNTIL_EOF = 3 class ChunkState(IntEnum): PARSE_CHUNKED_SIZE = 0 PARSE_CHUNKED_CHUNK = 1 PARSE_CHUNKED_CHUNK_EOF = 2 PARSE_MAYBE_TRAILERS = 3 PARSE_TRAILERS = 4 class HttpParser: def __init__(self, protocol=None, loop=None, max_line_size=8190, max_headers=32768, max_field_size=8190, timer=None, code=None, method=None, readall=False, payload_exception=None, response_with_body=True, read_until_eof=False, auto_decompress=True): self.protocol = protocol self.loop = loop self.max_line_size = max_line_size self.max_headers = max_headers self.max_field_size = max_field_size self.timer = timer self.code = code self.method = method self.readall = readall self.payload_exception = payload_exception self.response_with_body = response_with_body self.read_until_eof = read_until_eof self._lines = [] self._tail = b'' self._upgraded = False self._payload = None self._payload_parser = None self._auto_decompress = auto_decompress def feed_eof(self): if self._payload_parser is not None: self._payload_parser.feed_eof() self._payload_parser = None else: # try to extract partial message if self._tail: self._lines.append(self._tail) if self._lines: if self._lines[-1] != '\r\n': self._lines.append('') try: return self.parse_message(self._lines) except Exception: return None def feed_data(self, data, SEP=b'\r\n', EMPTY=b'', CONTENT_LENGTH=hdrs.CONTENT_LENGTH, METH_CONNECT=hdrs.METH_CONNECT, SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1): messages = [] if self._tail: data, self._tail = self._tail + data, b'' data_len = len(data) start_pos = 0 loop = self.loop while start_pos < data_len: # read HTTP message (request/response line + headers), \r\n\r\n # and split by lines if self._payload_parser is None and not self._upgraded: pos = data.find(SEP, start_pos) # consume \r\n if pos == start_pos and not self._lines: start_pos = pos + 2 continue if pos >= start_pos: # line found self._lines.append(data[start_pos:pos]) start_pos = pos + 2 # \r\n\r\n found if self._lines[-1] == EMPTY: try: msg = self.parse_message(self._lines) finally: self._lines.clear() # payload length length = msg.headers.get(CONTENT_LENGTH) if length is not None: try: length = int(length) except ValueError: raise InvalidHeader(CONTENT_LENGTH) if length < 0: raise InvalidHeader(CONTENT_LENGTH) # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in msg.headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) self._upgraded = msg.upgrade method = getattr(msg, 'method', self.method) # calculate payload if ((length is not None and length > 0) or msg.chunked and not msg.upgrade): payload = StreamReader( self.protocol, timer=self.timer, loop=loop) payload_parser = HttpPayloadParser( payload, length=length, chunked=msg.chunked, method=method, compression=msg.compression, code=self.code, readall=self.readall, response_with_body=self.response_with_body, auto_decompress=self._auto_decompress) if not payload_parser.done: self._payload_parser = payload_parser elif method == METH_CONNECT: payload = StreamReader( self.protocol, timer=self.timer, loop=loop) self._upgraded = True self._payload_parser = HttpPayloadParser( payload, method=msg.method, compression=msg.compression, readall=True, auto_decompress=self._auto_decompress) else: if (getattr(msg, 'code', 100) >= 199 and length is None and self.read_until_eof): payload = StreamReader( self.protocol, timer=self.timer, loop=loop) payload_parser = HttpPayloadParser( payload, length=length, chunked=msg.chunked, method=method, compression=msg.compression, code=self.code, readall=True, response_with_body=self.response_with_body, auto_decompress=self._auto_decompress) if not payload_parser.done: self._payload_parser = payload_parser else: payload = EMPTY_PAYLOAD messages.append((msg, payload)) else: self._tail = data[start_pos:] data = EMPTY break # no parser, just store elif self._payload_parser is None and self._upgraded: assert not self._lines break # feed payload elif data and start_pos < data_len: assert not self._lines try: eof, data = self._payload_parser.feed_data( data[start_pos:]) except BaseException as exc: if self.payload_exception is not None: self._payload_parser.payload.set_exception( self.payload_exception(str(exc))) else: self._payload_parser.payload.set_exception(exc) eof = True data = b'' if eof: start_pos = 0 data_len = len(data) self._payload_parser = None continue else: break if data and start_pos < data_len: data = data[start_pos:] else: data = EMPTY return messages, self._upgraded, data def parse_headers(self, lines): """Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case. """ headers = CIMultiDict() raw_headers = [] lines_idx = 1 line = lines[1] line_count = len(lines) while line: header_length = len(line) # Parse initial header name : value pair. try: bname, bvalue = line.split(b':', 1) except ValueError: raise InvalidHeader(line) from None bname = bname.strip(b' \t') if HDRRE.search(bname): raise InvalidHeader(bname) # next line lines_idx += 1 line = lines[lines_idx] # consume continuation lines continuation = line and line[0] in (32, 9) # (' ', '\t') if continuation: bvalue = [bvalue] while continuation: header_length += len(line) if header_length > self.max_field_size: raise LineTooLong( 'request header field {}'.format( bname.decode("utf8", "xmlcharrefreplace")), self.max_field_size) bvalue.append(line) # next line lines_idx += 1 if lines_idx < line_count: line = lines[lines_idx] if line: continuation = line[0] in (32, 9) # (' ', '\t') else: line = b'' break bvalue = b''.join(bvalue) else: if header_length > self.max_field_size: raise LineTooLong( 'request header field {}'.format( bname.decode("utf8", "xmlcharrefreplace")), self.max_field_size) bvalue = bvalue.strip() name = bname.decode('utf-8', 'surrogateescape') value = bvalue.decode('utf-8', 'surrogateescape') headers.add(name, value) raw_headers.append((bname, bvalue)) close_conn = None encoding = None upgrade = False chunked = False raw_headers = tuple(raw_headers) # keep-alive conn = headers.get(hdrs.CONNECTION) if conn: v = conn.lower() if v == 'close': close_conn = True elif v == 'keep-alive': close_conn = False elif v == 'upgrade': upgrade = True # encoding enc = headers.get(hdrs.CONTENT_ENCODING) if enc: enc = enc.lower() if enc in ('gzip', 'deflate', 'br'): encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te and 'chunked' in te.lower(): chunked = True return headers, raw_headers, close_conn, encoding, upgrade, chunked class HttpRequestParserPy(HttpParser): """Read request status line. Exception .http_exceptions.BadStatusLine could be raised in case of any errors in status line. Returns RawRequestMessage. """ def parse_message(self, lines): if len(lines[0]) > self.max_line_size: raise LineTooLong( 'Status line is too long', self.max_line_size) # request line line = lines[0].decode('utf-8', 'surrogateescape') try: method, path, version = line.split(None, 2) except ValueError: raise BadStatusLine(line) from None # method method = method.upper() if not METHRE.match(method): raise BadStatusLine(method) # version try: if version.startswith('HTTP/'): n1, n2 = version[5:].split('.', 1) version = HttpVersion(int(n1), int(n2)) else: raise BadStatusLine(version) except Exception: raise BadStatusLine(version) # read headers headers, raw_headers, \ close, compression, upgrade, chunked = self.parse_headers(lines) if close is None: # then the headers weren't set in the request if version <= HttpVersion10: # HTTP 1.0 must asks to not close close = True else: # HTTP 1.1 must ask to close. close = False return RawRequestMessage( method, path, version, headers, raw_headers, close, compression, upgrade, chunked, URL(path)) class HttpResponseParserPy(HttpParser): """Read response status line and headers. BadStatusLine could be raised in case of any errors in status line. Returns RawResponseMessage""" def parse_message(self, lines): if len(lines[0]) > self.max_line_size: raise LineTooLong( 'Status line is too long', self.max_line_size) line = lines[0].decode('utf-8', 'surrogateescape') try: version, status = line.split(None, 1) except ValueError: raise BadStatusLine(line) from None else: try: status, reason = status.split(None, 1) except ValueError: reason = '' # version match = VERSRE.match(version) if match is None: raise BadStatusLine(line) version = HttpVersion(int(match.group(1)), int(match.group(2))) # The status code is a three-digit number try: status = int(status) except ValueError: raise BadStatusLine(line) from None if status > 999: raise BadStatusLine(line) # read headers headers, raw_headers, \ close, compression, upgrade, chunked = self.parse_headers(lines) if close is None: close = version <= HttpVersion10 return RawResponseMessage( version, status, reason.strip(), headers, raw_headers, close, compression, upgrade, chunked) class HttpPayloadParser: def __init__(self, payload, length=None, chunked=False, compression=None, code=None, method=None, readall=False, response_with_body=True, auto_decompress=True): self.payload = payload self._length = 0 self._type = ParseState.PARSE_NONE self._chunk = ChunkState.PARSE_CHUNKED_SIZE self._chunk_size = 0 self._chunk_tail = b'' self._auto_decompress = auto_decompress self.done = False # payload decompression wrapper if response_with_body and compression and self._auto_decompress: payload = DeflateBuffer(payload, compression) # payload parser if not response_with_body: # don't parse payload if it's not expected to be received self._type = ParseState.PARSE_NONE payload.feed_eof() self.done = True elif chunked: self._type = ParseState.PARSE_CHUNKED elif length is not None: self._type = ParseState.PARSE_LENGTH self._length = length if self._length == 0: payload.feed_eof() self.done = True else: if readall and code != 204: self._type = ParseState.PARSE_UNTIL_EOF elif method in ('PUT', 'POST'): internal_logger.warning( # pragma: no cover 'Content-Length or Transfer-Encoding header is required') self._type = ParseState.PARSE_NONE payload.feed_eof() self.done = True self.payload = payload def feed_eof(self): if self._type == ParseState.PARSE_UNTIL_EOF: self.payload.feed_eof() elif self._type == ParseState.PARSE_LENGTH: raise ContentLengthError( "Not enough data for satisfy content length header.") elif self._type == ParseState.PARSE_CHUNKED: raise TransferEncodingError( "Not enough data for satisfy transfer length header.") def feed_data(self, chunk, SEP=b'\r\n', CHUNK_EXT=b';'): # Read specified amount of bytes if self._type == ParseState.PARSE_LENGTH: required = self._length chunk_len = len(chunk) if required >= chunk_len: self._length = required - chunk_len self.payload.feed_data(chunk, chunk_len) if self._length == 0: self.payload.feed_eof() return True, b'' else: self._length = 0 self.payload.feed_data(chunk[:required], required) self.payload.feed_eof() return True, chunk[required:] # Chunked transfer encoding parser elif self._type == ParseState.PARSE_CHUNKED: if self._chunk_tail: chunk = self._chunk_tail + chunk self._chunk_tail = b'' while chunk: # read next chunk size if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: pos = chunk.find(SEP) if pos >= 0: i = chunk.find(CHUNK_EXT, 0, pos) if i >= 0: size = chunk[:i] # strip chunk-extensions else: size = chunk[:pos] try: size = int(bytes(size), 16) except ValueError: exc = TransferEncodingError(chunk[:pos]) self.payload.set_exception(exc) raise exc from None chunk = chunk[pos+2:] if size == 0: # eof marker self._chunk = ChunkState.PARSE_MAYBE_TRAILERS else: self._chunk = ChunkState.PARSE_CHUNKED_CHUNK self._chunk_size = size self.payload.begin_http_chunk_receiving() else: self._chunk_tail = chunk return False, None # read chunk and feed buffer if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: required = self._chunk_size chunk_len = len(chunk) if required > chunk_len: self._chunk_size = required - chunk_len self.payload.feed_data(chunk, chunk_len) return False, None else: self._chunk_size = 0 self.payload.feed_data(chunk[:required], required) chunk = chunk[required:] self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF self.payload.end_http_chunk_receiving() # toss the CRLF at the end of the chunk if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: if chunk[:2] == SEP: chunk = chunk[2:] self._chunk = ChunkState.PARSE_CHUNKED_SIZE else: self._chunk_tail = chunk return False, None # if stream does not contain trailer, after 0\r\n # we should get another \r\n otherwise # trailers needs to be skiped until \r\n\r\n if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: if chunk[:2] == SEP: # end of stream self.payload.feed_eof() return True, chunk[2:] else: self._chunk = ChunkState.PARSE_TRAILERS # read and discard trailer up to the CRLF terminator if self._chunk == ChunkState.PARSE_TRAILERS: pos = chunk.find(SEP) if pos >= 0: chunk = chunk[pos+2:] self._chunk = ChunkState.PARSE_MAYBE_TRAILERS else: self._chunk_tail = chunk return False, None # Read all bytes until eof elif self._type == ParseState.PARSE_UNTIL_EOF: self.payload.feed_data(chunk, len(chunk)) return False, None class DeflateBuffer: """DeflateStream decompress stream and feed data into specified stream.""" def __init__(self, out, encoding): self.out = out self.size = 0 self.encoding = encoding self._started_decoding = False if encoding == 'br': if not HAS_BROTLI: # pragma: no cover raise ContentEncodingError( 'Can not decode content-encoding: brotli (br). ' 'Please install `brotlipy`') self.decompressor = brotli.Decompressor() else: zlib_mode = (16 + zlib.MAX_WBITS if encoding == 'gzip' else -zlib.MAX_WBITS) self.decompressor = zlib.decompressobj(wbits=zlib_mode) def set_exception(self, exc): self.out.set_exception(exc) def feed_data(self, chunk, size): self.size += size try: chunk = self.decompressor.decompress(chunk) except Exception: if not self._started_decoding and self.encoding == 'deflate': self.decompressor = zlib.decompressobj() try: chunk = self.decompressor.decompress(chunk) except Exception: raise ContentEncodingError( 'Can not decode content-encoding: %s' % self.encoding) else: raise ContentEncodingError( 'Can not decode content-encoding: %s' % self.encoding) if chunk: self._started_decoding = True self.out.feed_data(chunk, len(chunk)) def feed_eof(self): chunk = self.decompressor.flush() if chunk or self.size > 0: self.out.feed_data(chunk, len(chunk)) if self.encoding != 'br' and not self.decompressor.eof: raise ContentEncodingError('deflate') self.out.feed_eof() def begin_http_chunk_receiving(self): self.out.begin_http_chunk_receiving() def end_http_chunk_receiving(self): self.out.end_http_chunk_receiving() HttpRequestParser = HttpRequestParserPy HttpResponseParser = HttpResponseParserPy try: from ._http_parser import HttpRequestParserC, HttpResponseParserC if not NO_EXTENSIONS: # pragma: no cover HttpRequestParser = HttpRequestParserC HttpResponseParser = HttpResponseParserC except ImportError: # pragma: no cover pass aiohttp-3.0.1/aiohttp/http_websocket.py0000666000000000000000000005276113240304665016402 0ustar 00000000000000"""WebSocket protocol versions 13 and 8.""" import collections import json import random import re import sys import zlib from enum import IntEnum from struct import Struct from .helpers import NO_EXTENSIONS, noop from .log import ws_logger __all__ = ('WS_CLOSED_MESSAGE', 'WS_CLOSING_MESSAGE', 'WS_KEY', 'WebSocketReader', 'WebSocketWriter', 'WSMessage', 'WebSocketError', 'WSMsgType', 'WSCloseCode') class WSCloseCode(IntEnum): OK = 1000 GOING_AWAY = 1001 PROTOCOL_ERROR = 1002 UNSUPPORTED_DATA = 1003 INVALID_TEXT = 1007 POLICY_VIOLATION = 1008 MESSAGE_TOO_BIG = 1009 MANDATORY_EXTENSION = 1010 INTERNAL_ERROR = 1011 SERVICE_RESTART = 1012 TRY_AGAIN_LATER = 1013 ALLOWED_CLOSE_CODES = {int(i) for i in WSCloseCode} class WSMsgType(IntEnum): # websocket spec types CONTINUATION = 0x0 TEXT = 0x1 BINARY = 0x2 PING = 0x9 PONG = 0xa CLOSE = 0x8 # aiohttp specific types CLOSING = 0x100 CLOSED = 0x101 ERROR = 0x102 text = TEXT binary = BINARY ping = PING pong = PONG close = CLOSE closing = CLOSING closed = CLOSED error = ERROR WS_KEY = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' UNPACK_LEN2 = Struct('!H').unpack_from UNPACK_LEN3 = Struct('!Q').unpack_from UNPACK_CLOSE_CODE = Struct('!H').unpack PACK_LEN1 = Struct('!BB').pack PACK_LEN2 = Struct('!BBH').pack PACK_LEN3 = Struct('!BBQ').pack PACK_CLOSE_CODE = Struct('!H').pack MSG_SIZE = 2 ** 14 DEFAULT_LIMIT = 2 ** 16 _WSMessageBase = collections.namedtuple('_WSMessageBase', ['type', 'data', 'extra']) class WSMessage(_WSMessageBase): def json(self, *, loads=json.loads): """Return parsed JSON data. .. versionadded:: 0.22 """ return loads(self.data) WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None) WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None) class WebSocketError(Exception): """WebSocket protocol parser error.""" def __init__(self, code, message): self.code = code super().__init__(message) class WSHandshakeError(Exception): """WebSocket protocol handshake error.""" native_byteorder = sys.byteorder # Used by _websocket_mask_python _XOR_TABLE = [bytes(a ^ b for a in range(256)) for b in range(256)] def _websocket_mask_python(mask, data): """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytearray` object of any length. The contents of `data` are masked with `mask`, as specified in section 5.3 of RFC 6455. Note that this function mutates the `data` argument. This pure-python implementation may be replaced by an optimized version when available. """ assert isinstance(data, bytearray), data assert len(mask) == 4, mask if data: a, b, c, d = (_XOR_TABLE[n] for n in mask) data[::4] = data[::4].translate(a) data[1::4] = data[1::4].translate(b) data[2::4] = data[2::4].translate(c) data[3::4] = data[3::4].translate(d) if NO_EXTENSIONS: _websocket_mask = _websocket_mask_python else: try: from ._websocket import _websocket_mask_cython _websocket_mask = _websocket_mask_cython except ImportError: # pragma: no cover _websocket_mask = _websocket_mask_python _WS_DEFLATE_TRAILING = bytes([0x00, 0x00, 0xff, 0xff]) _WS_EXT_RE = re.compile(r'^(?:;\s*(?:' r'(server_no_context_takeover)|' r'(client_no_context_takeover)|' r'(server_max_window_bits(?:=(\d+))?)|' r'(client_max_window_bits(?:=(\d+))?)))*$') _WS_EXT_RE_SPLIT = re.compile(r'permessage-deflate([^,]+)?') def ws_ext_parse(extstr, isserver=False): if not extstr: return 0, False compress = 0 notakeover = False for ext in _WS_EXT_RE_SPLIT.finditer(extstr): defext = ext.group(1) # Return compress = 15 when get `permessage-deflate` if not defext: compress = 15 break match = _WS_EXT_RE.match(defext) if match: compress = 15 if isserver: # Server never fail to detect compress handshake. # Server does not need to send max wbit to client if match.group(4): compress = int(match.group(4)) # Group3 must match if group4 matches # Compress wbit 8 does not support in zlib # If compress level not support, # CONTINUE to next extension if compress > 15 or compress < 9: compress = 0 continue if match.group(1): notakeover = True # Ignore regex group 5 & 6 for client_max_window_bits break else: if match.group(6): compress = int(match.group(6)) # Group5 must match if group6 matches # Compress wbit 8 does not support in zlib # If compress level not support, # FAIL the parse progress if compress > 15 or compress < 9: raise WSHandshakeError('Invalid window size') if match.group(2): notakeover = True # Ignore regex group 5 & 6 for client_max_window_bits break # Return Fail if client side and not match elif not isserver: raise WSHandshakeError('Extension for deflate not supported' + ext.group(1)) return compress, notakeover def ws_ext_gen(compress=15, isserver=False, server_notakeover=False): # client_notakeover=False not used for server # compress wbit 8 does not support in zlib if compress < 9 or compress > 15: raise ValueError('Compress wbits must between 9 and 15, ' 'zlib does not support wbits=8') enabledext = ['permessage-deflate'] if not isserver: enabledext.append('client_max_window_bits') if compress < 15: enabledext.append('server_max_window_bits=' + str(compress)) if server_notakeover: enabledext.append('server_no_context_takeover') # if client_notakeover: # enabledext.append('client_no_context_takeover') return '; '.join(enabledext) class WSParserState(IntEnum): READ_HEADER = 1 READ_PAYLOAD_LENGTH = 2 READ_PAYLOAD_MASK = 3 READ_PAYLOAD = 4 class WebSocketReader: def __init__(self, queue, compress=True): self.queue = queue self._exc = None self._partial = [] self._state = WSParserState.READ_HEADER self._opcode = None self._frame_fin = False self._frame_opcode = None self._frame_payload = bytearray() self._tail = b'' self._has_mask = False self._frame_mask = None self._payload_length = 0 self._payload_length_flag = 0 self._compressed = None self._decompressobj = None self._compress = compress def feed_eof(self): self.queue.feed_eof() def feed_data(self, data): if self._exc: return True, data try: return self._feed_data(data) except Exception as exc: self._exc = exc self.queue.set_exception(exc) return True, b'' def _feed_data(self, data): for fin, opcode, payload, compressed in self.parse_frame(data): if compressed and not self._decompressobj: self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS) if opcode == WSMsgType.CLOSE: if len(payload) >= 2: close_code = UNPACK_CLOSE_CODE(payload[:2])[0] if (close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES): raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Invalid close code: {}'.format(close_code)) try: close_message = payload[2:].decode('utf-8') except UnicodeDecodeError as exc: raise WebSocketError( WSCloseCode.INVALID_TEXT, 'Invalid UTF-8 text message') from exc msg = WSMessage(WSMsgType.CLOSE, close_code, close_message) elif payload: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Invalid close frame: {} {} {!r}'.format( fin, opcode, payload)) else: msg = WSMessage(WSMsgType.CLOSE, 0, '') self.queue.feed_data(msg, 0) elif opcode == WSMsgType.PING: self.queue.feed_data( WSMessage(WSMsgType.PING, payload, ''), len(payload)) elif opcode == WSMsgType.PONG: self.queue.feed_data( WSMessage(WSMsgType.PONG, payload, ''), len(payload)) elif opcode not in ( WSMsgType.TEXT, WSMsgType.BINARY) and self._opcode is None: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, "Unexpected opcode={!r}".format(opcode)) else: # load text/binary if not fin: # got partial frame payload if opcode != WSMsgType.CONTINUATION: self._opcode = opcode self._partial.append(payload) else: # previous frame was non finished # we should get continuation opcode if self._partial: if opcode != WSMsgType.CONTINUATION: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'The opcode in non-fin frame is expected ' 'to be zero, got {!r}'.format(opcode)) if opcode == WSMsgType.CONTINUATION: opcode = self._opcode self._opcode = None self._partial.append(payload) payload_merged = b''.join(self._partial) # Decompress process must to be done after all packets # received. if compressed: payload_merged = self._decompressobj.decompress( payload_merged + _WS_DEFLATE_TRAILING) self._partial.clear() if opcode == WSMsgType.TEXT: try: text = payload_merged.decode('utf-8') self.queue.feed_data( WSMessage(WSMsgType.TEXT, text, ''), len(text)) except UnicodeDecodeError as exc: raise WebSocketError( WSCloseCode.INVALID_TEXT, 'Invalid UTF-8 text message') from exc else: self.queue.feed_data( WSMessage(WSMsgType.BINARY, payload_merged, ''), len(payload_merged)) return False, b'' def parse_frame(self, buf): """Return the next frame from the socket.""" frames = [] if self._tail: buf, self._tail = self._tail + buf, b'' start_pos = 0 buf_length = len(buf) while True: # read header if self._state == WSParserState.READ_HEADER: if buf_length - start_pos >= 2: data = buf[start_pos:start_pos+2] start_pos += 2 first_byte, second_byte = data fin = (first_byte >> 7) & 1 rsv1 = (first_byte >> 6) & 1 rsv2 = (first_byte >> 5) & 1 rsv3 = (first_byte >> 4) & 1 opcode = first_byte & 0xf # frame-fin = %x0 ; more frames of this message follow # / %x1 ; final frame of this message # frame-rsv1 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # frame-rsv2 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # frame-rsv3 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # # Remove rsv1 from this test for deflate development if rsv2 or rsv3 or (rsv1 and not self._compress): raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received frame with non-zero reserved bits') if opcode > 0x7 and fin == 0: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received fragmented control frame') has_mask = (second_byte >> 7) & 1 length = second_byte & 0x7f # Control frames MUST have a payload # length of 125 bytes or less if opcode > 0x7 and length > 125: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Control frame payload cannot be ' 'larger than 125 bytes') # Set compress status if last package is FIN # OR set compress status if this is first fragment # Raise error if not first fragment with rsv1 = 0x1 if self._frame_fin or self._compressed is None: self._compressed = True if rsv1 else False elif rsv1: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received frame with non-zero reserved bits') self._frame_fin = fin self._frame_opcode = opcode self._has_mask = has_mask self._payload_length_flag = length self._state = WSParserState.READ_PAYLOAD_LENGTH else: break # read payload length if self._state == WSParserState.READ_PAYLOAD_LENGTH: length = self._payload_length_flag if length == 126: if buf_length - start_pos >= 2: data = buf[start_pos:start_pos+2] start_pos += 2 length = UNPACK_LEN2(data)[0] self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) else: break elif length > 126: if buf_length - start_pos >= 8: data = buf[start_pos:start_pos+8] start_pos += 8 length = UNPACK_LEN3(data)[0] self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) else: break else: self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) # read payload mask if self._state == WSParserState.READ_PAYLOAD_MASK: if buf_length - start_pos >= 4: self._frame_mask = buf[start_pos:start_pos+4] start_pos += 4 self._state = WSParserState.READ_PAYLOAD else: break if self._state == WSParserState.READ_PAYLOAD: length = self._payload_length payload = self._frame_payload chunk_len = buf_length - start_pos if length >= chunk_len: self._payload_length = length - chunk_len payload.extend(buf[start_pos:]) start_pos = buf_length else: self._payload_length = 0 payload.extend(buf[start_pos:start_pos+length]) start_pos = start_pos + length if self._payload_length == 0: if self._has_mask: _websocket_mask(self._frame_mask, payload) frames.append(( self._frame_fin, self._frame_opcode, payload, self._compressed)) self._frame_payload = bytearray() self._state = WSParserState.READ_HEADER else: break self._tail = buf[start_pos:] return frames class WebSocketWriter: def __init__(self, protocol, transport, *, use_mask=False, limit=DEFAULT_LIMIT, random=random.Random(), compress=0, notakeover=False): self.protocol = protocol self.transport = transport self.use_mask = use_mask self.randrange = random.randrange self.compress = compress self.notakeover = notakeover self._closing = False self._limit = limit self._output_size = 0 self._compressobj = None def _send_frame(self, message, opcode, compress=None): """Send a frame over the websocket with message as its payload.""" if self._closing: ws_logger.warning('websocket connection is closing.') rsv = 0 # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: if compress: # Do not set self._compress if compressing is for this frame compressobj = zlib.compressobj(wbits=-compress) else: # self.compress if not self._compressobj: self._compressobj = zlib.compressobj(wbits=-self.compress) compressobj = self._compressobj message = compressobj.compress(message) message = message + compressobj.flush( zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask if use_mask: mask_bit = 0x80 else: mask_bit = 0 if msg_length < 126: header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) elif msg_length < (1 << 16): header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: mask = self.randrange(0, 0xffffffff) mask = mask.to_bytes(4, 'big') message = bytearray(message) _websocket_mask(mask, message) self.transport.write(header + mask + message) self._output_size += len(header) + len(mask) + len(message) else: if len(message) > MSG_SIZE: self.transport.write(header) self.transport.write(message) else: self.transport.write(header + message) self._output_size += len(header) + len(message) if self._output_size > self._limit: self._output_size = 0 return self.protocol._drain_helper() return noop() def pong(self, message=b''): """Send pong message.""" if isinstance(message, str): message = message.encode('utf-8') return self._send_frame(message, WSMsgType.PONG) def ping(self, message=b''): """Send ping message.""" if isinstance(message, str): message = message.encode('utf-8') return self._send_frame(message, WSMsgType.PING) def send(self, message, binary=False, compress=None): """Send a frame over the websocket with message as its payload.""" if isinstance(message, str): message = message.encode('utf-8') if binary: return self._send_frame(message, WSMsgType.BINARY, compress) else: return self._send_frame(message, WSMsgType.TEXT, compress) def close(self, code=1000, message=b''): """Close the websocket, sending the specified code and message.""" if isinstance(message, str): message = message.encode('utf-8') try: return self._send_frame( PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE) finally: self._closing = True aiohttp-3.0.1/aiohttp/http_writer.py0000666000000000000000000000764313240304665015727 0ustar 00000000000000"""Http related parsers and protocol.""" import asyncio import collections import zlib from .abc import AbstractStreamWriter from .helpers import noop __all__ = ('StreamWriter', 'HttpVersion', 'HttpVersion10', 'HttpVersion11') HttpVersion = collections.namedtuple('HttpVersion', ['major', 'minor']) HttpVersion10 = HttpVersion(1, 0) HttpVersion11 = HttpVersion(1, 1) class StreamWriter(AbstractStreamWriter): def __init__(self, protocol, transport, loop): self._protocol = protocol self._transport = transport self.loop = loop self.length = None self.chunked = False self.buffer_size = 0 self.output_size = 0 self._eof = False self._compress = None self._drain_waiter = None @property def transport(self): return self._transport @property def protocol(self): return self._protocol def enable_chunking(self): self.chunked = True def enable_compression(self, encoding='deflate'): zlib_mode = (16 + zlib.MAX_WBITS if encoding == 'gzip' else -zlib.MAX_WBITS) self._compress = zlib.compressobj(wbits=zlib_mode) def _write(self, chunk): size = len(chunk) self.buffer_size += size self.output_size += size if self._transport is None or self._transport.is_closing(): raise asyncio.CancelledError('Cannot write to closing transport') self._transport.write(chunk) def write(self, chunk, *, drain=True, LIMIT=64*1024): """Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future. """ if self._compress is not None: chunk = self._compress.compress(chunk) if not chunk: return noop() if self.length is not None: chunk_len = len(chunk) if self.length >= chunk_len: self.length = self.length - chunk_len else: chunk = chunk[:self.length] self.length = 0 if not chunk: return noop() if chunk: if self.chunked: chunk_len = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len + chunk + b'\r\n' self._write(chunk) if self.buffer_size > LIMIT and drain: self.buffer_size = 0 return self.drain() return noop() def write_headers(self, status_line, headers, SEP=': ', END='\r\n'): """Write request/response status and headers.""" # status + headers headers = status_line + ''.join( [k + SEP + v + END for k, v in headers.items()]) headers = headers.encode('utf-8') + b'\r\n' self._write(headers) async def write_eof(self, chunk=b''): if self._eof: return if self._compress: if chunk: chunk = self._compress.compress(chunk) chunk = chunk + self._compress.flush() if chunk and self.chunked: chunk_len = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len + chunk + b'\r\n0\r\n\r\n' else: if self.chunked: if chunk: chunk_len = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len + chunk + b'\r\n0\r\n\r\n' else: chunk = b'0\r\n\r\n' if chunk: self._write(chunk) await self.drain() self._eof = True self._transport = None async def drain(self): """Flush the write buffer. The intended use is to write await w.write(data) await w.drain() """ if self._protocol.transport is not None: await self._protocol._drain_helper() aiohttp-3.0.1/aiohttp/locks.py0000666000000000000000000000166213240304665014462 0ustar 00000000000000import asyncio import collections class EventResultOrError: """ This class wrappers the Event asyncio lock allowing either awake the locked Tasks without any error or raising an exception. thanks to @vorpalsmith for the simple design. """ def __init__(self, loop): self._loop = loop self._exc = None self._event = asyncio.Event(loop=loop) self._waiters = collections.deque() def set(self, exc=None): self._exc = exc self._event.set() async def wait(self): waiter = self._loop.create_task(self._event.wait()) self._waiters.append(waiter) try: val = await waiter finally: self._waiters.remove(waiter) if self._exc is not None: raise self._exc return val def cancel(self): """ Cancel all waiters """ for waiter in self._waiters: waiter.cancel() aiohttp-3.0.1/aiohttp/log.py0000666000000000000000000000050613240304665014124 0ustar 00000000000000import logging access_logger = logging.getLogger('aiohttp.access') client_logger = logging.getLogger('aiohttp.client') internal_logger = logging.getLogger('aiohttp.internal') server_logger = logging.getLogger('aiohttp.server') web_logger = logging.getLogger('aiohttp.web') ws_logger = logging.getLogger('aiohttp.websocket') aiohttp-3.0.1/aiohttp/multipart.py0000666000000000000000000007172113240304665015373 0ustar 00000000000000import base64 import binascii import json import re import uuid import warnings import zlib from collections import Mapping, Sequence, deque from urllib.parse import parse_qsl, unquote, urlencode from multidict import CIMultiDict from .hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING, CONTENT_TYPE) from .helpers import CHAR, TOKEN, parse_mimetype, reify from .http import HttpParser from .payload import (BytesPayload, LookupError, Payload, StringPayload, get_payload, payload_type) __all__ = ('MultipartReader', 'MultipartWriter', 'BodyPartReader', 'BadContentDispositionHeader', 'BadContentDispositionParam', 'parse_content_disposition', 'content_disposition_filename') class BadContentDispositionHeader(RuntimeWarning): pass class BadContentDispositionParam(RuntimeWarning): pass def parse_content_disposition(header): def is_token(string): return string and TOKEN >= set(string) def is_quoted(string): return string[0] == string[-1] == '"' def is_rfc5987(string): return is_token(string) and string.count("'") == 2 def is_extended_param(string): return string.endswith('*') def is_continuous_param(string): pos = string.find('*') + 1 if not pos: return False substring = string[pos:-1] if string.endswith('*') else string[pos:] return substring.isdigit() def unescape(text, *, chars=''.join(map(re.escape, CHAR))): return re.sub('\\\\([{}])'.format(chars), '\\1', text) if not header: return None, {} disptype, *parts = header.split(';') if not is_token(disptype): warnings.warn(BadContentDispositionHeader(header)) return None, {} params = {} while parts: item = parts.pop(0) if '=' not in item: warnings.warn(BadContentDispositionHeader(header)) return None, {} key, value = item.split('=', 1) key = key.lower().strip() value = value.lstrip() if key in params: warnings.warn(BadContentDispositionHeader(header)) return None, {} if not is_token(key): warnings.warn(BadContentDispositionParam(item)) continue elif is_continuous_param(key): if is_quoted(value): value = unescape(value[1:-1]) elif not is_token(value): warnings.warn(BadContentDispositionParam(item)) continue elif is_extended_param(key): if is_rfc5987(value): encoding, _, value = value.split("'", 2) encoding = encoding or 'utf-8' else: warnings.warn(BadContentDispositionParam(item)) continue try: value = unquote(value, encoding, 'strict') except UnicodeDecodeError: # pragma: nocover warnings.warn(BadContentDispositionParam(item)) continue else: failed = True if is_quoted(value): failed = False value = unescape(value[1:-1].lstrip('\\/')) elif is_token(value): failed = False elif parts: # maybe just ; in filename, in any case this is just # one case fix, for proper fix we need to redesign parser _value = '%s;%s' % (value, parts[0]) if is_quoted(_value): parts.pop(0) value = unescape(_value[1:-1].lstrip('\\/')) failed = False if failed: warnings.warn(BadContentDispositionHeader(header)) return None, {} params[key] = value return disptype.lower(), params def content_disposition_filename(params, name='filename'): name_suf = '%s*' % name if not params: return None elif name_suf in params: return params[name_suf] elif name in params: return params[name] else: parts = [] fnparams = sorted((key, value) for key, value in params.items() if key.startswith(name_suf)) for num, (key, value) in enumerate(fnparams): _, tail = key.split('*', 1) if tail.endswith('*'): tail = tail[:-1] if tail == str(num): parts.append(value) else: break if not parts: return None value = ''.join(parts) if "'" in value: encoding, _, value = value.split("'", 2) encoding = encoding or 'utf-8' return unquote(value, encoding, 'strict') return value class MultipartResponseWrapper: """Wrapper around the MultipartBodyReader. It takes care about underlying connection and close it when it needs in. """ def __init__(self, resp, stream): self.resp = resp self.stream = stream def __aiter__(self): return self async def __anext__(self): part = await self.next() if part is None: raise StopAsyncIteration # NOQA return part def at_eof(self): """Returns True when all response data had been read.""" return self.resp.content.at_eof() async def next(self): """Emits next multipart reader object.""" item = await self.stream.next() if self.stream.at_eof(): await self.release() return item async def release(self): """Releases the connection gracefully, reading all the content to the void.""" await self.resp.release() class BodyPartReader: """Multipart reader for single body part.""" chunk_size = 8192 def __init__(self, boundary, headers, content): self.headers = headers self._boundary = boundary self._content = content self._at_eof = False length = self.headers.get(CONTENT_LENGTH, None) self._length = int(length) if length is not None else None self._read_bytes = 0 self._unread = deque() self._prev_chunk = None self._content_eof = 0 self._cache = {} def __aiter__(self): return self async def __anext__(self): part = await self.next() if part is None: raise StopAsyncIteration # NOQA return part async def next(self): item = await self.read() if not item: return None return item async def read(self, *, decode=False): """Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched """ if self._at_eof: return b'' data = bytearray() while not self._at_eof: data.extend((await self.read_chunk(self.chunk_size))) if decode: return self.decode(data) return data async def read_chunk(self, size=chunk_size): """Reads body part content chunk of the specified size. size: chunk size """ if self._at_eof: return b'' if self._length: chunk = await self._read_chunk_from_length(size) else: chunk = await self._read_chunk_from_stream(size) self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True if self._at_eof: clrf = await self._content.readline() assert b'\r\n' == clrf, \ 'reader did not read all the data or it is malformed' return chunk async def _read_chunk_from_length(self, size): # Reads body part content chunk of the specified size. # The body part must has Content-Length header with proper value. assert self._length is not None, \ 'Content-Length required for chunked read' chunk_size = min(size, self._length - self._read_bytes) chunk = await self._content.read(chunk_size) return chunk async def _read_chunk_from_stream(self, size): # Reads content chunk of body part with unknown length. # The Content-Length header for body part is not necessary. assert size >= len(self._boundary) + 2, \ 'Chunk size must be greater or equal than boundary length + 2' first_chunk = self._prev_chunk is None if first_chunk: self._prev_chunk = await self._content.read(size) chunk = await self._content.read(size) self._content_eof += int(self._content.at_eof()) assert self._content_eof < 3, "Reading after EOF" window = self._prev_chunk + chunk sub = b'\r\n' + self._boundary if first_chunk: idx = window.find(sub) else: idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) if idx >= 0: # pushing boundary back to content self._content.unread_data(window[idx:]) if size > idx: self._prev_chunk = self._prev_chunk[:idx] chunk = window[len(self._prev_chunk):idx] if not chunk: self._at_eof = True result = self._prev_chunk self._prev_chunk = chunk return result async def readline(self): """Reads body part by line by line.""" if self._at_eof: return b'' if self._unread: line = self._unread.popleft() else: line = await self._content.readline() if line.startswith(self._boundary): # the very last boundary may not come with \r\n, # so set single rules for everyone sline = line.rstrip(b'\r\n') boundary = self._boundary last_boundary = self._boundary + b'--' # ensure that we read exactly the boundary, not something alike if sline == boundary or sline == last_boundary: self._at_eof = True self._unread.append(line) return b'' else: next_line = await self._content.readline() if next_line.startswith(self._boundary): line = line[:-2] # strip CRLF but only once self._unread.append(next_line) return line async def release(self): """Like read(), but reads all the data to the void.""" if self._at_eof: return while not self._at_eof: await self.read_chunk(self.chunk_size) async def text(self, *, encoding=None): """Like read(), but assumes that body part contains text data.""" data = await self.read(decode=True) # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA encoding = encoding or self.get_charset(default='utf-8') return data.decode(encoding) async def json(self, *, encoding=None): """Like read(), but assumes that body parts contains JSON data.""" data = await self.read(decode=True) if not data: return None encoding = encoding or self.get_charset(default='utf-8') return json.loads(data.decode(encoding)) async def form(self, *, encoding=None): """Like read(), but assumes that body parts contains form urlencoded data. """ data = await self.read(decode=True) if not data: return None encoding = encoding or self.get_charset(default='utf-8') return parse_qsl(data.rstrip().decode(encoding), keep_blank_values=True, encoding=encoding) def at_eof(self): """Returns True if the boundary was reached or False otherwise.""" return self._at_eof def decode(self, data): """Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value. """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) if CONTENT_ENCODING in self.headers: return self._decode_content(data) return data def _decode_content(self, data): encoding = self.headers[CONTENT_ENCODING].lower() if encoding == 'deflate': return zlib.decompress(data, -zlib.MAX_WBITS) elif encoding == 'gzip': return zlib.decompress(data, 16 + zlib.MAX_WBITS) elif encoding == 'identity': return data else: raise RuntimeError('unknown content encoding: {}'.format(encoding)) def _decode_content_transfer(self, data): encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower() if encoding == 'base64': return base64.b64decode(data) elif encoding == 'quoted-printable': return binascii.a2b_qp(data) elif encoding in ('binary', '8bit', '7bit'): return data else: raise RuntimeError('unknown content transfer encoding: {}' ''.format(encoding)) def get_charset(self, default=None): """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) return mimetype.parameters.get('charset', default) @reify def name(self): """Returns name specified in Content-Disposition header or None if missed or header is malformed. """ _, params = parse_content_disposition( self.headers.get(CONTENT_DISPOSITION)) return content_disposition_filename(params, 'name') @reify def filename(self): """Returns filename specified in Content-Disposition header or None if missed or header is malformed. """ _, params = parse_content_disposition( self.headers.get(CONTENT_DISPOSITION)) return content_disposition_filename(params, 'filename') @payload_type(BodyPartReader) class BodyPartReaderPayload(Payload): def __init__(self, value, *args, **kwargs): super().__init__(value, *args, **kwargs) params = {} if value.name is not None: params['name'] = value.name if value.filename is not None: params['filename'] = value.name if params: self.set_content_disposition('attachment', **params) async def write(self, writer): field = self._value chunk = await field.read_chunk(size=2**16) while chunk: writer.write(field.decode(chunk)) chunk = await field.read_chunk(size=2**16) class MultipartReader: """Multipart body reader.""" #: Response wrapper, used when multipart readers constructs from response. response_wrapper_cls = MultipartResponseWrapper #: Multipart reader class, used to handle multipart/* body parts. #: None points to type(self) multipart_reader_cls = None #: Body part reader class for non multipart/* content types. part_reader_cls = BodyPartReader def __init__(self, headers, content): self.headers = headers self._boundary = ('--' + self._get_boundary()).encode() self._content = content self._last_part = None self._at_eof = False self._at_bof = True self._unread = [] def __aiter__(self): return self async def __anext__(self): part = await self.next() if part is None: raise StopAsyncIteration # NOQA return part @classmethod def from_response(cls, response): """Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance """ obj = cls.response_wrapper_cls(response, cls(response.headers, response.content)) return obj def at_eof(self): """Returns True if the final boundary was reached or False otherwise. """ return self._at_eof async def next(self): """Emits the next multipart body part.""" # So, if we're at BOF, we need to skip till the boundary. if self._at_eof: return await self._maybe_release_last_part() if self._at_bof: await self._read_until_first_boundary() self._at_bof = False else: await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return self._last_part = await self.fetch_next_part() return self._last_part async def release(self): """Reads all the body parts to the void till the final boundary.""" while not self._at_eof: item = await self.next() if item is None: break await item.release() async def fetch_next_part(self): """Returns the next body part reader.""" headers = await self._read_headers() return self._get_part_reader(headers) def _get_part_reader(self, headers): """Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers """ ctype = headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) if mimetype.type == 'multipart': if self.multipart_reader_cls is None: return type(self)(headers, self._content) return self.multipart_reader_cls(headers, self._content) else: return self.part_reader_cls(self._boundary, headers, self._content) def _get_boundary(self): mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) assert mimetype.type == 'multipart', ( 'multipart/* content type expected' ) if 'boundary' not in mimetype.parameters: raise ValueError('boundary missed for Content-Type: %s' % self.headers[CONTENT_TYPE]) boundary = mimetype.parameters['boundary'] if len(boundary) > 70: raise ValueError('boundary %r is too long (70 chars max)' % boundary) return boundary async def _readline(self): if self._unread: return self._unread.pop() return await self._content.readline() async def _read_until_first_boundary(self): while True: chunk = await self._readline() if chunk == b'': raise ValueError("Could not find starting boundary %r" % (self._boundary)) chunk = chunk.rstrip() if chunk == self._boundary: return elif chunk == self._boundary + b'--': self._at_eof = True return async def _read_boundary(self): chunk = (await self._readline()).rstrip() if chunk == self._boundary: pass elif chunk == self._boundary + b'--': self._at_eof = True epilogue = await self._readline() next_line = await self._readline() # the epilogue is expected and then either the end of input or the # parent multipart boundary, if the parent boundary is found then # it should be marked as unread and handed to the parent for # processing if next_line[:2] == b'--': self._unread.append(next_line) # otherwise the request is likely missing an epilogue and both # lines should be passed to the parent for processing # (this handles the old behavior gracefully) else: self._unread.extend([next_line, epilogue]) else: raise ValueError('Invalid boundary %r, expected %r' % (chunk, self._boundary)) async def _read_headers(self): lines = [b''] while True: chunk = await self._content.readline() chunk = chunk.strip() lines.append(chunk) if not chunk: break parser = HttpParser() headers, *_ = parser.parse_headers(lines) return headers async def _maybe_release_last_part(self): """Ensures that the last read body part is read completely.""" if self._last_part is not None: if not self._last_part.at_eof(): await self._last_part.release() self._unread.extend(self._last_part._unread) self._last_part = None class MultipartWriter(Payload): """Multipart body writer.""" def __init__(self, subtype='mixed', boundary=None): boundary = boundary if boundary is not None else uuid.uuid4().hex # The underlying Payload API demands a str (utf-8), not bytes, # so we need to ensure we don't lose anything during conversion. # As a result, require the boundary to be ASCII only. # In both situations. try: self._boundary = boundary.encode('ascii') except UnicodeEncodeError: raise ValueError('boundary should contain ASCII only chars') \ from None ctype = ('multipart/{}; boundary={}' .format(subtype, self._boundary_value)) super().__init__(None, content_type=ctype) self._parts = [] self._headers = CIMultiDict() self._headers[CONTENT_TYPE] = self.content_type def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass def __iter__(self): return iter(self._parts) def __len__(self): return len(self._parts) _valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z") _invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]") @property def _boundary_value(self): """Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting. """ # Refer to RFCs 7231, 7230, 5234. # # parameter = token "=" ( token / quoted-string ) # token = 1*tchar # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text # obs-text = %x80-FF # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" # / DIGIT / ALPHA # ; any VCHAR, except delimiters # VCHAR = %x21-7E value = self._boundary if re.match(self._valid_tchar_regex, value): return value.decode('ascii') # cannot fail if re.search(self._invalid_qdtext_char_regex, value): raise ValueError("boundary value contains invalid characters") # escape %x5C and %x22 quoted_value_content = value.replace(b'\\', b'\\\\') quoted_value_content = quoted_value_content.replace(b'"', b'\\"') return '"' + quoted_value_content.decode('ascii') + '"' @property def boundary(self): return self._boundary.decode('ascii') def append(self, obj, headers=None): if headers is None: headers = CIMultiDict() if isinstance(obj, Payload): if obj.headers is not None: obj.headers.update(headers) else: obj._headers = headers self.append_payload(obj) else: try: self.append_payload(get_payload(obj, headers=headers)) except LookupError: raise TypeError def append_payload(self, payload): """Adds a new body part to multipart writer.""" # content-type if CONTENT_TYPE not in payload.headers: payload.headers[CONTENT_TYPE] = payload.content_type # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': encoding = None # te encoding te_encoding = payload.headers.get( CONTENT_TRANSFER_ENCODING, '').lower() if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}' ''.format(te_encoding)) if te_encoding == 'binary': te_encoding = None # size size = payload.size if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) # render headers headers = ''.join( [k + ': ' + v + '\r\n' for k, v in payload.headers.items()] ).encode('utf-8') + b'\r\n' self._parts.append((payload, headers, encoding, te_encoding)) def append_json(self, obj, headers=None): """Helper to append JSON part.""" if headers is None: headers = CIMultiDict() data = json.dumps(obj).encode('utf-8') self.append_payload( BytesPayload( data, headers=headers, content_type='application/json')) def append_form(self, obj, headers=None): """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) if headers is None: headers = CIMultiDict() if isinstance(obj, Mapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) return self.append_payload( StringPayload(data, headers=headers, content_type='application/x-www-form-urlencoded')) @property def size(self): """Size of the payload.""" if not self._parts: return 0 total = 0 for part, headers, encoding, te_encoding in self._parts: if encoding or te_encoding or part.size is None: return None total += ( 2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n' part.size + len(headers) + 2 # b'\r\n' ) total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total async def write(self, writer): """Write body.""" if not self._parts: return for part, headers, encoding, te_encoding in self._parts: await writer.write(b'--' + self._boundary + b'\r\n') await writer.write(headers) if encoding or te_encoding: w = MultipartPayloadWriter(writer) if encoding: w.enable_compression(encoding) if te_encoding: w.enable_encoding(te_encoding) await part.write(w) await w.write_eof() else: await part.write(writer) await writer.write(b'\r\n') await writer.write(b'--' + self._boundary + b'--\r\n') class MultipartPayloadWriter: def __init__(self, writer): self._writer = writer self._encoding = None self._compress = None def enable_encoding(self, encoding): if encoding == 'base64': self._encoding = encoding self._encoding_buffer = bytearray() elif encoding == 'quoted-printable': self._encoding = 'quoted-printable' def enable_compression(self, encoding='deflate'): zlib_mode = (16 + zlib.MAX_WBITS if encoding == 'gzip' else -zlib.MAX_WBITS) self._compress = zlib.compressobj(wbits=zlib_mode) async def write_eof(self): if self._compress is not None: chunk = self._compress.flush() if chunk: self._compress = None await self.write(chunk) if self._encoding == 'base64': if self._encoding_buffer: await self._writer.write(base64.b64encode( self._encoding_buffer)) async def write(self, chunk): if self._compress is not None: if chunk: chunk = self._compress.compress(chunk) if not chunk: return if self._encoding == 'base64': self._encoding_buffer.extend(chunk) if self._encoding_buffer: buffer = self._encoding_buffer div, mod = divmod(len(buffer), 3) enc_chunk, self._encoding_buffer = ( buffer[:div * 3], buffer[div * 3:]) if enc_chunk: enc_chunk = base64.b64encode(enc_chunk) await self._writer.write(enc_chunk) elif self._encoding == 'quoted-printable': await self._writer.write(binascii.b2a_qp(chunk)) else: await self._writer.write(chunk) aiohttp-3.0.1/aiohttp/payload.py0000666000000000000000000002031613240304665014775 0ustar 00000000000000import io import json import mimetypes import os import warnings from abc import ABC, abstractmethod from multidict import CIMultiDict from . import hdrs from .helpers import (PY_36, content_disposition_header, guess_filename, parse_mimetype, sentinel) from .streams import DEFAULT_LIMIT __all__ = ('PAYLOAD_REGISTRY', 'get_payload', 'payload_type', 'Payload', 'BytesPayload', 'StringPayload', 'IOBasePayload', 'BytesIOPayload', 'BufferedReaderPayload', 'TextIOPayload', 'StringIOPayload', 'JsonPayload') TOO_LARGE_BYTES_BODY = 2 ** 20 class LookupError(Exception): pass def get_payload(data, *args, **kwargs): return PAYLOAD_REGISTRY.get(data, *args, **kwargs) def register_payload(factory, type): PAYLOAD_REGISTRY.register(factory, type) class payload_type: def __init__(self, type): self.type = type def __call__(self, factory): register_payload(factory, self.type) return factory class PayloadRegistry: """Payload registry. note: we need zope.interface for more efficient adapter search """ def __init__(self): self._registry = [] def get(self, data, *args, **kwargs): if isinstance(data, Payload): return data for factory, type in self._registry: if isinstance(data, type): return factory(data, *args, **kwargs) raise LookupError() def register(self, factory, type): self._registry.append((factory, type)) class Payload(ABC): _size = None _headers = None _content_type = 'application/octet-stream' def __init__(self, value, *, headers=None, content_type=sentinel, filename=None, encoding=None, **kwargs): self._value = value self._encoding = encoding self._filename = filename if headers is not None: self._headers = CIMultiDict(headers) if content_type is sentinel and hdrs.CONTENT_TYPE in self._headers: content_type = self._headers[hdrs.CONTENT_TYPE] if content_type is sentinel: content_type = None self._content_type = content_type @property def size(self): """Size of the payload.""" return self._size @property def filename(self): """Filename of the payload.""" return self._filename @property def headers(self): """Custom item headers""" return self._headers @property def encoding(self): """Payload encoding""" return self._encoding @property def content_type(self): """Content type""" if self._content_type is not None: return self._content_type elif self._filename is not None: mime = mimetypes.guess_type(self._filename)[0] return 'application/octet-stream' if mime is None else mime else: return Payload._content_type def set_content_disposition(self, disptype, quote_fields=True, **params): """Sets ``Content-Disposition`` header.""" if self._headers is None: self._headers = CIMultiDict() self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( disptype, quote_fields=quote_fields, **params) @abstractmethod async def write(self, writer): """Write payload. writer is an AbstractStreamWriter instance: """ class BytesPayload(Payload): def __init__(self, value, *args, **kwargs): assert isinstance(value, (bytes, bytearray, memoryview)), \ "value argument must be byte-ish (%r)" % type(value) if 'content_type' not in kwargs: kwargs['content_type'] = 'application/octet-stream' super().__init__(value, *args, **kwargs) self._size = len(value) if self._size > TOO_LARGE_BYTES_BODY: if PY_36: kwargs = {'source': self} else: kwargs = {} warnings.warn("Sending a large body directly with raw bytes might" " lock the event loop. You should probably pass an " "io.BytesIO object instead", ResourceWarning, **kwargs) async def write(self, writer): await writer.write(self._value) class StringPayload(BytesPayload): def __init__(self, value, *args, encoding=None, content_type=None, **kwargs): if encoding is None: if content_type is None: encoding = 'utf-8' content_type = 'text/plain; charset=utf-8' else: mimetype = parse_mimetype(content_type) encoding = mimetype.parameters.get('charset', 'utf-8') else: if content_type is None: content_type = 'text/plain; charset=%s' % encoding super().__init__( value.encode(encoding), encoding=encoding, content_type=content_type, *args, **kwargs) class StringIOPayload(StringPayload): def __init__(self, value, *args, **kwargs): super().__init__(value.read(), *args, **kwargs) class IOBasePayload(Payload): def __init__(self, value, disposition='attachment', *args, **kwargs): if 'filename' not in kwargs: kwargs['filename'] = guess_filename(value) super().__init__(value, *args, **kwargs) if self._filename is not None and disposition is not None: self.set_content_disposition(disposition, filename=self._filename) async def write(self, writer): try: chunk = self._value.read(DEFAULT_LIMIT) while chunk: await writer.write(chunk) chunk = self._value.read(DEFAULT_LIMIT) finally: self._value.close() class TextIOPayload(IOBasePayload): def __init__(self, value, *args, encoding=None, content_type=None, **kwargs): if encoding is None: if content_type is None: encoding = 'utf-8' content_type = 'text/plain; charset=utf-8' else: mimetype = parse_mimetype(content_type) encoding = mimetype.parameters.get('charset', 'utf-8') else: if content_type is None: content_type = 'text/plain; charset=%s' % encoding super().__init__( value, content_type=content_type, encoding=encoding, *args, **kwargs) @property def size(self): try: return os.fstat(self._value.fileno()).st_size - self._value.tell() except OSError: return None async def write(self, writer): try: chunk = self._value.read(DEFAULT_LIMIT) while chunk: await writer.write(chunk.encode(self._encoding)) chunk = self._value.read(DEFAULT_LIMIT) finally: self._value.close() class BytesIOPayload(IOBasePayload): @property def size(self): position = self._value.tell() end = self._value.seek(0, os.SEEK_END) self._value.seek(position) return end - position class BufferedReaderPayload(IOBasePayload): @property def size(self): try: return os.fstat(self._value.fileno()).st_size - self._value.tell() except OSError: # data.fileno() is not supported, e.g. # io.BufferedReader(io.BytesIO(b'data')) return None class JsonPayload(BytesPayload): def __init__(self, value, encoding='utf-8', content_type='application/json', dumps=json.dumps, *args, **kwargs): super().__init__( dumps(value).encode(encoding), content_type=content_type, encoding=encoding, *args, **kwargs) PAYLOAD_REGISTRY = PayloadRegistry() PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) PAYLOAD_REGISTRY.register(StringPayload, str) PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) PAYLOAD_REGISTRY.register( BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) aiohttp-3.0.1/aiohttp/payload_streamer.py0000666000000000000000000000277213240304665016705 0ustar 00000000000000""" Payload implemenation for coroutines as data provider. As a simple case, you can upload data from file:: @aiohttp.streamer async def file_sender(writer, file_name=None): with open(file_name, 'rb') as f: chunk = f.read(2**16) while chunk: await writer.write(chunk) chunk = f.read(2**16) Then you can use `file_sender` like this: async with session.post('http://httpbin.org/post', data=file_sender(file_name='huge_file')) as resp: print(await resp.text()) ..note:: Coroutine must accept `writer` as first argument """ import asyncio from .payload import Payload, payload_type __all__ = ('streamer',) class _stream_wrapper: def __init__(self, coro, args, kwargs): self.coro = asyncio.coroutine(coro) self.args = args self.kwargs = kwargs async def __call__(self, writer): await self.coro(writer, *self.args, **self.kwargs) class streamer: def __init__(self, coro): self.coro = coro def __call__(self, *args, **kwargs): return _stream_wrapper(self.coro, args, kwargs) @payload_type(_stream_wrapper) class StreamWrapperPayload(Payload): async def write(self, writer): await self._value(writer) @payload_type(streamer) class StreamPayload(StreamWrapperPayload): def __init__(self, value, *args, **kwargs): super().__init__(value(), *args, **kwargs) async def write(self, writer): await self._value(writer) aiohttp-3.0.1/aiohttp/pytest_plugin.py0000666000000000000000000002305513240304665016255 0ustar 00000000000000import asyncio import collections import contextlib import warnings import pytest from aiohttp.helpers import isasyncgenfunction from aiohttp.web import Application from .test_utils import (BaseTestServer, RawTestServer, TestClient, TestServer, loop_context, setup_test_loop, teardown_test_loop) from .test_utils import unused_port as _unused_port try: import uvloop except ImportError: # pragma: no cover uvloop = None try: import tokio except ImportError: # pragma: no cover tokio = None def pytest_addoption(parser): parser.addoption( '--aiohttp-fast', action='store_true', default=False, help='run tests faster by disabling extra checks') parser.addoption( '--aiohttp-loop', action='store', default='pyloop', help='run tests with specific loop: pyloop, uvloop, tokio or all') parser.addoption( '--aiohttp-enable-loop-debug', action='store_true', default=False, help='enable event loop debug mode') def pytest_fixture_setup(fixturedef): """ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. """ func = fixturedef.func if isasyncgenfunction(func): # async generator fixture is_async_gen = True elif asyncio.iscoroutinefunction(func): # regular async fixture is_async_gen = False else: # not an async fixture, nothing to do return strip_request = False if 'request' not in fixturedef.argnames: fixturedef.argnames += ('request',) strip_request = True def wrapper(*args, **kwargs): request = kwargs['request'] if strip_request: del kwargs['request'] # if neither the fixture nor the test use the 'loop' fixture, # 'getfixturevalue' will fail because the test is not parameterized # (this can be removed someday if 'loop' is no longer parameterized) if 'loop' not in request.fixturenames: raise Exception( "Asynchronous fixtures must depend on the 'loop' fixture or " "be used in tests depending from it." ) _loop = request.getfixturevalue('loop') if is_async_gen: # for async generators, we need to advance the generator once, # then advance it again in a finalizer gen = func(*args, **kwargs) def finalizer(): try: return _loop.run_until_complete(gen.__anext__()) except StopAsyncIteration: # NOQA pass request.addfinalizer(finalizer) return _loop.run_until_complete(gen.__anext__()) else: return _loop.run_until_complete(func(*args, **kwargs)) fixturedef.func = wrapper @pytest.fixture def fast(request): """--fast config option""" return request.config.getoption('--aiohttp-fast') @pytest.fixture def loop_debug(request): """--enable-loop-debug config option""" return request.config.getoption('--aiohttp-enable-loop-debug') @contextlib.contextmanager def _runtime_warning_context(): """ Context manager which checks for RuntimeWarnings, specifically to avoid "coroutine 'X' was never awaited" warnings being missed. If RuntimeWarnings occur in the context a RuntimeError is raised. """ with warnings.catch_warnings(record=True) as _warnings: yield rw = ['{w.filename}:{w.lineno}:{w.message}'.format(w=w) for w in _warnings if w.category == RuntimeWarning] if rw: raise RuntimeError('{} Runtime Warning{},\n{}'.format( len(rw), '' if len(rw) == 1 else 's', '\n'.join(rw) )) @contextlib.contextmanager def _passthrough_loop_context(loop, fast=False): """ setups and tears down a loop unless one is passed in via the loop argument when it's passed straight through. """ if loop: # loop already exists, pass it straight through yield loop else: # this shadows loop_context's standard behavior loop = setup_test_loop() yield loop teardown_test_loop(loop, fast=fast) def pytest_pycollect_makeitem(collector, name, obj): """ Fix pytest collecting for coroutines. """ if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): return list(collector._genfunctions(name, obj)) def pytest_pyfunc_call(pyfuncitem): """ Run coroutines in an event loop instead of a normal function call. """ fast = pyfuncitem.config.getoption("--aiohttp-fast") if asyncio.iscoroutinefunction(pyfuncitem.function): existing_loop = pyfuncitem.funcargs.get('loop', None) with _runtime_warning_context(): with _passthrough_loop_context(existing_loop, fast=fast) as _loop: testargs = {arg: pyfuncitem.funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} _loop.run_until_complete(pyfuncitem.obj(**testargs)) return True def pytest_generate_tests(metafunc): if 'loop_factory' not in metafunc.fixturenames: return loops = metafunc.config.option.aiohttp_loop avail_factories = {'pyloop': asyncio.new_event_loop} if uvloop is not None: # pragma: no cover avail_factories['uvloop'] = uvloop.new_event_loop if tokio is not None: # pragma: no cover avail_factories['tokio'] = tokio.new_event_loop if loops == 'all': loops = 'pyloop,uvloop?,tokio?' factories = {} for name in loops.split(','): required = not name.endswith('?') name = name.strip(' ?') if name not in avail_factories: # pragma: no cover if required: raise ValueError( "Unknown loop '%s', available loops: %s" % ( name, list(factories.keys()))) else: continue factories[name] = avail_factories[name] metafunc.parametrize("loop_factory", list(factories.values()), ids=list(factories.keys())) @pytest.fixture def loop(loop_factory, fast, loop_debug): """Return an instance of the event loop.""" with loop_context(loop_factory, fast=fast) as _loop: if loop_debug: _loop.set_debug(True) # pragma: no cover yield _loop asyncio.set_event_loop(None) @pytest.fixture def unused_port(aiohttp_unused_port): # pragma: no cover warnings.warn("Deprecated, use aiohttp_unused_port fixture instead", DeprecationWarning) return aiohttp_unused_port @pytest.fixture def aiohttp_unused_port(): """Return a port that is unused on the current host.""" return _unused_port @pytest.fixture def aiohttp_server(loop): """Factory to create a TestServer instance, given an app. aiohttp_server(app, **kwargs) """ servers = [] async def go(app, *, port=None, **kwargs): server = TestServer(app, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) return server yield go async def finalize(): while servers: await servers.pop().close() loop.run_until_complete(finalize()) @pytest.fixture def test_server(aiohttp_server): # pragma: no cover warnings.warn("Deprecated, use aiohttp_server fixture instead", DeprecationWarning) return aiohttp_server @pytest.fixture def aiohttp_raw_server(loop): """Factory to create a RawTestServer instance, given a web handler. aiohttp_raw_server(handler, **kwargs) """ servers = [] async def go(handler, *, port=None, **kwargs): server = RawTestServer(handler, port=port) await server.start_server(loop=loop, **kwargs) servers.append(server) return server yield go async def finalize(): while servers: await servers.pop().close() loop.run_until_complete(finalize()) @pytest.fixture def raw_test_server(aiohttp_raw_server): # pragma: no cover warnings.warn("Deprecated, use aiohttp_raw_server fixture instead", DeprecationWarning) return aiohttp_raw_server @pytest.fixture def aiohttp_client(loop): """Factory to create a TestClient instance. aiohttp_client(app, **kwargs) aiohttp_client(server, **kwargs) aiohttp_client(raw_server, **kwargs) """ clients = [] async def go(__param, *args, server_kwargs=None, **kwargs): if isinstance(__param, collections.Callable) and \ not isinstance(__param, (Application, BaseTestServer)): __param = __param(loop, *args, **kwargs) kwargs = {} else: assert not args, "args should be empty" if isinstance(__param, Application): server_kwargs = server_kwargs or {} server = TestServer(__param, loop=loop, **server_kwargs) client = TestClient(server, loop=loop, **kwargs) elif isinstance(__param, BaseTestServer): client = TestClient(__param, loop=loop, **kwargs) else: raise ValueError("Unknown argument type: %r" % type(__param)) await client.start_server() clients.append(client) return client yield go async def finalize(): while clients: await clients.pop().close() loop.run_until_complete(finalize()) @pytest.fixture def test_client(aiohttp_client): # pragma: no cover warnings.warn("Deprecated, use aiohttp_client fixture instead", DeprecationWarning) return aiohttp_client aiohttp-3.0.1/aiohttp/resolver.py0000666000000000000000000000636013240304665015210 0ustar 00000000000000import asyncio import socket from .abc import AbstractResolver __all__ = ('ThreadedResolver', 'AsyncResolver', 'DefaultResolver') try: import aiodns # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') except ImportError: # pragma: no cover aiodns = None aiodns_default = False class ThreadedResolver(AbstractResolver): """Use Executor for synchronous getaddrinfo() calls, which defaults to concurrent.futures.ThreadPoolExecutor. """ def __init__(self, loop=None): if loop is None: loop = asyncio.get_event_loop() self._loop = loop async def resolve(self, host, port=0, family=socket.AF_INET): infos = await self._loop.getaddrinfo( host, port, type=socket.SOCK_STREAM, family=family) hosts = [] for family, _, proto, _, address in infos: hosts.append( {'hostname': host, 'host': address[0], 'port': address[1], 'family': family, 'proto': proto, 'flags': socket.AI_NUMERICHOST}) return hosts async def close(self): pass class AsyncResolver(AbstractResolver): """Use the `aiodns` package to make asynchronous DNS lookups""" def __init__(self, loop=None, *args, **kwargs): if loop is None: loop = asyncio.get_event_loop() if aiodns is None: raise RuntimeError("Resolver requires aiodns library") self._loop = loop self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs) if not hasattr(self._resolver, 'gethostbyname'): # aiodns 1.1 is not available, fallback to DNSResolver.query self.resolve = self._resolve_with_query async def resolve(self, host, port=0, family=socket.AF_INET): try: resp = await self._resolver.gethostbyname(host, family) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" raise OSError(msg) from exc hosts = [] for address in resp.addresses: hosts.append( {'hostname': host, 'host': address, 'port': port, 'family': family, 'proto': 0, 'flags': socket.AI_NUMERICHOST}) if not hosts: raise OSError("DNS lookup failed") return hosts async def _resolve_with_query(self, host, port=0, family=socket.AF_INET): if family == socket.AF_INET6: qtype = 'AAAA' else: qtype = 'A' try: resp = await self._resolver.query(host, qtype) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" raise OSError(msg) from exc hosts = [] for rr in resp: hosts.append( {'hostname': host, 'host': rr.host, 'port': port, 'family': family, 'proto': 0, 'flags': socket.AI_NUMERICHOST}) if not hosts: raise OSError("DNS lookup failed") return hosts async def close(self): return self._resolver.cancel() DefaultResolver = AsyncResolver if aiodns_default else ThreadedResolver aiohttp-3.0.1/aiohttp/signals.py0000666000000000000000000000164513240304665015010 0ustar 00000000000000from aiohttp.frozenlist import FrozenList __all__ = ('Signal',) class Signal(FrozenList): """Coroutine-based signal implementation. To connect a callback to a signal, use any list method. Signals are fired using the send() coroutine, which takes named arguments. """ __slots__ = ('_owner',) def __init__(self, owner): super().__init__() self._owner = owner def __repr__(self): return ''.format(self._owner, self.frozen, list(self)) async def send(self, *args, **kwargs): """ Sends data to all registered receivers. """ if not self.frozen: raise RuntimeError("Cannot send non-frozen signal.") for receiver in self: await receiver(*args, **kwargs) aiohttp-3.0.1/aiohttp/streams.py0000666000000000000000000004047713240304665015034 0ustar 00000000000000import asyncio import collections from .helpers import set_exception, set_result from .log import internal_logger __all__ = ( 'EMPTY_PAYLOAD', 'EofStream', 'StreamReader', 'DataQueue', 'FlowControlDataQueue') DEFAULT_LIMIT = 2 ** 16 class EofStream(Exception): """eof stream indication.""" class AsyncStreamIterator: def __init__(self, read_func): self.read_func = read_func def __aiter__(self): return self async def __anext__(self): try: rv = await self.read_func() except EofStream: raise StopAsyncIteration # NOQA if rv == b'': raise StopAsyncIteration # NOQA return rv class ChunkTupleAsyncStreamIterator(AsyncStreamIterator): async def __anext__(self): rv = await self.read_func() if rv == (b'', False): raise StopAsyncIteration # NOQA return rv class AsyncStreamReaderMixin: def __aiter__(self): return AsyncStreamIterator(self.readline) def iter_chunked(self, n): """Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only """ return AsyncStreamIterator(lambda: self.read(n)) def iter_any(self): """Returns an asynchronous iterator that yields all the available data as soon as it is received Python-3.5 available for Python 3.5+ only """ return AsyncStreamIterator(self.readany) def iter_chunks(self): """Returns an asynchronous iterator that yields chunks of data as they are received by the server. The yielded objects are tuples of (bytes, bool) as returned by the StreamReader.readchunk method. Python-3.5 available for Python 3.5+ only """ return ChunkTupleAsyncStreamIterator(self.readchunk) class StreamReader(AsyncStreamReaderMixin): """An enhancement of asyncio.StreamReader. Supports asynchronous iteration by line, chunk or as available:: async for line in reader: ... async for chunk in reader.iter_chunked(1024): ... async for slice in reader.iter_any(): ... """ total_bytes = 0 def __init__(self, protocol, *, limit=DEFAULT_LIMIT, timer=None, loop=None): self._protocol = protocol self._low_water = limit self._high_water = limit * 2 if loop is None: loop = asyncio.get_event_loop() self._loop = loop self._size = 0 self._cursor = 0 self._http_chunk_splits = None self._buffer = collections.deque() self._buffer_offset = 0 self._eof = False self._waiter = None self._eof_waiter = None self._exception = None self._timer = timer self._eof_callbacks = [] def __repr__(self): info = [self.__class__.__name__] if self._size: info.append('%d bytes' % self._size) if self._eof: info.append('eof') if self._low_water != DEFAULT_LIMIT: info.append('low=%d high=%d' % (self._low_water, self._high_water)) if self._waiter: info.append('w=%r' % self._waiter) if self._exception: info.append('e=%r' % self._exception) return '<%s>' % ' '.join(info) def exception(self): return self._exception def set_exception(self, exc): self._exception = exc self._eof_callbacks.clear() waiter = self._waiter if waiter is not None: self._waiter = None set_exception(waiter, exc) waiter = self._eof_waiter if waiter is not None: set_exception(waiter, exc) self._eof_waiter = None def on_eof(self, callback): if self._eof: try: callback() except Exception: internal_logger.exception('Exception in eof callback') else: self._eof_callbacks.append(callback) def feed_eof(self): self._eof = True waiter = self._waiter if waiter is not None: self._waiter = None set_result(waiter, True) waiter = self._eof_waiter if waiter is not None: self._eof_waiter = None set_result(waiter, True) for cb in self._eof_callbacks: try: cb() except Exception: internal_logger.exception('Exception in eof callback') self._eof_callbacks.clear() def is_eof(self): """Return True if 'feed_eof' was called.""" return self._eof def at_eof(self): """Return True if the buffer is empty and 'feed_eof' was called.""" return self._eof and not self._buffer async def wait_eof(self): if self._eof: return assert self._eof_waiter is None self._eof_waiter = self._loop.create_future() try: await self._eof_waiter finally: self._eof_waiter = None def unread_data(self, data): """ rollback reading some data from stream, inserting it to buffer head. """ if not data: return if self._buffer_offset: self._buffer[0] = self._buffer[0][self._buffer_offset:] self._buffer_offset = 0 self._size += len(data) self._cursor -= len(data) self._buffer.appendleft(data) self._eof_counter = 0 # TODO: size is ignored, remove the param later def feed_data(self, data, size=0): assert not self._eof, 'feed_data after feed_eof' if not data: return self._size += len(data) self._buffer.append(data) self.total_bytes += len(data) waiter = self._waiter if waiter is not None: self._waiter = None set_result(waiter, False) if (self._size > self._high_water and not self._protocol._reading_paused): self._protocol.pause_reading() def begin_http_chunk_receiving(self): if self._http_chunk_splits is None: self._http_chunk_splits = [] def end_http_chunk_receiving(self): if self._http_chunk_splits is None: raise RuntimeError("Called end_chunk_receiving without calling " "begin_chunk_receiving first") if not self._http_chunk_splits or \ self._http_chunk_splits[-1] != self.total_bytes: self._http_chunk_splits.append(self.total_bytes) async def _wait(self, func_name): # StreamReader uses a future to link the protocol feed_data() method # to a read coroutine. Running two read coroutines at the same time # would have an unexpected behaviour. It would not possible to know # which coroutine would get the next data. if self._waiter is not None: raise RuntimeError('%s() called while another coroutine is ' 'already waiting for incoming data' % func_name) waiter = self._waiter = self._loop.create_future() try: if self._timer: with self._timer: await waiter else: await waiter finally: self._waiter = None async def readline(self): if self._exception is not None: raise self._exception line = [] line_size = 0 not_enough = True while not_enough: while self._buffer and not_enough: offset = self._buffer_offset ichar = self._buffer[0].find(b'\n', offset) + 1 # Read from current offset to found b'\n' or to the end. data = self._read_nowait_chunk(ichar - offset if ichar else -1) line.append(data) line_size += len(data) if ichar: not_enough = False if line_size > self._high_water: raise ValueError('Line is too long') if self._eof: break if not_enough: await self._wait('readline') return b''.join(line) async def read(self, n=-1): if self._exception is not None: raise self._exception # migration problem; with DataQueue you have to catch # EofStream exception, so common way is to run payload.read() inside # infinite loop. what can cause real infinite loop with StreamReader # lets keep this code one major release. if __debug__: if self._eof and not self._buffer: self._eof_counter = getattr(self, '_eof_counter', 0) + 1 if self._eof_counter > 5: internal_logger.warning( 'Multiple access to StreamReader in eof state, ' 'might be infinite loop.', stack_info=True) if not n: return b'' if n < 0: # This used to just loop creating a new waiter hoping to # collect everything in self._buffer, but that would # deadlock if the subprocess sends more than self.limit # bytes. So just call self.readany() until EOF. blocks = [] while True: block = await self.readany() if not block: break blocks.append(block) return b''.join(blocks) if not self._buffer and not self._eof: await self._wait('read') return self._read_nowait(n) async def readany(self): if self._exception is not None: raise self._exception if not self._buffer and not self._eof: await self._wait('readany') return self._read_nowait(-1) async def readchunk(self): """Returns a tuple of (data, end_of_http_chunk). When chunked transfer encoding is used, end_of_http_chunk is a boolean indicating if the end of the data corresponds to the end of a HTTP chunk , otherwise it is always False. """ if self._exception is not None: raise self._exception if not self._buffer and not self._eof: if (self._http_chunk_splits and self._cursor == self._http_chunk_splits[0]): # end of http chunk without available data self._http_chunk_splits = self._http_chunk_splits[1:] return (b"", True) await self._wait('readchunk') if not self._buffer: # end of file return (b"", False) elif self._http_chunk_splits is not None: while self._http_chunk_splits: pos = self._http_chunk_splits[0] self._http_chunk_splits = self._http_chunk_splits[1:] if pos > self._cursor: return (self._read_nowait(pos-self._cursor), True) return (self._read_nowait(-1), False) else: return (self._read_nowait_chunk(-1), False) async def readexactly(self, n): if self._exception is not None: raise self._exception blocks = [] while n > 0: block = await self.read(n) if not block: partial = b''.join(blocks) raise asyncio.streams.IncompleteReadError( partial, len(partial) + n) blocks.append(block) n -= len(block) return b''.join(blocks) def read_nowait(self, n=-1): # default was changed to be consistent with .read(-1) # # I believe the most users don't know about the method and # they are not affected. if self._exception is not None: raise self._exception if self._waiter and not self._waiter.done(): raise RuntimeError( 'Called while some coroutine is waiting for incoming data.') return self._read_nowait(n) def _read_nowait_chunk(self, n): first_buffer = self._buffer[0] offset = self._buffer_offset if n != -1 and len(first_buffer) - offset > n: data = first_buffer[offset:offset + n] self._buffer_offset += n elif offset: self._buffer.popleft() data = first_buffer[offset:] self._buffer_offset = 0 else: data = self._buffer.popleft() self._size -= len(data) self._cursor += len(data) if self._size < self._low_water and self._protocol._reading_paused: self._protocol.resume_reading() return data def _read_nowait(self, n): chunks = [] while self._buffer: chunk = self._read_nowait_chunk(n) chunks.append(chunk) if n != -1: n -= len(chunk) if n == 0: break return b''.join(chunks) if chunks else b'' class EmptyStreamReader(AsyncStreamReaderMixin): def exception(self): return None def set_exception(self, exc): pass def on_eof(self, callback): try: callback() except Exception: internal_logger.exception('Exception in eof callback') def feed_eof(self): pass def is_eof(self): return True def at_eof(self): return True async def wait_eof(self): return def feed_data(self, data): pass async def readline(self): return b'' async def read(self, n=-1): return b'' async def readany(self): return b'' async def readchunk(self): return (b'', False) async def readexactly(self, n): raise asyncio.streams.IncompleteReadError(b'', n) def read_nowait(self): return b'' EMPTY_PAYLOAD = EmptyStreamReader() class DataQueue: """DataQueue is a general-purpose blocking queue with one reader.""" def __init__(self, *, loop=None): self._loop = loop self._eof = False self._waiter = None self._exception = None self._size = 0 self._buffer = collections.deque() def __len__(self): return len(self._buffer) def is_eof(self): return self._eof def at_eof(self): return self._eof and not self._buffer def exception(self): return self._exception def set_exception(self, exc): self._eof = True self._exception = exc waiter = self._waiter if waiter is not None: set_exception(waiter, exc) self._waiter = None def feed_data(self, data, size=0): self._size += size self._buffer.append((data, size)) waiter = self._waiter if waiter is not None: self._waiter = None set_result(waiter, True) def feed_eof(self): self._eof = True waiter = self._waiter if waiter is not None: self._waiter = None set_result(waiter, False) async def read(self): if not self._buffer and not self._eof: assert not self._waiter self._waiter = self._loop.create_future() try: await self._waiter except (asyncio.CancelledError, asyncio.TimeoutError): self._waiter = None raise if self._buffer: data, size = self._buffer.popleft() self._size -= size return data else: if self._exception is not None: raise self._exception else: raise EofStream def __aiter__(self): return AsyncStreamIterator(self.read) class FlowControlDataQueue(DataQueue): """FlowControlDataQueue resumes and pauses an underlying stream. It is a destination for parsed data.""" def __init__(self, protocol, *, limit=DEFAULT_LIMIT, loop=None): super().__init__(loop=loop) self._protocol = protocol self._limit = limit * 2 def feed_data(self, data, size): super().feed_data(data, size) if self._size > self._limit and not self._protocol._reading_paused: self._protocol.pause_reading() async def read(self): try: return await super().read() finally: if self._size < self._limit and self._protocol._reading_paused: self._protocol.resume_reading() aiohttp-3.0.1/aiohttp/tcp_helpers.py0000666000000000000000000000260113240304665015651 0ustar 00000000000000"""Helper methods to tune a TCP connection""" import socket from contextlib import suppress __all__ = ('tcp_keepalive', 'tcp_nodelay', 'tcp_cork') if hasattr(socket, 'TCP_CORK'): # pragma: no cover CORK = socket.TCP_CORK elif hasattr(socket, 'TCP_NOPUSH'): # pragma: no cover CORK = socket.TCP_NOPUSH else: # pragma: no cover CORK = None if hasattr(socket, 'SO_KEEPALIVE'): def tcp_keepalive(transport): sock = transport.get_extra_info('socket') if sock is not None: sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) else: def tcp_keepalive(transport): # pragma: no cover pass def tcp_nodelay(transport, value): sock = transport.get_extra_info('socket') if sock is None: return if sock.family not in (socket.AF_INET, socket.AF_INET6): return value = bool(value) # socket may be closed already, on windows OSError get raised with suppress(OSError): sock.setsockopt( socket.IPPROTO_TCP, socket.TCP_NODELAY, value) def tcp_cork(transport, value): sock = transport.get_extra_info('socket') if CORK is None: return if sock is None: return if sock.family not in (socket.AF_INET, socket.AF_INET6): return value = bool(value) with suppress(OSError): sock.setsockopt( socket.IPPROTO_TCP, CORK, value) aiohttp-3.0.1/aiohttp/test_utils.py0000666000000000000000000003737613240304665015561 0ustar 00000000000000"""Utilities shared by tests.""" import asyncio import contextlib import functools import gc import socket import sys import unittest from abc import ABC, abstractmethod from unittest import mock from multidict import CIMultiDict from yarl import URL import aiohttp from aiohttp.client import _RequestContextManager, _WSRequestContextManager from . import ClientSession, hdrs from .helpers import sentinel from .http import HttpVersion, RawRequestMessage from .signals import Signal from .web import (AppRunner, Request, Server, ServerRunner, TCPSite, UrlMappingMatchInfo) def unused_port(): """Return a port that is unused on the current host.""" with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind(('127.0.0.1', 0)) return s.getsockname()[1] class BaseTestServer(ABC): def __init__(self, *, scheme=sentinel, loop=None, host='127.0.0.1', port=None, skip_url_asserts=False, **kwargs): self._loop = loop self.runner = None self._root = None self.host = host self.port = port self._closed = False self.scheme = scheme self.skip_url_asserts = skip_url_asserts async def start_server(self, loop=None, **kwargs): if self.runner: return self._loop = loop self._ssl = kwargs.pop('ssl', None) self.runner = await self._make_runner(**kwargs) await self.runner.setup() if not self.port: self.port = unused_port() site = TCPSite(self.runner, host=self.host, port=self.port, ssl_context=self._ssl) await site.start() if self.scheme is sentinel: if self._ssl: scheme = 'https' else: scheme = 'http' self.scheme = scheme self._root = URL('{}://{}:{}'.format(self.scheme, self.host, self.port)) @abstractmethod # pragma: no cover async def _make_runner(self, **kwargs): pass def make_url(self, path): url = URL(path) if not self.skip_url_asserts: assert not url.is_absolute() return self._root.join(url) else: return URL(str(self._root) + path) @property def started(self): return self.runner is not None @property def closed(self): return self._closed @property def handler(self): # for backward compatibility # web.Server instance return self.runner.server async def close(self): """Close all fixtures created by the test client. After that point, the TestClient is no longer usable. This is an idempotent function: running close multiple times will not have any additional effects. close is also run when the object is garbage collected, and on exit when used as a context manager. """ if self.started and not self.closed: await self.runner.cleanup() self._root = None self.port = None self._closed = True def __enter__(self): raise TypeError("Use async with instead") def __exit__(self, exc_type, exc_value, traceback): # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover async def __aenter__(self): await self.start_server(loop=self._loop) return self async def __aexit__(self, exc_type, exc_value, traceback): await self.close() class TestServer(BaseTestServer): def __init__(self, app, *, scheme=sentinel, host='127.0.0.1', port=None, **kwargs): self.app = app super().__init__(scheme=scheme, host=host, port=port, **kwargs) async def _make_runner(self, **kwargs): return AppRunner(self.app, **kwargs) class RawTestServer(BaseTestServer): def __init__(self, handler, *, scheme=sentinel, host='127.0.0.1', port=None, **kwargs): self._handler = handler super().__init__(scheme=scheme, host=host, port=port, **kwargs) async def _make_runner(self, debug=True, **kwargs): srv = Server( self._handler, loop=self._loop, debug=True, **kwargs) return ServerRunner(srv, debug=debug, **kwargs) class TestClient: """ A test client implementation. To write functional tests for aiohttp based servers. """ def __init__(self, server, *, cookie_jar=None, loop=None, **kwargs): if not isinstance(server, BaseTestServer): raise TypeError("server must be web.Application TestServer " "instance, found type: %r" % type(server)) self._server = server self._loop = loop if cookie_jar is None: cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) self._closed = False self._responses = [] self._websockets = [] async def start_server(self): await self._server.start_server(loop=self._loop) @property def host(self): return self._server.host @property def port(self): return self._server.port @property def server(self): return self._server @property def session(self): """An internal aiohttp.ClientSession. Unlike the methods on the TestClient, client session requests do not automatically include the host in the url queried, and will require an absolute path to the resource. """ return self._session def make_url(self, path): return self._server.make_url(path) async def request(self, method, path, *args, **kwargs): """Routes a request to tested http server. The interface is identical to asyncio.ClientSession.request, except the loop kwarg is overridden by the instance used by the test server. """ resp = await self._session.request( method, self.make_url(path), *args, **kwargs ) # save it to close later self._responses.append(resp) return resp def get(self, path, *args, **kwargs): """Perform an HTTP GET request.""" return _RequestContextManager( self.request(hdrs.METH_GET, path, *args, **kwargs) ) def post(self, path, *args, **kwargs): """Perform an HTTP POST request.""" return _RequestContextManager( self.request(hdrs.METH_POST, path, *args, **kwargs) ) def options(self, path, *args, **kwargs): """Perform an HTTP OPTIONS request.""" return _RequestContextManager( self.request(hdrs.METH_OPTIONS, path, *args, **kwargs) ) def head(self, path, *args, **kwargs): """Perform an HTTP HEAD request.""" return _RequestContextManager( self.request(hdrs.METH_HEAD, path, *args, **kwargs) ) def put(self, path, *args, **kwargs): """Perform an HTTP PUT request.""" return _RequestContextManager( self.request(hdrs.METH_PUT, path, *args, **kwargs) ) def patch(self, path, *args, **kwargs): """Perform an HTTP PATCH request.""" return _RequestContextManager( self.request(hdrs.METH_PATCH, path, *args, **kwargs) ) def delete(self, path, *args, **kwargs): """Perform an HTTP PATCH request.""" return _RequestContextManager( self.request(hdrs.METH_DELETE, path, *args, **kwargs) ) def ws_connect(self, path, *args, **kwargs): """Initiate websocket connection. The api corresponds to aiohttp.ClientSession.ws_connect. """ return _WSRequestContextManager( self._ws_connect(path, *args, **kwargs) ) async def _ws_connect(self, path, *args, **kwargs): ws = await self._session.ws_connect( self.make_url(path), *args, **kwargs) self._websockets.append(ws) return ws async def close(self): """Close all fixtures created by the test client. After that point, the TestClient is no longer usable. This is an idempotent function: running close multiple times will not have any additional effects. close is also run on exit when used as a(n) (asynchronous) context manager. """ if not self._closed: for resp in self._responses: resp.close() for ws in self._websockets: await ws.close() await self._session.close() await self._server.close() self._closed = True def __enter__(self): raise TypeError("Use async with instead") def __exit__(self, exc_type, exc_value, traceback): # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover async def __aenter__(self): await self.start_server() return self async def __aexit__(self, exc_type, exc_value, traceback): await self.close() class AioHTTPTestCase(unittest.TestCase): """A base class to allow for unittest web applications using aiohttp. Provides the following: * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. * self.loop (asyncio.BaseEventLoop): the event loop in which the application and server are running. * self.app (aiohttp.web.Application): the application returned by self.get_application() Note that the TestClient's methods are asynchronous: you have to execute function on the test client using asynchronous methods. """ async def get_application(self): """ This method should be overridden to return the aiohttp.web.Application object to test. """ return self.get_app() def get_app(self): """Obsolete method used to constructing web application. Use .get_application() coroutine instead """ raise RuntimeError("Did you forget to define get_application()?") def setUp(self): self.loop = setup_test_loop() self.app = self.loop.run_until_complete(self.get_application()) self.server = self.loop.run_until_complete(self.get_server(self.app)) self.client = self.loop.run_until_complete( self.get_client(self.server)) self.loop.run_until_complete(self.client.start_server()) self.loop.run_until_complete(self.setUpAsync()) async def setUpAsync(self): pass def tearDown(self): self.loop.run_until_complete(self.tearDownAsync()) self.loop.run_until_complete(self.client.close()) teardown_test_loop(self.loop) async def tearDownAsync(self): pass async def get_server(self, app): """Return a TestServer instance.""" return TestServer(app, loop=self.loop) async def get_client(self, server): """Return a TestClient instance.""" return TestClient(server, loop=self.loop) def unittest_run_loop(func, *args, **kwargs): """A decorator dedicated to use with asynchronous methods of an AioHTTPTestCase. Handles executing an asynchronous function, using the self.loop of the AioHTTPTestCase. """ @functools.wraps(func, *args, **kwargs) def new_func(self, *inner_args, **inner_kwargs): return self.loop.run_until_complete( func(self, *inner_args, **inner_kwargs)) return new_func @contextlib.contextmanager def loop_context(loop_factory=asyncio.new_event_loop, fast=False): """A contextmanager that creates an event_loop, for test purposes. Handles the creation and cleanup of a test loop. """ loop = setup_test_loop(loop_factory) yield loop teardown_test_loop(loop, fast=fast) def setup_test_loop(loop_factory=asyncio.new_event_loop): """Create and return an asyncio.BaseEventLoop instance. The caller should also call teardown_test_loop, once they are done with the loop. """ loop = loop_factory() asyncio.set_event_loop(None) if sys.platform != "win32": policy = asyncio.get_event_loop_policy() watcher = asyncio.SafeChildWatcher() watcher.attach_loop(loop) with contextlib.suppress(NotImplementedError): policy.set_child_watcher(watcher) return loop def teardown_test_loop(loop, fast=False): """Teardown and cleanup an event_loop created by setup_test_loop. """ closed = loop.is_closed() if not closed: loop.call_soon(loop.stop) loop.run_forever() loop.close() if not fast: gc.collect() asyncio.set_event_loop(None) def _create_app_mock(): app = mock.Mock() app._debug = False app.on_response_prepare = Signal(app) app.on_response_prepare.freeze() return app def _create_transport(sslcontext=None): transport = mock.Mock() def get_extra_info(key): if key == 'sslcontext': return sslcontext else: return None transport.get_extra_info.side_effect = get_extra_info return transport def make_mocked_request(method, path, headers=None, *, match_info=sentinel, version=HttpVersion(1, 1), closing=False, app=None, writer=sentinel, payload_writer=sentinel, protocol=sentinel, transport=sentinel, payload=sentinel, sslcontext=None, client_max_size=1024**2, loop=...): """Creates mocked web.Request testing purposes. Useful in unit tests, when spinning full web server is overkill or specific conditions and errors are hard to trigger. """ task = mock.Mock() if loop is ...: loop = mock.Mock() loop.create_future.return_value = () if version < HttpVersion(1, 1): closing = True if headers: headers = CIMultiDict(headers) raw_hdrs = tuple( (k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) else: headers = CIMultiDict() raw_hdrs = () chunked = 'chunked' in headers.get(hdrs.TRANSFER_ENCODING, '').lower() message = RawRequestMessage( method, path, version, headers, raw_hdrs, closing, False, False, chunked, URL(path)) if app is None: app = _create_app_mock() if protocol is sentinel: protocol = mock.Mock() if transport is sentinel: transport = _create_transport(sslcontext) if writer is sentinel: writer = mock.Mock() writer.transport = transport if payload_writer is sentinel: payload_writer = mock.Mock() payload_writer.write = make_mocked_coro(None) payload_writer.write_eof = make_mocked_coro(None) payload_writer.drain = make_mocked_coro(None) protocol.transport = transport protocol.writer = writer if payload is sentinel: payload = mock.Mock() req = Request(message, payload, protocol, payload_writer, task, loop, client_max_size=client_max_size) match_info = UrlMappingMatchInfo( {} if match_info is sentinel else match_info, mock.Mock()) match_info.add_app(app) req._match_info = match_info return req def make_mocked_coro(return_value=sentinel, raise_exception=sentinel): """Creates a coroutine mock.""" @asyncio.coroutine def mock_coro(*args, **kwargs): if raise_exception is not sentinel: raise raise_exception return return_value return mock.Mock(wraps=mock_coro) aiohttp-3.0.1/aiohttp/tracing.py0000666000000000000000000002250613240304665014776 0ustar 00000000000000from types import SimpleNamespace import attr from multidict import CIMultiDict from yarl import URL from .client_reqrep import ClientResponse from .signals import Signal __all__ = ( 'TraceConfig', 'TraceRequestStartParams', 'TraceRequestEndParams', 'TraceRequestExceptionParams', 'TraceConnectionQueuedStartParams', 'TraceConnectionQueuedEndParams', 'TraceConnectionCreateStartParams', 'TraceConnectionCreateEndParams', 'TraceConnectionReuseconnParams', 'TraceDnsResolveHostStartParams', 'TraceDnsResolveHostEndParams', 'TraceDnsCacheHitParams', 'TraceDnsCacheMissParams', 'TraceRequestRedirectParams' ) class TraceConfig: """First-class used to trace requests launched via ClientSession objects.""" def __init__(self, trace_config_ctx_factory=SimpleNamespace): self._on_request_start = Signal(self) self._on_request_end = Signal(self) self._on_request_exception = Signal(self) self._on_request_redirect = Signal(self) self._on_connection_queued_start = Signal(self) self._on_connection_queued_end = Signal(self) self._on_connection_create_start = Signal(self) self._on_connection_create_end = Signal(self) self._on_connection_reuseconn = Signal(self) self._on_dns_resolvehost_start = Signal(self) self._on_dns_resolvehost_end = Signal(self) self._on_dns_cache_hit = Signal(self) self._on_dns_cache_miss = Signal(self) self._trace_config_ctx_factory = trace_config_ctx_factory def trace_config_ctx(self, trace_request_ctx=None): """ Return a new trace_config_ctx instance """ return self._trace_config_ctx_factory( trace_request_ctx=trace_request_ctx) def freeze(self): self._on_request_start.freeze() self._on_request_end.freeze() self._on_request_exception.freeze() self._on_request_redirect.freeze() self._on_connection_queued_start.freeze() self._on_connection_queued_end.freeze() self._on_connection_create_start.freeze() self._on_connection_create_end.freeze() self._on_connection_reuseconn.freeze() self._on_dns_resolvehost_start.freeze() self._on_dns_resolvehost_end.freeze() self._on_dns_cache_hit.freeze() self._on_dns_cache_miss.freeze() @property def on_request_start(self): return self._on_request_start @property def on_request_end(self): return self._on_request_end @property def on_request_exception(self): return self._on_request_exception @property def on_request_redirect(self): return self._on_request_redirect @property def on_connection_queued_start(self): return self._on_connection_queued_start @property def on_connection_queued_end(self): return self._on_connection_queued_end @property def on_connection_create_start(self): return self._on_connection_create_start @property def on_connection_create_end(self): return self._on_connection_create_end @property def on_connection_reuseconn(self): return self._on_connection_reuseconn @property def on_dns_resolvehost_start(self): return self._on_dns_resolvehost_start @property def on_dns_resolvehost_end(self): return self._on_dns_resolvehost_end @property def on_dns_cache_hit(self): return self._on_dns_cache_hit @property def on_dns_cache_miss(self): return self._on_dns_cache_miss @attr.s(frozen=True, slots=True) class TraceRequestStartParams: """ Parameters sent by the `on_request_start` signal""" method = attr.ib(type=str) url = attr.ib(type=URL) headers = attr.ib(type=CIMultiDict) @attr.s(frozen=True, slots=True) class TraceRequestEndParams: """ Parameters sent by the `on_request_end` signal""" method = attr.ib(type=str) url = attr.ib(type=URL) headers = attr.ib(type=CIMultiDict) response = attr.ib(type=ClientResponse) @attr.s(frozen=True, slots=True) class TraceRequestExceptionParams: """ Parameters sent by the `on_request_exception` signal""" method = attr.ib(type=str) url = attr.ib(type=URL) headers = attr.ib(type=CIMultiDict) exception = attr.ib(type=Exception) @attr.s(frozen=True, slots=True) class TraceRequestRedirectParams: """ Parameters sent by the `on_request_redirect` signal""" method = attr.ib(type=str) url = attr.ib(type=URL) headers = attr.ib(type=CIMultiDict) response = attr.ib(type=ClientResponse) @attr.s(frozen=True, slots=True) class TraceConnectionQueuedStartParams: """ Parameters sent by the `on_connection_queued_start` signal""" @attr.s(frozen=True, slots=True) class TraceConnectionQueuedEndParams: """ Parameters sent by the `on_connection_queued_end` signal""" @attr.s(frozen=True, slots=True) class TraceConnectionCreateStartParams: """ Parameters sent by the `on_connection_create_start` signal""" @attr.s(frozen=True, slots=True) class TraceConnectionCreateEndParams: """ Parameters sent by the `on_connection_create_end` signal""" @attr.s(frozen=True, slots=True) class TraceConnectionReuseconnParams: """ Parameters sent by the `on_connection_reuseconn` signal""" @attr.s(frozen=True, slots=True) class TraceDnsResolveHostStartParams: """ Parameters sent by the `on_dns_resolvehost_start` signal""" host = attr.ib(type=str) @attr.s(frozen=True, slots=True) class TraceDnsResolveHostEndParams: """ Parameters sent by the `on_dns_resolvehost_end` signal""" host = attr.ib(type=str) @attr.s(frozen=True, slots=True) class TraceDnsCacheHitParams: """ Parameters sent by the `on_dns_cache_hit` signal""" host = attr.ib(type=str) @attr.s(frozen=True, slots=True) class TraceDnsCacheMissParams: """ Parameters sent by the `on_dns_cache_miss` signal""" host = attr.ib(type=str) class Trace: """ Internal class used to keep together the main dependencies used at the moment of send a signal.""" def __init__(self, session, trace_config, trace_config_ctx): self._trace_config = trace_config self._trace_config_ctx = trace_config_ctx self._session = session async def send_request_start(self, method, url, headers): return await self._trace_config.on_request_start.send( self._session, self._trace_config_ctx, TraceRequestStartParams(method, url, headers) ) async def send_request_end(self, method, url, headers, response): return await self._trace_config.on_request_end.send( self._session, self._trace_config_ctx, TraceRequestEndParams(method, url, headers, response) ) async def send_request_exception(self, method, url, headers, exception): return await self._trace_config.on_request_exception.send( self._session, self._trace_config_ctx, TraceRequestExceptionParams(method, url, headers, exception) ) async def send_request_redirect(self, method, url, headers, response): return await self._trace_config._on_request_redirect.send( self._session, self._trace_config_ctx, TraceRequestRedirectParams(method, url, headers, response) ) async def send_connection_queued_start(self): return await self._trace_config.on_connection_queued_start.send( self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams() ) async def send_connection_queued_end(self): return await self._trace_config.on_connection_queued_end.send( self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams() ) async def send_connection_create_start(self): return await self._trace_config.on_connection_create_start.send( self._session, self._trace_config_ctx, TraceConnectionCreateStartParams() ) async def send_connection_create_end(self): return await self._trace_config.on_connection_create_end.send( self._session, self._trace_config_ctx, TraceConnectionCreateEndParams() ) async def send_connection_reuseconn(self): return await self._trace_config.on_connection_reuseconn.send( self._session, self._trace_config_ctx, TraceConnectionReuseconnParams() ) async def send_dns_resolvehost_start(self, host): return await self._trace_config.on_dns_resolvehost_start.send( self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host) ) async def send_dns_resolvehost_end(self, host): return await self._trace_config.on_dns_resolvehost_end.send( self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host) ) async def send_dns_cache_hit(self, host): return await self._trace_config.on_dns_cache_hit.send( self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host) ) async def send_dns_cache_miss(self, host): return await self._trace_config.on_dns_cache_miss.send( self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host) ) aiohttp-3.0.1/aiohttp/web.py0000666000000000000000000001603213240304665014121 0ustar 00000000000000import asyncio import socket import sys from argparse import ArgumentParser from collections import Iterable from importlib import import_module from . import (helpers, web_app, web_exceptions, web_fileresponse, web_middlewares, web_protocol, web_request, web_response, web_runner, web_server, web_urldispatcher, web_ws) from .log import access_logger from .web_app import Application # noqa from .web_exceptions import * # noqa from .web_fileresponse import * # noqa from .web_middlewares import * # noqa from .web_protocol import * # noqa from .web_request import * # noqa from .web_response import * # noqa from .web_runner import * # noqa from .web_runner import AppRunner, GracefulExit, SockSite, TCPSite, UnixSite from .web_server import * # noqa from .web_urldispatcher import * # noqa from .web_ws import * # noqa __all__ = (web_protocol.__all__ + web_app.__all__ + web_fileresponse.__all__ + web_request.__all__ + web_response.__all__ + web_exceptions.__all__ + web_urldispatcher.__all__ + web_ws.__all__ + web_server.__all__ + web_runner.__all__ + web_middlewares.__all__ + ('run_app',)) def run_app(app, *, host=None, port=None, path=None, sock=None, shutdown_timeout=60.0, ssl_context=None, print=print, backlog=128, access_log_class=helpers.AccessLogger, access_log_format=helpers.AccessLogger.LOG_FORMAT, access_log=access_logger, handle_signals=True, reuse_address=None, reuse_port=None): """Run an app locally""" loop = asyncio.get_event_loop() runner = AppRunner(app, handle_signals=handle_signals, access_log_class=access_log_class, access_log_format=access_log_format, access_log=access_log) loop.run_until_complete(runner.setup()) sites = [] try: if host is not None: if isinstance(host, (str, bytes, bytearray, memoryview)): sites.append(TCPSite(runner, host, port, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog, reuse_address=reuse_address, reuse_port=reuse_port)) else: for h in host: sites.append(TCPSite(runner, h, port, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog, reuse_address=reuse_address, reuse_port=reuse_port)) elif path is None and sock is None or port is not None: sites.append(TCPSite(runner, port=port, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog, reuse_address=reuse_address, reuse_port=reuse_port)) if path is not None: if isinstance(path, (str, bytes, bytearray, memoryview)): sites.append(UnixSite(runner, path, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog)) else: for p in path: sites.append(UnixSite(runner, p, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog)) if sock is not None: if not isinstance(sock, Iterable): sites.append(SockSite(runner, sock, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog)) else: for s in sock: sites.append(SockSite(runner, s, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog)) for site in sites: loop.run_until_complete(site.start()) try: if print: # pragma: no branch names = sorted(str(s.name) for s in runner.sites) print("======== Running on {} ========\n" "(Press CTRL+C to quit)".format(', '.join(names))) loop.run_forever() except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: loop.run_until_complete(runner.cleanup()) if hasattr(loop, 'shutdown_asyncgens'): loop.run_until_complete(loop.shutdown_asyncgens()) loop.close() def main(argv): arg_parser = ArgumentParser( description="aiohttp.web Application server", prog="aiohttp.web" ) arg_parser.add_argument( "entry_func", help=("Callable returning the `aiohttp.web.Application` instance to " "run. Should be specified in the 'module:function' syntax."), metavar="entry-func" ) arg_parser.add_argument( "-H", "--hostname", help="TCP/IP hostname to serve on (default: %(default)r)", default="localhost" ) arg_parser.add_argument( "-P", "--port", help="TCP/IP port to serve on (default: %(default)r)", type=int, default="8080" ) arg_parser.add_argument( "-U", "--path", help="Unix file system path to serve on. Specifying a path will cause " "hostname and port arguments to be ignored.", ) args, extra_argv = arg_parser.parse_known_args(argv) # Import logic mod_str, _, func_str = args.entry_func.partition(":") if not func_str or not mod_str: arg_parser.error( "'entry-func' not in 'module:function' syntax" ) if mod_str.startswith("."): arg_parser.error("relative module names not supported") try: module = import_module(mod_str) except ImportError as ex: arg_parser.error("unable to import %s: %s" % (mod_str, ex)) try: func = getattr(module, func_str) except AttributeError: arg_parser.error("module %r has no attribute %r" % (mod_str, func_str)) # Compatibility logic if args.path is not None and not hasattr(socket, 'AF_UNIX'): arg_parser.error("file system paths not supported by your operating" " environment") app = func(extra_argv) run_app(app, host=args.hostname, port=args.port, path=args.path) arg_parser.exit(message="Stopped\n") if __name__ == "__main__": # pragma: no branch main(sys.argv[1:]) # pragma: no cover aiohttp-3.0.1/aiohttp/web_app.py0000666000000000000000000002360713240304665014767 0ustar 00000000000000import asyncio import warnings from collections import MutableMapping from functools import partial from . import hdrs from .abc import AbstractAccessLogger, AbstractMatchInfo, AbstractRouter from .frozenlist import FrozenList from .helpers import AccessLogger from .log import web_logger from .signals import Signal from .web_middlewares import _fix_request_current_app from .web_request import Request from .web_response import StreamResponse from .web_server import Server from .web_urldispatcher import PrefixedSubAppResource, UrlDispatcher __all__ = ('Application',) class Application(MutableMapping): ATTRS = frozenset([ 'logger', '_debug', '_router', '_loop', '_handler_args', '_middlewares', '_middlewares_handlers', '_run_middlewares', '_state', '_frozen', '_subapps', '_on_response_prepare', '_on_startup', '_on_shutdown', '_on_cleanup', '_client_max_size']) def __init__(self, *, logger=web_logger, router=None, middlewares=(), handler_args=None, client_max_size=1024**2, loop=None, debug=...): if router is None: router = UrlDispatcher() assert isinstance(router, AbstractRouter), router if loop is not None: warnings.warn("loop argument is deprecated", DeprecationWarning, stacklevel=2) self._debug = debug self._router = router self._loop = loop self._handler_args = handler_args self.logger = logger self._middlewares = FrozenList(middlewares) self._middlewares_handlers = None # initialized on freezing self._run_middlewares = None # initialized on freezing self._state = {} self._frozen = False self._subapps = [] self._on_response_prepare = Signal(self) self._on_startup = Signal(self) self._on_shutdown = Signal(self) self._on_cleanup = Signal(self) self._client_max_size = client_max_size def __init_subclass__(cls): warnings.warn("Inheritance class {} from web.Application " "is discouraged".format(cls.__name__), DeprecationWarning, stacklevel=2) def __setattr__(self, name, val): if name not in self.ATTRS: warnings.warn("Setting custom web.Application.{} attribute " "is discouraged".format(name), DeprecationWarning, stacklevel=2) super().__setattr__(name, val) # MutableMapping API def __eq__(self, other): return self is other def __getitem__(self, key): return self._state[key] def _check_frozen(self): if self._frozen: warnings.warn("Changing state of started or joined " "application is deprecated", DeprecationWarning, stacklevel=3) def __setitem__(self, key, value): self._check_frozen() self._state[key] = value def __delitem__(self, key): self._check_frozen() del self._state[key] def __len__(self): return len(self._state) def __iter__(self): return iter(self._state) ######## @property def loop(self): return self._loop def _set_loop(self, loop): if loop is None: loop = asyncio.get_event_loop() if self._loop is not None and self._loop is not loop: raise RuntimeError( "web.Application instance initialized with different loop") self._loop = loop # set loop debug if self._debug is ...: self._debug = loop.get_debug() # set loop to sub applications for subapp in self._subapps: subapp._set_loop(loop) @property def frozen(self): return self._frozen def freeze(self): if self._frozen: return self._frozen = True self._middlewares.freeze() self._router.freeze() self._on_response_prepare.freeze() self._on_startup.freeze() self._on_shutdown.freeze() self._on_cleanup.freeze() self._middlewares_handlers = tuple(self._prepare_middleware()) # If current app and any subapp do not have middlewares avoid run all # of the code footprint that it implies, which have a middleware # hardcoded per app that sets up the current_app attribute. If no # middlewares are configured the handler will receive the proper # current_app without needing all of this code. self._run_middlewares = True if self.middlewares else False for subapp in self._subapps: subapp.freeze() self._run_middlewares =\ self._run_middlewares or subapp._run_middlewares @property def debug(self): return self._debug def _reg_subapp_signals(self, subapp): def reg_handler(signame): subsig = getattr(subapp, signame) async def handler(app): await subsig.send(subapp) appsig = getattr(self, signame) appsig.append(handler) reg_handler('on_startup') reg_handler('on_shutdown') reg_handler('on_cleanup') def add_subapp(self, prefix, subapp): if self.frozen: raise RuntimeError( "Cannot add sub application to frozen application") if subapp.frozen: raise RuntimeError("Cannot add frozen application") if prefix.endswith('/'): prefix = prefix[:-1] if prefix in ('', '/'): raise ValueError("Prefix cannot be empty") resource = PrefixedSubAppResource(prefix, subapp) self.router.register_resource(resource) self._reg_subapp_signals(subapp) self._subapps.append(subapp) subapp.freeze() if self._loop is not None: subapp._set_loop(self._loop) return resource @property def on_response_prepare(self): return self._on_response_prepare @property def on_startup(self): return self._on_startup @property def on_shutdown(self): return self._on_shutdown @property def on_cleanup(self): return self._on_cleanup @property def router(self): return self._router @property def middlewares(self): return self._middlewares def make_handler(self, *, loop=None, access_log_class=AccessLogger, **kwargs): if not issubclass(access_log_class, AbstractAccessLogger): raise TypeError( 'access_log_class must be subclass of ' 'aiohttp.abc.AbstractAccessLogger, got {}'.format( access_log_class)) self._set_loop(loop) self.freeze() kwargs['debug'] = self.debug if self._handler_args: for k, v in self._handler_args.items(): kwargs[k] = v return Server(self._handle, request_factory=self._make_request, access_log_class=access_log_class, loop=self.loop, **kwargs) async def startup(self): """Causes on_startup signal Should be called in the event loop along with the request handler. """ await self.on_startup.send(self) async def shutdown(self): """Causes on_shutdown signal Should be called before cleanup() """ await self.on_shutdown.send(self) async def cleanup(self): """Causes on_cleanup signal Should be called after shutdown() """ await self.on_cleanup.send(self) def _make_request(self, message, payload, protocol, writer, task, _cls=Request): return _cls( message, payload, protocol, writer, task, self._loop, client_max_size=self._client_max_size) def _prepare_middleware(self): for m in reversed(self._middlewares): if getattr(m, '__middleware_version__', None) == 1: yield m, True else: warnings.warn('old-style middleware "{!r}" deprecated, ' 'see #2252'.format(m), DeprecationWarning, stacklevel=2) yield m, False yield _fix_request_current_app(self), True async def _handle(self, request): match_info = await self._router.resolve(request) assert isinstance(match_info, AbstractMatchInfo), match_info match_info.add_app(self) if __debug__: match_info.freeze() resp = None request._match_info = match_info expect = request.headers.get(hdrs.EXPECT) if expect: resp = await match_info.expect_handler(request) await request.writer.drain() if resp is None: handler = match_info.handler if self._run_middlewares: for app in match_info.apps[::-1]: for m, new_style in app._middlewares_handlers: if new_style: handler = partial(m, handler=handler) else: handler = await m(app, handler) resp = await handler(request) assert isinstance(resp, StreamResponse), \ ("Handler {!r} should return response instance, " "got {!r} [middlewares {!r}]").format( match_info.handler, type(resp), [middleware for app in match_info.apps for middleware in app.middlewares]) return resp def __call__(self): """gunicorn compatibility""" return self def __repr__(self): return "".format(id(self)) aiohttp-3.0.1/aiohttp/web_exceptions.py0000666000000000000000000002060613240304665016364 0ustar 00000000000000from .web_response import Response __all__ = ( 'HTTPException', 'HTTPError', 'HTTPRedirection', 'HTTPSuccessful', 'HTTPOk', 'HTTPCreated', 'HTTPAccepted', 'HTTPNonAuthoritativeInformation', 'HTTPNoContent', 'HTTPResetContent', 'HTTPPartialContent', 'HTTPMultipleChoices', 'HTTPMovedPermanently', 'HTTPFound', 'HTTPSeeOther', 'HTTPNotModified', 'HTTPUseProxy', 'HTTPTemporaryRedirect', 'HTTPPermanentRedirect', 'HTTPClientError', 'HTTPBadRequest', 'HTTPUnauthorized', 'HTTPPaymentRequired', 'HTTPForbidden', 'HTTPNotFound', 'HTTPMethodNotAllowed', 'HTTPNotAcceptable', 'HTTPProxyAuthenticationRequired', 'HTTPRequestTimeout', 'HTTPConflict', 'HTTPGone', 'HTTPLengthRequired', 'HTTPPreconditionFailed', 'HTTPRequestEntityTooLarge', 'HTTPRequestURITooLong', 'HTTPUnsupportedMediaType', 'HTTPRequestRangeNotSatisfiable', 'HTTPExpectationFailed', 'HTTPMisdirectedRequest', 'HTTPUnprocessableEntity', 'HTTPFailedDependency', 'HTTPUpgradeRequired', 'HTTPPreconditionRequired', 'HTTPTooManyRequests', 'HTTPRequestHeaderFieldsTooLarge', 'HTTPUnavailableForLegalReasons', 'HTTPServerError', 'HTTPInternalServerError', 'HTTPNotImplemented', 'HTTPBadGateway', 'HTTPServiceUnavailable', 'HTTPGatewayTimeout', 'HTTPVersionNotSupported', 'HTTPVariantAlsoNegotiates', 'HTTPInsufficientStorage', 'HTTPNotExtended', 'HTTPNetworkAuthenticationRequired', ) ############################################################ # HTTP Exceptions ############################################################ class HTTPException(Response, Exception): # You should set in subclasses: # status = 200 status_code = None empty_body = False def __init__(self, *, headers=None, reason=None, body=None, text=None, content_type=None): Response.__init__(self, status=self.status_code, headers=headers, reason=reason, body=body, text=text, content_type=content_type) Exception.__init__(self, self.reason) if self.body is None and not self.empty_body: self.text = "{}: {}".format(self.status, self.reason) class HTTPError(HTTPException): """Base class for exceptions with status codes in the 400s and 500s.""" class HTTPRedirection(HTTPException): """Base class for exceptions with status codes in the 300s.""" class HTTPSuccessful(HTTPException): """Base class for exceptions with status codes in the 200s.""" class HTTPOk(HTTPSuccessful): status_code = 200 class HTTPCreated(HTTPSuccessful): status_code = 201 class HTTPAccepted(HTTPSuccessful): status_code = 202 class HTTPNonAuthoritativeInformation(HTTPSuccessful): status_code = 203 class HTTPNoContent(HTTPSuccessful): status_code = 204 empty_body = True class HTTPResetContent(HTTPSuccessful): status_code = 205 empty_body = True class HTTPPartialContent(HTTPSuccessful): status_code = 206 ############################################################ # 3xx redirection ############################################################ class _HTTPMove(HTTPRedirection): def __init__(self, location, *, headers=None, reason=None, body=None, text=None, content_type=None): if not location: raise ValueError("HTTP redirects need a location to redirect to.") super().__init__(headers=headers, reason=reason, body=body, text=text, content_type=content_type) self.headers['Location'] = str(location) self.location = location class HTTPMultipleChoices(_HTTPMove): status_code = 300 class HTTPMovedPermanently(_HTTPMove): status_code = 301 class HTTPFound(_HTTPMove): status_code = 302 # This one is safe after a POST (the redirected location will be # retrieved with GET): class HTTPSeeOther(_HTTPMove): status_code = 303 class HTTPNotModified(HTTPRedirection): # FIXME: this should include a date or etag header status_code = 304 empty_body = True class HTTPUseProxy(_HTTPMove): # Not a move, but looks a little like one status_code = 305 class HTTPTemporaryRedirect(_HTTPMove): status_code = 307 class HTTPPermanentRedirect(_HTTPMove): status_code = 308 ############################################################ # 4xx client error ############################################################ class HTTPClientError(HTTPError): pass class HTTPBadRequest(HTTPClientError): status_code = 400 class HTTPUnauthorized(HTTPClientError): status_code = 401 class HTTPPaymentRequired(HTTPClientError): status_code = 402 class HTTPForbidden(HTTPClientError): status_code = 403 class HTTPNotFound(HTTPClientError): status_code = 404 class HTTPMethodNotAllowed(HTTPClientError): status_code = 405 def __init__(self, method, allowed_methods, *, headers=None, reason=None, body=None, text=None, content_type=None): allow = ','.join(sorted(allowed_methods)) super().__init__(headers=headers, reason=reason, body=body, text=text, content_type=content_type) self.headers['Allow'] = allow self.allowed_methods = allowed_methods self.method = method.upper() class HTTPNotAcceptable(HTTPClientError): status_code = 406 class HTTPProxyAuthenticationRequired(HTTPClientError): status_code = 407 class HTTPRequestTimeout(HTTPClientError): status_code = 408 class HTTPConflict(HTTPClientError): status_code = 409 class HTTPGone(HTTPClientError): status_code = 410 class HTTPLengthRequired(HTTPClientError): status_code = 411 class HTTPPreconditionFailed(HTTPClientError): status_code = 412 class HTTPRequestEntityTooLarge(HTTPClientError): status_code = 413 class HTTPRequestURITooLong(HTTPClientError): status_code = 414 class HTTPUnsupportedMediaType(HTTPClientError): status_code = 415 class HTTPRequestRangeNotSatisfiable(HTTPClientError): status_code = 416 class HTTPExpectationFailed(HTTPClientError): status_code = 417 class HTTPMisdirectedRequest(HTTPClientError): status_code = 421 class HTTPUnprocessableEntity(HTTPClientError): status_code = 422 class HTTPFailedDependency(HTTPClientError): status_code = 424 class HTTPUpgradeRequired(HTTPClientError): status_code = 426 class HTTPPreconditionRequired(HTTPClientError): status_code = 428 class HTTPTooManyRequests(HTTPClientError): status_code = 429 class HTTPRequestHeaderFieldsTooLarge(HTTPClientError): status_code = 431 class HTTPUnavailableForLegalReasons(HTTPClientError): status_code = 451 def __init__(self, link, *, headers=None, reason=None, body=None, text=None, content_type=None): super().__init__(headers=headers, reason=reason, body=body, text=text, content_type=content_type) self.headers['Link'] = '<%s>; rel="blocked-by"' % link self.link = link ############################################################ # 5xx Server Error ############################################################ # Response status codes beginning with the digit "5" indicate cases in # which the server is aware that it has erred or is incapable of # performing the request. Except when responding to a HEAD request, the # server SHOULD include an entity containing an explanation of the error # situation, and whether it is a temporary or permanent condition. User # agents SHOULD display any included entity to the user. These response # codes are applicable to any request method. class HTTPServerError(HTTPError): pass class HTTPInternalServerError(HTTPServerError): status_code = 500 class HTTPNotImplemented(HTTPServerError): status_code = 501 class HTTPBadGateway(HTTPServerError): status_code = 502 class HTTPServiceUnavailable(HTTPServerError): status_code = 503 class HTTPGatewayTimeout(HTTPServerError): status_code = 504 class HTTPVersionNotSupported(HTTPServerError): status_code = 505 class HTTPVariantAlsoNegotiates(HTTPServerError): status_code = 506 class HTTPInsufficientStorage(HTTPServerError): status_code = 507 class HTTPNotExtended(HTTPServerError): status_code = 510 class HTTPNetworkAuthenticationRequired(HTTPServerError): status_code = 511 aiohttp-3.0.1/aiohttp/web_fileresponse.py0000666000000000000000000001701713240304665016703 0ustar 00000000000000import mimetypes import os import pathlib from . import hdrs from .helpers import set_exception, set_result from .http_writer import StreamWriter from .log import server_logger from .web_exceptions import (HTTPNotModified, HTTPOk, HTTPPartialContent, HTTPRequestRangeNotSatisfiable) from .web_response import StreamResponse __all__ = ('FileResponse',) NOSENDFILE = bool(os.environ.get("AIOHTTP_NOSENDFILE")) class SendfileStreamWriter(StreamWriter): def __init__(self, *args, **kwargs): self._sendfile_buffer = [] super().__init__(*args, **kwargs) def _write(self, chunk): # we overwrite StreamWriter._write, so nothing can be appended to # _buffer, and nothing is written to the transport directly by the # parent class self.output_size += len(chunk) self._sendfile_buffer.append(chunk) def _sendfile_cb(self, fut, out_fd, in_fd, offset, count, loop, registered): if registered: loop.remove_writer(out_fd) if fut.cancelled(): return try: n = os.sendfile(out_fd, in_fd, offset, count) if n == 0: # EOF reached n = count except (BlockingIOError, InterruptedError): n = 0 except Exception as exc: set_exception(fut, exc) return if n < count: loop.add_writer(out_fd, self._sendfile_cb, fut, out_fd, in_fd, offset + n, count - n, loop, True) else: set_result(fut, None) async def sendfile(self, fobj, count): out_socket = self.transport.get_extra_info('socket').dup() out_socket.setblocking(False) out_fd = out_socket.fileno() in_fd = fobj.fileno() offset = fobj.tell() loop = self.loop data = b''.join(self._sendfile_buffer) try: await loop.sock_sendall(out_socket, data) fut = loop.create_future() self._sendfile_cb(fut, out_fd, in_fd, offset, count, loop, False) await fut except Exception: server_logger.debug('Socket error') self.transport.close() finally: out_socket.close() self.output_size += count await super().write_eof() async def write_eof(self, chunk=b''): pass class FileResponse(StreamResponse): """A response object can be used to send files.""" def __init__(self, path, chunk_size=256*1024, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(path, str): path = pathlib.Path(path) self._path = path self._chunk_size = chunk_size async def _sendfile_system(self, request, fobj, count): # Write count bytes of fobj to resp using # the os.sendfile system call. # # For details check # https://github.com/KeepSafe/aiohttp/issues/1177 # See https://github.com/KeepSafe/aiohttp/issues/958 for details # # request should be a aiohttp.web.Request instance. # fobj should be an open file object. # count should be an integer > 0. transport = request.transport if (transport.get_extra_info("sslcontext") or transport.get_extra_info("socket") is None): writer = await self._sendfile_fallback(request, fobj, count) else: writer = SendfileStreamWriter( request.protocol, transport, request.loop ) request._payload_writer = writer await super().prepare(request) await writer.sendfile(fobj, count) return writer async def _sendfile_fallback(self, request, fobj, count): # Mimic the _sendfile_system() method, but without using the # os.sendfile() system call. This should be used on systems # that don't support the os.sendfile(). # To avoid blocking the event loop & to keep memory usage low, # fobj is transferred in chunks controlled by the # constructor's chunk_size argument. writer = (await super().prepare(request)) chunk_size = self._chunk_size chunk = fobj.read(chunk_size) while True: await writer.write(chunk) count = count - chunk_size if count <= 0: break chunk = fobj.read(min(chunk_size, count)) await writer.drain() return writer if hasattr(os, "sendfile") and not NOSENDFILE: # pragma: no cover _sendfile = _sendfile_system else: # pragma: no cover _sendfile = _sendfile_fallback async def prepare(self, request): filepath = self._path gzip = False if 'gzip' in request.headers.get(hdrs.ACCEPT_ENCODING, ''): gzip_path = filepath.with_name(filepath.name + '.gz') if gzip_path.is_file(): filepath = gzip_path gzip = True st = filepath.stat() modsince = request.if_modified_since if modsince is not None and st.st_mtime <= modsince.timestamp(): self.set_status(HTTPNotModified.status_code) self._length_check = False return await super().prepare(request) if hdrs.CONTENT_TYPE not in self.headers: ct, encoding = mimetypes.guess_type(str(filepath)) if not ct: ct = 'application/octet-stream' should_set_ct = True else: encoding = 'gzip' if gzip else None should_set_ct = False status = HTTPOk.status_code file_size = st.st_size count = file_size try: rng = request.http_range start = rng.start end = rng.stop except ValueError: self.set_status(HTTPRequestRangeNotSatisfiable.status_code) return await super().prepare(request) # If a range request has been made, convert start, end slice notation # into file pointer offset and count if start is not None or end is not None: if start is None and end < 0: # return tail of file start = file_size + end count = -end else: count = (end or file_size) - start if start + count > file_size: # rfc7233:If the last-byte-pos value is # absent, or if the value is greater than or equal to # the current length of the representation data, # the byte range is interpreted as the remainder # of the representation (i.e., the server replaces the # value of last-byte-pos with a value that is one less than # the current length of the selected representation). count = file_size - start if start >= file_size: count = 0 if count != file_size: status = HTTPPartialContent.status_code self.set_status(status) if should_set_ct: self.content_type = ct if encoding: self.headers[hdrs.CONTENT_ENCODING] = encoding if gzip: self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING self.last_modified = st.st_mtime self.content_length = count if count: with filepath.open('rb') as fobj: if start: fobj.seek(start) return await self._sendfile(request, fobj, count) return await super().prepare(request) aiohttp-3.0.1/aiohttp/web_middlewares.py0000666000000000000000000000514313240304665016502 0ustar 00000000000000import re from aiohttp.web_exceptions import HTTPMovedPermanently from aiohttp.web_urldispatcher import SystemRoute __all__ = ( 'middleware', 'normalize_path_middleware', ) async def _check_request_resolves(request, path): alt_request = request.clone(rel_url=path) match_info = await request.app.router.resolve(alt_request) alt_request._match_info = match_info if not isinstance(match_info.route, SystemRoute): return True, alt_request return False, request def middleware(f): f.__middleware_version__ = 1 return f def normalize_path_middleware( *, append_slash=True, merge_slashes=True, redirect_class=HTTPMovedPermanently): """ Middleware that normalizes the path of a request. By normalizing it means: - Add a trailing slash to the path. - Double slashes are replaced by one. The middleware returns as soon as it finds a path that resolves correctly. The order if all enable is 1) merge_slashes, 2) append_slash and 3) both merge_slashes and append_slash. If the path resolves with at least one of those conditions, it will redirect to the new path. If append_slash is True append slash when needed. If a resource is defined with trailing slash and the request comes without it, it will append it automatically. If merge_slashes is True, merge multiple consecutive slashes in the path into one. """ @middleware async def impl(request, handler): if isinstance(request.match_info.route, SystemRoute): paths_to_check = [] if '?' in request.raw_path: path, query = request.raw_path.split('?', 1) query = '?' + query else: query = '' path = request.raw_path if merge_slashes: paths_to_check.append(re.sub('//+', '/', path)) if append_slash and not request.path.endswith('/'): paths_to_check.append(path + '/') if merge_slashes and append_slash: paths_to_check.append( re.sub('//+', '/', path + '/')) for path in paths_to_check: resolves, request = await _check_request_resolves( request, path) if resolves: raise redirect_class(request.path + query) return await handler(request) return impl def _fix_request_current_app(app): @middleware async def impl(request, handler): with request.match_info.set_current_app(app): return await handler(request) return impl aiohttp-3.0.1/aiohttp/web_protocol.py0000666000000000000000000004250213240304665016043 0ustar 00000000000000import asyncio import asyncio.streams import http.server import traceback import warnings from collections import deque from contextlib import suppress from html import escape as html_escape import yarl from . import helpers, http from .helpers import CeilTimeout from .http import HttpProcessingError, HttpRequestParser, StreamWriter from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD from .tcp_helpers import tcp_cork, tcp_keepalive, tcp_nodelay from .web_exceptions import HTTPException from .web_request import BaseRequest from .web_response import Response __all__ = ('RequestHandler', 'RequestPayloadError') ERROR = http.RawRequestMessage( 'UNKNOWN', '/', http.HttpVersion10, {}, {}, True, False, False, False, yarl.URL('/')) class RequestPayloadError(Exception): """Payload parsing error.""" class RequestHandler(asyncio.streams.FlowControlMixin, asyncio.Protocol): """HTTP protocol implementation. RequestHandler handles incoming HTTP request. It reads request line, request headers and request payload and calls handle_request() method. By default it always returns with 404 response. RequestHandler handles errors in incoming request, like bad status line, bad headers or incomplete payload. If any error occurs, connection gets closed. :param keepalive_timeout: number of seconds before closing keep-alive connection :type keepalive_timeout: int or None :param bool tcp_keepalive: TCP keep-alive is on, default is on :param bool debug: enable debug mode :param logger: custom logger object :type logger: aiohttp.log.server_logger :param access_log_class: custom class for access_logger :type access_log_class: aiohttp.abc.AbstractAccessLogger :param access_log: custom logging object :type access_log: aiohttp.log.server_logger :param str access_log_format: access log format string :param loop: Optional event loop :param int max_line_size: Optional maximum header line size :param int max_field_size: Optional maximum header field size :param int max_headers: Optional maximum header size """ _request_count = 0 _keepalive = False # keep transport open KEEPALIVE_RESCHEDULE_DELAY = 1 def __init__(self, manager, *, loop=None, keepalive_timeout=75, # NGINX default value is 75 secs tcp_keepalive=True, logger=server_logger, access_log_class=helpers.AccessLogger, access_log=access_logger, access_log_format=helpers.AccessLogger.LOG_FORMAT, debug=False, max_line_size=8190, max_headers=32768, max_field_size=8190, lingering_time=10.0): super().__init__(loop=loop) self._loop = loop if loop is not None else asyncio.get_event_loop() self._manager = manager self._request_handler = manager.request_handler self._request_factory = manager.request_factory self._tcp_keepalive = tcp_keepalive self._keepalive_time = None self._keepalive_handle = None self._keepalive_timeout = keepalive_timeout self._lingering_time = float(lingering_time) self._messages = deque() self._message_tail = b'' self._waiter = None self._error_handler = None self._task_handler = self._loop.create_task(self.start()) self._upgrade = False self._payload_parser = None self._request_parser = HttpRequestParser( self, loop, max_line_size=max_line_size, max_field_size=max_field_size, max_headers=max_headers, payload_exception=RequestPayloadError) self.transport = None self._reading_paused = False self.logger = logger self.debug = debug self.access_log = access_log if access_log: self.access_logger = access_log_class( access_log, access_log_format) else: self.access_logger = None self._close = False self._force_close = False def __repr__(self): return "<{} {}>".format( self.__class__.__name__, 'connected' if self.transport is not None else 'disconnected') @property def keepalive_timeout(self): return self._keepalive_timeout async def shutdown(self, timeout=15.0): """Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.""" self._force_close = True if self._keepalive_handle is not None: self._keepalive_handle.cancel() if self._waiter: self._waiter.cancel() # wait for handlers with suppress(asyncio.CancelledError, asyncio.TimeoutError): with CeilTimeout(timeout, loop=self._loop): if self._error_handler and not self._error_handler.done(): await self._error_handler if self._task_handler and not self._task_handler.done(): await self._task_handler # force-close non-idle handler if self._task_handler: self._task_handler.cancel() if self.transport is not None: self.transport.close() self.transport = None def connection_made(self, transport): super().connection_made(transport) self.transport = transport if self._tcp_keepalive: tcp_keepalive(transport) tcp_cork(transport, False) tcp_nodelay(transport, True) self._manager.connection_made(self, transport) def connection_lost(self, exc): self._manager.connection_lost(self, exc) super().connection_lost(exc) self._manager = None self._force_close = True self._request_factory = None self._request_handler = None self._request_parser = None self.transport = None if self._keepalive_handle is not None: self._keepalive_handle.cancel() if self._task_handler: self._task_handler.cancel() if self._error_handler is not None: self._error_handler.cancel() self._task_handler = None if self._payload_parser is not None: self._payload_parser.feed_eof() self._payload_parser = None def set_parser(self, parser): assert self._payload_parser is None self._payload_parser = parser if self._message_tail: self._payload_parser.feed_data(self._message_tail) self._message_tail = b'' def eof_received(self): pass def data_received(self, data): if self._force_close or self._close: return # parse http messages if self._payload_parser is None and not self._upgrade: try: messages, upgraded, tail = self._request_parser.feed_data(data) except HttpProcessingError as exc: # something happened during parsing self._error_handler = self._loop.create_task( self.handle_parse_error( StreamWriter(self, self.transport, self._loop), 400, exc, exc.message)) self.close() except Exception as exc: # 500: internal error self._error_handler = self._loop.create_task( self.handle_parse_error( StreamWriter(self, self.transport, self._loop), 500, exc)) self.close() else: for (msg, payload) in messages: self._request_count += 1 self._messages.append((msg, payload)) if self._waiter: self._waiter.set_result(None) self._upgraded = upgraded if upgraded and tail: self._message_tail = tail # no parser, just store elif self._payload_parser is None and self._upgrade and data: self._message_tail += data # feed payload elif data: eof, tail = self._payload_parser.feed_data(data) if eof: self.close() def keep_alive(self, val): """Set keep-alive connection mode. :param bool val: new state. """ self._keepalive = val if self._keepalive_handle: self._keepalive_handle.cancel() self._keepalive_handle = None def close(self): """Stop accepting new pipelinig messages and close connection when handlers done processing messages""" self._close = True if self._waiter: self._waiter.cancel() def force_close(self, send_last_heartbeat=False): """Force close connection""" self._force_close = True if self._waiter: self._waiter.cancel() if self.transport is not None: if send_last_heartbeat: self.transport.write(b"\r\n") self.transport.close() self.transport = None def log_access(self, request, response, time): if self.access_logger is not None: self.access_logger.log(request, response, time) def log_debug(self, *args, **kw): if self.debug: self.logger.debug(*args, **kw) def log_exception(self, *args, **kw): self.logger.exception(*args, **kw) def _process_keepalive(self): if self._force_close or not self._keepalive: return next = self._keepalive_time + self._keepalive_timeout # handler in idle state if self._waiter: if self._loop.time() > next: self.force_close(send_last_heartbeat=True) return # not all request handlers are done, # reschedule itself to next second self._keepalive_handle = self._loop.call_later( self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive) def pause_reading(self): if not self._reading_paused: try: self.transport.pause_reading() except (AttributeError, NotImplementedError, RuntimeError): pass self._reading_paused = True def resume_reading(self): if self._reading_paused: try: self.transport.resume_reading() except (AttributeError, NotImplementedError, RuntimeError): pass self._reading_paused = False async def start(self): """Process incoming request. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ loop = self._loop handler = self._task_handler manager = self._manager keepalive_timeout = self._keepalive_timeout while not self._force_close: if not self._messages: try: # wait for next request self._waiter = loop.create_future() await self._waiter except asyncio.CancelledError: break finally: self._waiter = None message, payload = self._messages.popleft() if self.access_log: now = loop.time() manager.requests_count += 1 writer = StreamWriter(self, self.transport, loop) request = self._request_factory( message, payload, self, writer, handler) try: try: resp = await self._request_handler(request) except HTTPException as exc: resp = exc except asyncio.CancelledError: self.log_debug('Ignored premature client disconnection') break except asyncio.TimeoutError: self.log_debug('Request handler timed out.') resp = self.handle_error(request, 504) except Exception as exc: resp = self.handle_error(request, 500, exc) else: # Deprecation warning (See #2415) if isinstance(resp, HTTPException): warnings.warn( "returning HTTPException object is deprecated " "(#2415) and will be removed, " "please raise the exception instead", DeprecationWarning) await resp.prepare(request) await resp.write_eof() # notify server about keep-alive self._keepalive = resp.keep_alive # log access if self.access_log: self.log_access(request, resp, loop.time() - now) # check payload if not payload.is_eof(): lingering_time = self._lingering_time if not self._force_close and lingering_time: self.log_debug( 'Start lingering close timer for %s sec.', lingering_time) now = loop.time() end_t = now + lingering_time with suppress( asyncio.TimeoutError, asyncio.CancelledError): while not payload.is_eof() and now < end_t: timeout = min(end_t - now, lingering_time) with CeilTimeout(timeout, loop=loop): # read and ignore await payload.readany() now = loop.time() # if payload still uncompleted if not payload.is_eof() and not self._force_close: self.log_debug('Uncompleted request.') self.close() except asyncio.CancelledError: self.log_debug('Ignored premature client disconnection ') break except RuntimeError as exc: if self.debug: self.log_exception( 'Unhandled runtime exception', exc_info=exc) self.force_close() except Exception as exc: self.log_exception('Unhandled exception', exc_info=exc) self.force_close() finally: if self.transport is None: self.log_debug('Ignored premature client disconnection.') elif not self._force_close: if self._keepalive and not self._close: # start keep-alive timer if keepalive_timeout is not None: now = self._loop.time() self._keepalive_time = now if self._keepalive_handle is None: self._keepalive_handle = loop.call_at( now + keepalive_timeout, self._process_keepalive) else: break # remove handler, close transport if no handlers left if not self._force_close: self._task_handler = None if self.transport is not None and self._error_handler is None: self.transport.close() def handle_error(self, request, status=500, exc=None, message=None): """Handle errors. Returns HTTP response with specific status code. Logs additional information. It always closes current connection.""" self.log_exception("Error handling request", exc_info=exc) if status == 500: msg = "

500 Internal Server Error

" if self.debug: with suppress(Exception): tb = traceback.format_exc() tb = html_escape(tb) msg += '

Traceback:

\n
'
                    msg += tb
                    msg += '
' else: msg += "Server got itself in trouble" msg = ("500 Internal Server Error" "" + msg + "") else: msg = message resp = Response(status=status, text=msg, content_type='text/html') resp.force_close() # some data already got sent, connection is broken if request.writer.output_size > 0 or self.transport is None: self.force_close() return resp async def handle_parse_error(self, writer, status, exc=None, message=None): request = BaseRequest( ERROR, EMPTY_PAYLOAD, self, writer, None, self._loop) resp = self.handle_error(request, status, exc, message) await resp.prepare(request) await resp.write_eof() if self.transport is not None: self.transport.close() self._error_handler = None aiohttp-3.0.1/aiohttp/web_request.py0000666000000000000000000005066313240304665015701 0ustar 00000000000000import asyncio import collections import datetime import io import json import re import socket import string import tempfile import types import warnings from email.utils import parsedate from http.cookies import SimpleCookie from types import MappingProxyType from urllib.parse import parse_qsl import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy from yarl import URL from . import hdrs, multipart from .helpers import HeadersMixin, reify, sentinel from .streams import EmptyStreamReader from .web_exceptions import HTTPRequestEntityTooLarge __all__ = ('BaseRequest', 'FileField', 'Request') @attr.s(frozen=True, slots=True) class FileField: name = attr.ib(type=str) filename = attr.ib(type=str) file = attr.ib(type=io.BufferedReader) content_type = attr.ib(type=str) headers = attr.ib(type=CIMultiDictProxy) _TCHAR = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" # '-' at the end to prevent interpretation as range in a char class _TOKEN = r'[{tchar}]+'.format(tchar=_TCHAR) _QDTEXT = r'[{}]'.format( r''.join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))) # qdtext includes 0x5C to escape 0x5D ('\]') # qdtext excludes obs-text (because obsoleted, and encoding not specified) _QUOTED_PAIR = r'\\[\t !-~]' _QUOTED_STRING = r'"(?:{quoted_pair}|{qdtext})*"'.format( qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR) _FORWARDED_PAIR = ( r'({token})=({token}|{quoted_string})'.format( token=_TOKEN, quoted_string=_QUOTED_STRING)) _QUOTED_PAIR_REPLACE_RE = re.compile(r'\\([\t !-~])') # same pattern as _QUOTED_PAIR but contains a capture group _FORWARDED_PAIR_RE = re.compile(_FORWARDED_PAIR) ############################################################ # HTTP Request ############################################################ class BaseRequest(collections.MutableMapping, HeadersMixin): POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT, hdrs.METH_TRACE, hdrs.METH_DELETE} ATTRS = HeadersMixin.ATTRS | frozenset([ '_message', '_protocol', '_payload_writer', '_payload', '_headers', '_method', '_version', '_rel_url', '_post', '_read_bytes', '_state', '_cache', '_task', '_client_max_size', '_loop']) def __init__(self, message, payload, protocol, payload_writer, task, loop, *, client_max_size=1024**2, state=None, scheme=None, host=None, remote=None): if state is None: state = {} self._message = message self._protocol = protocol self._payload_writer = payload_writer self._payload = payload self._headers = message.headers self._method = message.method self._version = message.version self._rel_url = message.url self._post = None self._read_bytes = None self._state = state self._cache = {} self._task = task self._client_max_size = client_max_size self._loop = loop if scheme is not None: self._cache['scheme'] = scheme if host is not None: self._cache['host'] = host if remote is not None: self._cache['remote'] = remote def clone(self, *, method=sentinel, rel_url=sentinel, headers=sentinel, scheme=sentinel, host=sentinel, remote=sentinel): """Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object. """ if self._read_bytes: raise RuntimeError("Cannot clone request " "after reading it's content") dct = {} if method is not sentinel: dct['method'] = method if rel_url is not sentinel: rel_url = URL(rel_url) dct['url'] = rel_url dct['path'] = str(rel_url) if headers is not sentinel: dct['headers'] = CIMultiDict(headers) dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) message = self._message._replace(**dct) kwargs = {} if scheme is not sentinel: kwargs['scheme'] = scheme if host is not sentinel: kwargs['host'] = host if remote is not sentinel: kwargs['remote'] = remote return self.__class__( message, self._payload, self._protocol, self._payload_writer, self._task, self._loop, client_max_size=self._client_max_size, state=self._state.copy(), **kwargs) @property def task(self): return self._task @property def protocol(self): return self._protocol @property def transport(self): if self._protocol is None: return None return self._protocol.transport @property def writer(self): return self._payload_writer @property def message(self): return self._message @property def rel_url(self): return self._rel_url @property def loop(self): return self._loop # MutableMapping API def __getitem__(self, key): return self._state[key] def __setitem__(self, key, value): self._state[key] = value def __delitem__(self, key): del self._state[key] def __len__(self): return len(self._state) def __iter__(self): return iter(self._state) ######## @property def secure(self): """A bool indicating if the request is handled with SSL.""" return self.scheme == 'https' @reify def forwarded(self): """A tuple containing all parsed Forwarded header(s). Makes an effort to parse Forwarded headers as specified by RFC 7239: - It adds one (immutable) dictionary per Forwarded 'field-value', ie per proxy. The element corresponds to the data in the Forwarded field-value added by the first proxy encountered by the client. Each subsequent item corresponds to those added by later proxies. - It checks that every value has valid syntax in general as specified in section 4: either a 'token' or a 'quoted-string'. - It un-escapes found escape sequences. - It does NOT validate 'by' and 'for' contents as specified in section 6. - It does NOT validate 'host' contents (Host ABNF). - It does NOT validate 'proto' contents for valid URI scheme names. Returns a tuple containing one or more immutable dicts """ elems = [] for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): length = len(field_value) pos = 0 need_separator = False elem = {} elems.append(types.MappingProxyType(elem)) while 0 <= pos < length: match = _FORWARDED_PAIR_RE.match(field_value, pos) if match is not None: # got a valid forwarded-pair if need_separator: # bad syntax here, skip to next comma pos = field_value.find(',', pos) else: (name, value) = match.groups() if value[0] == '"': # quoted string: remove quotes and unescape value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1', value[1:-1]) elem[name.lower()] = value pos += len(match.group(0)) need_separator = True elif field_value[pos] == ',': # next forwarded-element need_separator = False elem = {} elems.append(types.MappingProxyType(elem)) pos += 1 elif field_value[pos] == ';': # next forwarded-pair need_separator = False pos += 1 elif field_value[pos] in ' \t': # Allow whitespace even between forwarded-pairs, though # RFC 7239 doesn't. This simplifies code and is in line # with Postel's law. pos += 1 else: # bad syntax here, skip to next comma pos = field_value.find(',', pos) return tuple(elems) @reify def scheme(self): """A string representing the scheme of the request. Hostname is resolved in this order: - overridden value by .clone(scheme=new_scheme) call. - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. 'http' or 'https'. """ if self.transport.get_extra_info('sslcontext'): return 'https' else: return 'http' @property def method(self): """Read only property for getting HTTP method. The value is upper-cased str like 'GET', 'POST', 'PUT' etc. """ return self._method @property def version(self): """Read only property for getting HTTP version of request. Returns aiohttp.protocol.HttpVersion instance. """ return self._version @reify def host(self): """Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value """ host = self._message.headers.get(hdrs.HOST) if host is not None: return host else: return socket.getfqdn() @reify def remote(self): """Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket """ if self.transport is None: return None peername = self.transport.get_extra_info('peername') if isinstance(peername, (list, tuple)): return peername[0] else: return peername @reify def url(self): url = URL.build(scheme=self.scheme, host=self.host) return url.join(self._rel_url) @property def path(self): """The URL including *PATH INFO* without the host or scheme. E.g., ``/app/blog`` """ return self._rel_url.path @reify def path_qs(self): """The URL including PATH_INFO and the query string. E.g, /app/blog?id=10 """ return str(self._rel_url) @property def raw_path(self): """ The URL including raw *PATH INFO* without the host or scheme. Warning, the path is unquoted and may contains non valid URL characters E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` """ return self._message.path @property def query(self): """A multidict with all the variables in the query string.""" return self._rel_url.query @property def query_string(self): """The query string in the URL. E.g., id=10 """ return self._rel_url.query_string @property def headers(self): """A case-insensitive multidict proxy with all headers.""" return self._headers @property def raw_headers(self): """A sequence of pars for all headers.""" return self._message.raw_headers @reify def if_modified_since(self, _IF_MODIFIED_SINCE=hdrs.IF_MODIFIED_SINCE): """The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self.headers.get(_IF_MODIFIED_SINCE) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None @property def keep_alive(self): """Is keepalive enabled by client?""" return not self._message.should_close @reify def cookies(self): """Return request cookies. A read-only dictionary-like object. """ raw = self.headers.get(hdrs.COOKIE, '') parsed = SimpleCookie(raw) return MappingProxyType( {key: val.value for key, val in parsed.items()}) @property def http_range(self, *, _RANGE=hdrs.RANGE): """The content of Range HTTP header. Return a slice instance. """ rng = self._headers.get(_RANGE) start, end = None, None if rng is not None: try: pattern = r'^bytes=(\d*)-(\d*)$' start, end = re.findall(pattern, rng)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptible format") end = int(end) if end else None start = int(start) if start else None if start is None and end is not None: # end with no start is to return tail of content end = -end if start is not None and end is not None: # end is inclusive in range header, exclusive for slice end += 1 if start >= end: raise ValueError('start cannot be after end') if start is end is None: # No valid range supplied raise ValueError('No start or end of range specified') return slice(start, end, 1) @property def content(self): """Return raw payload stream.""" return self._payload @property def has_body(self): """Return True if request's HTTP BODY can be read, False otherwise.""" warnings.warn( "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2) return not self._payload.at_eof() @property def can_read_body(self): """Return True if request's HTTP BODY can be read, False otherwise.""" return not self._payload.at_eof() @property def body_exists(self): """Return True if request has HTTP BODY, False otherwise.""" return type(self._payload) is not EmptyStreamReader async def release(self): """Release request. Eat unread part of HTTP BODY if present. """ while not self._payload.at_eof(): await self._payload.readany() async def read(self): """Read request body if present. Returns bytes object with full request content. """ if self._read_bytes is None: body = bytearray() while True: chunk = await self._payload.readany() body.extend(chunk) if self._client_max_size \ and len(body) >= self._client_max_size: raise HTTPRequestEntityTooLarge if not chunk: break self._read_bytes = bytes(body) return self._read_bytes async def text(self): """Return BODY as text using encoding from .charset.""" bytes_body = await self.read() encoding = self.charset or 'utf-8' return bytes_body.decode(encoding) async def json(self, *, loads=json.loads): """Return BODY as JSON.""" body = await self.text() return loads(body) async def multipart(self, *, reader=multipart.MultipartReader): """Return async iterator to process BODY as multipart.""" return reader(self._headers, self._payload) async def post(self): """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() if content_type == 'multipart/form-data': multipart = await self.multipart() field = await multipart.next() while field is not None: size = 0 max_size = self._client_max_size content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise ValueError( 'Maximum request body size exceeded') chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, tmp, content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise ValueError( 'Maximum request body size exceeded') field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post def __repr__(self): ascii_encodable_path = self.path.encode('ascii', 'backslashreplace') \ .decode('ascii') return "<{} {} {} >".format(self.__class__.__name__, self._method, ascii_encodable_path) @asyncio.coroutine def _prepare_hook(self, response): return yield # pragma: no cover class Request(BaseRequest): ATTRS = BaseRequest.ATTRS | frozenset(['_match_info']) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # matchdict, route_name, handler # or information about traversal lookup self._match_info = None # initialized after route resolving def __setattr__(self, name, val): if name not in self.ATTRS: warnings.warn("Setting custom {}.{} attribute " "is discouraged".format(self.__class__.__name__, name), DeprecationWarning, stacklevel=2) super().__setattr__(name, val) def clone(self, *, method=sentinel, rel_url=sentinel, headers=sentinel, scheme=sentinel, host=sentinel, remote=sentinel): ret = super().clone(method=method, rel_url=rel_url, headers=headers, scheme=scheme, host=host, remote=remote) ret._match_info = self._match_info return ret @property def match_info(self): """Result of route resolving.""" return self._match_info @property def app(self): """Application instance.""" return self._match_info.current_app async def _prepare_hook(self, response): match_info = self._match_info if match_info is None: return for app in match_info.apps: await app.on_response_prepare.send(self, response) aiohttp-3.0.1/aiohttp/web_response.py0000666000000000000000000005174413240304665016050 0ustar 00000000000000import collections import datetime import enum import json import math import time import warnings import zlib from email.utils import parsedate from http.cookies import SimpleCookie from multidict import CIMultiDict, CIMultiDictProxy from . import hdrs, payload from .helpers import HeadersMixin, rfc822_formatted_time, sentinel from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11 __all__ = ('ContentCoding', 'StreamResponse', 'Response', 'json_response') class ContentCoding(enum.Enum): # The content codings that we have support for. # # Additional registered codings are listed at: # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding deflate = 'deflate' gzip = 'gzip' identity = 'identity' ############################################################ # HTTP Response classes ############################################################ class StreamResponse(collections.MutableMapping, HeadersMixin): _length_check = True def __init__(self, *, status=200, reason=None, headers=None): self._body = None self._keep_alive = None self._chunked = False self._compression = False self._compression_force = False self._cookies = SimpleCookie() self._req = None self._payload_writer = None self._eof_sent = False self._body_length = 0 self._state = {} if headers is not None: self._headers = CIMultiDict(headers) else: self._headers = CIMultiDict() self.set_status(status, reason) @property def prepared(self): return self._payload_writer is not None @property def task(self): return getattr(self._req, 'task', None) @property def status(self): return self._status @property def chunked(self): return self._chunked @property def compression(self): return self._compression @property def reason(self): return self._reason def set_status(self, status, reason=None, _RESPONSES=RESPONSES): assert not self.prepared, \ 'Cannot change the response status code after ' \ 'the headers have been sent' self._status = int(status) if reason is None: try: reason = _RESPONSES[self._status][0] except Exception: reason = '' self._reason = reason @property def keep_alive(self): return self._keep_alive def force_close(self): self._keep_alive = False @property def body_length(self): return self._body_length @property def output_length(self): warnings.warn('output_length is deprecated', DeprecationWarning) return self._payload_writer.buffer_size def enable_chunked_encoding(self, chunk_size=None): """Enables automatic chunked transfer encoding.""" self._chunked = True if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError("You can't enable chunked encoding when " "a content length is set") if chunk_size is not None: warnings.warn('Chunk size is deprecated #1615', DeprecationWarning) def enable_compression(self, force=None): """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force @property def headers(self): return self._headers @property def cookies(self): return self._cookies def set_cookie(self, name, value, *, expires=None, domain=None, max_age=None, path='/', secure=None, httponly=None, version=None): """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = max_age elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version def del_cookie(self, name, *, domain=None, path='/'): """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path) @property def content_length(self): # Just a placeholder for adding setter return super().content_length @content_length.setter def content_length(self, value): if value is not None: value = int(value) if self._chunked: raise RuntimeError("You can't set content length when " "chunked encoding is enable") self._headers[hdrs.CONTENT_LENGTH] = str(value) else: self._headers.pop(hdrs.CONTENT_LENGTH, None) @property def content_type(self): # Just a placeholder for adding setter return super().content_type @content_type.setter def content_type(self, value): self.content_type # read header values if needed self._content_type = str(value) self._generate_content_type_header() @property def charset(self): # Just a placeholder for adding setter return super().charset @charset.setter def charset(self, value): ctype = self.content_type # read header values if needed if ctype == 'application/octet-stream': raise RuntimeError("Setting charset for application/octet-stream " "doesn't make sense, setup content_type first") if value is None: self._content_dict.pop('charset', None) else: self._content_dict['charset'] = str(value).lower() self._generate_content_type_header() @property def last_modified(self, _LAST_MODIFIED=hdrs.LAST_MODIFIED): """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self.headers.get(_LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None @last_modified.setter def last_modified(self, value): if value is None: self.headers.pop(hdrs.LAST_MODIFIED, None) elif isinstance(value, (int, float)): self.headers[hdrs.LAST_MODIFIED] = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))) elif isinstance(value, datetime.datetime): self.headers[hdrs.LAST_MODIFIED] = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()) elif isinstance(value, str): self.headers[hdrs.LAST_MODIFIED] = value def _generate_content_type_header(self, CONTENT_TYPE=hdrs.CONTENT_TYPE): params = '; '.join("%s=%s" % i for i in self._content_dict.items()) if params: ctype = self._content_type + '; ' + params else: ctype = self._content_type self.headers[CONTENT_TYPE] = ctype def _do_start_compression(self, coding): if coding != ContentCoding.identity: self.headers[hdrs.CONTENT_ENCODING] = coding.value self._payload_writer.enable_compression(coding.value) # Compressed payload may have different content length, # remove the header self._headers.popall(hdrs.CONTENT_LENGTH, None) def _start_compression(self, request): if self._compression_force: self._do_start_compression(self._compression_force) else: accept_encoding = request.headers.get( hdrs.ACCEPT_ENCODING, '').lower() for coding in ContentCoding: if coding.value in accept_encoding: self._do_start_compression(coding) return async def prepare(self, request): if self._eof_sent: return if self._payload_writer is not None: return self._payload_writer await request._prepare_hook(self) return self._start(request) def _start(self, request, HttpVersion10=HttpVersion10, HttpVersion11=HttpVersion11, CONNECTION=hdrs.CONNECTION, DATE=hdrs.DATE, SERVER=hdrs.SERVER, CONTENT_TYPE=hdrs.CONTENT_TYPE, CONTENT_LENGTH=hdrs.CONTENT_LENGTH, SET_COOKIE=hdrs.SET_COOKIE, SERVER_SOFTWARE=SERVER_SOFTWARE, TRANSFER_ENCODING=hdrs.TRANSFER_ENCODING): self._req = request keep_alive = self._keep_alive if keep_alive is None: keep_alive = request.keep_alive self._keep_alive = keep_alive version = request.version writer = self._payload_writer = request._payload_writer headers = self._headers for cookie in self._cookies.values(): value = cookie.output(header='')[1:] headers.add(SET_COOKIE, value) if self._compression: self._start_compression(request) if self._chunked: if version != HttpVersion11: raise RuntimeError( "Using chunked encoding is forbidden " "for HTTP/{0.major}.{0.minor}".format(request.version)) writer.enable_chunking() headers[TRANSFER_ENCODING] = 'chunked' if CONTENT_LENGTH in headers: del headers[CONTENT_LENGTH] elif self._length_check: writer.length = self.content_length if writer.length is None: if version >= HttpVersion11: writer.enable_chunking() headers[TRANSFER_ENCODING] = 'chunked' if CONTENT_LENGTH in headers: del headers[CONTENT_LENGTH] else: keep_alive = False headers.setdefault(CONTENT_TYPE, 'application/octet-stream') headers.setdefault(DATE, rfc822_formatted_time()) headers.setdefault(SERVER, SERVER_SOFTWARE) # connection header if CONNECTION not in headers: if keep_alive: if version == HttpVersion10: headers[CONNECTION] = 'keep-alive' else: if version == HttpVersion11: headers[CONNECTION] = 'close' # status line status_line = 'HTTP/{}.{} {} {}\r\n'.format( version[0], version[1], self._status, self._reason) writer.write_headers(status_line, headers) return writer async def write(self, data): assert isinstance(data, (bytes, bytearray, memoryview)), \ "data argument must be byte-ish (%r)" % type(data) if self._eof_sent: raise RuntimeError("Cannot call write() after write_eof()") if self._payload_writer is None: raise RuntimeError("Cannot call write() before prepare()") await self._payload_writer.write(data) async def drain(self): assert not self._eof_sent, "EOF has already been sent" assert self._payload_writer is not None, \ "Response has not been started" warnings.warn("drain method is deprecated, use await resp.write()", DeprecationWarning, stacklevel=2) await self._payload_writer.drain() async def write_eof(self, data=b''): assert isinstance(data, (bytes, bytearray, memoryview)), \ "data argument must be byte-ish (%r)" % type(data) if self._eof_sent: return assert self._payload_writer is not None, \ "Response has not been started" await self._payload_writer.write_eof(data) self._eof_sent = True self._req = None self._body_length = self._payload_writer.output_size self._payload_writer = None def __repr__(self): if self._eof_sent: info = "eof" elif self.prepared: info = "{} {} ".format(self._req.method, self._req.path) else: info = "not prepared" return "<{} {} {}>".format(self.__class__.__name__, self.reason, info) def __getitem__(self, key): return self._state[key] def __setitem__(self, key, value): self._state[key] = value def __delitem__(self, key): del self._state[key] def __len__(self): return len(self._state) def __iter__(self): return iter(self._state) def __hash__(self): return hash(id(self)) class Response(StreamResponse): def __init__(self, *, body=None, status=200, reason=None, text=None, headers=None, content_type=None, charset=None): if body is not None and text is not None: raise ValueError("body and text are not allowed together") if headers is None: headers = CIMultiDict() elif not isinstance(headers, (CIMultiDict, CIMultiDictProxy)): headers = CIMultiDict(headers) if content_type is not None and "charset" in content_type: raise ValueError("charset must not be in content_type " "argument") if text is not None: if hdrs.CONTENT_TYPE in headers: if content_type or charset: raise ValueError("passing both Content-Type header and " "content_type or charset params " "is forbidden") else: # fast path for filling headers if not isinstance(text, str): raise TypeError("text argument must be str (%r)" % type(text)) if content_type is None: content_type = 'text/plain' if charset is None: charset = 'utf-8' headers[hdrs.CONTENT_TYPE] = ( content_type + '; charset=' + charset) body = text.encode(charset) text = None else: if hdrs.CONTENT_TYPE in headers: if content_type is not None or charset is not None: raise ValueError("passing both Content-Type header and " "content_type or charset params " "is forbidden") else: if content_type is not None: if charset is not None: content_type += '; charset=' + charset headers[hdrs.CONTENT_TYPE] = content_type super().__init__(status=status, reason=reason, headers=headers) if text is not None: self.text = text else: self.body = body self._compressed_body = None @property def body(self): return self._body @body.setter def body(self, body, CONTENT_TYPE=hdrs.CONTENT_TYPE, CONTENT_LENGTH=hdrs.CONTENT_LENGTH): if body is None: self._body = None self._body_payload = False elif isinstance(body, (bytes, bytearray)): self._body = body self._body_payload = False else: try: self._body = body = payload.PAYLOAD_REGISTRY.get(body) except payload.LookupError: raise ValueError('Unsupported body type %r' % type(body)) self._body_payload = True headers = self._headers # set content-length header if needed if not self._chunked and CONTENT_LENGTH not in headers: size = body.size if size is not None: headers[CONTENT_LENGTH] = str(size) # set content-type if CONTENT_TYPE not in headers: headers[CONTENT_TYPE] = body.content_type # copy payload headers if body.headers: for (key, value) in body.headers.items(): if key not in headers: headers[key] = value self._compressed_body = None @property def text(self): if self._body is None: return None return self._body.decode(self.charset or 'utf-8') @text.setter def text(self, text): assert text is None or isinstance(text, str), \ "text argument must be str (%r)" % type(text) if self.content_type == 'application/octet-stream': self.content_type = 'text/plain' if self.charset is None: self.charset = 'utf-8' self._body = text.encode(self.charset) self._body_payload = False self._compressed_body = None @property def content_length(self): if self._chunked: return None if hdrs.CONTENT_LENGTH in self.headers: return super().content_length if self._compressed_body is not None: # Return length of the compressed body return len(self._compressed_body) elif self._body_payload: # A payload without content length, or a compressed payload return None elif self._body is not None: return len(self._body) else: return 0 @content_length.setter def content_length(self, value): raise RuntimeError("Content length is set automatically") async def write_eof(self): if self._eof_sent: return if self._compressed_body is not None: body = self._compressed_body else: body = self._body if body is not None: if (self._req._method == hdrs.METH_HEAD or self._status in [204, 304]): await super().write_eof() elif self._body_payload: await body.write(self._payload_writer) await super().write_eof() else: await super().write_eof(body) else: await super().write_eof() def _start(self, request): if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: if not self._body_payload: if self._body is not None: self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body)) else: self._headers[hdrs.CONTENT_LENGTH] = '0' return super()._start(request) def _do_start_compression(self, coding): if self._body_payload or self._chunked: return super()._do_start_compression(coding) if coding != ContentCoding.identity: # Instead of using _payload_writer.enable_compression, # compress the whole body zlib_mode = (16 + zlib.MAX_WBITS if coding.value == 'gzip' else -zlib.MAX_WBITS) compressobj = zlib.compressobj(wbits=zlib_mode) self._compressed_body = compressobj.compress(self._body) +\ compressobj.flush() self._headers[hdrs.CONTENT_ENCODING] = coding.value self._headers[hdrs.CONTENT_LENGTH] = \ str(len(self._compressed_body)) def json_response(data=sentinel, *, text=None, body=None, status=200, reason=None, headers=None, content_type='application/json', dumps=json.dumps): if data is not sentinel: if text or body: raise ValueError( "only one of data, text, or body should be specified" ) else: text = dumps(data) return Response(text=text, body=body, status=status, reason=reason, headers=headers, content_type=content_type) aiohttp-3.0.1/aiohttp/web_runner.py0000666000000000000000000002001613240304665015507 0ustar 00000000000000import asyncio import signal import socket from abc import ABC, abstractmethod from yarl import URL __all__ = ('TCPSite', 'UnixSite', 'SockSite', 'BaseRunner', 'AppRunner', 'ServerRunner', 'GracefulExit') class GracefulExit(SystemExit): code = 1 def _raise_graceful_exit(): raise GracefulExit() class BaseSite(ABC): __slots__ = ('_runner', '_shutdown_timeout', '_ssl_context', '_backlog', '_server') def __init__(self, runner, *, shutdown_timeout=60.0, ssl_context=None, backlog=128): if runner.server is None: raise RuntimeError("Call runner.setup() before making a site") self._runner = runner self._shutdown_timeout = shutdown_timeout self._ssl_context = ssl_context self._backlog = backlog self._server = None @property @abstractmethod def name(self): pass # pragma: no cover @abstractmethod async def start(self): self._runner._reg_site(self) async def stop(self): self._runner._check_site(self) if self._server is None: self._runner._unreg_site(self) return # not started yet self._server.close() await self._server.wait_closed() await self._runner.shutdown() await self._runner.server.shutdown(self._shutdown_timeout) self._runner._unreg_site(self) class TCPSite(BaseSite): __slots__ = ('_host', '_port') def __init__(self, runner, host=None, port=None, *, shutdown_timeout=60.0, ssl_context=None, backlog=128, reuse_address=None, reuse_port=None): super().__init__(runner, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog) if host is None: host = "0.0.0.0" self._host = host if port is None: port = 8443 if self._ssl_context else 8080 self._port = port self._reuse_address = reuse_address self._reuse_port = reuse_port @property def name(self): scheme = 'https' if self._ssl_context else 'http' return str(URL.build(scheme=scheme, host=self._host, port=self._port)) async def start(self): await super().start() loop = asyncio.get_event_loop() self._server = await loop.create_server( self._runner.server, self._host, self._port, ssl=self._ssl_context, backlog=self._backlog, reuse_address=self._reuse_address, reuse_port=self._reuse_port) class UnixSite(BaseSite): __slots__ = ('_path', ) def __init__(self, runner, path, *, shutdown_timeout=60.0, ssl_context=None, backlog=128): super().__init__(runner, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog) self._path = path @property def name(self): scheme = 'https' if self._ssl_context else 'http' return '{}://unix:{}:'.format(scheme, self._path) async def start(self): await super().start() loop = asyncio.get_event_loop() self._server = await loop.create_unix_server( self._runner.server, self._path, ssl=self._ssl_context, backlog=self._backlog) class SockSite(BaseSite): __slots__ = ('_sock', '_name') def __init__(self, runner, sock, *, shutdown_timeout=60.0, ssl_context=None, backlog=128): super().__init__(runner, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, backlog=backlog) self._sock = sock scheme = 'https' if self._ssl_context else 'http' if hasattr(socket, 'AF_UNIX') and sock.family == socket.AF_UNIX: name = '{}://unix:{}:'.format(scheme, sock.getsockname()) else: host, port = sock.getsockname()[:2] name = str(URL.build(scheme=scheme, host=host, port=port)) self._name = name @property def name(self): return self._name async def start(self): await super().start() loop = asyncio.get_event_loop() self._server = await loop.create_server( self._runner.server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog) class BaseRunner(ABC): __slots__ = ('_handle_signals', '_kwargs', '_server', '_sites') def __init__(self, *, handle_signals=False, **kwargs): self._handle_signals = handle_signals self._kwargs = kwargs self._server = None self._sites = set() @property def server(self): return self._server @property def sites(self): return set(self._sites) async def setup(self): loop = asyncio.get_event_loop() if self._handle_signals: try: loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit) loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit) except NotImplementedError: # pragma: no cover # add_signal_handler is not implemented on Windows pass self._server = await self._make_server() @abstractmethod async def shutdown(self): pass # pragma: no cover async def cleanup(self): loop = asyncio.get_event_loop() if self._server is None: # no started yet, do nothing return # The loop over sites is intentional, an exception on gather() # leaves self._sites in unpredictable state. # The loop guaranties that a site is either deleted on success or # still present on failure for site in list(self._sites): await site.stop() await self._cleanup_server() self._server = None if self._handle_signals: try: loop.remove_signal_handler(signal.SIGINT) loop.remove_signal_handler(signal.SIGTERM) except NotImplementedError: # pragma: no cover # remove_signal_handler is not implemented on Windows pass @abstractmethod async def _make_server(self): pass # pragma: no cover @abstractmethod async def _cleanup_server(self): pass # pragma: no cover def _reg_site(self, site): if site in self._sites: raise RuntimeError("Site {} is already registered in runner {}" .format(site, self)) self._sites.add(site) def _check_site(self, site): if site not in self._sites: raise RuntimeError("Site {} is not registered in runner {}" .format(site, self)) def _unreg_site(self, site): if site not in self._sites: raise RuntimeError("Site {} is not registered in runner {}" .format(site, self)) self._sites.remove(site) class ServerRunner(BaseRunner): """Low-level web server runner""" __slots__ = ('_web_server',) def __init__(self, web_server, *, handle_signals=False, **kwargs): super().__init__(handle_signals=handle_signals, **kwargs) self._web_server = web_server async def shutdown(self): pass async def _make_server(self): return self._web_server async def _cleanup_server(self): pass class AppRunner(BaseRunner): """Web Application runner""" __slots__ = ('_app',) def __init__(self, app, *, handle_signals=False, **kwargs): super().__init__(handle_signals=handle_signals, **kwargs) self._app = app @property def app(self): return self._app async def shutdown(self): await self._app.shutdown() async def _make_server(self): loop = asyncio.get_event_loop() self._app._set_loop(loop) self._app.on_startup.freeze() await self._app.startup() self._app.freeze() return self._app.make_handler(loop=loop, **self._kwargs) async def _cleanup_server(self): await self._app.cleanup() aiohttp-3.0.1/aiohttp/web_server.py0000666000000000000000000000245513240304665015513 0ustar 00000000000000"""Low level HTTP server.""" import asyncio from .web_protocol import RequestHandler from .web_request import BaseRequest __all__ = ('Server',) class Server: def __init__(self, handler, *, request_factory=None, loop=None, **kwargs): if loop is None: loop = asyncio.get_event_loop() self._loop = loop self._connections = {} self._kwargs = kwargs self.requests_count = 0 self.request_handler = handler self.request_factory = request_factory or self._make_request @property def connections(self): return list(self._connections.keys()) def connection_made(self, handler, transport): self._connections[handler] = transport def connection_lost(self, handler, exc=None): if handler in self._connections: del self._connections[handler] def _make_request(self, message, payload, protocol, writer, task): return BaseRequest( message, payload, protocol, writer, task, self._loop) async def shutdown(self, timeout=None): coros = [conn.shutdown(timeout) for conn in self._connections] await asyncio.gather(*coros, loop=self._loop) self._connections.clear() def __call__(self): return RequestHandler(self, loop=self._loop, **self._kwargs) aiohttp-3.0.1/aiohttp/web_urldispatcher.py0000666000000000000000000010055213240304665017053 0ustar 00000000000000import abc import asyncio import base64 import collections import hashlib import inspect import keyword import os import re import warnings from collections.abc import Container, Iterable, Sequence, Sized from contextlib import contextmanager from functools import wraps from pathlib import Path from types import MappingProxyType import attr from yarl import URL from . import hdrs from .abc import AbstractMatchInfo, AbstractRouter, AbstractView from .http import HttpVersion11 from .web_exceptions import (HTTPExpectationFailed, HTTPForbidden, HTTPMethodNotAllowed, HTTPNotFound) from .web_fileresponse import FileResponse from .web_response import Response __all__ = ('UrlDispatcher', 'UrlMappingMatchInfo', 'AbstractResource', 'Resource', 'PlainResource', 'DynamicResource', 'AbstractRoute', 'ResourceRoute', 'StaticResource', 'View', 'RouteDef', 'RouteTableDef', 'head', 'get', 'post', 'patch', 'put', 'delete', 'route', 'view') HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$") ROUTE_RE = re.compile(r'(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})') PATH_SEP = re.escape('/') @attr.s(frozen=True, repr=False, slots=True) class RouteDef: method = attr.ib(type=str) path = attr.ib(type=str) handler = attr.ib() kwargs = attr.ib() def __repr__(self): info = [] for name, value in sorted(self.kwargs.items()): info.append(", {}={!r}".format(name, value)) return (" {handler.__name__!r}" "{info}>".format(method=self.method, path=self.path, handler=self.handler, info=''.join(info))) def register(self, router): if self.method in hdrs.METH_ALL: reg = getattr(router, 'add_'+self.method.lower()) reg(self.path, self.handler, **self.kwargs) else: router.add_route(self.method, self.path, self.handler, **self.kwargs) class AbstractResource(Sized, Iterable): def __init__(self, *, name=None): self._name = name @property def name(self): return self._name @abc.abstractmethod # pragma: no branch def url_for(self, **kwargs): """Construct url for resource with additional params.""" @abc.abstractmethod # pragma: no branch async def resolve(self, request): """Resolve resource Return (UrlMappingMatchInfo, allowed_methods) pair.""" @abc.abstractmethod def add_prefix(self, prefix): """Add a prefix to processed URLs. Required for subapplications support. """ @abc.abstractmethod def get_info(self): """Return a dict with additional info useful for introspection""" def freeze(self): pass @abc.abstractmethod def raw_match(self, path): """Perform a raw match against path""" class AbstractRoute(abc.ABC): def __init__(self, method, handler, *, expect_handler=None, resource=None): if expect_handler is None: expect_handler = _default_expect_handler assert asyncio.iscoroutinefunction(expect_handler), \ 'Coroutine is expected, got {!r}'.format(expect_handler) method = method.upper() if not HTTP_METHOD_RE.match(method): raise ValueError("{} is not allowed HTTP method".format(method)) assert callable(handler), handler if asyncio.iscoroutinefunction(handler): pass elif inspect.isgeneratorfunction(handler): warnings.warn("Bare generators are deprecated, " "use @coroutine wrapper", DeprecationWarning) elif (isinstance(handler, type) and issubclass(handler, AbstractView)): pass else: warnings.warn("Bare functions are deprecated, " "use async ones", DeprecationWarning) @wraps(handler) async def handler_wrapper(*args, **kwargs): result = old_handler(*args, **kwargs) if asyncio.iscoroutine(result): result = await result return result old_handler = handler handler = handler_wrapper self._method = method self._handler = handler self._expect_handler = expect_handler self._resource = resource @property def method(self): return self._method @property def handler(self): return self._handler @property @abc.abstractmethod def name(self): """Optional route's name, always equals to resource's name.""" @property def resource(self): return self._resource @abc.abstractmethod def get_info(self): """Return a dict with additional info useful for introspection""" @abc.abstractmethod # pragma: no branch def url_for(self, *args, **kwargs): """Construct url for route with additional params.""" async def handle_expect_header(self, request): return await self._expect_handler(request) class UrlMappingMatchInfo(dict, AbstractMatchInfo): def __init__(self, match_dict, route): super().__init__(match_dict) self._route = route self._apps = () self._current_app = None self._frozen = False @property def handler(self): return self._route.handler @property def route(self): return self._route @property def expect_handler(self): return self._route.handle_expect_header @property def http_exception(self): return None def get_info(self): return self._route.get_info() @property def apps(self): return self._apps def add_app(self, app): if self._frozen: raise RuntimeError("Cannot change apps stack after .freeze() call") if self._current_app is None: self._current_app = app self._apps = (app,) + self._apps @property def current_app(self): return self._current_app @contextmanager def set_current_app(self, app): assert app in self._apps, ( "Expected one of the following apps {!r}, got {!r}" .format(self._apps, app)) prev = self._current_app self._current_app = app try: yield finally: self._current_app = prev def freeze(self): self._frozen = True def __repr__(self): return "".format(super().__repr__(), self._route) class MatchInfoError(UrlMappingMatchInfo): def __init__(self, http_exception): self._exception = http_exception super().__init__({}, SystemRoute(self._exception)) @property def http_exception(self): return self._exception def __repr__(self): return "".format(self._exception.status, self._exception.reason) async def _default_expect_handler(request): """Default handler for Expect header. Just send "100 Continue" to client. raise HTTPExpectationFailed if value of header is not "100-continue" """ expect = request.headers.get(hdrs.EXPECT) if request.version == HttpVersion11: if expect.lower() == "100-continue": request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n", drain=False) else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) class Resource(AbstractResource): def __init__(self, *, name=None): super().__init__(name=name) self._routes = [] def add_route(self, method, handler, *, expect_handler=None): for route_obj in self._routes: if route_obj.method == method or route_obj.method == hdrs.METH_ANY: raise RuntimeError("Added route will never be executed, " "method {route.method} is already " "registered".format(route=route_obj)) route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) self.register_route(route_obj) return route_obj def register_route(self, route): assert isinstance(route, ResourceRoute), \ 'Instance of Route class is required, got {!r}'.format(route) self._routes.append(route) async def resolve(self, request): allowed_methods = set() match_dict = self._match(request.rel_url.raw_path) if match_dict is None: return None, allowed_methods for route_obj in self._routes: route_method = route_obj.method allowed_methods.add(route_method) if (route_method == request.method or route_method == hdrs.METH_ANY): return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods) else: return None, allowed_methods def __len__(self): return len(self._routes) def __iter__(self): return iter(self._routes) # TODO: implement all abstract methods class PlainResource(Resource): def __init__(self, path, *, name=None): super().__init__(name=name) assert not path or path.startswith('/') self._path = path def freeze(self): if not self._path: self._path = '/' def add_prefix(self, prefix): assert prefix.startswith('/') assert not prefix.endswith('/') assert len(prefix) > 1 self._path = prefix + self._path def _match(self, path): # string comparison is about 10 times faster than regexp matching if self._path == path: return {} else: return None def raw_match(self, path): return self._path == path def get_info(self): return {'path': self._path} def url_for(self): return URL.build(path=self._path, encoded=True) def __repr__(self): name = "'" + self.name + "' " if self.name is not None else "" return "[_a-zA-Z][_a-zA-Z0-9]*)\}') DYN_WITH_RE = re.compile( r'\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}') GOOD = r'[^{}/]+' def __init__(self, path, *, name=None): super().__init__(name=name) pattern = '' formatter = '' for part in ROUTE_RE.split(path): match = self.DYN.fullmatch(part) if match: pattern += '(?P<{}>{})'.format(match.group('var'), self.GOOD) formatter += '{' + match.group('var') + '}' continue match = self.DYN_WITH_RE.fullmatch(part) if match: pattern += '(?P<{var}>{re})'.format(**match.groupdict()) formatter += '{' + match.group('var') + '}' continue if '{' in part or '}' in part: raise ValueError("Invalid path '{}'['{}']".format(path, part)) path = URL.build(path=part).raw_path formatter += path pattern += re.escape(path) try: compiled = re.compile(pattern) except re.error as exc: raise ValueError( "Bad pattern '{}': {}".format(pattern, exc)) from None assert compiled.pattern.startswith(PATH_SEP) assert formatter.startswith('/') self._pattern = compiled self._formatter = formatter def add_prefix(self, prefix): assert prefix.startswith('/') assert not prefix.endswith('/') assert len(prefix) > 1 self._pattern = re.compile(re.escape(prefix)+self._pattern.pattern) self._formatter = prefix + self._formatter def _match(self, path): match = self._pattern.fullmatch(path) if match is None: return None else: return {key: URL.build(path=value, encoded=True).path for key, value in match.groupdict().items()} def raw_match(self, path): return self._formatter == path def get_info(self): return {'formatter': self._formatter, 'pattern': self._pattern} def url_for(self, **parts): url = self._formatter.format_map({k: URL.build(path=v).raw_path for k, v in parts.items()}) return URL.build(path=url) def __repr__(self): name = "'" + self.name + "' " if self.name is not None else "" return (" 1 self._prefix = prefix + self._prefix def raw_match(self, prefix): return False # TODO: impl missing abstract methods class StaticResource(PrefixResource): VERSION_KEY = 'v' def __init__(self, prefix, directory, *, name=None, expect_handler=None, chunk_size=256 * 1024, show_index=False, follow_symlinks=False, append_version=False): super().__init__(prefix, name=name) try: directory = Path(directory) if str(directory).startswith('~'): directory = Path(os.path.expanduser(str(directory))) directory = directory.resolve() if not directory.is_dir(): raise ValueError('Not a directory') except (FileNotFoundError, ValueError) as error: raise ValueError( "No directory exists at '{}'".format(directory)) from error self._directory = directory self._show_index = show_index self._chunk_size = chunk_size self._follow_symlinks = follow_symlinks self._expect_handler = expect_handler self._append_version = append_version self._routes = {'GET': ResourceRoute('GET', self._handle, self, expect_handler=expect_handler), 'HEAD': ResourceRoute('HEAD', self._handle, self, expect_handler=expect_handler)} def url_for(self, *, filename, append_version=None): if append_version is None: append_version = self._append_version if isinstance(filename, Path): filename = str(filename) while filename.startswith('/'): filename = filename[1:] filename = '/' + filename # filename is not encoded url = URL.build(path=self._prefix + filename) if append_version is True: try: if filename.startswith('/'): filename = filename[1:] filepath = self._directory.joinpath(filename).resolve() if not self._follow_symlinks: filepath.relative_to(self._directory) except (ValueError, FileNotFoundError): # ValueError for case when path point to symlink # with follow_symlinks is False return url # relatively safe if filepath.is_file(): # TODO cache file content # with file watcher for cache invalidation with open(str(filepath), mode='rb') as f: file_bytes = f.read() h = self._get_file_hash(file_bytes) url = url.with_query({self.VERSION_KEY: h}) return url return url @staticmethod def _get_file_hash(byte_array): m = hashlib.sha256() # todo sha256 can be configurable param m.update(byte_array) b64 = base64.urlsafe_b64encode(m.digest()) return b64.decode('ascii') def get_info(self): return {'directory': self._directory, 'prefix': self._prefix} def set_options_route(self, handler): if 'OPTIONS' in self._routes: raise RuntimeError('OPTIONS route was set already') self._routes['OPTIONS'] = ResourceRoute( 'OPTIONS', handler, self, expect_handler=self._expect_handler) async def resolve(self, request): path = request.rel_url.raw_path method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix): return None, set() if method not in allowed_methods: return None, allowed_methods match_dict = {'filename': URL.build(path=path[len(self._prefix)+1:], encoded=True).path} return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) def __len__(self): return len(self._routes) def __iter__(self): return iter(self._routes.values()) async def _handle(self, request): filename = request.match_info['filename'] try: filepath = self._directory.joinpath(filename).resolve() if not self._follow_symlinks: filepath.relative_to(self._directory) except (ValueError, FileNotFoundError) as error: # relatively safe raise HTTPNotFound() from error except Exception as error: # perm error or other kind! request.app.logger.exception(error) raise HTTPNotFound() from error # on opening a dir, load it's contents if allowed if filepath.is_dir(): if self._show_index: try: ret = Response(text=self._directory_as_html(filepath), content_type="text/html") except PermissionError: raise HTTPForbidden() else: raise HTTPForbidden() elif filepath.is_file(): ret = FileResponse(filepath, chunk_size=self._chunk_size) else: raise HTTPNotFound return ret def _directory_as_html(self, filepath): # returns directory's index as html # sanity check assert filepath.is_dir() relative_path_to_dir = filepath.relative_to(self._directory).as_posix() index_of = "Index of /{}".format(relative_path_to_dir) h1 = "

{}

".format(index_of) index_list = [] dir_index = filepath.iterdir() for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() file_url = self._prefix + '/' + rel_path # if file is a directory, add '/' to the end of the name if _file.is_dir(): file_name = "{}/".format(_file.name) else: file_name = _file.name index_list.append( '
  • {name}
  • '.format(url=file_url, name=file_name) ) ul = "
      \n{}\n
    ".format('\n'.join(index_list)) body = "\n{}\n{}\n".format(h1, ul) head_str = "\n{}\n".format(index_of) html = "\n{}\n{}\n".format(head_str, body) return html def __repr__(self): name = "'" + self.name + "'" if self.name is not None else "" return " {directory!r}".format( name=name, path=self._prefix, directory=self._directory) class PrefixedSubAppResource(PrefixResource): def __init__(self, prefix, app): super().__init__(prefix) self._app = app for resource in app.router.resources(): resource.add_prefix(prefix) def add_prefix(self, prefix): super().add_prefix(prefix) for resource in self._app.router.resources(): resource.add_prefix(prefix) def url_for(self, *args, **kwargs): raise RuntimeError(".url_for() is not supported " "by sub-application root") def get_info(self): return {'app': self._app, 'prefix': self._prefix} async def resolve(self, request): if not request.url.raw_path.startswith(self._prefix): return None, set() match_info = await self._app.router.resolve(request) match_info.add_app(self._app) if isinstance(match_info.http_exception, HTTPMethodNotAllowed): methods = match_info.http_exception.allowed_methods else: methods = set() return match_info, methods def __len__(self): return len(self._app.router.routes()) def __iter__(self): return iter(self._app.router.routes()) def __repr__(self): return " {app!r}>".format( prefix=self._prefix, app=self._app) class ResourceRoute(AbstractRoute): """A route with resource""" def __init__(self, method, handler, resource, *, expect_handler=None): super().__init__(method, handler, expect_handler=expect_handler, resource=resource) def __repr__(self): return " {handler!r}".format( method=self.method, resource=self._resource, handler=self.handler) @property def name(self): return self._resource.name def url_for(self, *args, **kwargs): """Construct url for route with additional params.""" return self._resource.url_for(*args, **kwargs) def get_info(self): return self._resource.get_info() class SystemRoute(AbstractRoute): def __init__(self, http_exception): super().__init__(hdrs.METH_ANY, self._handler) self._http_exception = http_exception def url_for(self, *args, **kwargs): raise RuntimeError(".url_for() is not allowed for SystemRoute") @property def name(self): return None def get_info(self): return {'http_exception': self._http_exception} async def _handler(self, request): raise self._http_exception @property def status(self): return self._http_exception.status @property def reason(self): return self._http_exception.reason def __repr__(self): return "".format(self=self) class View(AbstractView): async def _iter(self): if self.request.method not in hdrs.METH_ALL: self._raise_allowed_methods() method = getattr(self, self.request.method.lower(), None) if method is None: self._raise_allowed_methods() resp = await method() return resp def __await__(self): return self._iter().__await__() def _raise_allowed_methods(self): allowed_methods = { m for m in hdrs.METH_ALL if hasattr(self, m.lower())} raise HTTPMethodNotAllowed(self.request.method, allowed_methods) class ResourcesView(Sized, Iterable, Container): def __init__(self, resources): self._resources = resources def __len__(self): return len(self._resources) def __iter__(self): yield from self._resources def __contains__(self, resource): return resource in self._resources class RoutesView(Sized, Iterable, Container): def __init__(self, resources): self._routes = [] for resource in resources: for route_obj in resource: self._routes.append(route_obj) def __len__(self): return len(self._routes) def __iter__(self): yield from self._routes def __contains__(self, route_obj): return route_obj in self._routes class UrlDispatcher(AbstractRouter, collections.abc.Mapping): NAME_SPLIT_RE = re.compile(r'[.:-]') def __init__(self): super().__init__() self._resources = [] self._named_resources = {} async def resolve(self, request): method = request.method allowed_methods = set() for resource in self._resources: match_dict, allowed = await resource.resolve(request) if match_dict is not None: return match_dict else: allowed_methods |= allowed else: if allowed_methods: return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods)) else: return MatchInfoError(HTTPNotFound()) def __iter__(self): return iter(self._named_resources) def __len__(self): return len(self._named_resources) def __contains__(self, name): return name in self._named_resources def __getitem__(self, name): return self._named_resources[name] def resources(self): return ResourcesView(self._resources) def routes(self): return RoutesView(self._resources) def named_resources(self): return MappingProxyType(self._named_resources) def register_resource(self, resource): assert isinstance(resource, AbstractResource), \ 'Instance of AbstractResource class is required, got {!r}'.format( resource) if self.frozen: raise RuntimeError( "Cannot register a resource into frozen router.") name = resource.name if name is not None: parts = self.NAME_SPLIT_RE.split(name) for part in parts: if not part.isidentifier() or keyword.iskeyword(part): raise ValueError('Incorrect route name {!r}, ' 'the name should be a sequence of ' 'python identifiers separated ' 'by dash, dot or column'.format(name)) if name in self._named_resources: raise ValueError('Duplicate {!r}, ' 'already handled by {!r}' .format(name, self._named_resources[name])) self._named_resources[name] = resource self._resources.append(resource) def add_resource(self, path, *, name=None): if path and not path.startswith('/'): raise ValueError("path should be started with / or be empty") # Reuse last added resource if path and name are the same if self._resources: resource = self._resources[-1] if resource.name == name and resource.raw_match(path): return resource if not ('{' in path or '}' in path or ROUTE_RE.search(path)): url = URL.build(path=path) resource = PlainResource(url.raw_path, name=name) self.register_resource(resource) return resource resource = DynamicResource(path, name=name) self.register_resource(resource) return resource def add_route(self, method, path, handler, *, name=None, expect_handler=None): resource = self.add_resource(path, name=name) return resource.add_route(method, handler, expect_handler=expect_handler) def add_static(self, prefix, path, *, name=None, expect_handler=None, chunk_size=256 * 1024, show_index=False, follow_symlinks=False, append_version=False): """Add static files view. prefix - url prefix path - folder with files """ assert prefix.startswith('/') if prefix.endswith('/'): prefix = prefix[:-1] resource = StaticResource(prefix, path, name=name, expect_handler=expect_handler, chunk_size=chunk_size, show_index=show_index, follow_symlinks=follow_symlinks, append_version=append_version) self.register_resource(resource) return resource def add_head(self, path, handler, **kwargs): """ Shortcut for add_route with method HEAD """ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) def add_get(self, path, handler, *, name=None, allow_head=True, **kwargs): """ Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint """ resource = self.add_resource(path, name=name) if allow_head: resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs) def add_post(self, path, handler, **kwargs): """ Shortcut for add_route with method POST """ return self.add_route(hdrs.METH_POST, path, handler, **kwargs) def add_put(self, path, handler, **kwargs): """ Shortcut for add_route with method PUT """ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) def add_patch(self, path, handler, **kwargs): """ Shortcut for add_route with method PATCH """ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) def add_delete(self, path, handler, **kwargs): """ Shortcut for add_route with method DELETE """ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) def add_view(self, path, handler, **kwargs): """ Shortcut for add_route with ANY methods for a class-based view """ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) def freeze(self): super().freeze() for resource in self._resources: resource.freeze() def add_routes(self, routes): """Append routes to route table. Parameter should be a sequence of RouteDef objects. """ for route_obj in routes: route_obj.register(self) def route(method, path, handler, **kwargs): return RouteDef(method, path, handler, kwargs) def head(path, handler, **kwargs): return route(hdrs.METH_HEAD, path, handler, **kwargs) def get(path, handler, *, name=None, allow_head=True, **kwargs): return route(hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs) def post(path, handler, **kwargs): return route(hdrs.METH_POST, path, handler, **kwargs) def put(path, handler, **kwargs): return route(hdrs.METH_PUT, path, handler, **kwargs) def patch(path, handler, **kwargs): return route(hdrs.METH_PATCH, path, handler, **kwargs) def delete(path, handler, **kwargs): return route(hdrs.METH_DELETE, path, handler, **kwargs) def view(path, handler, **kwargs): return route(hdrs.METH_ANY, path, handler, **kwargs) class RouteTableDef(Sequence): """Route definition table""" def __init__(self): self._items = [] def __repr__(self): return "".format(len(self._items)) def __getitem__(self, index): return self._items[index] def __iter__(self): return iter(self._items) def __len__(self): return len(self._items) def __contains__(self, item): return item in self._items def route(self, method, path, **kwargs): def inner(handler): self._items.append(RouteDef(method, path, handler, kwargs)) return handler return inner def head(self, path, **kwargs): return self.route(hdrs.METH_HEAD, path, **kwargs) def get(self, path, **kwargs): return self.route(hdrs.METH_GET, path, **kwargs) def post(self, path, **kwargs): return self.route(hdrs.METH_POST, path, **kwargs) def put(self, path, **kwargs): return self.route(hdrs.METH_PUT, path, **kwargs) def patch(self, path, **kwargs): return self.route(hdrs.METH_PATCH, path, **kwargs) def delete(self, path, **kwargs): return self.route(hdrs.METH_DELETE, path, **kwargs) def view(self, path, **kwargs): return self.route(hdrs.METH_ANY, path, **kwargs) aiohttp-3.0.1/aiohttp/web_ws.py0000666000000000000000000003453213240304665014637 0ustar 00000000000000import asyncio import base64 import binascii import hashlib import json import async_timeout import attr from multidict import CIMultiDict from . import hdrs from .helpers import call_later, set_result from .http import (WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, WS_KEY, WebSocketError, WebSocketReader, WebSocketWriter, WSMessage, WSMsgType, ws_ext_gen, ws_ext_parse) from .log import ws_logger from .streams import FlowControlDataQueue from .web_exceptions import HTTPBadRequest, HTTPException, HTTPMethodNotAllowed from .web_response import StreamResponse __all__ = ('WebSocketResponse', 'WebSocketReady', 'WSMsgType',) THRESHOLD_CONNLOST_ACCESS = 5 @attr.s(frozen=True, slots=True) class WebSocketReady: ok = attr.ib(type=bool) protocol = attr.ib(type=str) def __bool__(self): return self.ok class WebSocketResponse(StreamResponse): def __init__(self, *, timeout=10.0, receive_timeout=None, autoclose=True, autoping=True, heartbeat=None, protocols=(), compress=True): super().__init__(status=101) self._protocols = protocols self._ws_protocol = None self._writer = None self._reader = None self._closed = False self._closing = False self._conn_lost = 0 self._close_code = None self._loop = None self._waiting = None self._exception = None self._timeout = timeout self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat self._heartbeat_cb = None if heartbeat is not None: self._pong_heartbeat = heartbeat/2.0 self._pong_response_cb = None self._compress = compress def _cancel_heartbeat(self): if self._pong_response_cb is not None: self._pong_response_cb.cancel() self._pong_response_cb = None if self._heartbeat_cb is not None: self._heartbeat_cb.cancel() self._heartbeat_cb = None def _reset_heartbeat(self): self._cancel_heartbeat() if self._heartbeat is not None: self._heartbeat_cb = call_later( self._send_heartbeat, self._heartbeat, self._loop) def _send_heartbeat(self): if self._heartbeat is not None and not self._closed: self._writer.ping() if self._pong_response_cb is not None: self._pong_response_cb.cancel() self._pong_response_cb = call_later( self._pong_not_received, self._pong_heartbeat, self._loop) def _pong_not_received(self): if self._req is not None and self._req.transport is not None: self._closed = True self._close_code = 1006 self._exception = asyncio.TimeoutError() self._req.transport.close() async def prepare(self, request): # make pre-check to don't hide it by do_handshake() exceptions if self._payload_writer is not None: return self._payload_writer protocol, writer = self._pre_start(request) payload_writer = await super().prepare(request) self._post_start(request, protocol, writer) await payload_writer.drain() return payload_writer def _handshake(self, request): headers = request.headers if request.method != hdrs.METH_GET: raise HTTPMethodNotAllowed(request.method, [hdrs.METH_GET]) if 'websocket' != headers.get(hdrs.UPGRADE, '').lower().strip(): raise HTTPBadRequest( text=('No WebSocket UPGRADE hdr: {}\n Can ' '"Upgrade" only to "WebSocket".') .format(headers.get(hdrs.UPGRADE))) if 'upgrade' not in headers.get(hdrs.CONNECTION, '').lower(): raise HTTPBadRequest( text='No CONNECTION upgrade hdr: {}'.format( headers.get(hdrs.CONNECTION))) # find common sub-protocol between client and server protocol = None if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: req_protocols = [str(proto.strip()) for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] for proto in req_protocols: if proto in self._protocols: protocol = proto break else: # No overlap found: Return no protocol as per spec ws_logger.warning( 'Client protocols %r don’t overlap server-known ones %r', req_protocols, self._protocols) # check supported version version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, '') if version not in ('13', '8', '7'): raise HTTPBadRequest( text='Unsupported version: {}'.format(version)) # check client handshake for validity key = headers.get(hdrs.SEC_WEBSOCKET_KEY) try: if not key or len(base64.b64decode(key)) != 16: raise HTTPBadRequest( text='Handshake error: {!r}'.format(key)) except binascii.Error: raise HTTPBadRequest( text='Handshake error: {!r}'.format(key)) from None accept_val = base64.b64encode( hashlib.sha1(key.encode() + WS_KEY).digest()).decode() response_headers = CIMultiDict({hdrs.UPGRADE: 'websocket', hdrs.CONNECTION: 'upgrade', hdrs.TRANSFER_ENCODING: 'chunked', hdrs.SEC_WEBSOCKET_ACCEPT: accept_val}) notakeover = False compress = self._compress if compress: extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) # Server side always get return with no exception. # If something happened, just drop compress extension compress, notakeover = ws_ext_parse(extensions, isserver=True) if compress: enabledext = ws_ext_gen(compress=compress, isserver=True, server_notakeover=notakeover) response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext if protocol: response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol return (response_headers, protocol, compress, notakeover) def _pre_start(self, request): self._loop = request.loop headers, protocol, compress, notakeover = self._handshake( request) self._reset_heartbeat() self.set_status(101) self.headers.update(headers) self.force_close() self._compress = compress writer = WebSocketWriter(request._protocol, request._protocol.transport, compress=compress, notakeover=notakeover) return protocol, writer def _post_start(self, request, protocol, writer): self._ws_protocol = protocol self._writer = writer self._reader = FlowControlDataQueue( request._protocol, limit=2 ** 16, loop=self._loop) request.protocol.set_parser(WebSocketReader( self._reader, compress=self._compress)) # disable HTTP keepalive for WebSocket request.protocol.keep_alive(False) def can_prepare(self, request): if self._writer is not None: raise RuntimeError('Already started') try: _, protocol, _, _ = self._handshake(request) except HTTPException: return WebSocketReady(False, None) else: return WebSocketReady(True, protocol) @property def closed(self): return self._closed @property def close_code(self): return self._close_code @property def ws_protocol(self): return self._ws_protocol @property def compress(self): return self._compress def exception(self): return self._exception async def ping(self, message='b'): if self._writer is None: raise RuntimeError('Call .prepare() first') await self._writer.ping(message) async def pong(self, message='b'): # unsolicited pong if self._writer is None: raise RuntimeError('Call .prepare() first') await self._writer.pong(message) async def send_str(self, data, compress=None): if self._writer is None: raise RuntimeError('Call .prepare() first') if not isinstance(data, str): raise TypeError('data argument must be str (%r)' % type(data)) await self._writer.send(data, binary=False, compress=compress) async def send_bytes(self, data, compress=None): if self._writer is None: raise RuntimeError('Call .prepare() first') if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError('data argument must be byte-ish (%r)' % type(data)) await self._writer.send(data, binary=True, compress=compress) async def send_json(self, data, compress=None, *, dumps=json.dumps): await self.send_str(dumps(data), compress=compress) async def write_eof(self): if self._eof_sent: return if self._payload_writer is None: raise RuntimeError("Response has not been started") await self.close() self._eof_sent = True async def close(self, *, code=1000, message=b''): if self._writer is None: raise RuntimeError('Call .prepare() first') self._cancel_heartbeat() # we need to break `receive()` cycle first, # `close()` may be called from different task if self._waiting is not None and not self._closed: self._reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._waiting if not self._closed: self._closed = True try: self._writer.close(code, message) await self._payload_writer.drain() except (asyncio.CancelledError, asyncio.TimeoutError): self._close_code = 1006 raise except Exception as exc: self._close_code = 1006 self._exception = exc return True if self._closing: return True try: with async_timeout.timeout(self._timeout, loop=self._loop): msg = await self._reader.read() except asyncio.CancelledError: self._close_code = 1006 raise except Exception as exc: self._close_code = 1006 self._exception = exc return True if msg.type == WSMsgType.CLOSE: self._close_code = msg.data return True self._close_code = 1006 self._exception = asyncio.TimeoutError() return True else: return False async def receive(self, timeout=None): if self._reader is None: raise RuntimeError('Call .prepare() first') while True: if self._waiting is not None: raise RuntimeError( 'Concurrent call to receive() is not allowed') if self._closed: self._conn_lost += 1 if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS: raise RuntimeError('WebSocket connection is closed.') return WS_CLOSED_MESSAGE elif self._closing: return WS_CLOSING_MESSAGE try: self._waiting = self._loop.create_future() try: with async_timeout.timeout( timeout or self._receive_timeout, loop=self._loop): msg = await self._reader.read() self._reset_heartbeat() finally: waiter = self._waiting set_result(waiter, True) self._waiting = None except (asyncio.CancelledError, asyncio.TimeoutError): self._close_code = 1006 raise except WebSocketError as exc: self._close_code = exc.code await self.close(code=exc.code) return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc self._closing = True self._close_code = 1006 await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type == WSMsgType.CLOSE: self._closing = True self._close_code = msg.data if not self._closed and self._autoclose: await self.close() elif msg.type == WSMsgType.CLOSING: self._closing = True elif msg.type == WSMsgType.PING and self._autoping: await self.pong(msg.data) continue elif msg.type == WSMsgType.PONG and self._autoping: continue return msg async def receive_str(self, *, timeout=None): msg = await self.receive(timeout) if msg.type != WSMsgType.TEXT: raise TypeError( "Received message {}:{!r} is not WSMsgType.TEXT".format( msg.type, msg.data)) return msg.data async def receive_bytes(self, *, timeout=None): msg = await self.receive(timeout) if msg.type != WSMsgType.BINARY: raise TypeError( "Received message {}:{!r} is not bytes".format(msg.type, msg.data)) return msg.data async def receive_json(self, *, loads=json.loads, timeout=None): data = await self.receive_str(timeout=timeout) return loads(data) async def write(self, data): raise RuntimeError("Cannot call .write() for websocket") def __aiter__(self): return self async def __anext__(self): msg = await self.receive() if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED): raise StopAsyncIteration # NOQA return msg aiohttp-3.0.1/aiohttp/worker.py0000666000000000000000000001565713240304665014671 0ustar 00000000000000"""Async gunicorn worker for aiohttp.web""" import asyncio import os import re import signal import sys from contextlib import suppress from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat from gunicorn.workers import base from aiohttp import web from .helpers import AccessLogger, set_result try: import ssl except ImportError: # pragma: no cover ssl = None __all__ = ('GunicornWebWorker', 'GunicornUVLoopWebWorker', 'GunicornTokioWebWorker') class GunicornWebWorker(base.Worker): DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default def __init__(self, *args, **kw): # pragma: no cover super().__init__(*args, **kw) self._runner = None self._task = None self.exit_code = 0 self._notify_waiter = None def init_process(self): # create new event_loop after fork asyncio.get_event_loop().close() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) super().init_process() def run(self): access_log = self.log.access_log if self.cfg.accesslog else None params = dict( logger=self.log, keepalive_timeout=self.cfg.keepalive, access_log=access_log, access_log_format=self._get_valid_log_format( self.cfg.access_log_format)) self._runner = web.AppRunner(self.wsgi, **params) self.loop.run_until_complete(self._runner.setup()) self._task = self.loop.create_task(self._run()) with suppress(Exception): # ignore all finalization problems self.loop.run_until_complete(self._task) if hasattr(self.loop, 'shutdown_asyncgens'): self.loop.run_until_complete(self.loop.shutdown_asyncgens()) self.loop.close() sys.exit(self.exit_code) async def _run(self): ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None for sock in self.sockets: site = web.SockSite( self._runner, sock, ssl_context=ctx, shutdown_timeout=self.cfg.graceful_timeout / 100 * 95) await site.start() # If our parent changed then we shut down. pid = os.getpid() try: while self.alive: self.notify() cnt = self._runner.server.requests_count if self.cfg.max_requests and cnt > self.cfg.max_requests: self.alive = False self.log.info("Max requests, shutting down: %s", self) elif pid == os.getpid() and self.ppid != os.getppid(): self.alive = False self.log.info("Parent changed, shutting down: %s", self) else: await self._wait_next_notify() except BaseException: pass await self._runner.cleanup() def _wait_next_notify(self): self._notify_waiter_done() self._notify_waiter = waiter = self.loop.create_future() self.loop.call_later(1.0, self._notify_waiter_done, waiter) return waiter def _notify_waiter_done(self, waiter=None): if waiter is None: waiter = self._notify_waiter if waiter is not None: set_result(waiter, True) if waiter is self._notify_waiter: self._notify_waiter = None def init_signals(self): # Set up signals through the event loop API. self.loop.add_signal_handler(signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None) self.loop.add_signal_handler(signal.SIGTERM, self.handle_exit, signal.SIGTERM, None) self.loop.add_signal_handler(signal.SIGINT, self.handle_quit, signal.SIGINT, None) self.loop.add_signal_handler(signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None) self.loop.add_signal_handler(signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None) self.loop.add_signal_handler(signal.SIGABRT, self.handle_abort, signal.SIGABRT, None) # Don't let SIGTERM and SIGUSR1 disturb active requests # by interrupting system calls signal.siginterrupt(signal.SIGTERM, False) signal.siginterrupt(signal.SIGUSR1, False) def handle_quit(self, sig, frame): self.alive = False # worker_int callback self.cfg.worker_int(self) # wakeup closing process self._notify_waiter_done() def handle_abort(self, sig, frame): self.alive = False self.exit_code = 1 self.cfg.worker_abort(self) sys.exit(1) @staticmethod def _create_ssl_context(cfg): """ Creates SSLContext instance for usage in asyncio.create_server. See ssl.SSLSocket.__init__ for more details. """ if ssl is None: # pragma: no cover raise RuntimeError('SSL is not supported.') ctx = ssl.SSLContext(cfg.ssl_version) ctx.load_cert_chain(cfg.certfile, cfg.keyfile) ctx.verify_mode = cfg.cert_reqs if cfg.ca_certs: ctx.load_verify_locations(cfg.ca_certs) if cfg.ciphers: ctx.set_ciphers(cfg.ciphers) return ctx def _get_valid_log_format(self, source_format): if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: return self.DEFAULT_AIOHTTP_LOG_FORMAT elif re.search(r'%\([^\)]+\)', source_format): raise ValueError( "Gunicorn's style options in form of `%(name)s` are not " "supported for the log formatting. Please use aiohttp's " "format specification to configure access log formatting: " "http://docs.aiohttp.org/en/stable/logging.html" "#format-specification" ) else: return source_format class GunicornUVLoopWebWorker(GunicornWebWorker): def init_process(self): import uvloop # Close any existing event loop before setting a # new policy. asyncio.get_event_loop().close() # Setup uvloop policy, so that every # asyncio.get_event_loop() will create an instance # of uvloop event loop. asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) super().init_process() class GunicornTokioWebWorker(GunicornWebWorker): def init_process(self): # pragma: no cover import tokio # Close any existing event loop before setting a # new policy. asyncio.get_event_loop().close() # Setup tokio policy, so that every # asyncio.get_event_loop() will create an instance # of tokio event loop. asyncio.set_event_loop_policy(tokio.EventLoopPolicy()) super().init_process() aiohttp-3.0.1/aiohttp/_cparser.pxd0000666000000000000000000000756713240304665015322 0ustar 00000000000000from libc.stdint cimport uint16_t, uint32_t, uint64_t cdef extern from "../vendor/http-parser/http_parser.h": ctypedef int (*http_data_cb) (http_parser*, const char *at, size_t length) except -1 ctypedef int (*http_cb) (http_parser*) except -1 struct http_parser: unsigned int type unsigned int flags unsigned int state unsigned int header_state unsigned int index uint32_t nread uint64_t content_length unsigned short http_major unsigned short http_minor unsigned int status_code unsigned int method unsigned int http_errno unsigned int upgrade void *data struct http_parser_settings: http_cb on_message_begin http_data_cb on_url http_data_cb on_status http_data_cb on_header_field http_data_cb on_header_value http_cb on_headers_complete http_data_cb on_body http_cb on_message_complete http_cb on_chunk_header http_cb on_chunk_complete enum http_parser_type: HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH enum http_errno: HPE_OK, HPE_CB_message_begin, HPE_CB_url, HPE_CB_header_field, HPE_CB_header_value, HPE_CB_headers_complete, HPE_CB_body, HPE_CB_message_complete, HPE_CB_status, HPE_CB_chunk_header, HPE_CB_chunk_complete, HPE_INVALID_EOF_STATE, HPE_HEADER_OVERFLOW, HPE_CLOSED_CONNECTION, HPE_INVALID_VERSION, HPE_INVALID_STATUS, HPE_INVALID_METHOD, HPE_INVALID_URL, HPE_INVALID_HOST, HPE_INVALID_PORT, HPE_INVALID_PATH, HPE_INVALID_QUERY_STRING, HPE_INVALID_FRAGMENT, HPE_LF_EXPECTED, HPE_INVALID_HEADER_TOKEN, HPE_INVALID_CONTENT_LENGTH, HPE_INVALID_CHUNK_SIZE, HPE_INVALID_CONSTANT, HPE_INVALID_INTERNAL_STATE, HPE_STRICT, HPE_PAUSED, HPE_UNKNOWN enum flags: F_CHUNKED, F_CONNECTION_KEEP_ALIVE, F_CONNECTION_CLOSE, F_CONNECTION_UPGRADE, F_TRAILING, F_UPGRADE, F_SKIPBODY, F_CONTENTLENGTH enum http_method: DELETE, GET, HEAD, POST, PUT, CONNECT, OPTIONS, TRACE, COPY, LOCK, MKCOL, MOVE, PROPFIND, PROPPATCH, SEARCH, UNLOCK, BIND, REBIND, UNBIND, ACL, REPORT, MKACTIVITY, CHECKOUT, MERGE, MSEARCH, NOTIFY, SUBSCRIBE, UNSUBSCRIBE, PATCH, PURGE, MKCALENDAR, LINK, UNLINK void http_parser_init(http_parser *parser, http_parser_type type) size_t http_parser_execute(http_parser *parser, const http_parser_settings *settings, const char *data, size_t len) int http_should_keep_alive(const http_parser *parser) void http_parser_settings_init(http_parser_settings *settings) const char *http_errno_name(http_errno err) const char *http_errno_description(http_errno err) const char *http_method_str(http_method m) # URL Parser enum http_parser_url_fields: UF_SCHEMA = 0, UF_HOST = 1, UF_PORT = 2, UF_PATH = 3, UF_QUERY = 4, UF_FRAGMENT = 5, UF_USERINFO = 6, UF_MAX = 7 struct http_parser_url_field_data: uint16_t off uint16_t len struct http_parser_url: uint16_t field_set uint16_t port http_parser_url_field_data[UF_MAX] field_data void http_parser_url_init(http_parser_url *u) int http_parser_parse_url(const char *buf, size_t buflen, int is_connect, http_parser_url *u) aiohttp-3.0.1/aiohttp/_frozenlist.c0000666000000000000000000103751513240304735015504 0ustar 00000000000000/* Generated by Cython 0.27.3 */ /* BEGIN: Cython Metadata { "distutils": { "name": "aiohttp._frozenlist", "sources": [ "aiohttp/_frozenlist.pyx" ] }, "module_name": "aiohttp._frozenlist" } END: Cython Metadata */ #define PY_SSIZE_T_CLEAN #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else #define CYTHON_ABI "0_27_3" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #define __PYX_COMMA , #ifndef HAVE_LONG_LONG #if PY_VERSION_HEX >= 0x02070000 #define HAVE_LONG_LONG #endif #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #ifdef PYPY_VERSION #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #elif defined(PYSTON_VERSION) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #if PY_MAJOR_VERSION < 3 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #elif !defined(CYTHON_USE_PYLONG_INTERNALS) #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #ifndef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if PY_VERSION_HEX < 0x030300F0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #ifndef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 1 #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000) #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS #include "longintrepr.h" #undef SHIFT #undef BASE #undef MASK #endif #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) #define Py_OptimizeFlag 0 #endif #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyType_Type #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #if PY_VERSION_HEX < 0x030700A0 || !defined(METH_FASTCALL) #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, Py_ssize_t nargs); typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject **args, Py_ssize_t nargs, PyObject *kwnames); #else #define __Pyx_PyCFunctionFast _PyCFunctionFast #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #if CYTHON_FAST_PYCCALL #define __Pyx_PyFastCFunction_Check(func)\ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) #else #define __Pyx_PyFastCFunction_Check(func) 0 #endif #if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #elif PY_VERSION_HEX >= 0x03060000 #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #elif PY_VERSION_HEX >= 0x03000000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #else #define __Pyx_PyThreadState_Current _PyThreadState_Current #endif #if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) #else #define __Pyx_PyDict_NewPresized(n) PyDict_New() #endif #if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) #else #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) #endif #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ 0 : _PyUnicode_Ready((PyObject *)(op))) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 #define PyUnicode_2BYTE_KIND 2 #define PyUnicode_4BYTE_KIND 4 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_PYSTON #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #else #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) #endif #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) #define PyObject_ASCII(o) PyObject_Repr(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) #else #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyInt_FromSsize_t PyLong_FromSsize_t #define PyInt_AsLong PyLong_AsLong #define PyInt_AS_LONG PyLong_AS_LONG #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long #endif #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY #ifndef PyUnicode_InternFromString #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #endif #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) #else #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif #if CYTHON_USE_ASYNC_SLOTS #if PY_VERSION_HEX >= 0x030500B1 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #else #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) #endif #else #define __Pyx_PyType_AsAsync(obj) NULL #endif #ifndef __Pyx_PyAsyncMethodsStruct typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; } __Pyx_PyAsyncMethodsStruct; #endif #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } # else # define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int32 uint32_t; #endif #endif #else #include #endif #ifndef CYTHON_FALLTHROUGH #if defined(__cplusplus) && __cplusplus >= 201103L #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #elif __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #elif __has_cpp_attribute(gnu::fallthrough) #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #if defined(__clang__ ) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH #endif #endif #endif #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #elif defined(__GNUC__) #define CYTHON_INLINE __inline__ #elif defined(_MSC_VER) #define CYTHON_INLINE __inline #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_INLINE inline #else #define CYTHON_INLINE #endif #endif #if defined(WIN32) || defined(MS_WINDOWS) #define _USE_MATH_DEFINES #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif #define __PYX_ERR(f_index, lineno, Ln_error) \ { \ __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ } #ifndef __PYX_EXTERN_C #ifdef __cplusplus #define __PYX_EXTERN_C extern "C" #else #define __PYX_EXTERN_C extern #endif #endif #define __PYX_HAVE__aiohttp___frozenlist #define __PYX_HAVE_API__aiohttp___frozenlist #ifdef _OPENMP #include #endif /* _OPENMP */ #if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; #define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 #define __PYX_DEFAULT_STRING_ENCODING "" #define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString #define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ (sizeof(type) < sizeof(Py_ssize_t)) ||\ (sizeof(type) > sizeof(Py_ssize_t) &&\ likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX) &&\ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ v == (type)PY_SSIZE_T_MIN))) ||\ (sizeof(type) == sizeof(Py_ssize_t) &&\ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX))) ) #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); #define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if PY_MAJOR_VERSION < 3 #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #else #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { const Py_UNICODE *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) #define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) #define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else #define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #endif #define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #else #define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) #endif #define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII static int __Pyx_sys_getdefaultencoding_not_ascii; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; PyObject* ascii_chars_u = NULL; PyObject* ascii_chars_b = NULL; const char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; if (strcmp(default_encoding_c, "ascii") == 0) { __Pyx_sys_getdefaultencoding_not_ascii = 0; } else { char ascii_chars[128]; int c; for (c = 0; c < 128; c++) { ascii_chars[c] = c; } __Pyx_sys_getdefaultencoding_not_ascii = 1; ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); if (!ascii_chars_u) goto bad; ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { PyErr_Format( PyExc_ValueError, "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", default_encoding_c); goto bad; } Py_DECREF(ascii_chars_u); Py_DECREF(ascii_chars_b); } Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); Py_XDECREF(ascii_chars_u); Py_XDECREF(ascii_chars_b); return -1; } #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT static char* __PYX_DEFAULT_STRING_ENCODING; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); return -1; } #endif #endif /* Test for GCC > 2.95 */ #if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; static PyObject *__pyx_cython_runtime; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm= __FILE__; static const char *__pyx_filename; static const char *__pyx_f[] = { "aiohttp\\_frozenlist.pyx", "stringsource", }; /*--- Type declarations ---*/ struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList; /* "aiohttp/_frozenlist.pyx":4 * * * cdef class FrozenList: # <<<<<<<<<<<<<< * * cdef readonly bint frozen */ struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList { PyObject_HEAD struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *__pyx_vtab; int frozen; PyObject *_items; }; struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList { PyObject *(*_check_frozen)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); PyObject *(*_fast_len)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); }; static struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *__pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList; static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *); /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, int); void (*DECREF)(void*, PyObject*, int); void (*GOTREF)(void*, PyObject*, int); void (*GIVEREF)(void*, PyObject*, int); void* (*SetupContext)(const char*, int, const char*); void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; #ifdef WITH_THREAD #define __Pyx_RefNannySetupContext(name, acquire_gil)\ if (acquire_gil) {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ PyGILState_Release(__pyx_gilstate_save);\ } else {\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ } #else #define __Pyx_RefNannySetupContext(name, acquire_gil)\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) #endif #define __Pyx_RefNannyFinishContext()\ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif #define __Pyx_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_XDECREF(tmp);\ } while (0) #define __Pyx_DECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_DECREF(tmp);\ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /* PyObjectGetAttrStr.proto */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_getattr)) return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); #endif return PyObject_GetAttr(obj, attr_name); } #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); /* RaiseDoubleKeywords.proto */ static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /* ParseKeywords.proto */ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ const char* function_name); /* RaiseArgTupleInvalid.proto */ static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /* PyObjectCall.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); #else #define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) #endif /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; #define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; #define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #define __Pyx_PyErr_Occurred() PyErr_Occurred() #endif /* PyErrFetchRestore.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) #else #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #endif #else #define __Pyx_PyErr_Clear() PyErr_Clear() #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /* RaiseException.proto */ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /* PyCFunctionFastCall.proto */ #if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); #else #define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) #endif /* PyFunctionFastCall.proto */ #if CYTHON_FAST_PYCALL #define __Pyx_PyFunction_FastCall(func, args, nargs)\ __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) #if 1 || PY_VERSION_HEX < 0x030600B1 static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); #else #define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) #endif #endif /* PyObjectCallMethO.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); #endif /* PyObjectCallOneArg.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); /* PyObjectCallNoArg.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); #else #define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) #endif /* PyIntBinop.proto */ #if !CYTHON_COMPILING_IN_PYPY static PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, int inplace); #else #define __Pyx_PyInt_EqObjC(op1, op2, intval, inplace)\ PyObject_RichCompare(op1, op2, Py_EQ) #endif /* PySequenceContains.proto */ static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { int result = PySequence_Contains(seq, item); return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); } /* PyObjectCallMethod1.proto */ static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg); static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg); /* pop_index.proto */ static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix); static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix); #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix); #define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ (likely(PyList_CheckExact(L) && __Pyx_fits_Py_ssize_t(ix, type, is_signed))) ?\ __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ __Pyx__PyObject_PopIndex(L, py_ix))) #define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ __Pyx_fits_Py_ssize_t(ix, type, is_signed) ?\ __Pyx__PyList_PopIndex(L, py_ix, ix) : (\ (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ __Pyx__PyObject_PopIndex(L, py_ix))) #else #define __Pyx_PyList_PopIndex(L, py_ix, ix, is_signed, type, to_py_func)\ __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) #define __Pyx_PyObject_PopIndex(L, py_ix, ix, is_signed, type, to_py_func) (\ (unlikely(py_ix == Py_None)) ? __Pyx__PyObject_PopNewIndex(L, to_py_func(ix)) :\ __Pyx__PyObject_PopIndex(L, py_ix)) #endif /* ListAppend.proto */ #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { PyListObject* L = (PyListObject*) list; Py_ssize_t len = Py_SIZE(list); if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); Py_SIZE(list) = len+1; return 0; } return PyList_Append(list, x); } #else #define __Pyx_PyList_Append(L,x) PyList_Append(L,x) #endif /* PyErrExceptionMatches.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); #else #define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) #endif /* GetAttr.proto */ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); /* GetAttr3.proto */ static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /* GetModuleGlobalName.proto */ static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); /* Import.proto */ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /* ImportFrom.proto */ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); /* GetItemInt.proto */ #define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ __Pyx_GetItemInt_Generic(o, to_py_func(i)))) #define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); #define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, int wraparound, int boundscheck); /* HasAttr.proto */ static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); /* SetVTable.proto */ static int __Pyx_SetVtable(PyObject *dict, void *vtable); /* SetupReduce.proto */ static int __Pyx_setup_reduce(PyObject* type_obj); /* CLineInTraceback.proto */ #ifdef CYTHON_CLINE_IN_TRACEBACK #define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) #else static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); #endif /* CodeObjectCache.proto */ typedef struct { PyCodeObject* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; }; static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static PyCodeObject *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); /* AddTraceback.proto */ static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); /* CIntFromPy.proto */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); #else #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) #define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) #endif /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); /* InitStrings.proto */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto*/ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto*/ /* Module declarations from 'aiohttp._frozenlist' */ static PyTypeObject *__pyx_ptype_7aiohttp_11_frozenlist_FrozenList = 0; static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *, PyObject *); /*proto*/ #define __Pyx_MODULE_NAME "aiohttp._frozenlist" extern int __pyx_module_is_main_aiohttp___frozenlist; int __pyx_module_is_main_aiohttp___frozenlist = 0; /* Implementation of 'aiohttp._frozenlist' */ static PyObject *__pyx_builtin_RuntimeError; static const char __pyx_k_new[] = "__new__"; static const char __pyx_k_pop[] = "pop"; static const char __pyx_k_pos[] = "pos"; static const char __pyx_k_dict[] = "__dict__"; static const char __pyx_k_item[] = "item"; static const char __pyx_k_iter[] = "__iter__"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_name[] = "__name__"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_clear[] = "clear"; static const char __pyx_k_count[] = "count"; static const char __pyx_k_index[] = "index"; static const char __pyx_k_items[] = "items"; static const char __pyx_k_format[] = "format"; static const char __pyx_k_import[] = "__import__"; static const char __pyx_k_pickle[] = "pickle"; static const char __pyx_k_reduce[] = "__reduce__"; static const char __pyx_k_remove[] = "remove"; static const char __pyx_k_update[] = "update"; static const char __pyx_k_getstate[] = "__getstate__"; static const char __pyx_k_pyx_type[] = "__pyx_type"; static const char __pyx_k_register[] = "register"; static const char __pyx_k_reversed[] = "__reversed__"; static const char __pyx_k_setstate[] = "__setstate__"; static const char __pyx_k_pyx_state[] = "__pyx_state"; static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; static const char __pyx_k_pyx_result[] = "__pyx_result"; static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; static const char __pyx_k_PickleError[] = "PickleError"; static const char __pyx_k_RuntimeError[] = "RuntimeError"; static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; static const char __pyx_k_stringsource[] = "stringsource"; static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; static const char __pyx_k_MutableSequence[] = "MutableSequence"; static const char __pyx_k_collections_abc[] = "collections.abc"; static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError"; static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_FrozenList_frozen_r[] = ""; static const char __pyx_k_aiohttp__frozenlist[] = "aiohttp._frozenlist"; static const char __pyx_k_pyx_unpickle_FrozenList[] = "__pyx_unpickle_FrozenList"; static const char __pyx_k_Cannot_modify_frozen_list[] = "Cannot modify frozen list."; static const char __pyx_k_Incompatible_checksums_s_vs_0x94[] = "Incompatible checksums (%s vs 0x949a143 = (_items, frozen))"; static PyObject *__pyx_kp_s_Cannot_modify_frozen_list; static PyObject *__pyx_kp_s_FrozenList_frozen_r; static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x94; static PyObject *__pyx_n_s_MutableSequence; static PyObject *__pyx_n_s_PickleError; static PyObject *__pyx_n_s_RuntimeError; static PyObject *__pyx_n_s_aiohttp__frozenlist; static PyObject *__pyx_n_s_clear; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_collections_abc; static PyObject *__pyx_n_s_count; static PyObject *__pyx_n_s_dict; static PyObject *__pyx_n_s_format; static PyObject *__pyx_n_s_getstate; static PyObject *__pyx_n_s_import; static PyObject *__pyx_n_s_index; static PyObject *__pyx_n_s_item; static PyObject *__pyx_n_s_items; static PyObject *__pyx_n_s_iter; static PyObject *__pyx_n_s_main; static PyObject *__pyx_n_s_name; static PyObject *__pyx_n_s_new; static PyObject *__pyx_n_s_pickle; static PyObject *__pyx_n_s_pop; static PyObject *__pyx_n_s_pos; static PyObject *__pyx_n_s_pyx_PickleError; static PyObject *__pyx_n_s_pyx_checksum; static PyObject *__pyx_n_s_pyx_result; static PyObject *__pyx_n_s_pyx_state; static PyObject *__pyx_n_s_pyx_type; static PyObject *__pyx_n_s_pyx_unpickle_FrozenList; static PyObject *__pyx_n_s_pyx_vtable; static PyObject *__pyx_n_s_reduce; static PyObject *__pyx_n_s_reduce_cython; static PyObject *__pyx_n_s_reduce_ex; static PyObject *__pyx_n_s_register; static PyObject *__pyx_n_s_remove; static PyObject *__pyx_n_s_reversed; static PyObject *__pyx_n_s_setstate; static PyObject *__pyx_n_s_setstate_cython; static PyObject *__pyx_kp_s_stringsource; static PyObject *__pyx_n_s_test; static PyObject *__pyx_n_s_update; static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /* proto */ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_pos, PyObject *__pyx_v_item); /* proto */ static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ static PyObject *__pyx_tp_new_7aiohttp_11_frozenlist_FrozenList(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ static PyObject *__pyx_int_0; static PyObject *__pyx_int_1; static PyObject *__pyx_int_2; static PyObject *__pyx_int_3; static PyObject *__pyx_int_4; static PyObject *__pyx_int_5; static PyObject *__pyx_int_155820355; static PyObject *__pyx_int_neg_1; static PyObject *__pyx_tuple_; static PyObject *__pyx_tuple__2; static PyObject *__pyx_codeobj__3; /* "aiohttp/_frozenlist.pyx":9 * cdef list _items * * def __init__(self, items=None): # <<<<<<<<<<<<<< * self.frozen = False * if items is not None: */ /* Python wrapper */ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_items = 0; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_items,0}; PyObject* values[1] = {0}; values[0] = ((PyObject *)Py_None); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_items); if (value) { values[0] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_items = values[0]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return -1; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_items); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList___init__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("__init__", 0); __Pyx_INCREF(__pyx_v_items); /* "aiohttp/_frozenlist.pyx":10 * * def __init__(self, items=None): * self.frozen = False # <<<<<<<<<<<<<< * if items is not None: * items = list(items) */ __pyx_v_self->frozen = 0; /* "aiohttp/_frozenlist.pyx":11 * def __init__(self, items=None): * self.frozen = False * if items is not None: # <<<<<<<<<<<<<< * items = list(items) * else: */ __pyx_t_1 = (__pyx_v_items != Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":12 * self.frozen = False * if items is not None: * items = list(items) # <<<<<<<<<<<<<< * else: * items = [] */ __pyx_t_3 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 12, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF_SET(__pyx_v_items, __pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_frozenlist.pyx":11 * def __init__(self, items=None): * self.frozen = False * if items is not None: # <<<<<<<<<<<<<< * items = list(items) * else: */ goto __pyx_L3; } /* "aiohttp/_frozenlist.pyx":14 * items = list(items) * else: * items = [] # <<<<<<<<<<<<<< * self._items = items * */ /*else*/ { __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF_SET(__pyx_v_items, __pyx_t_3); __pyx_t_3 = 0; } __pyx_L3:; /* "aiohttp/_frozenlist.pyx":15 * else: * items = [] * self._items = items # <<<<<<<<<<<<<< * * cdef object _check_frozen(self): */ if (!(likely(PyList_CheckExact(__pyx_v_items))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_v_items)->tp_name), 0))) __PYX_ERR(0, 15, __pyx_L1_error) __pyx_t_3 = __pyx_v_items; __Pyx_INCREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->_items); __Pyx_DECREF(__pyx_v_self->_items); __pyx_v_self->_items = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_frozenlist.pyx":9 * cdef list _items * * def __init__(self, items=None): # <<<<<<<<<<<<<< * self.frozen = False * if items is not None: */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF(__pyx_v_items); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":17 * self._items = items * * cdef object _check_frozen(self): # <<<<<<<<<<<<<< * if self.frozen: * raise RuntimeError("Cannot modify frozen list.") */ static PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; PyObject *__pyx_t_2 = NULL; __Pyx_RefNannySetupContext("_check_frozen", 0); /* "aiohttp/_frozenlist.pyx":18 * * cdef object _check_frozen(self): * if self.frozen: # <<<<<<<<<<<<<< * raise RuntimeError("Cannot modify frozen list.") * */ __pyx_t_1 = (__pyx_v_self->frozen != 0); if (__pyx_t_1) { /* "aiohttp/_frozenlist.pyx":19 * cdef object _check_frozen(self): * if self.frozen: * raise RuntimeError("Cannot modify frozen list.") # <<<<<<<<<<<<<< * * cdef inline object _fast_len(self): */ __pyx_t_2 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_Raise(__pyx_t_2, 0, 0, 0); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __PYX_ERR(0, 19, __pyx_L1_error) /* "aiohttp/_frozenlist.pyx":18 * * cdef object _check_frozen(self): * if self.frozen: # <<<<<<<<<<<<<< * raise RuntimeError("Cannot modify frozen list.") * */ } /* "aiohttp/_frozenlist.pyx":17 * self._items = items * * cdef object _check_frozen(self): # <<<<<<<<<<<<<< * if self.frozen: * raise RuntimeError("Cannot modify frozen list.") */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList._check_frozen", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":21 * raise RuntimeError("Cannot modify frozen list.") * * cdef inline object _fast_len(self): # <<<<<<<<<<<<<< * return len(self._items) * */ static CYTHON_INLINE PyObject *__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; Py_ssize_t __pyx_t_2; __Pyx_RefNannySetupContext("_fast_len", 0); /* "aiohttp/_frozenlist.pyx":22 * * cdef inline object _fast_len(self): * return len(self._items) # <<<<<<<<<<<<<< * * def freeze(self): */ __Pyx_XDECREF(__pyx_r); __pyx_t_1 = __pyx_v_self->_items; __Pyx_INCREF(__pyx_t_1); if (unlikely(__pyx_t_1 == Py_None)) { PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); __PYX_ERR(0, 22, __pyx_L1_error) } __pyx_t_2 = PyList_GET_SIZE(__pyx_t_1); if (unlikely(__pyx_t_2 == ((Py_ssize_t)-1))) __PYX_ERR(0, 22, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = PyInt_FromSsize_t(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 22, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":21 * raise RuntimeError("Cannot modify frozen list.") * * cdef inline object _fast_len(self): # <<<<<<<<<<<<<< * return len(self._items) * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList._fast_len", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":24 * return len(self._items) * * def freeze(self): # <<<<<<<<<<<<<< * self.frozen = True * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("freeze (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_2freeze(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("freeze", 0); /* "aiohttp/_frozenlist.pyx":25 * * def freeze(self): * self.frozen = True # <<<<<<<<<<<<<< * * def __getitem__(self, index): */ __pyx_v_self->frozen = 1; /* "aiohttp/_frozenlist.pyx":24 * return len(self._items) * * def freeze(self): # <<<<<<<<<<<<<< * self.frozen = True * */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":27 * self.frozen = True * * def __getitem__(self, index): # <<<<<<<<<<<<<< * return self._items[index] * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__getitem__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_4__getitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__getitem__", 0); /* "aiohttp/_frozenlist.pyx":28 * * def __getitem__(self, index): * return self._items[index] # <<<<<<<<<<<<<< * * def __setitem__(self, index, value): */ __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(0, 28, __pyx_L1_error) } __pyx_t_1 = PyObject_GetItem(__pyx_v_self->_items, __pyx_v_index); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":27 * self.frozen = True * * def __getitem__(self, index): # <<<<<<<<<<<<<< * return self._items[index] * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__getitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":30 * return self._items[index] * * def __setitem__(self, index, value): # <<<<<<<<<<<<<< * self._check_frozen() * self._items[index] = value */ /* Python wrapper */ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value); /*proto*/ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setitem__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index), ((PyObject *)__pyx_v_value)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6__setitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index, PyObject *__pyx_v_value) { int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__setitem__", 0); /* "aiohttp/_frozenlist.pyx":31 * * def __setitem__(self, index, value): * self._check_frozen() # <<<<<<<<<<<<<< * self._items[index] = value * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 31, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":32 * def __setitem__(self, index, value): * self._check_frozen() * self._items[index] = value # <<<<<<<<<<<<<< * * def __delitem__(self, index): */ if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(0, 32, __pyx_L1_error) } if (unlikely(PyObject_SetItem(__pyx_v_self->_items, __pyx_v_index, __pyx_v_value) < 0)) __PYX_ERR(0, 32, __pyx_L1_error) /* "aiohttp/_frozenlist.pyx":30 * return self._items[index] * * def __setitem__(self, index, value): # <<<<<<<<<<<<<< * self._check_frozen() * self._items[index] = value */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__setitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":34 * self._items[index] = value * * def __delitem__(self, index): # <<<<<<<<<<<<<< * self._check_frozen() * del self._items[index] */ /* Python wrapper */ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index); /*proto*/ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(PyObject *__pyx_v_self, PyObject *__pyx_v_index) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__delitem__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_index)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_8__delitem__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__delitem__", 0); /* "aiohttp/_frozenlist.pyx":35 * * def __delitem__(self, index): * self._check_frozen() # <<<<<<<<<<<<<< * del self._items[index] * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":36 * def __delitem__(self, index): * self._check_frozen() * del self._items[index] # <<<<<<<<<<<<<< * * def __len__(self): */ if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(0, 36, __pyx_L1_error) } if (unlikely(PyObject_DelItem(__pyx_v_self->_items, __pyx_v_index) < 0)) __PYX_ERR(0, 36, __pyx_L1_error) /* "aiohttp/_frozenlist.pyx":34 * self._items[index] = value * * def __delitem__(self, index): # <<<<<<<<<<<<<< * self._check_frozen() * del self._items[index] */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__delitem__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":38 * del self._items[index] * * def __len__(self): # <<<<<<<<<<<<<< * return self._fast_len() * */ /* Python wrapper */ static Py_ssize_t __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__(PyObject *__pyx_v_self); /*proto*/ static Py_ssize_t __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__(PyObject *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__len__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static Py_ssize_t __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_10__len__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { Py_ssize_t __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; Py_ssize_t __pyx_t_2; __Pyx_RefNannySetupContext("__len__", 0); /* "aiohttp/_frozenlist.pyx":39 * * def __len__(self): * return self._fast_len() # <<<<<<<<<<<<<< * * def __iter__(self): */ __pyx_t_1 = __pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_t_1); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_2; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":38 * del self._items[index] * * def __len__(self): # <<<<<<<<<<<<<< * return self._fast_len() * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__len__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":41 * return self._fast_len() * * def __iter__(self): # <<<<<<<<<<<<<< * return self._items.__iter__() * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iter__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_12__iter__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("__iter__", 0); /* "aiohttp/_frozenlist.pyx":42 * * def __iter__(self): * return self._items.__iter__() # <<<<<<<<<<<<<< * * def __reversed__(self): */ __Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_iter); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 42, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 42, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":41 * return self._fast_len() * * def __iter__(self): # <<<<<<<<<<<<<< * return self._items.__iter__() * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__iter__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":44 * return self._items.__iter__() * * def __reversed__(self): # <<<<<<<<<<<<<< * return self._items.__reversed__() * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reversed__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_14__reversed__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("__reversed__", 0); /* "aiohttp/_frozenlist.pyx":45 * * def __reversed__(self): * return self._items.__reversed__() # <<<<<<<<<<<<<< * * def __richcmp__(self, other, op): */ __Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_reversed); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 45, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":44 * return self._items.__iter__() * * def __reversed__(self): # <<<<<<<<<<<<<< * return self._items.__reversed__() * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__reversed__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":47 * return self._items.__reversed__() * * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< * if op == 0: # < * return list(self) < other */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, int __pyx_arg_op) { PyObject *__pyx_v_op = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__richcmp__ (wrapper)", 0); __pyx_v_op = __Pyx_PyInt_From_int(__pyx_arg_op); if (unlikely(!__pyx_v_op)) __PYX_ERR(0, 47, __pyx_L3_error) __Pyx_GOTREF(__pyx_v_op); goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(((PyObject *)__pyx_v_self), ((PyObject *)__pyx_v_other), ((PyObject *)__pyx_v_op)); /* function exit code */ __Pyx_XDECREF(__pyx_v_op); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_16__richcmp__(PyObject *__pyx_v_self, PyObject *__pyx_v_other, PyObject *__pyx_v_op) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("__richcmp__", 0); /* "aiohttp/_frozenlist.pyx":48 * * def __richcmp__(self, other, op): * if op == 0: # < # <<<<<<<<<<<<<< * return list(self) < other * if op == 1: # <= */ __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_0, 0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 48, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":49 * def __richcmp__(self, other, op): * if op == 0: # < * return list(self) < other # <<<<<<<<<<<<<< * if op == 1: # <= * return list(self) <= other */ __Pyx_XDECREF(__pyx_r); __pyx_t_1 = PySequence_List(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 49, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_LT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 49, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":48 * * def __richcmp__(self, other, op): * if op == 0: # < # <<<<<<<<<<<<<< * return list(self) < other * if op == 1: # <= */ } /* "aiohttp/_frozenlist.pyx":50 * if op == 0: # < * return list(self) < other * if op == 1: # <= # <<<<<<<<<<<<<< * return list(self) <= other * if op == 2: # == */ __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 50, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 50, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":51 * return list(self) < other * if op == 1: # <= * return list(self) <= other # <<<<<<<<<<<<<< * if op == 2: # == * return list(self) == other */ __Pyx_XDECREF(__pyx_r); __pyx_t_3 = PySequence_List(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 51, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_LE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 51, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":50 * if op == 0: # < * return list(self) < other * if op == 1: # <= # <<<<<<<<<<<<<< * return list(self) <= other * if op == 2: # == */ } /* "aiohttp/_frozenlist.pyx":52 * if op == 1: # <= * return list(self) <= other * if op == 2: # == # <<<<<<<<<<<<<< * return list(self) == other * if op == 3: # != */ __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 52, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":53 * return list(self) <= other * if op == 2: # == * return list(self) == other # <<<<<<<<<<<<<< * if op == 3: # != * return list(self) != other */ __Pyx_XDECREF(__pyx_r); __pyx_t_1 = PySequence_List(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 53, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":52 * if op == 1: # <= * return list(self) <= other * if op == 2: # == # <<<<<<<<<<<<<< * return list(self) == other * if op == 3: # != */ } /* "aiohttp/_frozenlist.pyx":54 * if op == 2: # == * return list(self) == other * if op == 3: # != # <<<<<<<<<<<<<< * return list(self) != other * if op == 4: # > */ __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_3, 3, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 54, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 54, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":55 * return list(self) == other * if op == 3: # != * return list(self) != other # <<<<<<<<<<<<<< * if op == 4: # > * return list(self) > other */ __Pyx_XDECREF(__pyx_r); __pyx_t_3 = PySequence_List(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 55, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 55, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":54 * if op == 2: # == * return list(self) == other * if op == 3: # != # <<<<<<<<<<<<<< * return list(self) != other * if op == 4: # > */ } /* "aiohttp/_frozenlist.pyx":56 * if op == 3: # != * return list(self) != other * if op == 4: # > # <<<<<<<<<<<<<< * return list(self) > other * if op == 5: # => */ __pyx_t_1 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_4, 4, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 56, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":57 * return list(self) != other * if op == 4: # > * return list(self) > other # <<<<<<<<<<<<<< * if op == 5: # => * return list(self) >= other */ __Pyx_XDECREF(__pyx_r); __pyx_t_1 = PySequence_List(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 57, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_v_other, Py_GT); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 57, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":56 * if op == 3: # != * return list(self) != other * if op == 4: # > # <<<<<<<<<<<<<< * return list(self) > other * if op == 5: # => */ } /* "aiohttp/_frozenlist.pyx":58 * if op == 4: # > * return list(self) > other * if op == 5: # => # <<<<<<<<<<<<<< * return list(self) >= other * */ __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_op, __pyx_int_5, 5, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 58, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 58, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if (__pyx_t_2) { /* "aiohttp/_frozenlist.pyx":59 * return list(self) > other * if op == 5: # => * return list(self) >= other # <<<<<<<<<<<<<< * * def insert(self, pos, item): */ __Pyx_XDECREF(__pyx_r); __pyx_t_3 = PySequence_List(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 59, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_v_other, Py_GE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 59, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":58 * if op == 4: # > * return list(self) > other * if op == 5: # => # <<<<<<<<<<<<<< * return list(self) >= other * */ } /* "aiohttp/_frozenlist.pyx":47 * return self._items.__reversed__() * * def __richcmp__(self, other, op): # <<<<<<<<<<<<<< * if op == 0: # < * return list(self) < other */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__richcmp__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":61 * return list(self) >= other * * def insert(self, pos, item): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.insert(pos, item) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_pos = 0; PyObject *__pyx_v_item = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("insert (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pos,&__pyx_n_s_item,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pos)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_item)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, 1); __PYX_ERR(0, 61, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "insert") < 0)) __PYX_ERR(0, 61, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_pos = values[0]; __pyx_v_item = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("insert", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 61, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_pos, __pyx_v_item); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_18insert(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_pos, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; Py_ssize_t __pyx_t_2; int __pyx_t_3; __Pyx_RefNannySetupContext("insert", 0); /* "aiohttp/_frozenlist.pyx":62 * * def insert(self, pos, item): * self._check_frozen() # <<<<<<<<<<<<<< * self._items.insert(pos, item) * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 62, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":63 * def insert(self, pos, item): * self._check_frozen() * self._items.insert(pos, item) # <<<<<<<<<<<<<< * * def __contains__(self, item): */ if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "insert"); __PYX_ERR(0, 63, __pyx_L1_error) } __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_v_pos); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 63, __pyx_L1_error) __pyx_t_3 = PyList_Insert(__pyx_v_self->_items, __pyx_t_2, __pyx_v_item); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 63, __pyx_L1_error) /* "aiohttp/_frozenlist.pyx":61 * return list(self) >= other * * def insert(self, pos, item): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.insert(pos, item) */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.insert", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":65 * self._items.insert(pos, item) * * def __contains__(self, item): # <<<<<<<<<<<<<< * return item in self._items * */ /* Python wrapper */ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static int __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__contains__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_20__contains__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; __Pyx_RefNannySetupContext("__contains__", 0); /* "aiohttp/_frozenlist.pyx":66 * * def __contains__(self, item): * return item in self._items # <<<<<<<<<<<<<< * * def __iadd__(self, items): */ __pyx_t_1 = (__Pyx_PySequence_ContainsTF(__pyx_v_item, __pyx_v_self->_items, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 66, __pyx_L1_error) __pyx_r = __pyx_t_1; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":65 * self._items.insert(pos, item) * * def __contains__(self, item): # <<<<<<<<<<<<<< * return item in self._items * */ /* function exit code */ __pyx_L1_error:; __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__contains__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":68 * return item in self._items * * def __iadd__(self, items): # <<<<<<<<<<<<<< * self._check_frozen() * self._items += list(items) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_items); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__(PyObject *__pyx_v_self, PyObject *__pyx_v_items) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__iadd__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_items)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_22__iadd__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; __Pyx_RefNannySetupContext("__iadd__", 0); /* "aiohttp/_frozenlist.pyx":69 * * def __iadd__(self, items): * self._check_frozen() # <<<<<<<<<<<<<< * self._items += list(items) * return self */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":70 * def __iadd__(self, items): * self._check_frozen() * self._items += list(items) # <<<<<<<<<<<<<< * return self * */ __pyx_t_1 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_items, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 70, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_GIVEREF(__pyx_t_2); __Pyx_GOTREF(__pyx_v_self->_items); __Pyx_DECREF(__pyx_v_self->_items); __pyx_v_self->_items = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_frozenlist.pyx":71 * self._check_frozen() * self._items += list(items) * return self # <<<<<<<<<<<<<< * * def index(self, item): */ __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(((PyObject *)__pyx_v_self)); __pyx_r = ((PyObject *)__pyx_v_self); goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":68 * return item in self._items * * def __iadd__(self, items): # <<<<<<<<<<<<<< * self._check_frozen() * self._items += list(items) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__iadd__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":73 * return self * * def index(self, item): # <<<<<<<<<<<<<< * return self._items.index(item) * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("index (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_24index(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; __Pyx_RefNannySetupContext("index", 0); /* "aiohttp/_frozenlist.pyx":74 * * def index(self, item): * return self._items.index(item) # <<<<<<<<<<<<<< * * def remove(self, item): */ __Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (!__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_item}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_item}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 74, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_item); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 74, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":73 * return self * * def index(self, item): # <<<<<<<<<<<<<< * return self._items.index(item) * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.index", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":76 * return self._items.index(item) * * def remove(self, item): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.remove(item) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("remove (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_26remove(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; __Pyx_RefNannySetupContext("remove", 0); /* "aiohttp/_frozenlist.pyx":77 * * def remove(self, item): * self._check_frozen() # <<<<<<<<<<<<<< * self._items.remove(item) * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 77, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":78 * def remove(self, item): * self._check_frozen() * self._items.remove(item) # <<<<<<<<<<<<<< * * def clear(self): */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_remove); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 78, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (!__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_item}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_item}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 78, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_item); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":76 * return self._items.index(item) * * def remove(self, item): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.remove(item) */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.remove", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":80 * self._items.remove(item) * * def clear(self): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.clear() */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("clear (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_28clear(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("clear", 0); /* "aiohttp/_frozenlist.pyx":81 * * def clear(self): * self._check_frozen() # <<<<<<<<<<<<<< * self._items.clear() * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 81, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":82 * def clear(self): * self._check_frozen() * self._items.clear() # <<<<<<<<<<<<<< * * def extend(self, items): */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_clear); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 82, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 82, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":80 * self._items.remove(item) * * def clear(self): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.clear() */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.clear", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":84 * self._items.clear() * * def extend(self, items): # <<<<<<<<<<<<<< * self._check_frozen() * self._items += list(items) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend(PyObject *__pyx_v_self, PyObject *__pyx_v_items); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend(PyObject *__pyx_v_self, PyObject *__pyx_v_items) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("extend (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_items)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_30extend(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_items) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; __Pyx_RefNannySetupContext("extend", 0); /* "aiohttp/_frozenlist.pyx":85 * * def extend(self, items): * self._check_frozen() # <<<<<<<<<<<<<< * self._items += list(items) * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 85, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":86 * def extend(self, items): * self._check_frozen() * self._items += list(items) # <<<<<<<<<<<<<< * * def reverse(self): */ __pyx_t_1 = PySequence_List(__pyx_v_items); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 86, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = PyNumber_InPlaceAdd(__pyx_v_self->_items, __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 86, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_GIVEREF(__pyx_t_2); __Pyx_GOTREF(__pyx_v_self->_items); __Pyx_DECREF(__pyx_v_self->_items); __pyx_v_self->_items = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_frozenlist.pyx":84 * self._items.clear() * * def extend(self, items): # <<<<<<<<<<<<<< * self._check_frozen() * self._items += list(items) */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.extend", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":88 * self._items += list(items) * * def reverse(self): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.reverse() */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("reverse (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_32reverse(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; __Pyx_RefNannySetupContext("reverse", 0); /* "aiohttp/_frozenlist.pyx":89 * * def reverse(self): * self._check_frozen() # <<<<<<<<<<<<<< * self._items.reverse() * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 89, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":90 * def reverse(self): * self._check_frozen() * self._items.reverse() # <<<<<<<<<<<<<< * * def pop(self, index=-1): */ if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "reverse"); __PYX_ERR(0, 90, __pyx_L1_error) } __pyx_t_2 = PyList_Reverse(__pyx_v_self->_items); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 90, __pyx_L1_error) /* "aiohttp/_frozenlist.pyx":88 * self._items += list(items) * * def reverse(self): # <<<<<<<<<<<<<< * self._check_frozen() * self._items.reverse() */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.reverse", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":92 * self._items.reverse() * * def pop(self, index=-1): # <<<<<<<<<<<<<< * self._check_frozen() * return self._items.pop(index) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_index = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("pop (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_index,0}; PyObject* values[1] = {0}; values[0] = ((PyObject *)__pyx_int_neg_1); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_index); if (value) { values[0] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "pop") < 0)) __PYX_ERR(0, 92, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_index = values[0]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("pop", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 92, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), __pyx_v_index); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_34pop(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_index) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; Py_ssize_t __pyx_t_2; __Pyx_RefNannySetupContext("pop", 0); /* "aiohttp/_frozenlist.pyx":93 * * def pop(self, index=-1): * self._check_frozen() # <<<<<<<<<<<<<< * return self._items.pop(index) * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 93, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":94 * def pop(self, index=-1): * self._check_frozen() * return self._items.pop(index) # <<<<<<<<<<<<<< * * def append(self, item): */ __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "pop"); __PYX_ERR(0, 94, __pyx_L1_error) } __pyx_t_2 = __Pyx_PyIndex_AsSsize_t(__pyx_v_index); if (unlikely((__pyx_t_2 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 94, __pyx_L1_error) __pyx_t_1 = __Pyx_PyList_PopIndex(__pyx_v_self->_items, __pyx_v_index, __pyx_t_2, 1, Py_ssize_t, PyInt_FromSsize_t); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 94, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":92 * self._items.reverse() * * def pop(self, index=-1): # <<<<<<<<<<<<<< * self._check_frozen() * return self._items.pop(index) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.pop", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":96 * return self._items.pop(index) * * def append(self, item): # <<<<<<<<<<<<<< * self._check_frozen() * return self._items.append(item) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("append (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_36append(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; __Pyx_RefNannySetupContext("append", 0); /* "aiohttp/_frozenlist.pyx":97 * * def append(self, item): * self._check_frozen() # <<<<<<<<<<<<<< * return self._items.append(item) * */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self->__pyx_vtab)->_check_frozen(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_frozenlist.pyx":98 * def append(self, item): * self._check_frozen() * return self._items.append(item) # <<<<<<<<<<<<<< * * def count(self, item): */ __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_self->_items == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 98, __pyx_L1_error) } __pyx_t_2 = __Pyx_PyList_Append(__pyx_v_self->_items, __pyx_v_item); if (unlikely(__pyx_t_2 == ((int)-1))) __PYX_ERR(0, 98, __pyx_L1_error) __pyx_t_1 = __Pyx_Owned_Py_None(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 98, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":96 * return self._items.pop(index) * * def append(self, item): # <<<<<<<<<<<<<< * self._check_frozen() * return self._items.append(item) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.append", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":100 * return self._items.append(item) * * def count(self, item): # <<<<<<<<<<<<<< * return self._items.count(item) * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count(PyObject *__pyx_v_self, PyObject *__pyx_v_item); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count(PyObject *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("count (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v_item)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_38count(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v_item) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; __Pyx_RefNannySetupContext("count", 0); /* "aiohttp/_frozenlist.pyx":101 * * def count(self, item): * return self._items.count(item) # <<<<<<<<<<<<<< * * def __repr__(self): */ __Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_items, __pyx_n_s_count); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (!__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_item); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_item}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_item}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; __Pyx_INCREF(__pyx_v_item); __Pyx_GIVEREF(__pyx_v_item); PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_item); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":100 * return self._items.append(item) * * def count(self, item): # <<<<<<<<<<<<<< * return self._items.count(item) * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.count", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":103 * return self._items.count(item) * * def __repr__(self): # <<<<<<<<<<<<<< * return ''.format(self.frozen, * self._items) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__repr__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_40__repr__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; __Pyx_RefNannySetupContext("__repr__", 0); /* "aiohttp/_frozenlist.pyx":104 * * def __repr__(self): * return ''.format(self.frozen, # <<<<<<<<<<<<<< * self._items) * */ __Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_s_FrozenList_frozen_r, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 104, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 104, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); /* "aiohttp/_frozenlist.pyx":105 * def __repr__(self): * return ''.format(self.frozen, * self._items) # <<<<<<<<<<<<<< * * */ __pyx_t_4 = NULL; __pyx_t_5 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_5 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_self->_items}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_self->_items}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else #endif { __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 104, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (__pyx_t_4) { __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; } __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_3); __Pyx_INCREF(__pyx_v_self->_items); __Pyx_GIVEREF(__pyx_v_self->_items); PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_self->_items); __pyx_t_3 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_frozenlist.pyx":103 * return self._items.count(item) * * def __repr__(self): # <<<<<<<<<<<<<< * return ''.format(self.frozen, * self._items) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_frozenlist.pyx":6 * cdef class FrozenList: * * cdef readonly bint frozen # <<<<<<<<<<<<<< * cdef list _items * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(PyObject *__pyx_v_self); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(PyObject *__pyx_v_self) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_6frozen___get__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__get__", 0); __Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.frozen.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * cdef bint use_setstate * state = (self._items, self.frozen) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_42__reduce_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self) { int __pyx_v_use_setstate; PyObject *__pyx_v_state = NULL; PyObject *__pyx_v__dict = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; int __pyx_t_3; int __pyx_t_4; PyObject *__pyx_t_5 = NULL; __Pyx_RefNannySetupContext("__reduce_cython__", 0); /* "(tree fragment)":3 * def __reduce_cython__(self): * cdef bint use_setstate * state = (self._items, self.frozen) # <<<<<<<<<<<<<< * _dict = getattr(self, '__dict__', None) * if _dict is not None: */ __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->frozen); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 3, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_v_self->_items); __Pyx_GIVEREF(__pyx_v_self->_items); PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->_items); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_1); __pyx_t_1 = 0; __pyx_v_state = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; /* "(tree fragment)":4 * cdef bint use_setstate * state = (self._items, self.frozen) * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< * if _dict is not None: * state += (_dict,) */ __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_v__dict = __pyx_t_2; __pyx_t_2 = 0; /* "(tree fragment)":5 * state = (self._items, self.frozen) * _dict = getattr(self, '__dict__', None) * if _dict is not None: # <<<<<<<<<<<<<< * state += (_dict,) * use_setstate = True */ __pyx_t_3 = (__pyx_v__dict != Py_None); __pyx_t_4 = (__pyx_t_3 != 0); if (__pyx_t_4) { /* "(tree fragment)":6 * _dict = getattr(self, '__dict__', None) * if _dict is not None: * state += (_dict,) # <<<<<<<<<<<<<< * use_setstate = True * else: */ __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_v__dict); __Pyx_GIVEREF(__pyx_v__dict); PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); __pyx_t_1 = 0; /* "(tree fragment)":7 * if _dict is not None: * state += (_dict,) * use_setstate = True # <<<<<<<<<<<<<< * else: * use_setstate = self._items is not None */ __pyx_v_use_setstate = 1; /* "(tree fragment)":5 * state = (self._items, self.frozen) * _dict = getattr(self, '__dict__', None) * if _dict is not None: # <<<<<<<<<<<<<< * state += (_dict,) * use_setstate = True */ goto __pyx_L3; } /* "(tree fragment)":9 * use_setstate = True * else: * use_setstate = self._items is not None # <<<<<<<<<<<<<< * if use_setstate: * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state */ /*else*/ { __pyx_t_4 = (__pyx_v_self->_items != ((PyObject*)Py_None)); __pyx_v_use_setstate = __pyx_t_4; } __pyx_L3:; /* "(tree fragment)":10 * else: * use_setstate = self._items is not None * if use_setstate: # <<<<<<<<<<<<<< * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state * else: */ __pyx_t_4 = (__pyx_v_use_setstate != 0); if (__pyx_t_4) { /* "(tree fragment)":11 * use_setstate = self._items is not None * if use_setstate: * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state # <<<<<<<<<<<<<< * else: * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) */ __Pyx_XDECREF(__pyx_r); __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_FrozenList); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); __Pyx_INCREF(__pyx_int_155820355); __Pyx_GIVEREF(__pyx_int_155820355); PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_155820355); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2); __Pyx_INCREF(__pyx_v_state); __Pyx_GIVEREF(__pyx_v_state); PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); __pyx_t_1 = 0; __pyx_t_2 = 0; __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L0; /* "(tree fragment)":10 * else: * use_setstate = self._items is not None * if use_setstate: # <<<<<<<<<<<<<< * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state * else: */ } /* "(tree fragment)":13 * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, None), state * else: * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) */ /*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_FrozenList); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); __Pyx_INCREF(__pyx_int_155820355); __Pyx_GIVEREF(__pyx_int_155820355); PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_155820355); __Pyx_INCREF(__pyx_v_state); __Pyx_GIVEREF(__pyx_v_state); PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); __pyx_t_5 = 0; __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; } /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * cdef bint use_setstate * state = (self._items, self.frozen) */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_state); __Pyx_XDECREF(__pyx_v__dict); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":14 * else: * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist_10FrozenList_44__setstate_cython__(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v_self, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__setstate_cython__", 0); /* "(tree fragment)":15 * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) * def __setstate_cython__(self, __pyx_state): * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) # <<<<<<<<<<<<<< */ if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 15, __pyx_L1_error) __pyx_t_1 = __pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "(tree fragment)":14 * else: * return __pyx_unpickle_FrozenList, (type(self), 0x949a143, state) * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * __pyx_unpickle_FrozenList__set_state(self, __pyx_state) */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._frozenlist.FrozenList.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":1 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< * if __pyx_checksum != 0x949a143: * from pickle import PickleError as __pyx_PickleError */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyMethodDef __pyx_mdef_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList = {"__pyx_unpickle_FrozenList", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList, METH_VARARGS|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v___pyx_type = 0; long __pyx_v___pyx_checksum; PyObject *__pyx_v___pyx_state = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; PyObject* values[3] = {0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_FrozenList") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); } __pyx_v___pyx_type = values[0]; __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) __pyx_v___pyx_state = values[2]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_FrozenList", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_v___pyx_PickleError = NULL; PyObject *__pyx_v___pyx_result = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; int __pyx_t_7; __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList", 0); /* "(tree fragment)":2 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): * if __pyx_checksum != 0x949a143: # <<<<<<<<<<<<<< * from pickle import PickleError as __pyx_PickleError * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) */ __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x949a143) != 0); if (__pyx_t_1) { /* "(tree fragment)":3 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): * if __pyx_checksum != 0x949a143: * from pickle import PickleError as __pyx_PickleError # <<<<<<<<<<<<<< * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) * __pyx_result = FrozenList.__new__(__pyx_type) */ __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_n_s_PickleError); __Pyx_GIVEREF(__pyx_n_s_PickleError); PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_t_2); __pyx_v___pyx_PickleError = __pyx_t_2; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "(tree fragment)":4 * if __pyx_checksum != 0x949a143: * from pickle import PickleError as __pyx_PickleError * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) # <<<<<<<<<<<<<< * __pyx_result = FrozenList.__new__(__pyx_type) * if __pyx_state is not None: */ __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_INCREF(__pyx_v___pyx_PickleError); __pyx_t_2 = __pyx_v___pyx_PickleError; __pyx_t_5 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (!__pyx_t_5) { __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_3); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else #endif { __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_t_4); __pyx_t_4 = 0; __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(1, 4, __pyx_L1_error) /* "(tree fragment)":2 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): * if __pyx_checksum != 0x949a143: # <<<<<<<<<<<<<< * from pickle import PickleError as __pyx_PickleError * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) */ } /* "(tree fragment)":5 * from pickle import PickleError as __pyx_PickleError * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) * __pyx_result = FrozenList.__new__(__pyx_type) # <<<<<<<<<<<<<< * if __pyx_state is not None: * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_6 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_6)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_6); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (!__pyx_t_6) { __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_3); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_3); } else #endif { __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL; __Pyx_INCREF(__pyx_v___pyx_type); __Pyx_GIVEREF(__pyx_v___pyx_type); PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v___pyx_type); __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_v___pyx_result = __pyx_t_3; __pyx_t_3 = 0; /* "(tree fragment)":6 * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) * __pyx_result = FrozenList.__new__(__pyx_type) * if __pyx_state is not None: # <<<<<<<<<<<<<< * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) * return __pyx_result */ __pyx_t_1 = (__pyx_v___pyx_state != Py_None); __pyx_t_7 = (__pyx_t_1 != 0); if (__pyx_t_7) { /* "(tree fragment)":7 * __pyx_result = FrozenList.__new__(__pyx_type) * if __pyx_state is not None: * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) # <<<<<<<<<<<<<< * return __pyx_result * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): */ if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 7, __pyx_L1_error) __pyx_t_3 = __pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)__pyx_v___pyx_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "(tree fragment)":6 * raise __pyx_PickleError("Incompatible checksums (%s vs 0x949a143 = (_items, frozen))" % __pyx_checksum) * __pyx_result = FrozenList.__new__(__pyx_type) * if __pyx_state is not None: # <<<<<<<<<<<<<< * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) * return __pyx_result */ } /* "(tree fragment)":8 * if __pyx_state is not None: * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) * return __pyx_result # <<<<<<<<<<<<<< * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] */ __Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v___pyx_result); __pyx_r = __pyx_v___pyx_result; goto __pyx_L0; /* "(tree fragment)":1 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< * if __pyx_checksum != 0x949a143: * from pickle import PickleError as __pyx_PickleError */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v___pyx_PickleError); __Pyx_XDECREF(__pyx_v___pyx_result); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":9 * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) * return __pyx_result * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): */ static PyObject *__pyx_f_7aiohttp_11_frozenlist___pyx_unpickle_FrozenList__set_state(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *__pyx_v___pyx_result, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; Py_ssize_t __pyx_t_3; int __pyx_t_4; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; PyObject *__pyx_t_8 = NULL; PyObject *__pyx_t_9 = NULL; __Pyx_RefNannySetupContext("__pyx_unpickle_FrozenList__set_state", 0); /* "(tree fragment)":10 * return __pyx_result * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] # <<<<<<<<<<<<<< * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): * __pyx_result.__dict__.update(__pyx_state[2]) */ if (unlikely(__pyx_v___pyx_state == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(1, 10, __pyx_L1_error) } __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (!(likely(PyList_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "list", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 10, __pyx_L1_error) __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v___pyx_result->_items); __Pyx_DECREF(__pyx_v___pyx_result->_items); __pyx_v___pyx_result->_items = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; if (unlikely(__pyx_v___pyx_state == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(1, 10, __pyx_L1_error) } __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v___pyx_result->frozen = __pyx_t_2; /* "(tree fragment)":11 * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< * __pyx_result.__dict__.update(__pyx_state[2]) */ if (unlikely(__pyx_v___pyx_state == Py_None)) { PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); __PYX_ERR(1, 11, __pyx_L1_error) } __pyx_t_3 = PyTuple_GET_SIZE(__pyx_v___pyx_state); if (unlikely(__pyx_t_3 == ((Py_ssize_t)-1))) __PYX_ERR(1, 11, __pyx_L1_error) __pyx_t_4 = ((__pyx_t_3 > 2) != 0); if (__pyx_t_4) { } else { __pyx_t_2 = __pyx_t_4; goto __pyx_L4_bool_binop_done; } __pyx_t_4 = __Pyx_HasAttr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(1, 11, __pyx_L1_error) __pyx_t_5 = (__pyx_t_4 != 0); __pyx_t_2 = __pyx_t_5; __pyx_L4_bool_binop_done:; if (__pyx_t_2) { /* "(tree fragment)":12 * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): * __pyx_result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< */ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v___pyx_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; if (unlikely(__pyx_v___pyx_state == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(1, 12, __pyx_L1_error) } __pyx_t_6 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_8 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); if (likely(__pyx_t_8)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); __Pyx_INCREF(__pyx_t_8); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_7, function); } } if (!__pyx_t_8) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_t_6}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_t_6}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif { __pyx_t_9 = PyTuple_New(1+1); if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_8); __pyx_t_8 = NULL; __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_9, 0+1, __pyx_t_6); __pyx_t_6 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "(tree fragment)":11 * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): # <<<<<<<<<<<<<< * __pyx_result.__dict__.update(__pyx_state[2]) */ } /* "(tree fragment)":9 * __pyx_unpickle_FrozenList__set_state( __pyx_result, __pyx_state) * return __pyx_result * cdef __pyx_unpickle_FrozenList__set_state(FrozenList __pyx_result, tuple __pyx_state): # <<<<<<<<<<<<<< * __pyx_result._items = __pyx_state[0]; __pyx_result.frozen = __pyx_state[1] * if len(__pyx_state) > 2 and hasattr(__pyx_result, '__dict__'): */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_AddTraceback("aiohttp._frozenlist.__pyx_unpickle_FrozenList__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static struct __pyx_vtabstruct_7aiohttp_11_frozenlist_FrozenList __pyx_vtable_7aiohttp_11_frozenlist_FrozenList; static PyObject *__pyx_tp_new_7aiohttp_11_frozenlist_FrozenList(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p; PyObject *o; if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { o = (*t->tp_alloc)(t, 0); } else { o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); } if (unlikely(!o)) return 0; p = ((struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o); p->__pyx_vtab = __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList; p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); return o; } static void __pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList(PyObject *o) { struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; #if CYTHON_USE_TP_FINALIZE if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { if (PyObject_CallFinalizerFromDealloc(o)) return; } #endif PyObject_GC_UnTrack(o); Py_CLEAR(p->_items); (*Py_TYPE(o)->tp_free)(o); } static int __pyx_tp_traverse_7aiohttp_11_frozenlist_FrozenList(PyObject *o, visitproc v, void *a) { int e; struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; if (p->_items) { e = (*v)(p->_items, a); if (e) return e; } return 0; } static int __pyx_tp_clear_7aiohttp_11_frozenlist_FrozenList(PyObject *o) { PyObject* tmp; struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *p = (struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *)o; tmp = ((PyObject*)p->_items); p->_items = ((PyObject*)Py_None); Py_INCREF(Py_None); Py_XDECREF(tmp); return 0; } static PyObject *__pyx_sq_item_7aiohttp_11_frozenlist_FrozenList(PyObject *o, Py_ssize_t i) { PyObject *r; PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0; r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x); Py_DECREF(x); return r; } static int __pyx_mp_ass_subscript_7aiohttp_11_frozenlist_FrozenList(PyObject *o, PyObject *i, PyObject *v) { if (v) { return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_7__setitem__(o, i, v); } else { return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_9__delitem__(o, i); } } static PyObject *__pyx_getprop_7aiohttp_11_frozenlist_10FrozenList_frozen(PyObject *o, CYTHON_UNUSED void *x) { return __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_6frozen_1__get__(o); } static PyMethodDef __pyx_methods_7aiohttp_11_frozenlist_FrozenList[] = { {"freeze", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_3freeze, METH_NOARGS, 0}, {"__reversed__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_15__reversed__, METH_NOARGS, 0}, {"insert", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_19insert, METH_VARARGS|METH_KEYWORDS, 0}, {"index", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_25index, METH_O, 0}, {"remove", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_27remove, METH_O, 0}, {"clear", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_29clear, METH_NOARGS, 0}, {"extend", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_31extend, METH_O, 0}, {"reverse", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_33reverse, METH_NOARGS, 0}, {"pop", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_35pop, METH_VARARGS|METH_KEYWORDS, 0}, {"append", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_37append, METH_O, 0}, {"count", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_39count, METH_O, 0}, {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_43__reduce_cython__, METH_NOARGS, 0}, {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_11_frozenlist_10FrozenList_45__setstate_cython__, METH_O, 0}, {0, 0, 0, 0} }; static struct PyGetSetDef __pyx_getsets_7aiohttp_11_frozenlist_FrozenList[] = { {(char *)"frozen", __pyx_getprop_7aiohttp_11_frozenlist_10FrozenList_frozen, 0, (char *)0, 0}, {0, 0, 0, 0, 0} }; static PyNumberMethods __pyx_tp_as_number_FrozenList = { 0, /*nb_add*/ 0, /*nb_subtract*/ 0, /*nb_multiply*/ #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) 0, /*nb_divide*/ #endif 0, /*nb_remainder*/ 0, /*nb_divmod*/ 0, /*nb_power*/ 0, /*nb_negative*/ 0, /*nb_positive*/ 0, /*nb_absolute*/ 0, /*nb_nonzero*/ 0, /*nb_invert*/ 0, /*nb_lshift*/ 0, /*nb_rshift*/ 0, /*nb_and*/ 0, /*nb_xor*/ 0, /*nb_or*/ #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) 0, /*nb_coerce*/ #endif 0, /*nb_int*/ #if PY_MAJOR_VERSION < 3 0, /*nb_long*/ #else 0, /*reserved*/ #endif 0, /*nb_float*/ #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) 0, /*nb_oct*/ #endif #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) 0, /*nb_hex*/ #endif __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_23__iadd__, /*nb_inplace_add*/ 0, /*nb_inplace_subtract*/ 0, /*nb_inplace_multiply*/ #if PY_MAJOR_VERSION < 3 || (CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x03050000) 0, /*nb_inplace_divide*/ #endif 0, /*nb_inplace_remainder*/ 0, /*nb_inplace_power*/ 0, /*nb_inplace_lshift*/ 0, /*nb_inplace_rshift*/ 0, /*nb_inplace_and*/ 0, /*nb_inplace_xor*/ 0, /*nb_inplace_or*/ 0, /*nb_floor_divide*/ 0, /*nb_true_divide*/ 0, /*nb_inplace_floor_divide*/ 0, /*nb_inplace_true_divide*/ 0, /*nb_index*/ #if PY_VERSION_HEX >= 0x03050000 0, /*nb_matrix_multiply*/ #endif #if PY_VERSION_HEX >= 0x03050000 0, /*nb_inplace_matrix_multiply*/ #endif }; static PySequenceMethods __pyx_tp_as_sequence_FrozenList = { __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__, /*sq_length*/ 0, /*sq_concat*/ 0, /*sq_repeat*/ __pyx_sq_item_7aiohttp_11_frozenlist_FrozenList, /*sq_item*/ 0, /*sq_slice*/ 0, /*sq_ass_item*/ 0, /*sq_ass_slice*/ __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_21__contains__, /*sq_contains*/ 0, /*sq_inplace_concat*/ 0, /*sq_inplace_repeat*/ }; static PyMappingMethods __pyx_tp_as_mapping_FrozenList = { __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_11__len__, /*mp_length*/ __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_5__getitem__, /*mp_subscript*/ __pyx_mp_ass_subscript_7aiohttp_11_frozenlist_FrozenList, /*mp_ass_subscript*/ }; static PyTypeObject __pyx_type_7aiohttp_11_frozenlist_FrozenList = { PyVarObject_HEAD_INIT(0, 0) "aiohttp._frozenlist.FrozenList", /*tp_name*/ sizeof(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList), /*tp_basicsize*/ 0, /*tp_itemsize*/ __pyx_tp_dealloc_7aiohttp_11_frozenlist_FrozenList, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if PY_MAJOR_VERSION < 3 0, /*tp_compare*/ #endif #if PY_MAJOR_VERSION >= 3 0, /*tp_as_async*/ #endif __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_41__repr__, /*tp_repr*/ &__pyx_tp_as_number_FrozenList, /*tp_as_number*/ &__pyx_tp_as_sequence_FrozenList, /*tp_as_sequence*/ &__pyx_tp_as_mapping_FrozenList, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ 0, /*tp_doc*/ __pyx_tp_traverse_7aiohttp_11_frozenlist_FrozenList, /*tp_traverse*/ __pyx_tp_clear_7aiohttp_11_frozenlist_FrozenList, /*tp_clear*/ __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_17__richcmp__, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_13__iter__, /*tp_iter*/ 0, /*tp_iternext*/ __pyx_methods_7aiohttp_11_frozenlist_FrozenList, /*tp_methods*/ 0, /*tp_members*/ __pyx_getsets_7aiohttp_11_frozenlist_FrozenList, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ __pyx_pw_7aiohttp_11_frozenlist_10FrozenList_1__init__, /*tp_init*/ 0, /*tp_alloc*/ __pyx_tp_new_7aiohttp_11_frozenlist_FrozenList, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static PyMethodDef __pyx_methods[] = { {0, 0, 0, 0} }; #if PY_MAJOR_VERSION >= 3 #if CYTHON_PEP489_MULTI_PHASE_INIT static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ static int __pyx_pymod_exec__frozenlist(PyObject* module); /*proto*/ static PyModuleDef_Slot __pyx_moduledef_slots[] = { {Py_mod_create, (void*)__pyx_pymod_create}, {Py_mod_exec, (void*)__pyx_pymod_exec__frozenlist}, {0, NULL} }; #endif static struct PyModuleDef __pyx_moduledef = { PyModuleDef_HEAD_INIT, "_frozenlist", 0, /* m_doc */ #if CYTHON_PEP489_MULTI_PHASE_INIT 0, /* m_size */ #else -1, /* m_size */ #endif __pyx_methods /* m_methods */, #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_moduledef_slots, /* m_slots */ #else NULL, /* m_reload */ #endif NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ }; #endif static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_kp_s_Cannot_modify_frozen_list, __pyx_k_Cannot_modify_frozen_list, sizeof(__pyx_k_Cannot_modify_frozen_list), 0, 0, 1, 0}, {&__pyx_kp_s_FrozenList_frozen_r, __pyx_k_FrozenList_frozen_r, sizeof(__pyx_k_FrozenList_frozen_r), 0, 0, 1, 0}, {&__pyx_kp_s_Incompatible_checksums_s_vs_0x94, __pyx_k_Incompatible_checksums_s_vs_0x94, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x94), 0, 0, 1, 0}, {&__pyx_n_s_MutableSequence, __pyx_k_MutableSequence, sizeof(__pyx_k_MutableSequence), 0, 0, 1, 1}, {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, {&__pyx_n_s_aiohttp__frozenlist, __pyx_k_aiohttp__frozenlist, sizeof(__pyx_k_aiohttp__frozenlist), 0, 0, 1, 1}, {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_collections_abc, __pyx_k_collections_abc, sizeof(__pyx_k_collections_abc), 0, 0, 1, 1}, {&__pyx_n_s_count, __pyx_k_count, sizeof(__pyx_k_count), 0, 0, 1, 1}, {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, {&__pyx_n_s_item, __pyx_k_item, sizeof(__pyx_k_item), 0, 0, 1, 1}, {&__pyx_n_s_items, __pyx_k_items, sizeof(__pyx_k_items), 0, 0, 1, 1}, {&__pyx_n_s_iter, __pyx_k_iter, sizeof(__pyx_k_iter), 0, 0, 1, 1}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, {&__pyx_n_s_pos, __pyx_k_pos, sizeof(__pyx_k_pos), 0, 0, 1, 1}, {&__pyx_n_s_pyx_PickleError, __pyx_k_pyx_PickleError, sizeof(__pyx_k_pyx_PickleError), 0, 0, 1, 1}, {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, {&__pyx_n_s_pyx_result, __pyx_k_pyx_result, sizeof(__pyx_k_pyx_result), 0, 0, 1, 1}, {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, {&__pyx_n_s_pyx_unpickle_FrozenList, __pyx_k_pyx_unpickle_FrozenList, sizeof(__pyx_k_pyx_unpickle_FrozenList), 0, 0, 1, 1}, {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, {&__pyx_n_s_register, __pyx_k_register, sizeof(__pyx_k_register), 0, 0, 1, 1}, {&__pyx_n_s_remove, __pyx_k_remove, sizeof(__pyx_k_remove), 0, 0, 1, 1}, {&__pyx_n_s_reversed, __pyx_k_reversed, sizeof(__pyx_k_reversed), 0, 0, 1, 1}, {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; static int __Pyx_InitCachedBuiltins(void) { __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 19, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); /* "aiohttp/_frozenlist.pyx":19 * cdef object _check_frozen(self): * if self.frozen: * raise RuntimeError("Cannot modify frozen list.") # <<<<<<<<<<<<<< * * cdef inline object _fast_len(self): */ __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_Cannot_modify_frozen_list); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); /* "(tree fragment)":1 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< * if __pyx_checksum != 0x949a143: * from pickle import PickleError as __pyx_PickleError */ __pyx_tuple__2 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_pyx_PickleError, __pyx_n_s_pyx_result); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__2); __Pyx_GIVEREF(__pyx_tuple__2); __pyx_codeobj__3 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__2, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_FrozenList, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__3)) __PYX_ERR(1, 1, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } static int __Pyx_InitGlobals(void) { if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_3 = PyInt_FromLong(3); if (unlikely(!__pyx_int_3)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_4 = PyInt_FromLong(4); if (unlikely(!__pyx_int_4)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_5 = PyInt_FromLong(5); if (unlikely(!__pyx_int_5)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_155820355 = PyInt_FromLong(155820355L); if (unlikely(!__pyx_int_155820355)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } #if PY_MAJOR_VERSION < 3 PyMODINIT_FUNC init_frozenlist(void); /*proto*/ PyMODINIT_FUNC init_frozenlist(void) #else PyMODINIT_FUNC PyInit__frozenlist(void); /*proto*/ PyMODINIT_FUNC PyInit__frozenlist(void) #if CYTHON_PEP489_MULTI_PHASE_INIT { return PyModuleDef_Init(&__pyx_moduledef); } static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { PyObject *value = PyObject_GetAttrString(spec, from_name); int result = 0; if (likely(value)) { result = PyDict_SetItemString(moddict, to_name, value); Py_DECREF(value); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { result = -1; } return result; } static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { PyObject *module = NULL, *moddict, *modname; if (__pyx_m) return __Pyx_NewRef(__pyx_m); modname = PyObject_GetAttrString(spec, "name"); if (unlikely(!modname)) goto bad; module = PyModule_NewObject(modname); Py_DECREF(modname); if (unlikely(!module)) goto bad; moddict = PyModule_GetDict(module); if (unlikely(!moddict)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; return module; bad: Py_XDECREF(module); return NULL; } static int __pyx_pymod_exec__frozenlist(PyObject *__pyx_pyinit_module) #endif #endif { PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; __Pyx_RefNannyDeclarations #if CYTHON_PEP489_MULTI_PHASE_INIT if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0; #endif #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { PyErr_Clear(); __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); if (!__Pyx_RefNanny) Py_FatalError("failed to import 'refnanny' module"); } #endif __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__frozenlist(void)", 0); if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pyx_CyFunction_USED if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_StopAsyncIteration_USED if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS #ifdef WITH_THREAD /* Python build with threading support? */ PyEval_InitThreads(); #endif #endif /*--- Module creation code ---*/ #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_m = __pyx_pyinit_module; Py_INCREF(__pyx_m); #else #if PY_MAJOR_VERSION < 3 __pyx_m = Py_InitModule4("_frozenlist", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); #else __pyx_m = PyModule_Create(&__pyx_moduledef); #endif if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_d); __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) #if CYTHON_COMPILING_IN_PYPY Py_INCREF(__pyx_b); #endif if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); /*--- Initialize various global constants etc. ---*/ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif if (__pyx_module_is_main_aiohttp___frozenlist) { if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) } #if PY_MAJOR_VERSION >= 3 { PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) if (!PyDict_GetItemString(modules, "aiohttp._frozenlist")) { if (unlikely(PyDict_SetItemString(modules, "aiohttp._frozenlist", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) } } #endif /*--- Builtin init code ---*/ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Global init code ---*/ /*--- Variable export code ---*/ /*--- Function export code ---*/ /*--- Type init code ---*/ __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList = &__pyx_vtable_7aiohttp_11_frozenlist_FrozenList; __pyx_vtable_7aiohttp_11_frozenlist_FrozenList._check_frozen = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__check_frozen; __pyx_vtable_7aiohttp_11_frozenlist_FrozenList._fast_len = (PyObject *(*)(struct __pyx_obj_7aiohttp_11_frozenlist_FrozenList *))__pyx_f_7aiohttp_11_frozenlist_10FrozenList__fast_len; if (PyType_Ready(&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) __pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_print = 0; if (__Pyx_SetVtable(__pyx_type_7aiohttp_11_frozenlist_FrozenList.tp_dict, __pyx_vtabptr_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) if (PyObject_SetAttrString(__pyx_m, "FrozenList", (PyObject *)&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_11_frozenlist_FrozenList) < 0) __PYX_ERR(0, 4, __pyx_L1_error) __pyx_ptype_7aiohttp_11_frozenlist_FrozenList = &__pyx_type_7aiohttp_11_frozenlist_FrozenList; /*--- Type import code ---*/ /*--- Variable import code ---*/ /*--- Function import code ---*/ /*--- Execution code ---*/ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /* "aiohttp/_frozenlist.pyx":1 * from collections.abc import MutableSequence # <<<<<<<<<<<<<< * * */ __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_MutableSequence); __Pyx_GIVEREF(__pyx_n_s_MutableSequence); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_MutableSequence); __pyx_t_2 = __Pyx_Import(__pyx_n_s_collections_abc, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_MutableSequence, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_frozenlist.pyx":108 * * * MutableSequence.register(FrozenList) # <<<<<<<<<<<<<< */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_MutableSequence); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_register); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_1)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_1); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (!__pyx_t_1) { __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_3, ((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_1, ((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)}; __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_GOTREF(__pyx_t_2); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_1, ((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)}; __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_GOTREF(__pyx_t_2); } else #endif { __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; __Pyx_INCREF(((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)); __Pyx_GIVEREF(((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)); PyTuple_SET_ITEM(__pyx_t_4, 0+1, ((PyObject *)__pyx_ptype_7aiohttp_11_frozenlist_FrozenList)); __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 108, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "(tree fragment)":1 * def __pyx_unpickle_FrozenList(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< * if __pyx_checksum != 0x949a143: * from pickle import PickleError as __pyx_PickleError */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_11_frozenlist_1__pyx_unpickle_FrozenList, NULL, __pyx_n_s_aiohttp__frozenlist); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_FrozenList, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_frozenlist.pyx":1 * from collections.abc import MutableSequence # <<<<<<<<<<<<<< * * */ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /*--- Wrapped vars code ---*/ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); if (__pyx_m) { if (__pyx_d) { __Pyx_AddTraceback("init aiohttp._frozenlist", 0, __pyx_lineno, __pyx_filename); } Py_DECREF(__pyx_m); __pyx_m = 0; } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init aiohttp._frozenlist"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); #if CYTHON_PEP489_MULTI_PHASE_INIT return (__pyx_m != NULL) ? 0 : -1; #elif PY_MAJOR_VERSION >= 3 return __pyx_m; #else return; #endif } /* --- Runtime support code --- */ /* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule((char *)modname); if (!m) goto end; p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* GetBuiltinName */ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); if (unlikely(!result)) { PyErr_Format(PyExc_NameError, #if PY_MAJOR_VERSION >= 3 "name '%U' is not defined", name); #else "name '%.200s' is not defined", PyString_AS_STRING(name)); #endif } return result; } /* RaiseDoubleKeywords */ static void __Pyx_RaiseDoubleKeywordsError( const char* func_name, PyObject* kw_name) { PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION >= 3 "%s() got multiple values for keyword argument '%U'", func_name, kw_name); #else "%s() got multiple values for keyword argument '%s'", func_name, PyString_AsString(kw_name)); #endif } /* ParseKeywords */ static int __Pyx_ParseOptionalKeywords( PyObject *kwds, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name) { PyObject *key = 0, *value = 0; Py_ssize_t pos = 0; PyObject*** name; PyObject*** first_kw_arg = argnames + num_pos_args; while (PyDict_Next(kwds, &pos, &key, &value)) { name = first_kw_arg; while (*name && (**name != key)) name++; if (*name) { values[name-argnames] = value; continue; } name = first_kw_arg; #if PY_MAJOR_VERSION < 3 if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { while (*name) { if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) && _PyString_Eq(**name, key)) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { if ((**argname == key) || ( (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) && _PyString_Eq(**argname, key))) { goto arg_passed_twice; } argname++; } } } else #endif if (likely(PyUnicode_Check(key))) { while (*name) { int cmp = (**name == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : #endif PyUnicode_Compare(**name, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { int cmp = (**argname == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : #endif PyUnicode_Compare(**argname, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) goto arg_passed_twice; argname++; } } } else goto invalid_keyword_type; if (kwds2) { if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; } else { goto invalid_keyword; } } return 0; arg_passed_twice: __Pyx_RaiseDoubleKeywordsError(function_name, key); goto bad; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); goto bad; invalid_keyword: PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION < 3 "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else "%s() got an unexpected keyword argument '%U'", function_name, key); #endif bad: return -1; } /* RaiseArgTupleInvalid */ static void __Pyx_RaiseArgtupleInvalid( const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found) { Py_ssize_t num_expected; const char *more_or_less; if (num_found < num_min) { num_expected = num_min; more_or_less = "at least"; } else { num_expected = num_max; more_or_less = "at most"; } if (exact) { more_or_less = "exactly"; } PyErr_Format(PyExc_TypeError, "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", func_name, more_or_less, num_expected, (num_expected == 1) ? "" : "s", num_found); } /* PyObjectCall */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; ternaryfunc call = func->ob_type->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = (*call)(func, arg, kw); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; } #endif /* RaiseException */ #if PY_MAJOR_VERSION < 3 static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, CYTHON_UNUSED PyObject *cause) { __Pyx_PyThreadState_declare Py_XINCREF(type); if (!value || value == Py_None) value = NULL; else Py_INCREF(value); if (!tb || tb == Py_None) tb = NULL; else { Py_INCREF(tb); if (!PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto raise_error; } } if (PyType_Check(type)) { #if CYTHON_COMPILING_IN_PYPY if (!value) { Py_INCREF(Py_None); value = Py_None; } #endif PyErr_NormalizeException(&type, &value, &tb); } else { if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto raise_error; } value = type; type = (PyObject*) Py_TYPE(type); Py_INCREF(type); if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto raise_error; } } __Pyx_PyThreadState_assign __Pyx_ErrRestore(type, value, tb); return; raise_error: Py_XDECREF(value); Py_XDECREF(type); Py_XDECREF(tb); return; } #else static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { PyObject* owned_instance = NULL; if (tb == Py_None) { tb = 0; } else if (tb && !PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto bad; } if (value == Py_None) value = 0; if (PyExceptionInstance_Check(type)) { if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto bad; } value = type; type = (PyObject*) Py_TYPE(value); } else if (PyExceptionClass_Check(type)) { PyObject *instance_class = NULL; if (value && PyExceptionInstance_Check(value)) { instance_class = (PyObject*) Py_TYPE(value); if (instance_class != type) { int is_subclass = PyObject_IsSubclass(instance_class, type); if (!is_subclass) { instance_class = NULL; } else if (unlikely(is_subclass == -1)) { goto bad; } else { type = instance_class; } } } if (!instance_class) { PyObject *args; if (!value) args = PyTuple_New(0); else if (PyTuple_Check(value)) { Py_INCREF(value); args = value; } else args = PyTuple_Pack(1, value); if (!args) goto bad; owned_instance = PyObject_Call(type, args, NULL); Py_DECREF(args); if (!owned_instance) goto bad; value = owned_instance; if (!PyExceptionInstance_Check(value)) { PyErr_Format(PyExc_TypeError, "calling %R should have returned an instance of " "BaseException, not %R", type, Py_TYPE(value)); goto bad; } } } else { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto bad; } if (cause) { PyObject *fixed_cause; if (cause == Py_None) { fixed_cause = NULL; } else if (PyExceptionClass_Check(cause)) { fixed_cause = PyObject_CallObject(cause, NULL); if (fixed_cause == NULL) goto bad; } else if (PyExceptionInstance_Check(cause)) { fixed_cause = cause; Py_INCREF(fixed_cause); } else { PyErr_SetString(PyExc_TypeError, "exception causes must derive from " "BaseException"); goto bad; } PyException_SetCause(value, fixed_cause); } PyErr_SetObject(type, value); if (tb) { #if CYTHON_COMPILING_IN_PYPY PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); Py_INCREF(tb); PyErr_Restore(tmp_type, tmp_value, tb); Py_XDECREF(tmp_tb); #else PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject* tmp_tb = tstate->curexc_traceback; if (tb != tmp_tb) { Py_INCREF(tb); tstate->curexc_traceback = tb; Py_XDECREF(tmp_tb); } #endif } bad: Py_XDECREF(owned_instance); return; } #endif /* PyCFunctionFastCall */ #if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { PyCFunctionObject *func = (PyCFunctionObject*)func_obj; PyCFunction meth = PyCFunction_GET_FUNCTION(func); PyObject *self = PyCFunction_GET_SELF(func); int flags = PyCFunction_GET_FLAGS(func); assert(PyCFunction_Check(func)); assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); assert(nargs >= 0); assert(nargs == 0 || args != NULL); /* _PyCFunction_FastCallDict() must not be called with an exception set, because it may clear it (directly or indirectly) and so the caller loses its exception */ assert(!PyErr_Occurred()); if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); } else { return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); } } #endif /* PyFunctionFastCall */ #if CYTHON_FAST_PYCALL #include "frameobject.h" static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, PyObject *globals) { PyFrameObject *f; PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject **fastlocals; Py_ssize_t i; PyObject *result; assert(globals != NULL); /* XXX Perhaps we should create a specialized PyFrame_New() that doesn't take locals, but does take builtins without sanity checking them. */ assert(tstate != NULL); f = PyFrame_New(tstate, co, globals, NULL); if (f == NULL) { return NULL; } fastlocals = f->f_localsplus; for (i = 0; i < na; i++) { Py_INCREF(*args); fastlocals[i] = *args++; } result = PyEval_EvalFrameEx(f,0); ++tstate->recursion_depth; Py_DECREF(f); --tstate->recursion_depth; return result; } #if 1 || PY_VERSION_HEX < 0x030600B1 static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); PyObject *globals = PyFunction_GET_GLOBALS(func); PyObject *argdefs = PyFunction_GET_DEFAULTS(func); PyObject *closure; #if PY_MAJOR_VERSION >= 3 PyObject *kwdefs; #endif PyObject *kwtuple, **k; PyObject **d; Py_ssize_t nd; Py_ssize_t nk; PyObject *result; assert(kwargs == NULL || PyDict_Check(kwargs)); nk = kwargs ? PyDict_Size(kwargs) : 0; if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { return NULL; } if ( #if PY_MAJOR_VERSION >= 3 co->co_kwonlyargcount == 0 && #endif likely(kwargs == NULL || nk == 0) && co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { if (argdefs == NULL && co->co_argcount == nargs) { result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); goto done; } else if (nargs == 0 && argdefs != NULL && co->co_argcount == Py_SIZE(argdefs)) { /* function called with no arguments, but all parameters have a default value: use default values as arguments .*/ args = &PyTuple_GET_ITEM(argdefs, 0); result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); goto done; } } if (kwargs != NULL) { Py_ssize_t pos, i; kwtuple = PyTuple_New(2 * nk); if (kwtuple == NULL) { result = NULL; goto done; } k = &PyTuple_GET_ITEM(kwtuple, 0); pos = i = 0; while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { Py_INCREF(k[i]); Py_INCREF(k[i+1]); i += 2; } nk = i / 2; } else { kwtuple = NULL; k = NULL; } closure = PyFunction_GET_CLOSURE(func); #if PY_MAJOR_VERSION >= 3 kwdefs = PyFunction_GET_KW_DEFAULTS(func); #endif if (argdefs != NULL) { d = &PyTuple_GET_ITEM(argdefs, 0); nd = Py_SIZE(argdefs); } else { d = NULL; nd = 0; } #if PY_MAJOR_VERSION >= 3 result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, args, nargs, k, (int)nk, d, (int)nd, kwdefs, closure); #else result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, args, nargs, k, (int)nk, d, (int)nd, closure); #endif Py_XDECREF(kwtuple); done: Py_LeaveRecursiveCall(); return result; } #endif #endif /* PyObjectCallMethO */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { PyObject *self, *result; PyCFunction cfunc; cfunc = PyCFunction_GET_FUNCTION(func); self = PyCFunction_GET_SELF(func); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = cfunc(self, arg); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyObjectCallOneArg */ #if CYTHON_COMPILING_IN_CPYTHON static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_New(1); if (unlikely(!args)) return NULL; Py_INCREF(arg); PyTuple_SET_ITEM(args, 0, arg); result = __Pyx_PyObject_Call(func, args, NULL); Py_DECREF(args); return result; } static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCall(func, &arg, 1); } #endif if (likely(PyCFunction_Check(func))) { if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { return __Pyx_PyObject_CallMethO(func, arg); #if CYTHON_FAST_PYCCALL } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { return __Pyx_PyCFunction_FastCall(func, &arg, 1); #endif } } return __Pyx__PyObject_CallOneArg(func, arg); } #else static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_Pack(1, arg); if (unlikely(!args)) return NULL; result = __Pyx_PyObject_Call(func, args, NULL); Py_DECREF(args); return result; } #endif /* PyObjectCallNoArg */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCall(func, NULL, 0); } #endif #ifdef __Pyx_CyFunction_USED if (likely(PyCFunction_Check(func) || __Pyx_TypeCheck(func, __pyx_CyFunctionType))) { #else if (likely(PyCFunction_Check(func))) { #endif if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { return __Pyx_PyObject_CallMethO(func, NULL); } } return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); } #endif /* PyIntBinop */ #if !CYTHON_COMPILING_IN_PYPY static PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED int inplace) { if (op1 == op2) { Py_RETURN_TRUE; } #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(op1))) { const long b = intval; long a = PyInt_AS_LONG(op1); if (a == b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } } #endif #if CYTHON_USE_PYLONG_INTERNALS if (likely(PyLong_CheckExact(op1))) { const long b = intval; long a; const digit* digits = ((PyLongObject*)op1)->ob_digit; const Py_ssize_t size = Py_SIZE(op1); if (likely(__Pyx_sst_abs(size) <= 1)) { a = likely(size) ? digits[0] : 0; if (size == -1) a = -a; } else { switch (size) { case -2: if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); break; } case 2: if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); break; } case -3: if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); break; } case 3: if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); break; } case -4: if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); break; } case 4: if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); break; } #if PyLong_SHIFT < 30 && PyLong_SHIFT != 15 default: return PyLong_Type.tp_richcompare(op1, op2, Py_EQ); #else default: Py_RETURN_FALSE; #endif } } if (a == b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } } #endif if (PyFloat_CheckExact(op1)) { const long b = intval; double a = PyFloat_AS_DOUBLE(op1); if ((double)a == (double)b) { Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } } return PyObject_RichCompare(op1, op2, Py_EQ); } #endif /* PyObjectCallMethod1 */ static PyObject* __Pyx__PyObject_CallMethod1(PyObject* method, PyObject* arg) { PyObject *result = NULL; #if CYTHON_UNPACK_METHODS if (likely(PyMethod_Check(method))) { PyObject *self = PyMethod_GET_SELF(method); if (likely(self)) { PyObject *args; PyObject *function = PyMethod_GET_FUNCTION(method); #if CYTHON_FAST_PYCALL if (PyFunction_Check(function)) { PyObject *args[2] = {self, arg}; result = __Pyx_PyFunction_FastCall(function, args, 2); goto done; } #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(function)) { PyObject *args[2] = {self, arg}; result = __Pyx_PyCFunction_FastCall(function, args, 2); goto done; } #endif args = PyTuple_New(2); if (unlikely(!args)) goto done; Py_INCREF(self); PyTuple_SET_ITEM(args, 0, self); Py_INCREF(arg); PyTuple_SET_ITEM(args, 1, arg); Py_INCREF(function); result = __Pyx_PyObject_Call(function, args, NULL); Py_DECREF(args); Py_DECREF(function); return result; } } #endif result = __Pyx_PyObject_CallOneArg(method, arg); goto done; done: return result; } static PyObject* __Pyx_PyObject_CallMethod1(PyObject* obj, PyObject* method_name, PyObject* arg) { PyObject *method, *result = NULL; method = __Pyx_PyObject_GetAttrStr(obj, method_name); if (unlikely(!method)) goto done; result = __Pyx__PyObject_CallMethod1(method, arg); done: Py_XDECREF(method); return result; } /* pop_index */ static PyObject* __Pyx__PyObject_PopNewIndex(PyObject* L, PyObject* py_ix) { PyObject *r; if (unlikely(!py_ix)) return NULL; r = __Pyx__PyObject_PopIndex(L, py_ix); Py_DECREF(py_ix); return r; } static PyObject* __Pyx__PyObject_PopIndex(PyObject* L, PyObject* py_ix) { return __Pyx_PyObject_CallMethod1(L, __pyx_n_s_pop, py_ix); } #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static PyObject* __Pyx__PyList_PopIndex(PyObject* L, PyObject* py_ix, Py_ssize_t ix) { Py_ssize_t size = PyList_GET_SIZE(L); if (likely(size > (((PyListObject*)L)->allocated >> 1))) { Py_ssize_t cix = ix; if (cix < 0) { cix += size; } if (likely(0 <= cix && cix < size)) { PyObject* v = PyList_GET_ITEM(L, cix); Py_SIZE(L) -= 1; size -= 1; memmove(&PyList_GET_ITEM(L, cix), &PyList_GET_ITEM(L, cix+1), (size_t)(size-cix)*sizeof(PyObject*)); return v; } } if (py_ix == Py_None) { return __Pyx__PyObject_PopNewIndex(L, PyInt_FromSsize_t(ix)); } else { return __Pyx__PyObject_PopIndex(L, py_ix); } } #endif /* PyErrExceptionMatches */ #if CYTHON_FAST_THREAD_STATE static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(tuple); #if PY_MAJOR_VERSION >= 3 for (i=0; icurexc_type; if (exc_type == err) return 1; if (unlikely(!exc_type)) return 0; if (unlikely(PyTuple_Check(err))) return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); } #endif /* GetAttr */ static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { #if CYTHON_USE_TYPE_SLOTS #if PY_MAJOR_VERSION >= 3 if (likely(PyUnicode_Check(n))) #else if (likely(PyString_Check(n))) #endif return __Pyx_PyObject_GetAttrStr(o, n); #endif return PyObject_GetAttr(o, n); } /* GetAttr3 */ static PyObject *__Pyx_GetAttr3Default(PyObject *d) { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign if (unlikely(!__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) return NULL; __Pyx_PyErr_Clear(); Py_INCREF(d); return d; } static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { PyObject *r = __Pyx_GetAttr(o, n); return (likely(r)) ? r : __Pyx_GetAttr3Default(d); } /* GetModuleGlobalName */ static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { PyObject *result; #if !CYTHON_AVOID_BORROWED_REFS result = PyDict_GetItem(__pyx_d, name); if (likely(result)) { Py_INCREF(result); } else { #else result = PyObject_GetItem(__pyx_d, name); if (!result) { PyErr_Clear(); #endif result = __Pyx_GetBuiltinName(name); } return result; } /* Import */ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { PyObject *empty_list = 0; PyObject *module = 0; PyObject *global_dict = 0; PyObject *empty_dict = 0; PyObject *list; #if PY_MAJOR_VERSION < 3 PyObject *py_import; py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); if (!py_import) goto bad; #endif if (from_list) list = from_list; else { empty_list = PyList_New(0); if (!empty_list) goto bad; list = empty_list; } global_dict = PyModule_GetDict(__pyx_m); if (!global_dict) goto bad; empty_dict = PyDict_New(); if (!empty_dict) goto bad; { #if PY_MAJOR_VERSION >= 3 if (level == -1) { if (strchr(__Pyx_MODULE_NAME, '.')) { module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, 1); if (!module) { if (!PyErr_ExceptionMatches(PyExc_ImportError)) goto bad; PyErr_Clear(); } } level = 0; } #endif if (!module) { #if PY_MAJOR_VERSION < 3 PyObject *py_level = PyInt_FromLong(level); if (!py_level) goto bad; module = PyObject_CallFunctionObjArgs(py_import, name, global_dict, empty_dict, list, py_level, NULL); Py_DECREF(py_level); #else module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, level); #endif } } bad: #if PY_MAJOR_VERSION < 3 Py_XDECREF(py_import); #endif Py_XDECREF(empty_list); Py_XDECREF(empty_dict); return module; } /* ImportFrom */ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Format(PyExc_ImportError, #if PY_MAJOR_VERSION < 3 "cannot import name %.230s", PyString_AS_STRING(name)); #else "cannot import name %S", name); #endif } return value; } /* GetItemInt */ static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; if (!j) return NULL; r = PyObject_GetItem(o, j); Py_DECREF(j); return r; } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += PyList_GET_SIZE(o); } if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyList_GET_SIZE(o)))) { PyObject *r = PyList_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += PyTuple_GET_SIZE(o); } if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS if (is_list || PyList_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { PyObject *r = PyList_GET_ITEM(o, n); Py_INCREF(r); return r; } } else if (PyTuple_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, n); Py_INCREF(r); return r; } } else { PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; if (likely(m && m->sq_item)) { if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { Py_ssize_t l = m->sq_length(o); if (likely(l >= 0)) { i += l; } else { if (!PyErr_ExceptionMatches(PyExc_OverflowError)) return NULL; PyErr_Clear(); } } return m->sq_item(o, i); } } #else if (is_list || PySequence_Check(o)) { return PySequence_GetItem(o, i); } #endif return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); } /* HasAttr */ static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { PyObject *r; if (unlikely(!__Pyx_PyBaseString_Check(n))) { PyErr_SetString(PyExc_TypeError, "hasattr(): attribute name must be string"); return -1; } r = __Pyx_GetAttr(o, n); if (unlikely(!r)) { PyErr_Clear(); return 0; } else { Py_DECREF(r); return 1; } } /* SetVTable */ static int __Pyx_SetVtable(PyObject *dict, void *vtable) { #if PY_VERSION_HEX >= 0x02070000 PyObject *ob = PyCapsule_New(vtable, 0, 0); #else PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); #endif if (!ob) goto bad; if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) goto bad; Py_DECREF(ob); return 0; bad: Py_XDECREF(ob); return -1; } /* SetupReduce */ static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { int ret; PyObject *name_attr; name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); if (likely(name_attr)) { ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); } else { ret = -1; } if (unlikely(ret < 0)) { PyErr_Clear(); ret = 0; } Py_XDECREF(name_attr); return ret; } static int __Pyx_setup_reduce(PyObject* type_obj) { int ret = 0; PyObject *object_reduce = NULL; PyObject *object_reduce_ex = NULL; PyObject *reduce = NULL; PyObject *reduce_ex = NULL; PyObject *reduce_cython = NULL; PyObject *setstate = NULL; PyObject *setstate_cython = NULL; #if CYTHON_USE_PYTYPE_LOOKUP if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; #else if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; #endif #if CYTHON_USE_PYTYPE_LOOKUP object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; #else object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; #endif reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; if (reduce_ex == object_reduce_ex) { #if CYTHON_USE_PYTYPE_LOOKUP object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; #else object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; #endif reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); if (!setstate) PyErr_Clear(); if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; } PyType_Modified((PyTypeObject*)type_obj); } } goto GOOD; BAD: if (!PyErr_Occurred()) PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); ret = -1; GOOD: #if !CYTHON_USE_PYTYPE_LOOKUP Py_XDECREF(object_reduce); Py_XDECREF(object_reduce_ex); #endif Py_XDECREF(reduce); Py_XDECREF(reduce_ex); Py_XDECREF(reduce_cython); Py_XDECREF(setstate); Py_XDECREF(setstate_cython); return ret; } /* CLineInTraceback */ #ifndef CYTHON_CLINE_IN_TRACEBACK static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); if (likely(cython_runtime_dict)) { use_cline = PyDict_GetItem(*cython_runtime_dict, __pyx_n_s_cline_in_traceback); } else #endif { PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_DECREF(use_cline_obj); } else { PyErr_Clear(); use_cline = NULL; } } if (!use_cline) { c_line = 0; PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (PyObject_Not(use_cline) != 0) { c_line = 0; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); return c_line; } #endif /* CodeObjectCache */ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static PyCodeObject *__pyx_find_code_object(int code_line) { PyCodeObject* code_object; int pos; if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { return NULL; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { return NULL; } code_object = __pyx_code_cache.entries[pos].code_object; Py_INCREF(code_object); return code_object; } static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = 64; __pyx_code_cache.count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { PyCodeObject* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_DECREF(tmp); return; } if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = new_max; } for (i=__pyx_code_cache.count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; __pyx_code_cache.count++; Py_INCREF(code_object); } /* AddTraceback */ #include "compile.h" #include "frameobject.h" #include "traceback.h" static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyObject *py_srcfile = 0; PyObject *py_funcname = 0; #if PY_MAJOR_VERSION < 3 py_srcfile = PyString_FromString(filename); #else py_srcfile = PyUnicode_FromString(filename); #endif if (!py_srcfile) goto bad; if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); #else py_funcname = PyUnicode_FromString(funcname); #endif } if (!py_funcname) goto bad; py_code = __Pyx_PyCode_New( 0, 0, 0, 0, 0, __pyx_empty_bytes, /*PyObject *code,*/ __pyx_empty_tuple, /*PyObject *consts,*/ __pyx_empty_tuple, /*PyObject *names,*/ __pyx_empty_tuple, /*PyObject *varnames,*/ __pyx_empty_tuple, /*PyObject *freevars,*/ __pyx_empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ py_line, __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); Py_DECREF(py_funcname); return py_code; bad: Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; PyThreadState *tstate = __Pyx_PyThreadState_Current; if (c_line) { c_line = __Pyx_CLineForTraceback(tstate, c_line); } py_code = __pyx_find_code_object(c_line ? -c_line : py_line); if (!py_code) { py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) goto bad; __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( tstate, /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ __pyx_d, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { const int neg_one = (int) -1, const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(int) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(int) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(int) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(int), little, !is_unsigned); } } /* CIntFromPyVerify */ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ {\ func_type value = func_value;\ if (sizeof(target_type) < sizeof(func_type)) {\ if (unlikely(value != (func_type) (target_type) value)) {\ func_type zero = 0;\ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ return (target_type) -1;\ if (is_unsigned && unlikely(value < zero))\ goto raise_neg_overflow;\ else\ goto raise_overflow;\ }\ }\ return (target_type) value;\ } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(long) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(long) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(long) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(long), little, !is_unsigned); } } /* CIntFromPy */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(long) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (long) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(long) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) case -2: if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } #endif if (sizeof(long) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else long val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (long) -1; } } else { long val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (long) -1; val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to long"); return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to long"); return (long) -1; } /* CIntFromPy */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { const int neg_one = (int) -1, const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(int) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (int) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(int) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) case -2: if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -3: if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -4: if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; } #endif if (sizeof(int) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else int val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (int) -1; } } else { int val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (int) -1; val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int"); return (int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int"); return (int) -1; } /* FastTypeChecks */ #if CYTHON_COMPILING_IN_CPYTHON static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { while (a) { a = a->tp_base; if (a == b) return 1; } return b == &PyBaseObject_Type; } static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (a == b) return 1; mro = a->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(a, b); } #if PY_MAJOR_VERSION == 2 static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { PyObject *exception, *value, *tb; int res; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ErrFetch(&exception, &value, &tb); res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } if (!res) { res = PyObject_IsSubclass(err, exc_type2); if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } } __Pyx_ErrRestore(exception, value, tb); return res; } #else static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; if (!res) { res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); } return res; } #endif static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { if (likely(err == exc_type)) return 1; if (likely(PyExceptionClass_Check(err))) { return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); } return PyErr_GivenExceptionMatches(err, exc_type); } static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { if (likely(err == exc_type1 || err == exc_type2)) return 1; if (likely(PyExceptionClass_Check(err))) { return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); } return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); } #endif /* CheckBinaryVersion */ static int __Pyx_check_binary_version(void) { char ctversion[4], rtversion[4]; PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { char message[200]; PyOS_snprintf(message, sizeof(message), "compiletime version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); return PyErr_WarnEx(NULL, message, 1); } return 0; } /* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 if (t->is_unicode) { *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); } else if (t->intern) { *t->p = PyString_InternFromString(t->s); } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } #else if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); } else if (t->encoding) { *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); } else { *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); } } else { *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); } #endif if (!*t->p) return -1; if (PyObject_Hash(*t->p) == -1) PyErr_Clear(); ++t; } return 0; } static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); } static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT #if !CYTHON_PEP393_ENABLED static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { char* defenc_c; PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); if (!defenc) return NULL; defenc_c = PyBytes_AS_STRING(defenc); #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII { char* end = defenc_c + PyBytes_GET_SIZE(defenc); char* c; for (c = defenc_c; c < end; c++) { if ((unsigned char) (*c) >= 128) { PyUnicode_AsASCIIString(o); return NULL; } } } #endif *length = PyBytes_GET_SIZE(defenc); return defenc_c; } #else static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (likely(PyUnicode_IS_ASCII(o))) { *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { PyUnicode_AsASCIIString(o); return NULL; } #else return PyUnicode_AsUTF8AndSize(o, length); #endif } #endif #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT if ( #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII __Pyx_sys_getdefaultencoding_not_ascii && #endif PyUnicode_Check(o)) { return __Pyx_PyUnicode_AsStringAndSize(o, length); } else #endif #if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) if (PyByteArray_Check(o)) { *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); } else #endif { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { #if PY_MAJOR_VERSION >= 3 if (PyLong_Check(result)) { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, "__int__ returned non-int (type %.200s). " "The ability to return an instance of a strict subclass of int " "is deprecated, and may be removed in a future version of Python.", Py_TYPE(result)->tp_name)) { Py_DECREF(result); return NULL; } return result; } #endif PyErr_Format(PyExc_TypeError, "__%.4s__ returned non-%.4s (type %.200s)", type_name, type_name, Py_TYPE(result)->tp_name); Py_DECREF(result); return NULL; } static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif const char *name = NULL; PyObject *res = NULL; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x) || PyLong_Check(x))) #else if (likely(PyLong_Check(x))) #endif return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; res = m->nb_long(x); } #else if (likely(m && m->nb_int)) { name = "int"; res = m->nb_int(x); } #endif #else if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { res = PyNumber_Int(x); } #endif if (likely(res)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else if (unlikely(!PyLong_CheckExact(res))) { #endif return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(b))) { if (sizeof(Py_ssize_t) >= sizeof(long)) return PyInt_AS_LONG(b); else return PyInt_AsSsize_t(x); } #endif if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)b)->ob_digit; const Py_ssize_t size = Py_SIZE(b); if (likely(__Pyx_sst_abs(size) <= 1)) { ival = likely(size) ? digits[0] : 0; if (size == -1) ival = -ival; return ival; } else { switch (size) { case 2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } #endif /* Py_PYTHON_H */ aiohttp-3.0.1/aiohttp/_frozenlist.pyx0000666000000000000000000000505513240304665016075 0ustar 00000000000000from collections.abc import MutableSequence cdef class FrozenList: cdef readonly bint frozen cdef list _items def __init__(self, items=None): self.frozen = False if items is not None: items = list(items) else: items = [] self._items = items cdef object _check_frozen(self): if self.frozen: raise RuntimeError("Cannot modify frozen list.") cdef inline object _fast_len(self): return len(self._items) def freeze(self): self.frozen = True def __getitem__(self, index): return self._items[index] def __setitem__(self, index, value): self._check_frozen() self._items[index] = value def __delitem__(self, index): self._check_frozen() del self._items[index] def __len__(self): return self._fast_len() def __iter__(self): return self._items.__iter__() def __reversed__(self): return self._items.__reversed__() def __richcmp__(self, other, op): if op == 0: # < return list(self) < other if op == 1: # <= return list(self) <= other if op == 2: # == return list(self) == other if op == 3: # != return list(self) != other if op == 4: # > return list(self) > other if op == 5: # => return list(self) >= other def insert(self, pos, item): self._check_frozen() self._items.insert(pos, item) def __contains__(self, item): return item in self._items def __iadd__(self, items): self._check_frozen() self._items += list(items) return self def index(self, item): return self._items.index(item) def remove(self, item): self._check_frozen() self._items.remove(item) def clear(self): self._check_frozen() self._items.clear() def extend(self, items): self._check_frozen() self._items += list(items) def reverse(self): self._check_frozen() self._items.reverse() def pop(self, index=-1): self._check_frozen() return self._items.pop(index) def append(self, item): self._check_frozen() return self._items.append(item) def count(self, item): return self._items.count(item) def __repr__(self): return ''.format(self.frozen, self._items) MutableSequence.register(FrozenList) aiohttp-3.0.1/aiohttp/_http_parser.c0000666000000000000000000221626613240304736015644 0ustar 00000000000000/* Generated by Cython 0.27.3 */ /* BEGIN: Cython Metadata { "distutils": { "define_macros": [ [ "HTTP_PARSER_STRICT", 0 ] ], "depends": [], "name": "aiohttp._http_parser", "sources": [ "aiohttp/_http_parser.pyx", "vendor/http-parser/http_parser.c" ] }, "module_name": "aiohttp._http_parser" } END: Cython Metadata */ #define PY_SSIZE_T_CLEAN #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else #define CYTHON_ABI "0_27_3" #define CYTHON_FUTURE_DIVISION 1 #include #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #define __PYX_COMMA , #ifndef HAVE_LONG_LONG #if PY_VERSION_HEX >= 0x02070000 #define HAVE_LONG_LONG #endif #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #ifdef PYPY_VERSION #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #elif defined(PYSTON_VERSION) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #if PY_MAJOR_VERSION < 3 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #elif !defined(CYTHON_USE_PYLONG_INTERNALS) #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #ifndef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if PY_VERSION_HEX < 0x030300F0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #ifndef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 1 #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000) #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS #include "longintrepr.h" #undef SHIFT #undef BASE #undef MASK #endif #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) #define Py_OptimizeFlag 0 #endif #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyType_Type #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #if PY_VERSION_HEX < 0x030700A0 || !defined(METH_FASTCALL) #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, Py_ssize_t nargs); typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject **args, Py_ssize_t nargs, PyObject *kwnames); #else #define __Pyx_PyCFunctionFast _PyCFunctionFast #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #if CYTHON_FAST_PYCCALL #define __Pyx_PyFastCFunction_Check(func)\ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) #else #define __Pyx_PyFastCFunction_Check(func) 0 #endif #if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #elif PY_VERSION_HEX >= 0x03060000 #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #elif PY_VERSION_HEX >= 0x03000000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #else #define __Pyx_PyThreadState_Current _PyThreadState_Current #endif #if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) #else #define __Pyx_PyDict_NewPresized(n) PyDict_New() #endif #if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) #else #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) #endif #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ 0 : _PyUnicode_Ready((PyObject *)(op))) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 #define PyUnicode_2BYTE_KIND 2 #define PyUnicode_4BYTE_KIND 4 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_PYSTON #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #else #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) #endif #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) #define PyObject_ASCII(o) PyObject_Repr(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) #else #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyInt_FromSsize_t PyLong_FromSsize_t #define PyInt_AsLong PyLong_AsLong #define PyInt_AS_LONG PyLong_AS_LONG #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long #endif #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY #ifndef PyUnicode_InternFromString #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #endif #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) #else #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif #if CYTHON_USE_ASYNC_SLOTS #if PY_VERSION_HEX >= 0x030500B1 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #else #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) #endif #else #define __Pyx_PyType_AsAsync(obj) NULL #endif #ifndef __Pyx_PyAsyncMethodsStruct typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; } __Pyx_PyAsyncMethodsStruct; #endif #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } # else # define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int32 uint32_t; #endif #endif #else #include #endif #ifndef CYTHON_FALLTHROUGH #if defined(__cplusplus) && __cplusplus >= 201103L #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #elif __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #elif __has_cpp_attribute(gnu::fallthrough) #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #if defined(__clang__ ) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH #endif #endif #endif #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #elif defined(__GNUC__) #define CYTHON_INLINE __inline__ #elif defined(_MSC_VER) #define CYTHON_INLINE __inline #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_INLINE inline #else #define CYTHON_INLINE #endif #endif #if defined(WIN32) || defined(MS_WINDOWS) #define _USE_MATH_DEFINES #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif #define __PYX_ERR(f_index, lineno, Ln_error) \ { \ __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ } #ifndef __PYX_EXTERN_C #ifdef __cplusplus #define __PYX_EXTERN_C extern "C" #else #define __PYX_EXTERN_C extern #endif #endif #define __PYX_HAVE__aiohttp___http_parser #define __PYX_HAVE_API__aiohttp___http_parser #include #include #include "pythread.h" #include #include "../vendor/http-parser/http_parser.h" #ifdef _OPENMP #include #endif /* _OPENMP */ #if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; #define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 #define __PYX_DEFAULT_STRING_ENCODING "" #define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString #define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ (sizeof(type) < sizeof(Py_ssize_t)) ||\ (sizeof(type) > sizeof(Py_ssize_t) &&\ likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX) &&\ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ v == (type)PY_SSIZE_T_MIN))) ||\ (sizeof(type) == sizeof(Py_ssize_t) &&\ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX))) ) #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); #define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if PY_MAJOR_VERSION < 3 #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #else #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { const Py_UNICODE *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) #define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) #define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else #define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #endif #define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #else #define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) #endif #define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII static int __Pyx_sys_getdefaultencoding_not_ascii; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; PyObject* ascii_chars_u = NULL; PyObject* ascii_chars_b = NULL; const char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; if (strcmp(default_encoding_c, "ascii") == 0) { __Pyx_sys_getdefaultencoding_not_ascii = 0; } else { char ascii_chars[128]; int c; for (c = 0; c < 128; c++) { ascii_chars[c] = c; } __Pyx_sys_getdefaultencoding_not_ascii = 1; ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); if (!ascii_chars_u) goto bad; ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { PyErr_Format( PyExc_ValueError, "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", default_encoding_c); goto bad; } Py_DECREF(ascii_chars_u); Py_DECREF(ascii_chars_b); } Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); Py_XDECREF(ascii_chars_u); Py_XDECREF(ascii_chars_b); return -1; } #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT static char* __PYX_DEFAULT_STRING_ENCODING; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); return -1; } #endif #endif /* Test for GCC > 2.95 */ #if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; static PyObject *__pyx_cython_runtime; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm= __FILE__; static const char *__pyx_filename; static const char *__pyx_f[] = { "aiohttp\\_http_parser.pyx", "stringsource", "type.pxd", "bool.pxd", "complex.pxd", }; /*--- Type declarations ---*/ struct __pyx_obj_7aiohttp_12_http_parser_HttpParser; struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC; struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC; struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init; struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__on_headers_complete; /* "aiohttp/_http_parser.pyx":81 * PyMem_Free(self._csettings) * * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< * object protocol, object loop, object timer=None, * size_t max_line_size=8190, size_t max_headers=32768, */ struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init { int __pyx_n; PyObject *timer; size_t max_line_size; size_t max_headers; size_t max_field_size; PyObject *payload_exception; PyObject *response_with_body; PyObject *auto_decompress; }; /* "aiohttp/_http_parser.pyx":161 * self._raw_header_value += raw_val * * cdef _on_headers_complete(self, # <<<<<<<<<<<<<< * ENCODING='utf-8', * ENCODING_ERR='surrogateescape', */ struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__on_headers_complete { int __pyx_n; PyObject *ENCODING; PyObject *ENCODING_ERR; PyObject *CONTENT_ENCODING; PyObject *SEC_WEBSOCKET_KEY1; PyObject *SUPPORTED; }; /* "aiohttp/_http_parser.pyx":29 * * @cython.internal * cdef class HttpParser: # <<<<<<<<<<<<<< * * cdef: */ struct __pyx_obj_7aiohttp_12_http_parser_HttpParser { PyObject_HEAD struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtab; struct http_parser *_cparser; struct http_parser_settings *_csettings; PyObject *_header_name; PyObject *_header_value; PyObject *_raw_header_name; PyObject *_raw_header_value; PyObject *_protocol; PyObject *_loop; PyObject *_timer; size_t _max_line_size; size_t _max_field_size; size_t _max_headers; int _response_with_body; int _started; PyObject *_url; PyObject *_buf; PyObject *_path; PyObject *_reason; PyObject *_headers; PyObject *_raw_headers; int _upgraded; PyObject *_messages; PyObject *_payload; int _payload_error; PyObject *_payload_exception; PyObject *_last_error; int _auto_decompress; Py_buffer py_buf; }; /* "aiohttp/_http_parser.pyx":309 * * * cdef class HttpRequestParserC(HttpParser): # <<<<<<<<<<<<<< * * def __init__(self, protocol, loop, timer=None, */ struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser __pyx_base; }; /* "aiohttp/_http_parser.pyx":336 * * * cdef class HttpResponseParserC(HttpParser): # <<<<<<<<<<<<<< * * def __init__(self, protocol, loop, timer=None, */ struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser __pyx_base; }; /* "aiohttp/_http_parser.pyx":29 * * @cython.internal * cdef class HttpParser: # <<<<<<<<<<<<<< * * cdef: */ struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser { PyObject *(*_init)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum http_parser_type, PyObject *, PyObject *, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args); PyObject *(*_process_header)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); PyObject *(*_on_header_field)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, PyObject *, PyObject *); PyObject *(*_on_header_value)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, PyObject *, PyObject *); PyObject *(*_on_headers_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__on_headers_complete *__pyx_optional_args); PyObject *(*_on_message_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); PyObject *(*_on_chunk_header)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); PyObject *(*_on_chunk_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); PyObject *(*_on_status_complete)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *); }; static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; /* "aiohttp/_http_parser.pyx":309 * * * cdef class HttpRequestParserC(HttpParser): # <<<<<<<<<<<<<< * * def __init__(self, protocol, loop, timer=None, */ struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParserC { struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_base; }; static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParserC; /* "aiohttp/_http_parser.pyx":336 * * * cdef class HttpResponseParserC(HttpParser): # <<<<<<<<<<<<<< * * def __init__(self, protocol, loop, timer=None, */ struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParserC { struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_base; }; static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParserC; /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, int); void (*DECREF)(void*, PyObject*, int); void (*GOTREF)(void*, PyObject*, int); void (*GIVEREF)(void*, PyObject*, int); void* (*SetupContext)(const char*, int, const char*); void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; #ifdef WITH_THREAD #define __Pyx_RefNannySetupContext(name, acquire_gil)\ if (acquire_gil) {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ PyGILState_Release(__pyx_gilstate_save);\ } else {\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ } #else #define __Pyx_RefNannySetupContext(name, acquire_gil)\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) #endif #define __Pyx_RefNannyFinishContext()\ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif #define __Pyx_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_XDECREF(tmp);\ } while (0) #define __Pyx_DECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_DECREF(tmp);\ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /* PyObjectGetAttrStr.proto */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_getattr)) return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); #endif return PyObject_GetAttr(obj, attr_name); } #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); /* RaiseArgTupleInvalid.proto */ static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /* KeywordStringCheck.proto */ static int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); /* PyObjectCall.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); #else #define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) #endif /* ListAppend.proto */ #if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { PyListObject* L = (PyListObject*) list; Py_ssize_t len = Py_SIZE(list); if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { Py_INCREF(x); PyList_SET_ITEM(list, len, x); Py_SIZE(list) = len+1; return 0; } return PyList_Append(list, x); } #else #define __Pyx_PyList_Append(L,x) PyList_Append(L,x) #endif /* GetModuleGlobalName.proto */ static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); /* PyCFunctionFastCall.proto */ #if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); #else #define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) #endif /* PyFunctionFastCall.proto */ #if CYTHON_FAST_PYCALL #define __Pyx_PyFunction_FastCall(func, args, nargs)\ __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) #if 1 || PY_VERSION_HEX < 0x030600B1 static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); #else #define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) #endif #endif /* PyObjectCallMethO.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); #endif /* PyObjectCallOneArg.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); /* PySequenceContains.proto */ static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { int result = PySequence_Contains(seq, item); return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); } /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; #define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; #define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #define __Pyx_PyErr_Occurred() PyErr_Occurred() #endif /* PyErrFetchRestore.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) #else #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #endif #else #define __Pyx_PyErr_Clear() PyErr_Clear() #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /* RaiseException.proto */ static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /* PyObjectCallNoArg.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); #else #define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) #endif /* decode_c_string_utf16.proto */ static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16(const char *s, Py_ssize_t size, const char *errors) { int byteorder = 0; return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); } static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16LE(const char *s, Py_ssize_t size, const char *errors) { int byteorder = -1; return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); } static CYTHON_INLINE PyObject *__Pyx_PyUnicode_DecodeUTF16BE(const char *s, Py_ssize_t size, const char *errors) { int byteorder = 1; return PyUnicode_DecodeUTF16(s, size, errors, &byteorder); } /* decode_c_bytes.proto */ static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); /* decode_bytes.proto */ static CYTHON_INLINE PyObject* __Pyx_decode_bytes( PyObject* string, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { return __Pyx_decode_c_bytes( PyBytes_AS_STRING(string), PyBytes_GET_SIZE(string), start, stop, encoding, errors, decode_func); } /* GetItemInt.proto */ #define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ __Pyx_GetItemInt_Generic(o, to_py_func(i)))) #define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); #define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, int wraparound, int boundscheck); static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, int wraparound, int boundscheck); /* SliceObject.proto */ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice( PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, PyObject** py_start, PyObject** py_stop, PyObject** py_slice, int has_cstart, int has_cstop, int wraparound); /* RaiseDoubleKeywords.proto */ static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /* ParseKeywords.proto */ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ const char* function_name); /* decode_bytearray.proto */ static CYTHON_INLINE PyObject* __Pyx_decode_bytearray( PyObject* string, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { return __Pyx_decode_c_bytes( PyByteArray_AS_STRING(string), PyByteArray_GET_SIZE(string), start, stop, encoding, errors, decode_func); } /* GetException.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); #endif /* SwapException.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #else static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); #endif /* SaveResetException.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); #else #define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) #define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) #endif /* PyErrExceptionMatches.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); #else #define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) #endif /* IncludeStringH.proto */ #include /* decode_c_string.proto */ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( const char* cstring, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)); /* RaiseTooManyValuesToUnpack.proto */ static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); /* RaiseNeedMoreValuesToUnpack.proto */ static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); /* IterFinish.proto */ static CYTHON_INLINE int __Pyx_IterFinish(void); /* UnpackItemEndCheck.proto */ static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /* SetVTable.proto */ static int __Pyx_SetVtable(PyObject *dict, void *vtable); /* SetupReduce.proto */ static int __Pyx_setup_reduce(PyObject* type_obj); /* Import.proto */ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); /* ImportFrom.proto */ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); /* CLineInTraceback.proto */ #ifdef CYTHON_CLINE_IN_TRACEBACK #define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) #else static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); #endif /* CodeObjectCache.proto */ typedef struct { PyCodeObject* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; }; static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static PyCodeObject *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); /* AddTraceback.proto */ static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_short(unsigned short value); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value); /* CIntFromPy.proto */ static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); #else #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) #define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) #endif /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); /* PyIdentifierFromString.proto */ #if !defined(__Pyx_PyIdentifier_FromString) #if PY_MAJOR_VERSION < 3 #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) #else #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) #endif #endif /* ModuleImport.proto */ static PyObject *__Pyx_ImportModule(const char *name); /* TypeImport.proto */ static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /* InitStrings.proto */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum http_parser_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_field, PyObject *__pyx_v_raw_field); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val, PyObject *__pyx_v_raw_val); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__on_headers_complete *__pyx_optional_args); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpRequestParserC__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self); /* proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_19HttpResponseParserC__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self); /* proto*/ /* Module declarations from 'cpython.mem' */ /* Module declarations from 'cpython.version' */ /* Module declarations from '__builtin__' */ /* Module declarations from 'cpython.type' */ static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; /* Module declarations from 'libc.string' */ /* Module declarations from 'libc.stdio' */ /* Module declarations from 'cpython.object' */ /* Module declarations from 'cpython.ref' */ /* Module declarations from 'cpython.exc' */ /* Module declarations from 'cpython.module' */ /* Module declarations from 'cpython.tuple' */ /* Module declarations from 'cpython.list' */ /* Module declarations from 'cpython.sequence' */ /* Module declarations from 'cpython.mapping' */ /* Module declarations from 'cpython.iterator' */ /* Module declarations from 'cpython.number' */ /* Module declarations from 'cpython.int' */ /* Module declarations from '__builtin__' */ /* Module declarations from 'cpython.bool' */ static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; /* Module declarations from 'cpython.long' */ /* Module declarations from 'cpython.float' */ /* Module declarations from '__builtin__' */ /* Module declarations from 'cpython.complex' */ static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; /* Module declarations from 'cpython.string' */ /* Module declarations from 'cpython.unicode' */ /* Module declarations from 'cpython.dict' */ /* Module declarations from 'cpython.instance' */ /* Module declarations from 'cpython.function' */ /* Module declarations from 'cpython.method' */ /* Module declarations from 'cpython.weakref' */ /* Module declarations from 'cpython.getargs' */ /* Module declarations from 'cpython.pythread' */ /* Module declarations from 'cpython.pystate' */ /* Module declarations from 'cpython.cobject' */ /* Module declarations from 'cpython.oldbuffer' */ /* Module declarations from 'cpython.set' */ /* Module declarations from 'cpython.buffer' */ /* Module declarations from 'cpython.bytes' */ /* Module declarations from 'cpython.pycapsule' */ /* Module declarations from 'cpython' */ /* Module declarations from 'cython' */ /* Module declarations from 'aiohttp' */ /* Module declarations from 'libc.stdint' */ /* Module declarations from 'aiohttp._cparser' */ /* Module declarations from 'aiohttp._http_parser' */ static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpParser = 0; static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpRequestParserC = 0; static PyTypeObject *__pyx_ptype_7aiohttp_12_http_parser_HttpResponseParserC = 0; static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(struct http_parser *); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(struct http_parser *, char const *, size_t); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(struct http_parser *, char const *, size_t); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(struct http_parser *, char const *, size_t); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(struct http_parser *, char const *, size_t); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(struct http_parser *); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(struct http_parser *, char const *, size_t); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(struct http_parser *); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(struct http_parser *); /*proto*/ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(struct http_parser *); /*proto*/ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(enum http_errno); /*proto*/ #define __Pyx_MODULE_NAME "aiohttp._http_parser" extern int __pyx_module_is_main_aiohttp___http_parser; int __pyx_module_is_main_aiohttp___http_parser = 0; /* Implementation of 'aiohttp._http_parser' */ static PyObject *__pyx_builtin_MemoryError; static PyObject *__pyx_builtin_TypeError; static PyObject *__pyx_builtin_BaseException; static const char __pyx_k__6[] = ""; static const char __pyx_k_br[] = "br"; static const char __pyx_k_ln[] = "ln"; static const char __pyx_k_URL[] = "URL"; static const char __pyx_k__13[] = ":"; static const char __pyx_k_all[] = "__all__"; static const char __pyx_k_get[] = "get"; static const char __pyx_k_off[] = "off"; static const char __pyx_k_res[] = "res"; static const char __pyx_k_sep[] = "sep"; static const char __pyx_k_url[] = "url"; static const char __pyx_k_gzip[] = "gzip"; static const char __pyx_k_hdrs[] = "hdrs"; static const char __pyx_k_host[] = "host"; static const char __pyx_k_loop[] = "loop"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_name[] = "__name__"; static const char __pyx_k_path[] = "path"; static const char __pyx_k_port[] = "port"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_user[] = "user"; static const char __pyx_k_yarl[] = "yarl"; static const char __pyx_k_build[] = "build"; static const char __pyx_k_clear[] = "clear"; static const char __pyx_k_lower[] = "lower"; static const char __pyx_k_query[] = "query"; static const char __pyx_k_timer[] = "timer"; static const char __pyx_k_utf_8[] = "utf-8"; static const char __pyx_k_decode[] = "decode"; static const char __pyx_k_extend[] = "extend"; static const char __pyx_k_format[] = "format"; static const char __pyx_k_import[] = "__import__"; static const char __pyx_k_length[] = "length"; static const char __pyx_k_parsed[] = "parsed"; static const char __pyx_k_py_buf[] = "py_buf"; static const char __pyx_k_reduce[] = "__reduce__"; static const char __pyx_k_result[] = "result"; static const char __pyx_k_schema[] = "schema"; static const char __pyx_k_scheme[] = "scheme"; static const char __pyx_k_aiohttp[] = "aiohttp"; static const char __pyx_k_deflate[] = "deflate"; static const char __pyx_k_streams[] = "streams"; static const char __pyx_k_buf_data[] = "buf_data"; static const char __pyx_k_feed_eof[] = "feed_eof"; static const char __pyx_k_fragment[] = "fragment"; static const char __pyx_k_getstate[] = "__getstate__"; static const char __pyx_k_password[] = "password"; static const char __pyx_k_protocol[] = "protocol"; static const char __pyx_k_setstate[] = "__setstate__"; static const char __pyx_k_userinfo[] = "userinfo"; static const char __pyx_k_TypeError[] = "TypeError"; static const char __pyx_k_feed_data[] = "feed_data"; static const char __pyx_k_multidict[] = "multidict"; static const char __pyx_k_parse_url[] = "_parse_url"; static const char __pyx_k_partition[] = "partition"; static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; static const char __pyx_k_CIMultiDict[] = "CIMultiDict"; static const char __pyx_k_HttpVersion[] = "HttpVersion"; static const char __pyx_k_LineTooLong[] = "LineTooLong"; static const char __pyx_k_MemoryError[] = "MemoryError"; static const char __pyx_k_http_parser[] = "http_parser"; static const char __pyx_k_http_writer[] = "http_writer"; static const char __pyx_k_max_headers[] = "max_headers"; static const char __pyx_k_parse_url_2[] = "parse_url"; static const char __pyx_k_StreamReader[] = "StreamReader"; static const char __pyx_k_http_version[] = "http_version"; static const char __pyx_k_BadStatusLine[] = "BadStatusLine"; static const char __pyx_k_BaseException[] = "BaseException"; static const char __pyx_k_DeflateBuffer[] = "DeflateBuffer"; static const char __pyx_k_EMPTY_PAYLOAD[] = "EMPTY_PAYLOAD"; static const char __pyx_k_HttpVersion10[] = "HttpVersion10"; static const char __pyx_k_HttpVersion11[] = "HttpVersion11"; static const char __pyx_k_InvalidHeader[] = "InvalidHeader"; static const char __pyx_k_invalid_url_r[] = "invalid url {!r}"; static const char __pyx_k_max_line_size[] = "max_line_size"; static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; static const char __pyx_k_set_exception[] = "set_exception"; static const char __pyx_k_BadHttpMessage[] = "BadHttpMessage"; static const char __pyx_k_max_field_size[] = "max_field_size"; static const char __pyx_k_read_until_eof[] = "read_until_eof"; static const char __pyx_k_InvalidURLError[] = "InvalidURLError"; static const char __pyx_k_auto_decompress[] = "auto_decompress"; static const char __pyx_k_http_exceptions[] = "http_exceptions"; static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; static const char __pyx_k_surrogateescape[] = "surrogateescape"; static const char __pyx_k_CONTENT_ENCODING[] = "CONTENT_ENCODING"; static const char __pyx_k_RawRequestMessage[] = "RawRequestMessage"; static const char __pyx_k_payload_exception[] = "payload_exception"; static const char __pyx_k_ContentLengthError[] = "ContentLengthError"; static const char __pyx_k_HttpRequestParserC[] = "HttpRequestParserC"; static const char __pyx_k_RawResponseMessage[] = "RawResponseMessage"; static const char __pyx_k_SEC_WEBSOCKET_KEY1[] = "SEC_WEBSOCKET_KEY1"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_response_with_body[] = "response_with_body"; static const char __pyx_k_HttpResponseMessageC[] = "HttpResponseMessageC"; static const char __pyx_k_PayloadEncodingError[] = "PayloadEncodingError"; static const char __pyx_k_aiohttp__http_parser[] = "aiohttp._http_parser"; static const char __pyx_k_TransferEncodingError[] = "TransferEncodingError"; static const char __pyx_k_Header_name_is_too_long[] = "Header name is too long"; static const char __pyx_k_Status_line_is_too_long[] = "Status line is too long"; static const char __pyx_k_Header_value_is_too_long[] = "Header value is too long"; static const char __pyx_k_aiohttp__http_parser_pyx[] = "aiohttp\\_http_parser.pyx"; static const char __pyx_k_end_http_chunk_receiving[] = "end_http_chunk_receiving"; static const char __pyx_k_begin_http_chunk_receiving[] = "begin_http_chunk_receiving"; static const char __pyx_k_Not_enough_data_for_satisfy_cont[] = "Not enough data for satisfy content length header."; static const char __pyx_k_Not_enough_data_for_satisfy_tran[] = "Not enough data for satisfy transfer length header."; static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__"; static PyObject *__pyx_n_s_BadHttpMessage; static PyObject *__pyx_n_s_BadStatusLine; static PyObject *__pyx_n_s_BaseException; static PyObject *__pyx_n_s_CIMultiDict; static PyObject *__pyx_n_s_CONTENT_ENCODING; static PyObject *__pyx_n_s_ContentLengthError; static PyObject *__pyx_n_s_DeflateBuffer; static PyObject *__pyx_n_s_EMPTY_PAYLOAD; static PyObject *__pyx_kp_u_Header_name_is_too_long; static PyObject *__pyx_kp_u_Header_value_is_too_long; static PyObject *__pyx_n_u_HttpRequestParserC; static PyObject *__pyx_n_u_HttpResponseMessageC; static PyObject *__pyx_n_s_HttpVersion; static PyObject *__pyx_n_s_HttpVersion10; static PyObject *__pyx_n_s_HttpVersion11; static PyObject *__pyx_n_s_InvalidHeader; static PyObject *__pyx_n_s_InvalidURLError; static PyObject *__pyx_n_s_LineTooLong; static PyObject *__pyx_n_s_MemoryError; static PyObject *__pyx_kp_u_Not_enough_data_for_satisfy_cont; static PyObject *__pyx_kp_u_Not_enough_data_for_satisfy_tran; static PyObject *__pyx_n_s_PayloadEncodingError; static PyObject *__pyx_n_s_RawRequestMessage; static PyObject *__pyx_n_s_RawResponseMessage; static PyObject *__pyx_n_s_SEC_WEBSOCKET_KEY1; static PyObject *__pyx_kp_u_Status_line_is_too_long; static PyObject *__pyx_n_s_StreamReader; static PyObject *__pyx_n_s_TransferEncodingError; static PyObject *__pyx_n_s_TypeError; static PyObject *__pyx_n_s_URL; static PyObject *__pyx_kp_u__13; static PyObject *__pyx_kp_b__6; static PyObject *__pyx_kp_u__6; static PyObject *__pyx_n_s_aiohttp; static PyObject *__pyx_n_s_aiohttp__http_parser; static PyObject *__pyx_kp_s_aiohttp__http_parser_pyx; static PyObject *__pyx_n_s_all; static PyObject *__pyx_n_s_auto_decompress; static PyObject *__pyx_n_s_begin_http_chunk_receiving; static PyObject *__pyx_n_u_br; static PyObject *__pyx_n_s_buf_data; static PyObject *__pyx_n_s_build; static PyObject *__pyx_n_s_clear; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_decode; static PyObject *__pyx_n_u_deflate; static PyObject *__pyx_n_s_end_http_chunk_receiving; static PyObject *__pyx_n_s_extend; static PyObject *__pyx_n_s_feed_data; static PyObject *__pyx_n_s_feed_eof; static PyObject *__pyx_n_s_format; static PyObject *__pyx_n_s_fragment; static PyObject *__pyx_n_s_get; static PyObject *__pyx_n_s_getstate; static PyObject *__pyx_n_u_gzip; static PyObject *__pyx_n_s_hdrs; static PyObject *__pyx_n_s_host; static PyObject *__pyx_n_s_http_exceptions; static PyObject *__pyx_n_s_http_parser; static PyObject *__pyx_n_s_http_version; static PyObject *__pyx_n_s_http_writer; static PyObject *__pyx_n_s_import; static PyObject *__pyx_kp_u_invalid_url_r; static PyObject *__pyx_n_s_length; static PyObject *__pyx_n_s_ln; static PyObject *__pyx_n_s_loop; static PyObject *__pyx_n_s_lower; static PyObject *__pyx_n_s_main; static PyObject *__pyx_n_s_max_field_size; static PyObject *__pyx_n_s_max_headers; static PyObject *__pyx_n_s_max_line_size; static PyObject *__pyx_n_s_multidict; static PyObject *__pyx_n_s_name; static PyObject *__pyx_kp_s_no_default___reduce___due_to_non; static PyObject *__pyx_n_s_off; static PyObject *__pyx_n_s_parse_url; static PyObject *__pyx_n_s_parse_url_2; static PyObject *__pyx_n_u_parse_url_2; static PyObject *__pyx_n_s_parsed; static PyObject *__pyx_n_s_partition; static PyObject *__pyx_n_s_password; static PyObject *__pyx_n_s_path; static PyObject *__pyx_n_s_payload_exception; static PyObject *__pyx_n_s_port; static PyObject *__pyx_n_s_protocol; static PyObject *__pyx_n_s_py_buf; static PyObject *__pyx_n_s_pyx_vtable; static PyObject *__pyx_n_s_query; static PyObject *__pyx_n_s_read_until_eof; static PyObject *__pyx_n_s_reduce; static PyObject *__pyx_n_s_reduce_cython; static PyObject *__pyx_n_s_reduce_ex; static PyObject *__pyx_n_s_res; static PyObject *__pyx_n_s_response_with_body; static PyObject *__pyx_n_s_result; static PyObject *__pyx_n_s_schema; static PyObject *__pyx_n_s_scheme; static PyObject *__pyx_n_s_sep; static PyObject *__pyx_n_s_set_exception; static PyObject *__pyx_n_s_setstate; static PyObject *__pyx_n_s_setstate_cython; static PyObject *__pyx_n_s_streams; static PyObject *__pyx_n_u_surrogateescape; static PyObject *__pyx_n_s_test; static PyObject *__pyx_n_s_timer; static PyObject *__pyx_n_s_url; static PyObject *__pyx_n_s_user; static PyObject *__pyx_n_s_userinfo; static PyObject *__pyx_kp_u_utf_8; static PyObject *__pyx_n_s_yarl; static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto */ static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_eof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_8feed_data(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_12__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ static int __pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, PyObject *__pyx_v_response_with_body, CYTHON_UNUSED PyObject *__pyx_v_read_until_eof); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ static int __pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, PyObject *__pyx_v_response_with_body, CYTHON_UNUSED PyObject *__pyx_v_read_until_eof, PyObject *__pyx_v_auto_decompress); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url); /* proto */ static PyObject *__pyx_pf_7aiohttp_12_http_parser_2_parse_url(CYTHON_UNUSED PyObject *__pyx_self, char *__pyx_v_buf_data, size_t __pyx_v_length); /* proto */ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpParser(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParserC(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParserC(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ static PyObject *__pyx_k_; static PyObject *__pyx_k__2; static PyObject *__pyx_tuple__3; static PyObject *__pyx_tuple__4; static PyObject *__pyx_tuple__5; static PyObject *__pyx_tuple__7; static PyObject *__pyx_tuple__8; static PyObject *__pyx_tuple__9; static PyObject *__pyx_tuple__10; static PyObject *__pyx_tuple__11; static PyObject *__pyx_tuple__12; static PyObject *__pyx_tuple__14; static PyObject *__pyx_tuple__15; static PyObject *__pyx_tuple__16; static PyObject *__pyx_tuple__18; static PyObject *__pyx_codeobj__17; static PyObject *__pyx_codeobj__19; /* "aiohttp/_http_parser.pyx":66 * Py_buffer py_buf * * def __cinit__(self): # <<<<<<<<<<<<<< * self._cparser = \ * PyMem_Malloc(sizeof(cparser.http_parser)) */ /* Python wrapper */ static int __pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0); if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { __Pyx_RaiseArgtupleInvalid("__cinit__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__cinit__", 0))) return -1; __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_12_http_parser_10HttpParser___cinit__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { int __pyx_r; __Pyx_RefNannyDeclarations int __pyx_t_1; __Pyx_RefNannySetupContext("__cinit__", 0); /* "aiohttp/_http_parser.pyx":67 * * def __cinit__(self): * self._cparser = \ # <<<<<<<<<<<<<< * PyMem_Malloc(sizeof(cparser.http_parser)) * if self._cparser is NULL: */ __pyx_v_self->_cparser = ((struct http_parser *)PyMem_Malloc((sizeof(struct http_parser)))); /* "aiohttp/_http_parser.pyx":69 * self._cparser = \ * PyMem_Malloc(sizeof(cparser.http_parser)) * if self._cparser is NULL: # <<<<<<<<<<<<<< * raise MemoryError() * */ __pyx_t_1 = ((__pyx_v_self->_cparser == NULL) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":70 * PyMem_Malloc(sizeof(cparser.http_parser)) * if self._cparser is NULL: * raise MemoryError() # <<<<<<<<<<<<<< * * self._csettings = \ */ PyErr_NoMemory(); __PYX_ERR(0, 70, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":69 * self._cparser = \ * PyMem_Malloc(sizeof(cparser.http_parser)) * if self._cparser is NULL: # <<<<<<<<<<<<<< * raise MemoryError() * */ } /* "aiohttp/_http_parser.pyx":72 * raise MemoryError() * * self._csettings = \ # <<<<<<<<<<<<<< * PyMem_Malloc(sizeof(cparser.http_parser_settings)) * if self._csettings is NULL: */ __pyx_v_self->_csettings = ((struct http_parser_settings *)PyMem_Malloc((sizeof(struct http_parser_settings)))); /* "aiohttp/_http_parser.pyx":74 * self._csettings = \ * PyMem_Malloc(sizeof(cparser.http_parser_settings)) * if self._csettings is NULL: # <<<<<<<<<<<<<< * raise MemoryError() * */ __pyx_t_1 = ((__pyx_v_self->_csettings == NULL) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":75 * PyMem_Malloc(sizeof(cparser.http_parser_settings)) * if self._csettings is NULL: * raise MemoryError() # <<<<<<<<<<<<<< * * def __dealloc__(self): */ PyErr_NoMemory(); __PYX_ERR(0, 75, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":74 * self._csettings = \ * PyMem_Malloc(sizeof(cparser.http_parser_settings)) * if self._csettings is NULL: # <<<<<<<<<<<<<< * raise MemoryError() * */ } /* "aiohttp/_http_parser.pyx":66 * Py_buffer py_buf * * def __cinit__(self): # <<<<<<<<<<<<<< * self._cparser = \ * PyMem_Malloc(sizeof(cparser.http_parser)) */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":77 * raise MemoryError() * * def __dealloc__(self): # <<<<<<<<<<<<<< * PyMem_Free(self._cparser) * PyMem_Free(self._csettings) */ /* Python wrapper */ static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self); /*proto*/ static void __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(PyObject *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__ (wrapper)", 0); __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); } static void __pyx_pf_7aiohttp_12_http_parser_10HttpParser_2__dealloc__(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__dealloc__", 0); /* "aiohttp/_http_parser.pyx":78 * * def __dealloc__(self): * PyMem_Free(self._cparser) # <<<<<<<<<<<<<< * PyMem_Free(self._csettings) * */ PyMem_Free(__pyx_v_self->_cparser); /* "aiohttp/_http_parser.pyx":79 * def __dealloc__(self): * PyMem_Free(self._cparser) * PyMem_Free(self._csettings) # <<<<<<<<<<<<<< * * cdef _init(self, cparser.http_parser_type mode, */ PyMem_Free(__pyx_v_self->_csettings); /* "aiohttp/_http_parser.pyx":77 * raise MemoryError() * * def __dealloc__(self): # <<<<<<<<<<<<<< * PyMem_Free(self._cparser) * PyMem_Free(self._csettings) */ /* function exit code */ __Pyx_RefNannyFinishContext(); } /* "aiohttp/_http_parser.pyx":81 * PyMem_Free(self._csettings) * * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< * object protocol, object loop, object timer=None, * size_t max_line_size=8190, size_t max_headers=32768, */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__init(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, enum http_parser_type __pyx_v_mode, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args) { /* "aiohttp/_http_parser.pyx":82 * * cdef _init(self, cparser.http_parser_type mode, * object protocol, object loop, object timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ PyObject *__pyx_v_timer = ((PyObject *)Py_None); size_t __pyx_v_max_line_size = ((size_t)0x1FFE); size_t __pyx_v_max_headers = ((size_t)0x8000); size_t __pyx_v_max_field_size = ((size_t)0x1FFE); /* "aiohttp/_http_parser.pyx":84 * object protocol, object loop, object timer=None, * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< * response_with_body=True, auto_decompress=True): * cparser.http_parser_init(self._cparser, mode) */ PyObject *__pyx_v_payload_exception = ((PyObject *)Py_None); /* "aiohttp/_http_parser.pyx":85 * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, * response_with_body=True, auto_decompress=True): # <<<<<<<<<<<<<< * cparser.http_parser_init(self._cparser, mode) * self._cparser.data = self */ PyObject *__pyx_v_response_with_body = ((PyObject *)Py_True); PyObject *__pyx_v_auto_decompress = ((PyObject *)Py_True); PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; __Pyx_RefNannySetupContext("_init", 0); if (__pyx_optional_args) { if (__pyx_optional_args->__pyx_n > 0) { __pyx_v_timer = __pyx_optional_args->timer; if (__pyx_optional_args->__pyx_n > 1) { __pyx_v_max_line_size = __pyx_optional_args->max_line_size; if (__pyx_optional_args->__pyx_n > 2) { __pyx_v_max_headers = __pyx_optional_args->max_headers; if (__pyx_optional_args->__pyx_n > 3) { __pyx_v_max_field_size = __pyx_optional_args->max_field_size; if (__pyx_optional_args->__pyx_n > 4) { __pyx_v_payload_exception = __pyx_optional_args->payload_exception; if (__pyx_optional_args->__pyx_n > 5) { __pyx_v_response_with_body = __pyx_optional_args->response_with_body; if (__pyx_optional_args->__pyx_n > 6) { __pyx_v_auto_decompress = __pyx_optional_args->auto_decompress; } } } } } } } } /* "aiohttp/_http_parser.pyx":86 * size_t max_field_size=8190, payload_exception=None, * response_with_body=True, auto_decompress=True): * cparser.http_parser_init(self._cparser, mode) # <<<<<<<<<<<<<< * self._cparser.data = self * self._cparser.content_length = 0 */ http_parser_init(__pyx_v_self->_cparser, __pyx_v_mode); /* "aiohttp/_http_parser.pyx":87 * response_with_body=True, auto_decompress=True): * cparser.http_parser_init(self._cparser, mode) * self._cparser.data = self # <<<<<<<<<<<<<< * self._cparser.content_length = 0 * */ __pyx_v_self->_cparser->data = ((void *)__pyx_v_self); /* "aiohttp/_http_parser.pyx":88 * cparser.http_parser_init(self._cparser, mode) * self._cparser.data = self * self._cparser.content_length = 0 # <<<<<<<<<<<<<< * * cparser.http_parser_settings_init(self._csettings) */ __pyx_v_self->_cparser->content_length = 0; /* "aiohttp/_http_parser.pyx":90 * self._cparser.content_length = 0 * * cparser.http_parser_settings_init(self._csettings) # <<<<<<<<<<<<<< * * self._protocol = protocol */ http_parser_settings_init(__pyx_v_self->_csettings); /* "aiohttp/_http_parser.pyx":92 * cparser.http_parser_settings_init(self._csettings) * * self._protocol = protocol # <<<<<<<<<<<<<< * self._loop = loop * self._timer = timer */ __Pyx_INCREF(__pyx_v_protocol); __Pyx_GIVEREF(__pyx_v_protocol); __Pyx_GOTREF(__pyx_v_self->_protocol); __Pyx_DECREF(__pyx_v_self->_protocol); __pyx_v_self->_protocol = __pyx_v_protocol; /* "aiohttp/_http_parser.pyx":93 * * self._protocol = protocol * self._loop = loop # <<<<<<<<<<<<<< * self._timer = timer * */ __Pyx_INCREF(__pyx_v_loop); __Pyx_GIVEREF(__pyx_v_loop); __Pyx_GOTREF(__pyx_v_self->_loop); __Pyx_DECREF(__pyx_v_self->_loop); __pyx_v_self->_loop = __pyx_v_loop; /* "aiohttp/_http_parser.pyx":94 * self._protocol = protocol * self._loop = loop * self._timer = timer # <<<<<<<<<<<<<< * * self._buf = bytearray() */ __Pyx_INCREF(__pyx_v_timer); __Pyx_GIVEREF(__pyx_v_timer); __Pyx_GOTREF(__pyx_v_self->_timer); __Pyx_DECREF(__pyx_v_self->_timer); __pyx_v_self->_timer = __pyx_v_timer; /* "aiohttp/_http_parser.pyx":96 * self._timer = timer * * self._buf = bytearray() # <<<<<<<<<<<<<< * self._payload = None * self._payload_error = 0 */ __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)(&PyByteArray_Type)), __pyx_empty_tuple, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 96, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->_buf); __Pyx_DECREF(__pyx_v_self->_buf); __pyx_v_self->_buf = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":97 * * self._buf = bytearray() * self._payload = None # <<<<<<<<<<<<<< * self._payload_error = 0 * self._payload_exception = payload_exception */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_payload); __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = Py_None; /* "aiohttp/_http_parser.pyx":98 * self._buf = bytearray() * self._payload = None * self._payload_error = 0 # <<<<<<<<<<<<<< * self._payload_exception = payload_exception * self._messages = [] */ __pyx_v_self->_payload_error = 0; /* "aiohttp/_http_parser.pyx":99 * self._payload = None * self._payload_error = 0 * self._payload_exception = payload_exception # <<<<<<<<<<<<<< * self._messages = [] * */ __Pyx_INCREF(__pyx_v_payload_exception); __Pyx_GIVEREF(__pyx_v_payload_exception); __Pyx_GOTREF(__pyx_v_self->_payload_exception); __Pyx_DECREF(__pyx_v_self->_payload_exception); __pyx_v_self->_payload_exception = __pyx_v_payload_exception; /* "aiohttp/_http_parser.pyx":100 * self._payload_error = 0 * self._payload_exception = payload_exception * self._messages = [] # <<<<<<<<<<<<<< * * self._header_name = None */ __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 100, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_self->_messages); __Pyx_DECREF(__pyx_v_self->_messages); __pyx_v_self->_messages = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":102 * self._messages = [] * * self._header_name = None # <<<<<<<<<<<<<< * self._header_value = None * self._raw_header_name = None */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_header_name); __Pyx_DECREF(__pyx_v_self->_header_name); __pyx_v_self->_header_name = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":103 * * self._header_name = None * self._header_value = None # <<<<<<<<<<<<<< * self._raw_header_name = None * self._raw_header_value = None */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_header_value); __Pyx_DECREF(__pyx_v_self->_header_value); __pyx_v_self->_header_value = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":104 * self._header_name = None * self._header_value = None * self._raw_header_name = None # <<<<<<<<<<<<<< * self._raw_header_value = None * */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_raw_header_name); __Pyx_DECREF(__pyx_v_self->_raw_header_name); __pyx_v_self->_raw_header_name = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":105 * self._header_value = None * self._raw_header_name = None * self._raw_header_value = None # <<<<<<<<<<<<<< * * self._max_line_size = max_line_size */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_raw_header_value); __Pyx_DECREF(__pyx_v_self->_raw_header_value); __pyx_v_self->_raw_header_value = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":107 * self._raw_header_value = None * * self._max_line_size = max_line_size # <<<<<<<<<<<<<< * self._max_headers = max_headers * self._max_field_size = max_field_size */ __pyx_v_self->_max_line_size = __pyx_v_max_line_size; /* "aiohttp/_http_parser.pyx":108 * * self._max_line_size = max_line_size * self._max_headers = max_headers # <<<<<<<<<<<<<< * self._max_field_size = max_field_size * self._response_with_body = response_with_body */ __pyx_v_self->_max_headers = __pyx_v_max_headers; /* "aiohttp/_http_parser.pyx":109 * self._max_line_size = max_line_size * self._max_headers = max_headers * self._max_field_size = max_field_size # <<<<<<<<<<<<<< * self._response_with_body = response_with_body * self._upgraded = False */ __pyx_v_self->_max_field_size = __pyx_v_max_field_size; /* "aiohttp/_http_parser.pyx":110 * self._max_headers = max_headers * self._max_field_size = max_field_size * self._response_with_body = response_with_body # <<<<<<<<<<<<<< * self._upgraded = False * self._auto_decompress = auto_decompress */ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_response_with_body); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 110, __pyx_L1_error) __pyx_v_self->_response_with_body = __pyx_t_2; /* "aiohttp/_http_parser.pyx":111 * self._max_field_size = max_field_size * self._response_with_body = response_with_body * self._upgraded = False # <<<<<<<<<<<<<< * self._auto_decompress = auto_decompress * */ __pyx_v_self->_upgraded = 0; /* "aiohttp/_http_parser.pyx":112 * self._response_with_body = response_with_body * self._upgraded = False * self._auto_decompress = auto_decompress # <<<<<<<<<<<<<< * * self._csettings.on_url = cb_on_url */ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_auto_decompress); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 112, __pyx_L1_error) __pyx_v_self->_auto_decompress = __pyx_t_2; /* "aiohttp/_http_parser.pyx":114 * self._auto_decompress = auto_decompress * * self._csettings.on_url = cb_on_url # <<<<<<<<<<<<<< * self._csettings.on_status = cb_on_status * self._csettings.on_header_field = cb_on_header_field */ __pyx_v_self->_csettings->on_url = __pyx_f_7aiohttp_12_http_parser_cb_on_url; /* "aiohttp/_http_parser.pyx":115 * * self._csettings.on_url = cb_on_url * self._csettings.on_status = cb_on_status # <<<<<<<<<<<<<< * self._csettings.on_header_field = cb_on_header_field * self._csettings.on_header_value = cb_on_header_value */ __pyx_v_self->_csettings->on_status = __pyx_f_7aiohttp_12_http_parser_cb_on_status; /* "aiohttp/_http_parser.pyx":116 * self._csettings.on_url = cb_on_url * self._csettings.on_status = cb_on_status * self._csettings.on_header_field = cb_on_header_field # <<<<<<<<<<<<<< * self._csettings.on_header_value = cb_on_header_value * self._csettings.on_headers_complete = cb_on_headers_complete */ __pyx_v_self->_csettings->on_header_field = __pyx_f_7aiohttp_12_http_parser_cb_on_header_field; /* "aiohttp/_http_parser.pyx":117 * self._csettings.on_status = cb_on_status * self._csettings.on_header_field = cb_on_header_field * self._csettings.on_header_value = cb_on_header_value # <<<<<<<<<<<<<< * self._csettings.on_headers_complete = cb_on_headers_complete * self._csettings.on_body = cb_on_body */ __pyx_v_self->_csettings->on_header_value = __pyx_f_7aiohttp_12_http_parser_cb_on_header_value; /* "aiohttp/_http_parser.pyx":118 * self._csettings.on_header_field = cb_on_header_field * self._csettings.on_header_value = cb_on_header_value * self._csettings.on_headers_complete = cb_on_headers_complete # <<<<<<<<<<<<<< * self._csettings.on_body = cb_on_body * self._csettings.on_message_begin = cb_on_message_begin */ __pyx_v_self->_csettings->on_headers_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete; /* "aiohttp/_http_parser.pyx":119 * self._csettings.on_header_value = cb_on_header_value * self._csettings.on_headers_complete = cb_on_headers_complete * self._csettings.on_body = cb_on_body # <<<<<<<<<<<<<< * self._csettings.on_message_begin = cb_on_message_begin * self._csettings.on_message_complete = cb_on_message_complete */ __pyx_v_self->_csettings->on_body = __pyx_f_7aiohttp_12_http_parser_cb_on_body; /* "aiohttp/_http_parser.pyx":120 * self._csettings.on_headers_complete = cb_on_headers_complete * self._csettings.on_body = cb_on_body * self._csettings.on_message_begin = cb_on_message_begin # <<<<<<<<<<<<<< * self._csettings.on_message_complete = cb_on_message_complete * self._csettings.on_chunk_header = cb_on_chunk_header */ __pyx_v_self->_csettings->on_message_begin = __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin; /* "aiohttp/_http_parser.pyx":121 * self._csettings.on_body = cb_on_body * self._csettings.on_message_begin = cb_on_message_begin * self._csettings.on_message_complete = cb_on_message_complete # <<<<<<<<<<<<<< * self._csettings.on_chunk_header = cb_on_chunk_header * self._csettings.on_chunk_complete = cb_on_chunk_complete */ __pyx_v_self->_csettings->on_message_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete; /* "aiohttp/_http_parser.pyx":122 * self._csettings.on_message_begin = cb_on_message_begin * self._csettings.on_message_complete = cb_on_message_complete * self._csettings.on_chunk_header = cb_on_chunk_header # <<<<<<<<<<<<<< * self._csettings.on_chunk_complete = cb_on_chunk_complete * */ __pyx_v_self->_csettings->on_chunk_header = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header; /* "aiohttp/_http_parser.pyx":123 * self._csettings.on_message_complete = cb_on_message_complete * self._csettings.on_chunk_header = cb_on_chunk_header * self._csettings.on_chunk_complete = cb_on_chunk_complete # <<<<<<<<<<<<<< * * self._last_error = None */ __pyx_v_self->_csettings->on_chunk_complete = __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete; /* "aiohttp/_http_parser.pyx":125 * self._csettings.on_chunk_complete = cb_on_chunk_complete * * self._last_error = None # <<<<<<<<<<<<<< * * cdef _process_header(self): */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_last_error); __Pyx_DECREF(__pyx_v_self->_last_error); __pyx_v_self->_last_error = Py_None; /* "aiohttp/_http_parser.pyx":81 * PyMem_Free(self._csettings) * * cdef _init(self, cparser.http_parser_type mode, # <<<<<<<<<<<<<< * object protocol, object loop, object timer=None, * size_t max_line_size=8190, size_t max_headers=32768, */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._init", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":127 * self._last_error = None * * cdef _process_header(self): # <<<<<<<<<<<<<< * if self._header_name is not None: * name = self._header_name */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_v_name = NULL; PyObject *__pyx_v_value = NULL; PyObject *__pyx_v_raw_name = NULL; PyObject *__pyx_v_raw_value = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; int __pyx_t_4; __Pyx_RefNannySetupContext("_process_header", 0); /* "aiohttp/_http_parser.pyx":128 * * cdef _process_header(self): * if self._header_name is not None: # <<<<<<<<<<<<<< * name = self._header_name * value = self._header_value */ __pyx_t_1 = (__pyx_v_self->_header_name != ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":129 * cdef _process_header(self): * if self._header_name is not None: * name = self._header_name # <<<<<<<<<<<<<< * value = self._header_value * */ __pyx_t_3 = __pyx_v_self->_header_name; __Pyx_INCREF(__pyx_t_3); __pyx_v_name = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":130 * if self._header_name is not None: * name = self._header_name * value = self._header_value # <<<<<<<<<<<<<< * * self._header_name = self._header_value = None */ __pyx_t_3 = __pyx_v_self->_header_value; __Pyx_INCREF(__pyx_t_3); __pyx_v_value = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":132 * value = self._header_value * * self._header_name = self._header_value = None # <<<<<<<<<<<<<< * self._headers.append((name, value)) * */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_header_name); __Pyx_DECREF(__pyx_v_self->_header_name); __pyx_v_self->_header_name = ((PyObject*)Py_None); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_header_value); __Pyx_DECREF(__pyx_v_self->_header_value); __pyx_v_self->_header_value = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":133 * * self._header_name = self._header_value = None * self._headers.append((name, value)) # <<<<<<<<<<<<<< * * raw_name = self._raw_header_name */ if (unlikely(__pyx_v_self->_headers == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 133, __pyx_L1_error) } __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 133, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_v_name); __Pyx_GIVEREF(__pyx_v_name); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_name); __Pyx_INCREF(__pyx_v_value); __Pyx_GIVEREF(__pyx_v_value); PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_value); __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_self->_headers, __pyx_t_3); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 133, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":135 * self._headers.append((name, value)) * * raw_name = self._raw_header_name # <<<<<<<<<<<<<< * raw_value = self._raw_header_value * */ __pyx_t_3 = __pyx_v_self->_raw_header_name; __Pyx_INCREF(__pyx_t_3); __pyx_v_raw_name = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":136 * * raw_name = self._raw_header_name * raw_value = self._raw_header_value # <<<<<<<<<<<<<< * * self._raw_header_name = self._raw_header_value = None */ __pyx_t_3 = __pyx_v_self->_raw_header_value; __Pyx_INCREF(__pyx_t_3); __pyx_v_raw_value = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":138 * raw_value = self._raw_header_value * * self._raw_header_name = self._raw_header_value = None # <<<<<<<<<<<<<< * self._raw_headers.append((raw_name, raw_value)) * */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_raw_header_name); __Pyx_DECREF(__pyx_v_self->_raw_header_name); __pyx_v_self->_raw_header_name = ((PyObject*)Py_None); __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_raw_header_value); __Pyx_DECREF(__pyx_v_self->_raw_header_value); __pyx_v_self->_raw_header_value = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":139 * * self._raw_header_name = self._raw_header_value = None * self._raw_headers.append((raw_name, raw_value)) # <<<<<<<<<<<<<< * * cdef _on_header_field(self, str field, bytes raw_field): */ if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 139, __pyx_L1_error) } __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 139, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_v_raw_name); __Pyx_GIVEREF(__pyx_v_raw_name); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_raw_name); __Pyx_INCREF(__pyx_v_raw_value); __Pyx_GIVEREF(__pyx_v_raw_value); PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_raw_value); __pyx_t_4 = __Pyx_PyList_Append(__pyx_v_self->_raw_headers, __pyx_t_3); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(0, 139, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":128 * * cdef _process_header(self): * if self._header_name is not None: # <<<<<<<<<<<<<< * name = self._header_name * value = self._header_value */ } /* "aiohttp/_http_parser.pyx":127 * self._last_error = None * * cdef _process_header(self): # <<<<<<<<<<<<<< * if self._header_name is not None: * name = self._header_name */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._process_header", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF(__pyx_v_name); __Pyx_XDECREF(__pyx_v_value); __Pyx_XDECREF(__pyx_v_raw_name); __Pyx_XDECREF(__pyx_v_raw_value); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":141 * self._raw_headers.append((raw_name, raw_value)) * * cdef _on_header_field(self, str field, bytes raw_field): # <<<<<<<<<<<<<< * if self._header_value is not None: * self._process_header() */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_field, PyObject *__pyx_v_raw_field) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("_on_header_field", 0); /* "aiohttp/_http_parser.pyx":142 * * cdef _on_header_field(self, str field, bytes raw_field): * if self._header_value is not None: # <<<<<<<<<<<<<< * self._process_header() * self._header_value = None */ __pyx_t_1 = (__pyx_v_self->_header_value != ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":143 * cdef _on_header_field(self, str field, bytes raw_field): * if self._header_value is not None: * self._process_header() # <<<<<<<<<<<<<< * self._header_value = None * */ __pyx_t_3 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 143, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":144 * if self._header_value is not None: * self._process_header() * self._header_value = None # <<<<<<<<<<<<<< * * if self._header_name is None: */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_header_value); __Pyx_DECREF(__pyx_v_self->_header_value); __pyx_v_self->_header_value = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":142 * * cdef _on_header_field(self, str field, bytes raw_field): * if self._header_value is not None: # <<<<<<<<<<<<<< * self._process_header() * self._header_value = None */ } /* "aiohttp/_http_parser.pyx":146 * self._header_value = None * * if self._header_name is None: # <<<<<<<<<<<<<< * self._header_name = field * self._raw_header_name = raw_field */ __pyx_t_2 = (__pyx_v_self->_header_name == ((PyObject*)Py_None)); __pyx_t_1 = (__pyx_t_2 != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":147 * * if self._header_name is None: * self._header_name = field # <<<<<<<<<<<<<< * self._raw_header_name = raw_field * else: */ __Pyx_INCREF(__pyx_v_field); __Pyx_GIVEREF(__pyx_v_field); __Pyx_GOTREF(__pyx_v_self->_header_name); __Pyx_DECREF(__pyx_v_self->_header_name); __pyx_v_self->_header_name = __pyx_v_field; /* "aiohttp/_http_parser.pyx":148 * if self._header_name is None: * self._header_name = field * self._raw_header_name = raw_field # <<<<<<<<<<<<<< * else: * self._header_name += field */ __Pyx_INCREF(__pyx_v_raw_field); __Pyx_GIVEREF(__pyx_v_raw_field); __Pyx_GOTREF(__pyx_v_self->_raw_header_name); __Pyx_DECREF(__pyx_v_self->_raw_header_name); __pyx_v_self->_raw_header_name = __pyx_v_raw_field; /* "aiohttp/_http_parser.pyx":146 * self._header_value = None * * if self._header_name is None: # <<<<<<<<<<<<<< * self._header_name = field * self._raw_header_name = raw_field */ goto __pyx_L4; } /* "aiohttp/_http_parser.pyx":150 * self._raw_header_name = raw_field * else: * self._header_name += field # <<<<<<<<<<<<<< * self._raw_header_name += raw_field * */ /*else*/ { __pyx_t_3 = __Pyx_PyUnicode_ConcatSafe(__pyx_v_self->_header_name, __pyx_v_field); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 150, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->_header_name); __Pyx_DECREF(__pyx_v_self->_header_name); __pyx_v_self->_header_name = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":151 * else: * self._header_name += field * self._raw_header_name += raw_field # <<<<<<<<<<<<<< * * cdef _on_header_value(self, str val, bytes raw_val): */ __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_self->_raw_header_name, __pyx_v_raw_field); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 151, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->_raw_header_name); __Pyx_DECREF(__pyx_v_self->_raw_header_name); __pyx_v_self->_raw_header_name = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; } __pyx_L4:; /* "aiohttp/_http_parser.pyx":141 * self._raw_headers.append((raw_name, raw_value)) * * cdef _on_header_field(self, str field, bytes raw_field): # <<<<<<<<<<<<<< * if self._header_value is not None: * self._process_header() */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":153 * self._raw_header_name += raw_field * * cdef _on_header_value(self, str val, bytes raw_val): # <<<<<<<<<<<<<< * if self._header_value is None: * self._header_value = val */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_val, PyObject *__pyx_v_raw_val) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("_on_header_value", 0); /* "aiohttp/_http_parser.pyx":154 * * cdef _on_header_value(self, str val, bytes raw_val): * if self._header_value is None: # <<<<<<<<<<<<<< * self._header_value = val * self._raw_header_value = raw_val */ __pyx_t_1 = (__pyx_v_self->_header_value == ((PyObject*)Py_None)); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":155 * cdef _on_header_value(self, str val, bytes raw_val): * if self._header_value is None: * self._header_value = val # <<<<<<<<<<<<<< * self._raw_header_value = raw_val * else: */ __Pyx_INCREF(__pyx_v_val); __Pyx_GIVEREF(__pyx_v_val); __Pyx_GOTREF(__pyx_v_self->_header_value); __Pyx_DECREF(__pyx_v_self->_header_value); __pyx_v_self->_header_value = __pyx_v_val; /* "aiohttp/_http_parser.pyx":156 * if self._header_value is None: * self._header_value = val * self._raw_header_value = raw_val # <<<<<<<<<<<<<< * else: * self._header_value += val */ __Pyx_INCREF(__pyx_v_raw_val); __Pyx_GIVEREF(__pyx_v_raw_val); __Pyx_GOTREF(__pyx_v_self->_raw_header_value); __Pyx_DECREF(__pyx_v_self->_raw_header_value); __pyx_v_self->_raw_header_value = __pyx_v_raw_val; /* "aiohttp/_http_parser.pyx":154 * * cdef _on_header_value(self, str val, bytes raw_val): * if self._header_value is None: # <<<<<<<<<<<<<< * self._header_value = val * self._raw_header_value = raw_val */ goto __pyx_L3; } /* "aiohttp/_http_parser.pyx":158 * self._raw_header_value = raw_val * else: * self._header_value += val # <<<<<<<<<<<<<< * self._raw_header_value += raw_val * */ /*else*/ { __pyx_t_3 = __Pyx_PyUnicode_ConcatSafe(__pyx_v_self->_header_value, __pyx_v_val); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 158, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->_header_value); __Pyx_DECREF(__pyx_v_self->_header_value); __pyx_v_self->_header_value = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":159 * else: * self._header_value += val * self._raw_header_value += raw_val # <<<<<<<<<<<<<< * * cdef _on_headers_complete(self, */ __pyx_t_3 = PyNumber_InPlaceAdd(__pyx_v_self->_raw_header_value, __pyx_v_raw_val); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 159, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->_raw_header_value); __Pyx_DECREF(__pyx_v_self->_raw_header_value); __pyx_v_self->_raw_header_value = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; } __pyx_L3:; /* "aiohttp/_http_parser.pyx":153 * self._raw_header_name += raw_field * * cdef _on_header_value(self, str val, bytes raw_val): # <<<<<<<<<<<<<< * if self._header_value is None: * self._header_value = val */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":161 * self._raw_header_value += raw_val * * cdef _on_headers_complete(self, # <<<<<<<<<<<<<< * ENCODING='utf-8', * ENCODING_ERR='surrogateescape', */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__on_headers_complete *__pyx_optional_args) { PyObject *__pyx_v_ENCODING = ((PyObject *)__pyx_kp_u_utf_8); PyObject *__pyx_v_ENCODING_ERR = ((PyObject *)__pyx_n_u_surrogateescape); PyObject *__pyx_v_CONTENT_ENCODING = __pyx_k_; PyObject *__pyx_v_SEC_WEBSOCKET_KEY1 = __pyx_k__2; /* "aiohttp/_http_parser.pyx":166 * CONTENT_ENCODING=hdrs.CONTENT_ENCODING, * SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1, * SUPPORTED=('gzip', 'deflate', 'br')): # <<<<<<<<<<<<<< * self._process_header() * */ PyObject *__pyx_v_SUPPORTED = ((PyObject *)__pyx_tuple__3); char const *__pyx_v_method; int __pyx_v_should_close; PyObject *__pyx_v_upgrade = NULL; PyObject *__pyx_v_chunked = NULL; PyObject *__pyx_v_raw_headers = NULL; PyObject *__pyx_v_headers = NULL; PyObject *__pyx_v_encoding = NULL; PyObject *__pyx_v_enc = NULL; PyObject *__pyx_v_msg = NULL; PyObject *__pyx_v_payload = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; int __pyx_t_6; PyObject *__pyx_t_7 = NULL; int __pyx_t_8; PyObject *__pyx_t_9 = NULL; PyObject *__pyx_t_10 = NULL; int __pyx_t_11; int __pyx_t_12; __Pyx_RefNannySetupContext("_on_headers_complete", 0); if (__pyx_optional_args) { if (__pyx_optional_args->__pyx_n > 0) { __pyx_v_ENCODING = __pyx_optional_args->ENCODING; if (__pyx_optional_args->__pyx_n > 1) { __pyx_v_ENCODING_ERR = __pyx_optional_args->ENCODING_ERR; if (__pyx_optional_args->__pyx_n > 2) { __pyx_v_CONTENT_ENCODING = __pyx_optional_args->CONTENT_ENCODING; if (__pyx_optional_args->__pyx_n > 3) { __pyx_v_SEC_WEBSOCKET_KEY1 = __pyx_optional_args->SEC_WEBSOCKET_KEY1; if (__pyx_optional_args->__pyx_n > 4) { __pyx_v_SUPPORTED = __pyx_optional_args->SUPPORTED; } } } } } } /* "aiohttp/_http_parser.pyx":167 * SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1, * SUPPORTED=('gzip', 'deflate', 'br')): * self._process_header() # <<<<<<<<<<<<<< * * method = cparser.http_method_str( self._cparser.method) */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_process_header(__pyx_v_self); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 167, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":169 * self._process_header() * * method = cparser.http_method_str( self._cparser.method) # <<<<<<<<<<<<<< * should_close = not bool(cparser.http_should_keep_alive(self._cparser)) * upgrade = bool(self._cparser.upgrade) */ __pyx_v_method = http_method_str(((enum http_method)__pyx_v_self->_cparser->method)); /* "aiohttp/_http_parser.pyx":170 * * method = cparser.http_method_str( self._cparser.method) * should_close = not bool(cparser.http_should_keep_alive(self._cparser)) # <<<<<<<<<<<<<< * upgrade = bool(self._cparser.upgrade) * chunked = bool(self._cparser.flags & cparser.F_CHUNKED) */ __pyx_t_1 = __Pyx_PyInt_From_int(http_should_keep_alive(__pyx_v_self->_cparser)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 170, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 170, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_v_should_close = (!((!(!__pyx_t_2)) != 0)); /* "aiohttp/_http_parser.pyx":171 * method = cparser.http_method_str( self._cparser.method) * should_close = not bool(cparser.http_should_keep_alive(self._cparser)) * upgrade = bool(self._cparser.upgrade) # <<<<<<<<<<<<<< * chunked = bool(self._cparser.flags & cparser.F_CHUNKED) * */ __pyx_t_1 = __Pyx_PyInt_From_unsigned_int(__pyx_v_self->_cparser->upgrade); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 171, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 171, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_PyBool_FromLong((!(!__pyx_t_2))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 171, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_upgrade = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":172 * should_close = not bool(cparser.http_should_keep_alive(self._cparser)) * upgrade = bool(self._cparser.upgrade) * chunked = bool(self._cparser.flags & cparser.F_CHUNKED) # <<<<<<<<<<<<<< * * raw_headers = tuple(self._raw_headers) */ __pyx_t_1 = __Pyx_PyInt_From_unsigned_int((__pyx_v_self->_cparser->flags & F_CHUNKED)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 172, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 172, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_PyBool_FromLong((!(!__pyx_t_2))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 172, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_chunked = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":174 * chunked = bool(self._cparser.flags & cparser.F_CHUNKED) * * raw_headers = tuple(self._raw_headers) # <<<<<<<<<<<<<< * headers = CIMultiDict(self._headers) * */ if (unlikely(__pyx_v_self->_raw_headers == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); __PYX_ERR(0, 174, __pyx_L1_error) } __pyx_t_1 = PyList_AsTuple(__pyx_v_self->_raw_headers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 174, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_raw_headers = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":175 * * raw_headers = tuple(self._raw_headers) * headers = CIMultiDict(self._headers) # <<<<<<<<<<<<<< * * if upgrade or self._cparser.method == 5: # cparser.CONNECT: */ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_CIMultiDict); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 175, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (!__pyx_t_4) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_self->_headers); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 175, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_self->_headers}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 175, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_self->_headers}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 175, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 175, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; __Pyx_INCREF(__pyx_v_self->_headers); __Pyx_GIVEREF(__pyx_v_self->_headers); PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_v_self->_headers); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 175, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_headers = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":177 * headers = CIMultiDict(self._headers) * * if upgrade or self._cparser.method == 5: # cparser.CONNECT: # <<<<<<<<<<<<<< * self._upgraded = True * */ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_upgrade); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 177, __pyx_L1_error) if (!__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L4_bool_binop_done; } __pyx_t_6 = ((__pyx_v_self->_cparser->method == 5) != 0); __pyx_t_2 = __pyx_t_6; __pyx_L4_bool_binop_done:; if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":178 * * if upgrade or self._cparser.method == 5: # cparser.CONNECT: * self._upgraded = True # <<<<<<<<<<<<<< * * # do not support old websocket spec */ __pyx_v_self->_upgraded = 1; /* "aiohttp/_http_parser.pyx":177 * headers = CIMultiDict(self._headers) * * if upgrade or self._cparser.method == 5: # cparser.CONNECT: # <<<<<<<<<<<<<< * self._upgraded = True * */ } /* "aiohttp/_http_parser.pyx":181 * * # do not support old websocket spec * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< * raise InvalidHeader(SEC_WEBSOCKET_KEY1) * */ __pyx_t_2 = (__Pyx_PySequence_ContainsTF(__pyx_v_SEC_WEBSOCKET_KEY1, __pyx_v_headers, Py_EQ)); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 181, __pyx_L1_error) __pyx_t_6 = (__pyx_t_2 != 0); if (__pyx_t_6) { /* "aiohttp/_http_parser.pyx":182 * # do not support old websocket spec * if SEC_WEBSOCKET_KEY1 in headers: * raise InvalidHeader(SEC_WEBSOCKET_KEY1) # <<<<<<<<<<<<<< * * encoding = None */ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_5 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (!__pyx_t_5) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_SEC_WEBSOCKET_KEY1}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_SEC_WEBSOCKET_KEY1}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); __pyx_t_5 = NULL; __Pyx_INCREF(__pyx_v_SEC_WEBSOCKET_KEY1); __Pyx_GIVEREF(__pyx_v_SEC_WEBSOCKET_KEY1); PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_SEC_WEBSOCKET_KEY1); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 182, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 182, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":181 * * # do not support old websocket spec * if SEC_WEBSOCKET_KEY1 in headers: # <<<<<<<<<<<<<< * raise InvalidHeader(SEC_WEBSOCKET_KEY1) * */ } /* "aiohttp/_http_parser.pyx":184 * raise InvalidHeader(SEC_WEBSOCKET_KEY1) * * encoding = None # <<<<<<<<<<<<<< * enc = headers.get(CONTENT_ENCODING) * if enc: */ __Pyx_INCREF(Py_None); __pyx_v_encoding = Py_None; /* "aiohttp/_http_parser.pyx":185 * * encoding = None * enc = headers.get(CONTENT_ENCODING) # <<<<<<<<<<<<<< * if enc: * enc = enc.lower() */ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_headers, __pyx_n_s_get); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 185, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (!__pyx_t_4) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_CONTENT_ENCODING); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 185, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_CONTENT_ENCODING}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 185, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_CONTENT_ENCODING}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 185, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); } else #endif { __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 185, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; __Pyx_INCREF(__pyx_v_CONTENT_ENCODING); __Pyx_GIVEREF(__pyx_v_CONTENT_ENCODING); PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_v_CONTENT_ENCODING); __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 185, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_enc = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":186 * encoding = None * enc = headers.get(CONTENT_ENCODING) * if enc: # <<<<<<<<<<<<<< * enc = enc.lower() * if enc in SUPPORTED: */ __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_enc); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 186, __pyx_L1_error) if (__pyx_t_6) { /* "aiohttp/_http_parser.pyx":187 * enc = headers.get(CONTENT_ENCODING) * if enc: * enc = enc.lower() # <<<<<<<<<<<<<< * if enc in SUPPORTED: * encoding = enc */ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_enc, __pyx_n_s_lower); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 187, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_5 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (__pyx_t_5) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 187, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF_SET(__pyx_v_enc, __pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":188 * if enc: * enc = enc.lower() * if enc in SUPPORTED: # <<<<<<<<<<<<<< * encoding = enc * */ __pyx_t_6 = (__Pyx_PySequence_ContainsTF(__pyx_v_enc, __pyx_v_SUPPORTED, Py_EQ)); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 188, __pyx_L1_error) __pyx_t_2 = (__pyx_t_6 != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":189 * enc = enc.lower() * if enc in SUPPORTED: * encoding = enc # <<<<<<<<<<<<<< * * if self._cparser.type == cparser.HTTP_REQUEST: */ __Pyx_INCREF(__pyx_v_enc); __Pyx_DECREF_SET(__pyx_v_encoding, __pyx_v_enc); /* "aiohttp/_http_parser.pyx":188 * if enc: * enc = enc.lower() * if enc in SUPPORTED: # <<<<<<<<<<<<<< * encoding = enc * */ } /* "aiohttp/_http_parser.pyx":186 * encoding = None * enc = headers.get(CONTENT_ENCODING) * if enc: # <<<<<<<<<<<<<< * enc = enc.lower() * if enc in SUPPORTED: */ } /* "aiohttp/_http_parser.pyx":191 * encoding = enc * * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< * msg = RawRequestMessage( * method.decode(ENCODING, ENCODING_ERR), self._path, */ __pyx_t_2 = ((__pyx_v_self->_cparser->type == HTTP_REQUEST) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":192 * * if self._cparser.type == cparser.HTTP_REQUEST: * msg = RawRequestMessage( # <<<<<<<<<<<<<< * method.decode(ENCODING, ENCODING_ERR), self._path, * self.http_version(), headers, raw_headers, */ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_RawRequestMessage); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 192, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); /* "aiohttp/_http_parser.pyx":193 * if self._cparser.type == cparser.HTTP_REQUEST: * msg = RawRequestMessage( * method.decode(ENCODING, ENCODING_ERR), self._path, # <<<<<<<<<<<<<< * self.http_version(), headers, raw_headers, * should_close, encoding, upgrade, chunked, self._url) */ __pyx_t_4 = __Pyx_PyBytes_FromString(__pyx_v_method); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 193, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_decode); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 193, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_4 = NULL; __pyx_t_8 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_7); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_7, function); __pyx_t_8 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_ENCODING, __pyx_v_ENCODING_ERR}; __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 193, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_5); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_ENCODING, __pyx_v_ENCODING_ERR}; __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 193, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_5); } else #endif { __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 193, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); if (__pyx_t_4) { __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_4); __pyx_t_4 = NULL; } __Pyx_INCREF(__pyx_v_ENCODING); __Pyx_GIVEREF(__pyx_v_ENCODING); PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_v_ENCODING); __Pyx_INCREF(__pyx_v_ENCODING_ERR); __Pyx_GIVEREF(__pyx_v_ENCODING_ERR); PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_v_ENCODING_ERR); __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_9, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 193, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; /* "aiohttp/_http_parser.pyx":194 * msg = RawRequestMessage( * method.decode(ENCODING, ENCODING_ERR), self._path, * self.http_version(), headers, raw_headers, # <<<<<<<<<<<<<< * should_close, encoding, upgrade, chunked, self._url) * else: */ __pyx_t_9 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_http_version); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 194, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_4 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_9); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_9, function); } } if (__pyx_t_4) { __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_9, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 194, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else { __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_9); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 194, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; /* "aiohttp/_http_parser.pyx":195 * method.decode(ENCODING, ENCODING_ERR), self._path, * self.http_version(), headers, raw_headers, * should_close, encoding, upgrade, chunked, self._url) # <<<<<<<<<<<<<< * else: * msg = RawResponseMessage( */ __pyx_t_9 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 195, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_4 = NULL; __pyx_t_8 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); __pyx_t_8 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[11] = {__pyx_t_4, __pyx_t_5, __pyx_v_self->_path, __pyx_t_7, __pyx_v_headers, __pyx_v_raw_headers, __pyx_t_9, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked, __pyx_v_self->_url}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 10+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 192, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[11] = {__pyx_t_4, __pyx_t_5, __pyx_v_self->_path, __pyx_t_7, __pyx_v_headers, __pyx_v_raw_headers, __pyx_t_9, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked, __pyx_v_self->_url}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 10+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 192, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else #endif { __pyx_t_10 = PyTuple_New(10+__pyx_t_8); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 192, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); if (__pyx_t_4) { __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_4); __pyx_t_4 = NULL; } __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_8, __pyx_t_5); __Pyx_INCREF(__pyx_v_self->_path); __Pyx_GIVEREF(__pyx_v_self->_path); PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_8, __pyx_v_self->_path); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_10, 2+__pyx_t_8, __pyx_t_7); __Pyx_INCREF(__pyx_v_headers); __Pyx_GIVEREF(__pyx_v_headers); PyTuple_SET_ITEM(__pyx_t_10, 3+__pyx_t_8, __pyx_v_headers); __Pyx_INCREF(__pyx_v_raw_headers); __Pyx_GIVEREF(__pyx_v_raw_headers); PyTuple_SET_ITEM(__pyx_t_10, 4+__pyx_t_8, __pyx_v_raw_headers); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_10, 5+__pyx_t_8, __pyx_t_9); __Pyx_INCREF(__pyx_v_encoding); __Pyx_GIVEREF(__pyx_v_encoding); PyTuple_SET_ITEM(__pyx_t_10, 6+__pyx_t_8, __pyx_v_encoding); __Pyx_INCREF(__pyx_v_upgrade); __Pyx_GIVEREF(__pyx_v_upgrade); PyTuple_SET_ITEM(__pyx_t_10, 7+__pyx_t_8, __pyx_v_upgrade); __Pyx_INCREF(__pyx_v_chunked); __Pyx_GIVEREF(__pyx_v_chunked); PyTuple_SET_ITEM(__pyx_t_10, 8+__pyx_t_8, __pyx_v_chunked); __Pyx_INCREF(__pyx_v_self->_url); __Pyx_GIVEREF(__pyx_v_self->_url); PyTuple_SET_ITEM(__pyx_t_10, 9+__pyx_t_8, __pyx_v_self->_url); __pyx_t_5 = 0; __pyx_t_7 = 0; __pyx_t_9 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 192, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_msg = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":191 * encoding = enc * * if self._cparser.type == cparser.HTTP_REQUEST: # <<<<<<<<<<<<<< * msg = RawRequestMessage( * method.decode(ENCODING, ENCODING_ERR), self._path, */ goto __pyx_L9; } /* "aiohttp/_http_parser.pyx":197 * should_close, encoding, upgrade, chunked, self._url) * else: * msg = RawResponseMessage( # <<<<<<<<<<<<<< * self.http_version(), self._cparser.status_code, self._reason, * headers, raw_headers, should_close, encoding, */ /*else*/ { __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_RawResponseMessage); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 197, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); /* "aiohttp/_http_parser.pyx":198 * else: * msg = RawResponseMessage( * self.http_version(), self._cparser.status_code, self._reason, # <<<<<<<<<<<<<< * headers, raw_headers, should_close, encoding, * upgrade, chunked) */ __pyx_t_9 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_http_version); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 198, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_7 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_9))) { __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_9); if (likely(__pyx_t_7)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); __Pyx_INCREF(__pyx_t_7); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_9, function); } } if (__pyx_t_7) { __pyx_t_10 = __Pyx_PyObject_CallOneArg(__pyx_t_9, __pyx_t_7); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 198, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else { __pyx_t_10 = __Pyx_PyObject_CallNoArg(__pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 198, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; __pyx_t_9 = __Pyx_PyInt_From_unsigned_int(__pyx_v_self->_cparser->status_code); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 198, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); /* "aiohttp/_http_parser.pyx":199 * msg = RawResponseMessage( * self.http_version(), self._cparser.status_code, self._reason, * headers, raw_headers, should_close, encoding, # <<<<<<<<<<<<<< * upgrade, chunked) * */ __pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_v_should_close); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 199, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); /* "aiohttp/_http_parser.pyx":200 * self.http_version(), self._cparser.status_code, self._reason, * headers, raw_headers, should_close, encoding, * upgrade, chunked) # <<<<<<<<<<<<<< * * if (self._cparser.content_length > 0 or chunked or */ __pyx_t_5 = NULL; __pyx_t_8 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); __pyx_t_8 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[10] = {__pyx_t_5, __pyx_t_10, __pyx_t_9, __pyx_v_self->_reason, __pyx_v_headers, __pyx_v_raw_headers, __pyx_t_7, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 9+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 197, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[10] = {__pyx_t_5, __pyx_t_10, __pyx_t_9, __pyx_v_self->_reason, __pyx_v_headers, __pyx_v_raw_headers, __pyx_t_7, __pyx_v_encoding, __pyx_v_upgrade, __pyx_v_chunked}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 9+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 197, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif { __pyx_t_4 = PyTuple_New(9+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 197, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); if (__pyx_t_5) { __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); __pyx_t_5 = NULL; } __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_8, __pyx_t_10); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_8, __pyx_t_9); __Pyx_INCREF(__pyx_v_self->_reason); __Pyx_GIVEREF(__pyx_v_self->_reason); PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_8, __pyx_v_self->_reason); __Pyx_INCREF(__pyx_v_headers); __Pyx_GIVEREF(__pyx_v_headers); PyTuple_SET_ITEM(__pyx_t_4, 3+__pyx_t_8, __pyx_v_headers); __Pyx_INCREF(__pyx_v_raw_headers); __Pyx_GIVEREF(__pyx_v_raw_headers); PyTuple_SET_ITEM(__pyx_t_4, 4+__pyx_t_8, __pyx_v_raw_headers); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_4, 5+__pyx_t_8, __pyx_t_7); __Pyx_INCREF(__pyx_v_encoding); __Pyx_GIVEREF(__pyx_v_encoding); PyTuple_SET_ITEM(__pyx_t_4, 6+__pyx_t_8, __pyx_v_encoding); __Pyx_INCREF(__pyx_v_upgrade); __Pyx_GIVEREF(__pyx_v_upgrade); PyTuple_SET_ITEM(__pyx_t_4, 7+__pyx_t_8, __pyx_v_upgrade); __Pyx_INCREF(__pyx_v_chunked); __Pyx_GIVEREF(__pyx_v_chunked); PyTuple_SET_ITEM(__pyx_t_4, 8+__pyx_t_8, __pyx_v_chunked); __pyx_t_10 = 0; __pyx_t_9 = 0; __pyx_t_7 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 197, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_v_msg = __pyx_t_1; __pyx_t_1 = 0; } __pyx_L9:; /* "aiohttp/_http_parser.pyx":202 * upgrade, chunked) * * if (self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< * self._cparser.method == 5): # CONNECT: 5 * payload = StreamReader( */ __pyx_t_6 = ((__pyx_v_self->_cparser->content_length > 0) != 0); if (!__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L11_bool_binop_done; } __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_v_chunked); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(0, 202, __pyx_L1_error) if (!__pyx_t_6) { } else { __pyx_t_2 = __pyx_t_6; goto __pyx_L11_bool_binop_done; } /* "aiohttp/_http_parser.pyx":203 * * if (self._cparser.content_length > 0 or chunked or * self._cparser.method == 5): # CONNECT: 5 # <<<<<<<<<<<<<< * payload = StreamReader( * self._protocol, timer=self._timer, loop=self._loop) */ __pyx_t_6 = ((__pyx_v_self->_cparser->method == 5) != 0); __pyx_t_2 = __pyx_t_6; __pyx_L11_bool_binop_done:; /* "aiohttp/_http_parser.pyx":202 * upgrade, chunked) * * if (self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< * self._cparser.method == 5): # CONNECT: 5 * payload = StreamReader( */ if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":204 * if (self._cparser.content_length > 0 or chunked or * self._cparser.method == 5): # CONNECT: 5 * payload = StreamReader( # <<<<<<<<<<<<<< * self._protocol, timer=self._timer, loop=self._loop) * else: */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_StreamReader); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 204, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); /* "aiohttp/_http_parser.pyx":205 * self._cparser.method == 5): # CONNECT: 5 * payload = StreamReader( * self._protocol, timer=self._timer, loop=self._loop) # <<<<<<<<<<<<<< * else: * payload = EMPTY_PAYLOAD */ __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 204, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_INCREF(__pyx_v_self->_protocol); __Pyx_GIVEREF(__pyx_v_self->_protocol); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->_protocol); __pyx_t_4 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 205, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_timer, __pyx_v_self->_timer) < 0) __PYX_ERR(0, 205, __pyx_L1_error) if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_loop, __pyx_v_self->_loop) < 0) __PYX_ERR(0, 205, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":204 * if (self._cparser.content_length > 0 or chunked or * self._cparser.method == 5): # CONNECT: 5 * payload = StreamReader( # <<<<<<<<<<<<<< * self._protocol, timer=self._timer, loop=self._loop) * else: */ __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 204, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_v_payload = __pyx_t_7; __pyx_t_7 = 0; /* "aiohttp/_http_parser.pyx":202 * upgrade, chunked) * * if (self._cparser.content_length > 0 or chunked or # <<<<<<<<<<<<<< * self._cparser.method == 5): # CONNECT: 5 * payload = StreamReader( */ goto __pyx_L10; } /* "aiohttp/_http_parser.pyx":207 * self._protocol, timer=self._timer, loop=self._loop) * else: * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< * * self._payload = payload */ /*else*/ { __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_EMPTY_PAYLOAD); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 207, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __pyx_v_payload = __pyx_t_7; __pyx_t_7 = 0; } __pyx_L10:; /* "aiohttp/_http_parser.pyx":209 * payload = EMPTY_PAYLOAD * * self._payload = payload # <<<<<<<<<<<<<< * if encoding is not None and self._auto_decompress: * self._payload = DeflateBuffer(payload, encoding) */ __Pyx_INCREF(__pyx_v_payload); __Pyx_GIVEREF(__pyx_v_payload); __Pyx_GOTREF(__pyx_v_self->_payload); __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = __pyx_v_payload; /* "aiohttp/_http_parser.pyx":210 * * self._payload = payload * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< * self._payload = DeflateBuffer(payload, encoding) * */ __pyx_t_6 = (__pyx_v_encoding != Py_None); __pyx_t_11 = (__pyx_t_6 != 0); if (__pyx_t_11) { } else { __pyx_t_2 = __pyx_t_11; goto __pyx_L15_bool_binop_done; } __pyx_t_11 = (__pyx_v_self->_auto_decompress != 0); __pyx_t_2 = __pyx_t_11; __pyx_L15_bool_binop_done:; if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":211 * self._payload = payload * if encoding is not None and self._auto_decompress: * self._payload = DeflateBuffer(payload, encoding) # <<<<<<<<<<<<<< * * if not self._response_with_body: */ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_DeflateBuffer); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 211, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = NULL; __pyx_t_8 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); __pyx_t_8 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 211, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_7); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_3, __pyx_v_payload, __pyx_v_encoding}; __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 211, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_7); } else #endif { __pyx_t_1 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 211, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (__pyx_t_3) { __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3); __pyx_t_3 = NULL; } __Pyx_INCREF(__pyx_v_payload); __Pyx_GIVEREF(__pyx_v_payload); PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_8, __pyx_v_payload); __Pyx_INCREF(__pyx_v_encoding); __Pyx_GIVEREF(__pyx_v_encoding); PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_8, __pyx_v_encoding); __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 211, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GIVEREF(__pyx_t_7); __Pyx_GOTREF(__pyx_v_self->_payload); __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = __pyx_t_7; __pyx_t_7 = 0; /* "aiohttp/_http_parser.pyx":210 * * self._payload = payload * if encoding is not None and self._auto_decompress: # <<<<<<<<<<<<<< * self._payload = DeflateBuffer(payload, encoding) * */ } /* "aiohttp/_http_parser.pyx":213 * self._payload = DeflateBuffer(payload, encoding) * * if not self._response_with_body: # <<<<<<<<<<<<<< * payload = EMPTY_PAYLOAD * */ __pyx_t_2 = ((!(__pyx_v_self->_response_with_body != 0)) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":214 * * if not self._response_with_body: * payload = EMPTY_PAYLOAD # <<<<<<<<<<<<<< * * self._messages.append((msg, payload)) */ __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_EMPTY_PAYLOAD); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 214, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF_SET(__pyx_v_payload, __pyx_t_7); __pyx_t_7 = 0; /* "aiohttp/_http_parser.pyx":213 * self._payload = DeflateBuffer(payload, encoding) * * if not self._response_with_body: # <<<<<<<<<<<<<< * payload = EMPTY_PAYLOAD * */ } /* "aiohttp/_http_parser.pyx":216 * payload = EMPTY_PAYLOAD * * self._messages.append((msg, payload)) # <<<<<<<<<<<<<< * * cdef _on_message_complete(self): */ if (unlikely(__pyx_v_self->_messages == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "append"); __PYX_ERR(0, 216, __pyx_L1_error) } __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 216, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(__pyx_v_msg); __Pyx_GIVEREF(__pyx_v_msg); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_msg); __Pyx_INCREF(__pyx_v_payload); __Pyx_GIVEREF(__pyx_v_payload); PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_v_payload); __pyx_t_12 = __Pyx_PyList_Append(__pyx_v_self->_messages, __pyx_t_7); if (unlikely(__pyx_t_12 == ((int)-1))) __PYX_ERR(0, 216, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; /* "aiohttp/_http_parser.pyx":161 * self._raw_header_value += raw_val * * cdef _on_headers_complete(self, # <<<<<<<<<<<<<< * ENCODING='utf-8', * ENCODING_ERR='surrogateescape', */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF(__pyx_v_upgrade); __Pyx_XDECREF(__pyx_v_chunked); __Pyx_XDECREF(__pyx_v_raw_headers); __Pyx_XDECREF(__pyx_v_headers); __Pyx_XDECREF(__pyx_v_encoding); __Pyx_XDECREF(__pyx_v_enc); __Pyx_XDECREF(__pyx_v_msg); __Pyx_XDECREF(__pyx_v_payload); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":218 * self._messages.append((msg, payload)) * * cdef _on_message_complete(self): # <<<<<<<<<<<<<< * self._payload.feed_eof() * self._payload = None */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("_on_message_complete", 0); /* "aiohttp/_http_parser.pyx":219 * * cdef _on_message_complete(self): * self._payload.feed_eof() # <<<<<<<<<<<<<< * self._payload = None * */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 219, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 219, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 219, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":220 * cdef _on_message_complete(self): * self._payload.feed_eof() * self._payload = None # <<<<<<<<<<<<<< * * cdef _on_chunk_header(self): */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_payload); __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = Py_None; /* "aiohttp/_http_parser.pyx":218 * self._messages.append((msg, payload)) * * cdef _on_message_complete(self): # <<<<<<<<<<<<<< * self._payload.feed_eof() * self._payload = None */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":222 * self._payload = None * * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< * self._payload.begin_http_chunk_receiving() * */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("_on_chunk_header", 0); /* "aiohttp/_http_parser.pyx":223 * * cdef _on_chunk_header(self): * self._payload.begin_http_chunk_receiving() # <<<<<<<<<<<<<< * * cdef _on_chunk_complete(self): */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_begin_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 223, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 223, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 223, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":222 * self._payload = None * * cdef _on_chunk_header(self): # <<<<<<<<<<<<<< * self._payload.begin_http_chunk_receiving() * */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":225 * self._payload.begin_http_chunk_receiving() * * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< * self._payload.end_http_chunk_receiving() * */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("_on_chunk_complete", 0); /* "aiohttp/_http_parser.pyx":226 * * cdef _on_chunk_complete(self): * self._payload.end_http_chunk_receiving() # <<<<<<<<<<<<<< * * cdef object _on_status_complete(self): */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_end_http_chunk_receiving); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 226, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 226, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 226, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":225 * self._payload.begin_http_chunk_receiving() * * cdef _on_chunk_complete(self): # <<<<<<<<<<<<<< * self._payload.end_http_chunk_receiving() * */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser._on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":228 * self._payload.end_http_chunk_receiving() * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< * pass * */ static PyObject *__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_on_status_complete", 0); /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":233 * ### Public API ### * * def http_version(self): # <<<<<<<<<<<<<< * cdef cparser.http_parser* parser = self._cparser * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5http_version(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5http_version(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("http_version (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_4http_version(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_4http_version(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { struct http_parser *__pyx_v_parser; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations struct http_parser *__pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; int __pyx_t_8; PyObject *__pyx_t_9 = NULL; __Pyx_RefNannySetupContext("http_version", 0); /* "aiohttp/_http_parser.pyx":234 * * def http_version(self): * cdef cparser.http_parser* parser = self._cparser # <<<<<<<<<<<<<< * * if parser.http_major == 1: */ __pyx_t_1 = __pyx_v_self->_cparser; __pyx_v_parser = __pyx_t_1; /* "aiohttp/_http_parser.pyx":236 * cdef cparser.http_parser* parser = self._cparser * * if parser.http_major == 1: # <<<<<<<<<<<<<< * if parser.http_minor == 0: * return HttpVersion10 */ __pyx_t_2 = ((__pyx_v_parser->http_major == 1) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":237 * * if parser.http_major == 1: * if parser.http_minor == 0: # <<<<<<<<<<<<<< * return HttpVersion10 * elif parser.http_minor == 1: */ switch (__pyx_v_parser->http_minor) { case 0: /* "aiohttp/_http_parser.pyx":238 * if parser.http_major == 1: * if parser.http_minor == 0: * return HttpVersion10 # <<<<<<<<<<<<<< * elif parser.http_minor == 1: * return HttpVersion11 */ __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_HttpVersion10); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 238, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":237 * * if parser.http_major == 1: * if parser.http_minor == 0: # <<<<<<<<<<<<<< * return HttpVersion10 * elif parser.http_minor == 1: */ break; /* "aiohttp/_http_parser.pyx":239 * if parser.http_minor == 0: * return HttpVersion10 * elif parser.http_minor == 1: # <<<<<<<<<<<<<< * return HttpVersion11 * */ case 1: /* "aiohttp/_http_parser.pyx":240 * return HttpVersion10 * elif parser.http_minor == 1: * return HttpVersion11 # <<<<<<<<<<<<<< * * return HttpVersion(parser.http_major, parser.http_minor) */ __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_HttpVersion11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 240, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":239 * if parser.http_minor == 0: * return HttpVersion10 * elif parser.http_minor == 1: # <<<<<<<<<<<<<< * return HttpVersion11 * */ break; default: break; } /* "aiohttp/_http_parser.pyx":236 * cdef cparser.http_parser* parser = self._cparser * * if parser.http_major == 1: # <<<<<<<<<<<<<< * if parser.http_minor == 0: * return HttpVersion10 */ } /* "aiohttp/_http_parser.pyx":242 * return HttpVersion11 * * return HttpVersion(parser.http_major, parser.http_minor) # <<<<<<<<<<<<<< * * def feed_eof(self): */ __Pyx_XDECREF(__pyx_r); __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_HttpVersion); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_PyInt_From_unsigned_short(__pyx_v_parser->http_major); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = __Pyx_PyInt_From_unsigned_short(__pyx_v_parser->http_minor); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = NULL; __pyx_t_8 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_7)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_7); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); __pyx_t_8 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_5, __pyx_t_6}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_5, __pyx_t_6}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif { __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); if (__pyx_t_7) { __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; } __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_5); __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_6); __pyx_t_5 = 0; __pyx_t_6 = 0; __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 242, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_r = __pyx_t_3; __pyx_t_3 = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":233 * ### Public API ### * * def http_version(self): # <<<<<<<<<<<<<< * cdef cparser.http_parser* parser = self._cparser * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_9); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.http_version", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":244 * return HttpVersion(parser.http_major, parser.http_minor) * * def feed_eof(self): # <<<<<<<<<<<<<< * cdef bytes desc * */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_eof(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("feed_eof (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_eof(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_6feed_eof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_v_desc = 0; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; __Pyx_RefNannySetupContext("feed_eof", 0); /* "aiohttp/_http_parser.pyx":247 * cdef bytes desc * * if self._payload is not None: # <<<<<<<<<<<<<< * if self._cparser.flags & cparser.F_CHUNKED: * raise TransferEncodingError( */ __pyx_t_1 = (__pyx_v_self->_payload != Py_None); __pyx_t_2 = (__pyx_t_1 != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":248 * * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< * raise TransferEncodingError( * "Not enough data for satisfy transfer length header.") */ __pyx_t_2 = ((__pyx_v_self->_cparser->flags & F_CHUNKED) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":249 * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: * raise TransferEncodingError( # <<<<<<<<<<<<<< * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENTLENGTH: */ __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 249, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 249, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_Raise(__pyx_t_4, 0, 0, 0); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __PYX_ERR(0, 249, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":248 * * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: # <<<<<<<<<<<<<< * raise TransferEncodingError( * "Not enough data for satisfy transfer length header.") */ } /* "aiohttp/_http_parser.pyx":251 * raise TransferEncodingError( * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENTLENGTH: # <<<<<<<<<<<<<< * raise ContentLengthError( * "Not enough data for satisfy content length header.") */ __pyx_t_2 = ((__pyx_v_self->_cparser->flags & F_CONTENTLENGTH) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":252 * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENTLENGTH: * raise ContentLengthError( # <<<<<<<<<<<<<< * "Not enough data for satisfy content length header.") * elif self._cparser.http_errno != cparser.HPE_OK: */ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 252, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 252, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 252, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":251 * raise TransferEncodingError( * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENTLENGTH: # <<<<<<<<<<<<<< * raise ContentLengthError( * "Not enough data for satisfy content length header.") */ } /* "aiohttp/_http_parser.pyx":254 * raise ContentLengthError( * "Not enough data for satisfy content length header.") * elif self._cparser.http_errno != cparser.HPE_OK: # <<<<<<<<<<<<<< * desc = cparser.http_errno_description( * self._cparser.http_errno) */ __pyx_t_2 = ((__pyx_v_self->_cparser->http_errno != HPE_OK) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":255 * "Not enough data for satisfy content length header.") * elif self._cparser.http_errno != cparser.HPE_OK: * desc = cparser.http_errno_description( # <<<<<<<<<<<<<< * self._cparser.http_errno) * raise PayloadEncodingError(desc.decode('latin-1')) */ __pyx_t_3 = __Pyx_PyBytes_FromString(http_errno_description(((enum http_errno)__pyx_v_self->_cparser->http_errno))); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 255, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_v_desc = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":257 * desc = cparser.http_errno_description( * self._cparser.http_errno) * raise PayloadEncodingError(desc.decode('latin-1')) # <<<<<<<<<<<<<< * else: * self._payload.feed_eof() */ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_6)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_6); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); } } if (!__pyx_t_6) { __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_5}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_5}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif { __pyx_t_7 = PyTuple_New(1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0+1, __pyx_t_5); __pyx_t_5 = 0; __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 257, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_Raise(__pyx_t_3, 0, 0, 0); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __PYX_ERR(0, 257, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":254 * raise ContentLengthError( * "Not enough data for satisfy content length header.") * elif self._cparser.http_errno != cparser.HPE_OK: # <<<<<<<<<<<<<< * desc = cparser.http_errno_description( * self._cparser.http_errno) */ } /* "aiohttp/_http_parser.pyx":259 * raise PayloadEncodingError(desc.decode('latin-1')) * else: * self._payload.feed_eof() # <<<<<<<<<<<<<< * elif self._started: * self._on_headers_complete() */ /*else*/ { __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->_payload, __pyx_n_s_feed_eof); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 259, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_7 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_7)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_7); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); } } if (__pyx_t_7) { __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 259, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else { __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 259, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } /* "aiohttp/_http_parser.pyx":247 * cdef bytes desc * * if self._payload is not None: # <<<<<<<<<<<<<< * if self._cparser.flags & cparser.F_CHUNKED: * raise TransferEncodingError( */ goto __pyx_L3; } /* "aiohttp/_http_parser.pyx":260 * else: * self._payload.feed_eof() * elif self._started: # <<<<<<<<<<<<<< * self._on_headers_complete() * if self._messages: */ __pyx_t_2 = (__pyx_v_self->_started != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":261 * self._payload.feed_eof() * elif self._started: * self._on_headers_complete() # <<<<<<<<<<<<<< * if self._messages: * return self._messages[-1][0] */ __pyx_t_3 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self->__pyx_vtab)->_on_headers_complete(__pyx_v_self, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 261, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":262 * elif self._started: * self._on_headers_complete() * if self._messages: # <<<<<<<<<<<<<< * return self._messages[-1][0] * */ __pyx_t_2 = (__pyx_v_self->_messages != Py_None) && (PyList_GET_SIZE(__pyx_v_self->_messages) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":263 * self._on_headers_complete() * if self._messages: * return self._messages[-1][0] # <<<<<<<<<<<<<< * * def feed_data(self, data): */ __Pyx_XDECREF(__pyx_r); if (unlikely(__pyx_v_self->_messages == Py_None)) { PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); __PYX_ERR(0, 263, __pyx_L1_error) } __pyx_t_3 = __Pyx_GetItemInt_List(__pyx_v_self->_messages, -1L, long, 1, __Pyx_PyInt_From_long, 1, 1, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 263, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_3, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 263, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_4; __pyx_t_4 = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":262 * elif self._started: * self._on_headers_complete() * if self._messages: # <<<<<<<<<<<<<< * return self._messages[-1][0] * */ } /* "aiohttp/_http_parser.pyx":260 * else: * self._payload.feed_eof() * elif self._started: # <<<<<<<<<<<<<< * self._on_headers_complete() * if self._messages: */ } __pyx_L3:; /* "aiohttp/_http_parser.pyx":244 * return HttpVersion(parser.http_major, parser.http_minor) * * def feed_eof(self): # <<<<<<<<<<<<<< * cdef bytes desc * */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.feed_eof", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_desc); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":265 * return self._messages[-1][0] * * def feed_data(self, data): # <<<<<<<<<<<<<< * cdef: * size_t data_len */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9feed_data(PyObject *__pyx_v_self, PyObject *__pyx_v_data) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("feed_data (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_8feed_data(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v_data)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_8feed_data(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, PyObject *__pyx_v_data) { size_t __pyx_v_data_len; size_t __pyx_v_nb; PyObject *__pyx_v_ex = NULL; PyObject *__pyx_v_messages = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; int __pyx_t_3; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; __Pyx_RefNannySetupContext("feed_data", 0); /* "aiohttp/_http_parser.pyx":270 * size_t nb * * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< * data_len = self.py_buf.len * */ __pyx_t_1 = PyObject_GetBuffer(__pyx_v_data, (&__pyx_v_self->py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 270, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":271 * * PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) * data_len = self.py_buf.len # <<<<<<<<<<<<<< * * nb = cparser.http_parser_execute( */ __pyx_v_data_len = ((size_t)__pyx_v_self->py_buf.len); /* "aiohttp/_http_parser.pyx":273 * data_len = self.py_buf.len * * nb = cparser.http_parser_execute( # <<<<<<<<<<<<<< * self._cparser, * self._csettings, */ __pyx_v_nb = http_parser_execute(__pyx_v_self->_cparser, __pyx_v_self->_csettings, ((char *)__pyx_v_self->py_buf.buf), __pyx_v_data_len); /* "aiohttp/_http_parser.pyx":279 * data_len) * * PyBuffer_Release(&self.py_buf) # <<<<<<<<<<<<<< * * # i am not sure about cparser.HPE_INVALID_METHOD, */ PyBuffer_Release((&__pyx_v_self->py_buf)); /* "aiohttp/_http_parser.pyx":284 * # seems get err for valid request * # test_client_functional.py::test_post_data_with_bytesio_file * if (self._cparser.http_errno != cparser.HPE_OK and # <<<<<<<<<<<<<< * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or * self._cparser.method == 0)): */ __pyx_t_3 = ((__pyx_v_self->_cparser->http_errno != HPE_OK) != 0); if (__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L4_bool_binop_done; } /* "aiohttp/_http_parser.pyx":285 * # test_client_functional.py::test_post_data_with_bytesio_file * if (self._cparser.http_errno != cparser.HPE_OK and * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or # <<<<<<<<<<<<<< * self._cparser.method == 0)): * if self._payload_error == 0: */ __pyx_t_3 = ((__pyx_v_self->_cparser->http_errno != HPE_INVALID_METHOD) != 0); if (!__pyx_t_3) { } else { __pyx_t_2 = __pyx_t_3; goto __pyx_L4_bool_binop_done; } /* "aiohttp/_http_parser.pyx":286 * if (self._cparser.http_errno != cparser.HPE_OK and * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or * self._cparser.method == 0)): # <<<<<<<<<<<<<< * if self._payload_error == 0: * if self._last_error is not None: */ __pyx_t_3 = ((__pyx_v_self->_cparser->method == 0) != 0); __pyx_t_2 = __pyx_t_3; __pyx_L4_bool_binop_done:; /* "aiohttp/_http_parser.pyx":284 * # seems get err for valid request * # test_client_functional.py::test_post_data_with_bytesio_file * if (self._cparser.http_errno != cparser.HPE_OK and # <<<<<<<<<<<<<< * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or * self._cparser.method == 0)): */ if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":287 * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or * self._cparser.method == 0)): * if self._payload_error == 0: # <<<<<<<<<<<<<< * if self._last_error is not None: * ex = self._last_error */ __pyx_t_2 = ((__pyx_v_self->_payload_error == 0) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":288 * self._cparser.method == 0)): * if self._payload_error == 0: * if self._last_error is not None: # <<<<<<<<<<<<<< * ex = self._last_error * self._last_error = None */ __pyx_t_2 = (__pyx_v_self->_last_error != Py_None); __pyx_t_3 = (__pyx_t_2 != 0); if (__pyx_t_3) { /* "aiohttp/_http_parser.pyx":289 * if self._payload_error == 0: * if self._last_error is not None: * ex = self._last_error # <<<<<<<<<<<<<< * self._last_error = None * else: */ __pyx_t_4 = __pyx_v_self->_last_error; __Pyx_INCREF(__pyx_t_4); __pyx_v_ex = __pyx_t_4; __pyx_t_4 = 0; /* "aiohttp/_http_parser.pyx":290 * if self._last_error is not None: * ex = self._last_error * self._last_error = None # <<<<<<<<<<<<<< * else: * ex = parser_error_from_errno( */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_last_error); __Pyx_DECREF(__pyx_v_self->_last_error); __pyx_v_self->_last_error = Py_None; /* "aiohttp/_http_parser.pyx":288 * self._cparser.method == 0)): * if self._payload_error == 0: * if self._last_error is not None: # <<<<<<<<<<<<<< * ex = self._last_error * self._last_error = None */ goto __pyx_L8; } /* "aiohttp/_http_parser.pyx":292 * self._last_error = None * else: * ex = parser_error_from_errno( # <<<<<<<<<<<<<< * self._cparser.http_errno) * self._payload = None */ /*else*/ { /* "aiohttp/_http_parser.pyx":293 * else: * ex = parser_error_from_errno( * self._cparser.http_errno) # <<<<<<<<<<<<<< * self._payload = None * raise ex */ __pyx_t_4 = __pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(((enum http_errno)__pyx_v_self->_cparser->http_errno)); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 292, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_v_ex = __pyx_t_4; __pyx_t_4 = 0; } __pyx_L8:; /* "aiohttp/_http_parser.pyx":294 * ex = parser_error_from_errno( * self._cparser.http_errno) * self._payload = None # <<<<<<<<<<<<<< * raise ex * */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_self->_payload); __Pyx_DECREF(__pyx_v_self->_payload); __pyx_v_self->_payload = Py_None; /* "aiohttp/_http_parser.pyx":295 * self._cparser.http_errno) * self._payload = None * raise ex # <<<<<<<<<<<<<< * * if self._messages: */ __Pyx_Raise(__pyx_v_ex, 0, 0, 0); __PYX_ERR(0, 295, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":287 * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or * self._cparser.method == 0)): * if self._payload_error == 0: # <<<<<<<<<<<<<< * if self._last_error is not None: * ex = self._last_error */ } /* "aiohttp/_http_parser.pyx":284 * # seems get err for valid request * # test_client_functional.py::test_post_data_with_bytesio_file * if (self._cparser.http_errno != cparser.HPE_OK and # <<<<<<<<<<<<<< * (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or * self._cparser.method == 0)): */ } /* "aiohttp/_http_parser.pyx":297 * raise ex * * if self._messages: # <<<<<<<<<<<<<< * messages = self._messages * self._messages = [] */ __pyx_t_3 = (__pyx_v_self->_messages != Py_None) && (PyList_GET_SIZE(__pyx_v_self->_messages) != 0); if (__pyx_t_3) { /* "aiohttp/_http_parser.pyx":298 * * if self._messages: * messages = self._messages # <<<<<<<<<<<<<< * self._messages = [] * else: */ __pyx_t_4 = __pyx_v_self->_messages; __Pyx_INCREF(__pyx_t_4); __pyx_v_messages = __pyx_t_4; __pyx_t_4 = 0; /* "aiohttp/_http_parser.pyx":299 * if self._messages: * messages = self._messages * self._messages = [] # <<<<<<<<<<<<<< * else: * messages = () */ __pyx_t_4 = PyList_New(0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 299, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_GIVEREF(__pyx_t_4); __Pyx_GOTREF(__pyx_v_self->_messages); __Pyx_DECREF(__pyx_v_self->_messages); __pyx_v_self->_messages = ((PyObject*)__pyx_t_4); __pyx_t_4 = 0; /* "aiohttp/_http_parser.pyx":297 * raise ex * * if self._messages: # <<<<<<<<<<<<<< * messages = self._messages * self._messages = [] */ goto __pyx_L9; } /* "aiohttp/_http_parser.pyx":301 * self._messages = [] * else: * messages = () # <<<<<<<<<<<<<< * * if self._upgraded: */ /*else*/ { __Pyx_INCREF(__pyx_empty_tuple); __pyx_v_messages = __pyx_empty_tuple; } __pyx_L9:; /* "aiohttp/_http_parser.pyx":303 * messages = () * * if self._upgraded: # <<<<<<<<<<<<<< * return messages, True, data[nb:] * else: */ __pyx_t_3 = (__pyx_v_self->_upgraded != 0); if (__pyx_t_3) { /* "aiohttp/_http_parser.pyx":304 * * if self._upgraded: * return messages, True, data[nb:] # <<<<<<<<<<<<<< * else: * return messages, False, b'' */ __Pyx_XDECREF(__pyx_r); __pyx_t_4 = __Pyx_PyObject_GetSlice(__pyx_v_data, __pyx_v_nb, 0, NULL, NULL, NULL, 1, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 304, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 304, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_INCREF(__pyx_v_messages); __Pyx_GIVEREF(__pyx_v_messages); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages); __Pyx_INCREF(Py_True); __Pyx_GIVEREF(Py_True); PyTuple_SET_ITEM(__pyx_t_5, 1, Py_True); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4); __pyx_t_4 = 0; __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":303 * messages = () * * if self._upgraded: # <<<<<<<<<<<<<< * return messages, True, data[nb:] * else: */ } /* "aiohttp/_http_parser.pyx":306 * return messages, True, data[nb:] * else: * return messages, False, b'' # <<<<<<<<<<<<<< * * */ /*else*/ { __Pyx_XDECREF(__pyx_r); __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 306, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_INCREF(__pyx_v_messages); __Pyx_GIVEREF(__pyx_v_messages); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_messages); __Pyx_INCREF(Py_False); __Pyx_GIVEREF(Py_False); PyTuple_SET_ITEM(__pyx_t_5, 1, Py_False); __Pyx_INCREF(__pyx_kp_b__6); __Pyx_GIVEREF(__pyx_kp_b__6); PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_kp_b__6); __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":265 * return self._messages[-1][0] * * def feed_data(self, data): # <<<<<<<<<<<<<< * cdef: * size_t data_len */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.feed_data", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_ex); __Pyx_XDECREF(__pyx_v_messages); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_10__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__reduce_cython__", 0); /* "(tree fragment)":2 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(1, 2, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":3 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_10HttpParser_12__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_10HttpParser_12__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__setstate_cython__", 0); /* "(tree fragment)":4 * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(1, 4, __pyx_L1_error) /* "(tree fragment)":3 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpParser.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":311 * cdef class HttpRequestParserC(HttpParser): * * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ /* Python wrapper */ static int __pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_protocol = 0; PyObject *__pyx_v_loop = 0; PyObject *__pyx_v_timer = 0; size_t __pyx_v_max_line_size; size_t __pyx_v_max_headers; size_t __pyx_v_max_field_size; PyObject *__pyx_v_payload_exception = 0; PyObject *__pyx_v_response_with_body = 0; CYTHON_UNUSED PyObject *__pyx_v_read_until_eof = 0; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,0}; PyObject* values[9] = {0,0,0,0,0,0,0,0,0}; values[2] = ((PyObject *)Py_None); /* "aiohttp/_http_parser.pyx":313 * def __init__(self, protocol, loop, timer=None, * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< * response_with_body=True, read_until_eof=False): * self._init(cparser.HTTP_REQUEST, protocol, loop, timer, */ values[6] = ((PyObject *)Py_None); /* "aiohttp/_http_parser.pyx":314 * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, * response_with_body=True, read_until_eof=False): # <<<<<<<<<<<<<< * self._init(cparser.HTTP_REQUEST, protocol, loop, timer, * max_line_size, max_headers, max_field_size, */ values[7] = ((PyObject *)Py_True); values[8] = ((PyObject *)Py_False); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); CYTHON_FALLTHROUGH; case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); CYTHON_FALLTHROUGH; case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); CYTHON_FALLTHROUGH; case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_loop)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 9, 1); __PYX_ERR(0, 311, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_timer); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_max_line_size); if (value) { values[3] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 4: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_max_headers); if (value) { values[4] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 5: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_max_field_size); if (value) { values[5] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 6: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_payload_exception); if (value) { values[6] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 7: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_response_with_body); if (value) { values[7] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 8: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_read_until_eof); if (value) { values[8] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 311, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); CYTHON_FALLTHROUGH; case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); CYTHON_FALLTHROUGH; case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); CYTHON_FALLTHROUGH; case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_protocol = values[0]; __pyx_v_loop = values[1]; __pyx_v_timer = values[2]; if (values[3]) { __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[3]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 312, __pyx_L3_error) } else { __pyx_v_max_line_size = ((size_t)0x1FFE); } if (values[4]) { __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 312, __pyx_L3_error) } else { __pyx_v_max_headers = ((size_t)0x8000); } if (values[5]) { __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 313, __pyx_L3_error) } else { __pyx_v_max_field_size = ((size_t)0x1FFE); } __pyx_v_payload_exception = values[6]; __pyx_v_response_with_body = values[7]; __pyx_v_read_until_eof = values[8]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 9, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 311, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParserC.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return -1; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof); /* "aiohttp/_http_parser.pyx":311 * cdef class HttpRequestParserC(HttpParser): * * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, PyObject *__pyx_v_response_with_body, CYTHON_UNUSED PyObject *__pyx_v_read_until_eof) { int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init __pyx_t_2; __Pyx_RefNannySetupContext("__init__", 0); /* "aiohttp/_http_parser.pyx":315 * size_t max_field_size=8190, payload_exception=None, * response_with_body=True, read_until_eof=False): * self._init(cparser.HTTP_REQUEST, protocol, loop, timer, # <<<<<<<<<<<<<< * max_line_size, max_headers, max_field_size, * payload_exception, response_with_body) */ __pyx_t_2.__pyx_n = 6; __pyx_t_2.timer = __pyx_v_timer; __pyx_t_2.max_line_size = __pyx_v_max_line_size; __pyx_t_2.max_headers = __pyx_v_max_headers; __pyx_t_2.max_field_size = __pyx_v_max_field_size; __pyx_t_2.payload_exception = __pyx_v_payload_exception; __pyx_t_2.response_with_body = __pyx_v_response_with_body; __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParserC *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_REQUEST, __pyx_v_protocol, __pyx_v_loop, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 315, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":311 * cdef class HttpRequestParserC(HttpParser): * * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParserC.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":319 * payload_exception, response_with_body) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< * cdef Py_buffer py_buf * if not self._buf: */ static PyObject *__pyx_f_7aiohttp_12_http_parser_18HttpRequestParserC__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self) { Py_buffer __pyx_v_py_buf; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; int __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; int __pyx_t_7; PyObject *__pyx_t_8 = NULL; PyObject *__pyx_t_9 = NULL; int __pyx_t_10; char const *__pyx_t_11; PyObject *__pyx_t_12 = NULL; PyObject *__pyx_t_13 = NULL; PyObject *__pyx_t_14 = NULL; PyObject *__pyx_t_15 = NULL; PyObject *__pyx_t_16 = NULL; PyObject *__pyx_t_17 = NULL; __Pyx_RefNannySetupContext("_on_status_complete", 0); /* "aiohttp/_http_parser.pyx":321 * cdef object _on_status_complete(self): * cdef Py_buffer py_buf * if not self._buf: # <<<<<<<<<<<<<< * return * self._path = self._buf.decode('utf-8', 'surrogateescape') */ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->__pyx_base._buf); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 321, __pyx_L1_error) __pyx_t_2 = ((!__pyx_t_1) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":322 * cdef Py_buffer py_buf * if not self._buf: * return # <<<<<<<<<<<<<< * self._path = self._buf.decode('utf-8', 'surrogateescape') * if self._cparser.method == 5: # CONNECT */ __Pyx_XDECREF(__pyx_r); __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; /* "aiohttp/_http_parser.pyx":321 * cdef object _on_status_complete(self): * cdef Py_buffer py_buf * if not self._buf: # <<<<<<<<<<<<<< * return * self._path = self._buf.decode('utf-8', 'surrogateescape') */ } /* "aiohttp/_http_parser.pyx":323 * if not self._buf: * return * self._path = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * if self._cparser.method == 5: # CONNECT * self._url = URL(self._path) */ if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); __PYX_ERR(0, 323, __pyx_L1_error) } __pyx_t_3 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 323, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 323, __pyx_L1_error) __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->__pyx_base._path); __Pyx_DECREF(__pyx_v_self->__pyx_base._path); __pyx_v_self->__pyx_base._path = ((PyObject*)__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":324 * return * self._path = self._buf.decode('utf-8', 'surrogateescape') * if self._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< * self._url = URL(self._path) * else: */ __pyx_t_2 = ((__pyx_v_self->__pyx_base._cparser->method == 5) != 0); if (__pyx_t_2) { /* "aiohttp/_http_parser.pyx":325 * self._path = self._buf.decode('utf-8', 'surrogateescape') * if self._cparser.method == 5: # CONNECT * self._url = URL(self._path) # <<<<<<<<<<<<<< * else: * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) */ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_URL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 325, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_5)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); } } if (!__pyx_t_5) { __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_self->__pyx_base._path); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 325, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_self->__pyx_base._path}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 325, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_self->__pyx_base._path}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 325, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_GOTREF(__pyx_t_3); } else #endif { __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 325, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; __Pyx_INCREF(__pyx_v_self->__pyx_base._path); __Pyx_GIVEREF(__pyx_v_self->__pyx_base._path); PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_self->__pyx_base._path); __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 325, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->__pyx_base._url); __Pyx_DECREF(__pyx_v_self->__pyx_base._url); __pyx_v_self->__pyx_base._url = __pyx_t_3; __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":324 * return * self._path = self._buf.decode('utf-8', 'surrogateescape') * if self._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< * self._url = URL(self._path) * else: */ goto __pyx_L4; } /* "aiohttp/_http_parser.pyx":327 * self._url = URL(self._path) * else: * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< * try: * self._url = _parse_url(py_buf.buf, */ /*else*/ { __pyx_t_3 = __pyx_v_self->__pyx_base._buf; __Pyx_INCREF(__pyx_t_3); __pyx_t_7 = PyObject_GetBuffer(__pyx_t_3, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_7 == ((int)-1))) __PYX_ERR(0, 327, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":328 * else: * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) * try: # <<<<<<<<<<<<<< * self._url = _parse_url(py_buf.buf, * py_buf.len) */ /*try:*/ { /* "aiohttp/_http_parser.pyx":329 * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) * try: * self._url = _parse_url(py_buf.buf, # <<<<<<<<<<<<<< * py_buf.len) * finally: */ __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_parse_url); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 329, __pyx_L6_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_6 = __Pyx_PyBytes_FromString(((char *)__pyx_v_py_buf.buf)); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 329, __pyx_L6_error) __Pyx_GOTREF(__pyx_t_6); /* "aiohttp/_http_parser.pyx":330 * try: * self._url = _parse_url(py_buf.buf, * py_buf.len) # <<<<<<<<<<<<<< * finally: * PyBuffer_Release(&py_buf) */ __pyx_t_5 = PyInt_FromSsize_t(__pyx_v_py_buf.len); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 330, __pyx_L6_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_8 = NULL; __pyx_t_7 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_8)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_8); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); __pyx_t_7 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_t_6, __pyx_t_5}; __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L6_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_t_6, __pyx_t_5}; __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L6_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif { __pyx_t_9 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 329, __pyx_L6_error) __Pyx_GOTREF(__pyx_t_9); if (__pyx_t_8) { __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_8); __pyx_t_8 = NULL; } __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_7, __pyx_t_6); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_7, __pyx_t_5); __pyx_t_6 = 0; __pyx_t_5 = 0; __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_9, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 329, __pyx_L6_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; /* "aiohttp/_http_parser.pyx":329 * PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) * try: * self._url = _parse_url(py_buf.buf, # <<<<<<<<<<<<<< * py_buf.len) * finally: */ __Pyx_GIVEREF(__pyx_t_3); __Pyx_GOTREF(__pyx_v_self->__pyx_base._url); __Pyx_DECREF(__pyx_v_self->__pyx_base._url); __pyx_v_self->__pyx_base._url = __pyx_t_3; __pyx_t_3 = 0; } /* "aiohttp/_http_parser.pyx":332 * py_buf.len) * finally: * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< * self._buf.clear() * */ /*finally:*/ { /*normal exit:*/{ PyBuffer_Release((&__pyx_v_py_buf)); goto __pyx_L7; } __pyx_L6_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14) < 0)) __Pyx_ErrFetch(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14); __Pyx_XGOTREF(__pyx_t_12); __Pyx_XGOTREF(__pyx_t_13); __Pyx_XGOTREF(__pyx_t_14); __Pyx_XGOTREF(__pyx_t_15); __Pyx_XGOTREF(__pyx_t_16); __Pyx_XGOTREF(__pyx_t_17); __pyx_t_7 = __pyx_lineno; __pyx_t_10 = __pyx_clineno; __pyx_t_11 = __pyx_filename; { PyBuffer_Release((&__pyx_v_py_buf)); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_15); __Pyx_XGIVEREF(__pyx_t_16); __Pyx_XGIVEREF(__pyx_t_17); __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); } __Pyx_XGIVEREF(__pyx_t_12); __Pyx_XGIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_ErrRestore(__pyx_t_12, __pyx_t_13, __pyx_t_14); __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_lineno = __pyx_t_7; __pyx_clineno = __pyx_t_10; __pyx_filename = __pyx_t_11; goto __pyx_L1_error; } __pyx_L7:; } } __pyx_L4:; /* "aiohttp/_http_parser.pyx":333 * finally: * PyBuffer_Release(&py_buf) * self._buf.clear() # <<<<<<<<<<<<<< * * */ __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->__pyx_base._buf, __pyx_n_s_clear); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 333, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_9 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_9)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_9); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); } } if (__pyx_t_9) { __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_9); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 333, __pyx_L1_error) __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else { __pyx_t_3 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 333, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":319 * payload_exception, response_with_body) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< * cdef Py_buffer py_buf * if not self._buf: */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParserC._on_status_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC_2__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__reduce_cython__", 0); /* "(tree fragment)":2 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(1, 2, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParserC.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":3 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC_4__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_18HttpRequestParserC_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__setstate_cython__", 0); /* "(tree fragment)":4 * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(1, 4, __pyx_L1_error) /* "(tree fragment)":3 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpRequestParserC.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":338 * cdef class HttpResponseParserC(HttpParser): * * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ /* Python wrapper */ static int __pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_protocol = 0; PyObject *__pyx_v_loop = 0; PyObject *__pyx_v_timer = 0; size_t __pyx_v_max_line_size; size_t __pyx_v_max_headers; size_t __pyx_v_max_field_size; PyObject *__pyx_v_payload_exception = 0; PyObject *__pyx_v_response_with_body = 0; CYTHON_UNUSED PyObject *__pyx_v_read_until_eof = 0; PyObject *__pyx_v_auto_decompress = 0; int __pyx_r; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_protocol,&__pyx_n_s_loop,&__pyx_n_s_timer,&__pyx_n_s_max_line_size,&__pyx_n_s_max_headers,&__pyx_n_s_max_field_size,&__pyx_n_s_payload_exception,&__pyx_n_s_response_with_body,&__pyx_n_s_read_until_eof,&__pyx_n_s_auto_decompress,0}; PyObject* values[10] = {0,0,0,0,0,0,0,0,0,0}; values[2] = ((PyObject *)Py_None); /* "aiohttp/_http_parser.pyx":340 * def __init__(self, protocol, loop, timer=None, * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, # <<<<<<<<<<<<<< * response_with_body=True, read_until_eof=False, * auto_decompress=True): */ values[6] = ((PyObject *)Py_None); /* "aiohttp/_http_parser.pyx":341 * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, * response_with_body=True, read_until_eof=False, # <<<<<<<<<<<<<< * auto_decompress=True): * self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, */ values[7] = ((PyObject *)Py_True); values[8] = ((PyObject *)Py_False); /* "aiohttp/_http_parser.pyx":342 * size_t max_field_size=8190, payload_exception=None, * response_with_body=True, read_until_eof=False, * auto_decompress=True): # <<<<<<<<<<<<<< * self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, * max_line_size, max_headers, max_field_size, */ values[9] = ((PyObject *)Py_True); if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); CYTHON_FALLTHROUGH; case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); CYTHON_FALLTHROUGH; case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); CYTHON_FALLTHROUGH; case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); CYTHON_FALLTHROUGH; case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_protocol)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_loop)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 10, 1); __PYX_ERR(0, 338, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_timer); if (value) { values[2] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 3: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_max_line_size); if (value) { values[3] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 4: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_max_headers); if (value) { values[4] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 5: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_max_field_size); if (value) { values[5] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 6: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_payload_exception); if (value) { values[6] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 7: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_response_with_body); if (value) { values[7] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 8: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_read_until_eof); if (value) { values[8] = value; kw_args--; } } CYTHON_FALLTHROUGH; case 9: if (kw_args > 0) { PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s_auto_decompress); if (value) { values[9] = value; kw_args--; } } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 338, __pyx_L3_error) } } else { switch (PyTuple_GET_SIZE(__pyx_args)) { case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); CYTHON_FALLTHROUGH; case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); CYTHON_FALLTHROUGH; case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); CYTHON_FALLTHROUGH; case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); CYTHON_FALLTHROUGH; case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); CYTHON_FALLTHROUGH; case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); CYTHON_FALLTHROUGH; case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); CYTHON_FALLTHROUGH; case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[0] = PyTuple_GET_ITEM(__pyx_args, 0); break; default: goto __pyx_L5_argtuple_error; } } __pyx_v_protocol = values[0]; __pyx_v_loop = values[1]; __pyx_v_timer = values[2]; if (values[3]) { __pyx_v_max_line_size = __Pyx_PyInt_As_size_t(values[3]); if (unlikely((__pyx_v_max_line_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 339, __pyx_L3_error) } else { __pyx_v_max_line_size = ((size_t)0x1FFE); } if (values[4]) { __pyx_v_max_headers = __Pyx_PyInt_As_size_t(values[4]); if (unlikely((__pyx_v_max_headers == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 339, __pyx_L3_error) } else { __pyx_v_max_headers = ((size_t)0x8000); } if (values[5]) { __pyx_v_max_field_size = __Pyx_PyInt_As_size_t(values[5]); if (unlikely((__pyx_v_max_field_size == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 340, __pyx_L3_error) } else { __pyx_v_max_field_size = ((size_t)0x1FFE); } __pyx_v_payload_exception = values[6]; __pyx_v_response_with_body = values[7]; __pyx_v_read_until_eof = values[8]; __pyx_v_auto_decompress = values[9]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("__init__", 0, 2, 10, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 338, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParserC.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return -1; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC___init__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *)__pyx_v_self), __pyx_v_protocol, __pyx_v_loop, __pyx_v_timer, __pyx_v_max_line_size, __pyx_v_max_headers, __pyx_v_max_field_size, __pyx_v_payload_exception, __pyx_v_response_with_body, __pyx_v_read_until_eof, __pyx_v_auto_decompress); /* "aiohttp/_http_parser.pyx":338 * cdef class HttpResponseParserC(HttpParser): * * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static int __pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC___init__(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self, PyObject *__pyx_v_protocol, PyObject *__pyx_v_loop, PyObject *__pyx_v_timer, size_t __pyx_v_max_line_size, size_t __pyx_v_max_headers, size_t __pyx_v_max_field_size, PyObject *__pyx_v_payload_exception, PyObject *__pyx_v_response_with_body, CYTHON_UNUSED PyObject *__pyx_v_read_until_eof, PyObject *__pyx_v_auto_decompress) { int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init __pyx_t_2; __Pyx_RefNannySetupContext("__init__", 0); /* "aiohttp/_http_parser.pyx":343 * response_with_body=True, read_until_eof=False, * auto_decompress=True): * self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, # <<<<<<<<<<<<<< * max_line_size, max_headers, max_field_size, * payload_exception, response_with_body, auto_decompress) */ __pyx_t_2.__pyx_n = 7; __pyx_t_2.timer = __pyx_v_timer; __pyx_t_2.max_line_size = __pyx_v_max_line_size; __pyx_t_2.max_headers = __pyx_v_max_headers; __pyx_t_2.max_field_size = __pyx_v_max_field_size; __pyx_t_2.payload_exception = __pyx_v_payload_exception; __pyx_t_2.response_with_body = __pyx_v_response_with_body; __pyx_t_2.auto_decompress = __pyx_v_auto_decompress; __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParserC *)__pyx_v_self->__pyx_base.__pyx_vtab)->__pyx_base._init(((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_v_self), HTTP_RESPONSE, __pyx_v_protocol, __pyx_v_loop, &__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 343, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":338 * cdef class HttpResponseParserC(HttpParser): * * def __init__(self, protocol, loop, timer=None, # <<<<<<<<<<<<<< * size_t max_line_size=8190, size_t max_headers=32768, * size_t max_field_size=8190, payload_exception=None, */ /* function exit code */ __pyx_r = 0; goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParserC.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":347 * payload_exception, response_with_body, auto_decompress) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< * if self._buf: * self._reason = self._buf.decode('utf-8', 'surrogateescape') */ static PyObject *__pyx_f_7aiohttp_12_http_parser_19HttpResponseParserC__on_status_complete(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; __Pyx_RefNannySetupContext("_on_status_complete", 0); /* "aiohttp/_http_parser.pyx":348 * * cdef object _on_status_complete(self): * if self._buf: # <<<<<<<<<<<<<< * self._reason = self._buf.decode('utf-8', 'surrogateescape') * self._buf.clear() */ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_self->__pyx_base._buf); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 348, __pyx_L1_error) if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":349 * cdef object _on_status_complete(self): * if self._buf: * self._reason = self._buf.decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * self._buf.clear() * */ if (unlikely(__pyx_v_self->__pyx_base._buf == Py_None)) { PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "decode"); __PYX_ERR(0, 349, __pyx_L1_error) } __pyx_t_2 = __Pyx_decode_bytearray(__pyx_v_self->__pyx_base._buf, 0, PY_SSIZE_T_MAX, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 349, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (!(likely(PyUnicode_CheckExact(__pyx_t_2))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_2)->tp_name), 0))) __PYX_ERR(0, 349, __pyx_L1_error) __Pyx_GIVEREF(__pyx_t_2); __Pyx_GOTREF(__pyx_v_self->__pyx_base._reason); __Pyx_DECREF(__pyx_v_self->__pyx_base._reason); __pyx_v_self->__pyx_base._reason = ((PyObject*)__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":350 * if self._buf: * self._reason = self._buf.decode('utf-8', 'surrogateescape') * self._buf.clear() # <<<<<<<<<<<<<< * * */ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_self->__pyx_base._buf, __pyx_n_s_clear); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 350, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (__pyx_t_4) { __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 350, __pyx_L1_error) __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else { __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 350, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":348 * * cdef object _on_status_complete(self): * if self._buf: # <<<<<<<<<<<<<< * self._reason = self._buf.decode('utf-8', 'surrogateescape') * self._buf.clear() */ } /* "aiohttp/_http_parser.pyx":347 * payload_exception, response_with_body, auto_decompress) * * cdef object _on_status_complete(self): # <<<<<<<<<<<<<< * if self._buf: * self._reason = self._buf.decode('utf-8', 'surrogateescape') */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParserC._on_status_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_3__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC_2__reduce_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *)__pyx_v_self)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC_2__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__reduce_cython__", 0); /* "(tree fragment)":2 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(1, 2, __pyx_L1_error) /* "(tree fragment)":1 * def __reduce_cython__(self): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParserC.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "(tree fragment)":3 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ static PyObject *__pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_5__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC_4__setstate_cython__(((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_19HttpResponseParserC_4__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; __Pyx_RefNannySetupContext("__setstate_cython__", 0); /* "(tree fragment)":4 * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< */ __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__12, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(1, 4, __pyx_L1_error) /* "(tree fragment)":3 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_AddTraceback("aiohttp._http_parser.HttpResponseParserC.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":353 * * * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_begin(struct http_parser *__pyx_v_parser) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; __Pyx_RefNannySetupContext("cb_on_message_begin", 0); /* "aiohttp/_http_parser.pyx":354 * * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * * pyparser._started = True */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":356 * cdef HttpParser pyparser = parser.data * * pyparser._started = True # <<<<<<<<<<<<<< * pyparser._headers = [] * pyparser._raw_headers = [] */ __pyx_v_pyparser->_started = 1; /* "aiohttp/_http_parser.pyx":357 * * pyparser._started = True * pyparser._headers = [] # <<<<<<<<<<<<<< * pyparser._raw_headers = [] * pyparser._buf.clear() */ __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 357, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_pyparser->_headers); __Pyx_DECREF(__pyx_v_pyparser->_headers); __pyx_v_pyparser->_headers = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":358 * pyparser._started = True * pyparser._headers = [] * pyparser._raw_headers = [] # <<<<<<<<<<<<<< * pyparser._buf.clear() * pyparser._path = None */ __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GIVEREF(__pyx_t_1); __Pyx_GOTREF(__pyx_v_pyparser->_raw_headers); __Pyx_DECREF(__pyx_v_pyparser->_raw_headers); __pyx_v_pyparser->_raw_headers = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":359 * pyparser._headers = [] * pyparser._raw_headers = [] * pyparser._buf.clear() # <<<<<<<<<<<<<< * pyparser._path = None * pyparser._reason = None */ __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_buf, __pyx_n_s_clear); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 359, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); } } if (__pyx_t_3) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else { __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 359, __pyx_L1_error) } __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":360 * pyparser._raw_headers = [] * pyparser._buf.clear() * pyparser._path = None # <<<<<<<<<<<<<< * pyparser._reason = None * return 0 */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_pyparser->_path); __Pyx_DECREF(__pyx_v_pyparser->_path); __pyx_v_pyparser->_path = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":361 * pyparser._buf.clear() * pyparser._path = None * pyparser._reason = None # <<<<<<<<<<<<<< * return 0 * */ __Pyx_INCREF(Py_None); __Pyx_GIVEREF(Py_None); __Pyx_GOTREF(__pyx_v_pyparser->_reason); __Pyx_DECREF(__pyx_v_pyparser->_reason); __pyx_v_pyparser->_reason = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":362 * pyparser._path = None * pyparser._reason = None * return 0 # <<<<<<<<<<<<<< * * */ __pyx_r = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":353 * * * cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_begin", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":365 * * * cdef int cb_on_url(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_url(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_ex = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; PyObject *__pyx_t_8 = NULL; int __pyx_t_9; PyObject *__pyx_t_10 = NULL; __Pyx_RefNannySetupContext("cb_on_url", 0); /* "aiohttp/_http_parser.pyx":367 * cdef int cb_on_url(cparser.http_parser* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * if length > pyparser._max_line_size: */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":368 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * if length > pyparser._max_line_size: * raise LineTooLong( */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":369 * cdef HttpParser pyparser = parser.data * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) */ __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_line_size) != 0); if (__pyx_t_5) { /* "aiohttp/_http_parser.pyx":370 * try: * if length > pyparser._max_line_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) */ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 370, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); /* "aiohttp/_http_parser.pyx":371 * if length > pyparser._max_line_size: * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) # <<<<<<<<<<<<<< * pyparser._buf.extend(at[:length]) * except BaseException as ex: */ __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 371, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_8 = NULL; __pyx_t_9 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_6); if (likely(__pyx_t_8)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); __Pyx_INCREF(__pyx_t_8); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_6, function); __pyx_t_9 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 370, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 370, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif { __pyx_t_10 = PyTuple_New(2+__pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 370, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); if (__pyx_t_8) { __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; } __Pyx_INCREF(__pyx_kp_u_Status_line_is_too_long); __Pyx_GIVEREF(__pyx_kp_u_Status_line_is_too_long); PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_9, __pyx_kp_u_Status_line_is_too_long); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_9, __pyx_t_7); __pyx_t_7 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 370, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 370, __pyx_L3_error) /* "aiohttp/_http_parser.pyx":369 * cdef HttpParser pyparser = parser.data * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) */ } /* "aiohttp/_http_parser.pyx":372 * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) # <<<<<<<<<<<<<< * except BaseException as ex: * pyparser._last_error = ex */ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_buf, __pyx_n_s_extend); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_10 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); __pyx_t_7 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); if (likely(__pyx_t_7)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); __Pyx_INCREF(__pyx_t_7); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_6, function); } } if (!__pyx_t_7) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_10}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_10}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } else #endif { __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_t_10); __pyx_t_10 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 372, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; } } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":368 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * if length > pyparser._max_line_size: * raise LineTooLong( */ } /* "aiohttp/_http_parser.pyx":377 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":373 * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_9) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_8) < 0) __PYX_ERR(0, 373, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_8); __Pyx_INCREF(__pyx_t_6); __pyx_v_ex = __pyx_t_6; /*try:*/ { /* "aiohttp/_http_parser.pyx":374 * pyparser._buf.extend(at[:length]) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_ex); __Pyx_GIVEREF(__pyx_v_ex); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; /* "aiohttp/_http_parser.pyx":375 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L14_return; } /* "aiohttp/_http_parser.pyx":373 * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ /*finally:*/ { __pyx_L14_return: { __pyx_t_9 = __pyx_r; __Pyx_DECREF(__pyx_v_ex); __pyx_v_ex = NULL; __pyx_r = __pyx_t_9; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":368 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * if length > pyparser._max_line_size: * raise LineTooLong( */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":365 * * * cdef int cb_on_url(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_url", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_ex); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":380 * * * cdef int cb_on_status(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_status(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_ex = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; PyObject *__pyx_t_8 = NULL; int __pyx_t_9; PyObject *__pyx_t_10 = NULL; __Pyx_RefNannySetupContext("cb_on_status", 0); /* "aiohttp/_http_parser.pyx":382 * cdef int cb_on_status(cparser.http_parser* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * cdef str reason * try: */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":384 * cdef HttpParser pyparser = parser.data * cdef str reason * try: # <<<<<<<<<<<<<< * if length > pyparser._max_line_size: * raise LineTooLong( */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":385 * cdef str reason * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) */ __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_line_size) != 0); if (__pyx_t_5) { /* "aiohttp/_http_parser.pyx":386 * try: * if length > pyparser._max_line_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) */ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 386, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); /* "aiohttp/_http_parser.pyx":387 * if length > pyparser._max_line_size: * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) # <<<<<<<<<<<<<< * pyparser._buf.extend(at[:length]) * except BaseException as ex: */ __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_line_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 387, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_8 = NULL; __pyx_t_9 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_6); if (likely(__pyx_t_8)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); __Pyx_INCREF(__pyx_t_8); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_6, function); __pyx_t_9 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_kp_u_Status_line_is_too_long, __pyx_t_7}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif { __pyx_t_10 = PyTuple_New(2+__pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 386, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); if (__pyx_t_8) { __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; } __Pyx_INCREF(__pyx_kp_u_Status_line_is_too_long); __Pyx_GIVEREF(__pyx_kp_u_Status_line_is_too_long); PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_9, __pyx_kp_u_Status_line_is_too_long); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_9, __pyx_t_7); __pyx_t_7 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 386, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 386, __pyx_L3_error) /* "aiohttp/_http_parser.pyx":385 * cdef str reason * try: * if length > pyparser._max_line_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) */ } /* "aiohttp/_http_parser.pyx":388 * raise LineTooLong( * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) # <<<<<<<<<<<<<< * except BaseException as ex: * pyparser._last_error = ex */ __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_buf, __pyx_n_s_extend); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_10 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); __pyx_t_7 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); if (likely(__pyx_t_7)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); __Pyx_INCREF(__pyx_t_7); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_6, function); } } if (!__pyx_t_7) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_10}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_10}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } else #endif { __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_t_10); __pyx_t_10 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 388, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; } } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":384 * cdef HttpParser pyparser = parser.data * cdef str reason * try: # <<<<<<<<<<<<<< * if length > pyparser._max_line_size: * raise LineTooLong( */ } /* "aiohttp/_http_parser.pyx":393 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":389 * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_9) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_8) < 0) __PYX_ERR(0, 389, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_8); __Pyx_INCREF(__pyx_t_6); __pyx_v_ex = __pyx_t_6; /*try:*/ { /* "aiohttp/_http_parser.pyx":390 * pyparser._buf.extend(at[:length]) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_ex); __Pyx_GIVEREF(__pyx_v_ex); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; /* "aiohttp/_http_parser.pyx":391 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; goto __pyx_L14_return; } /* "aiohttp/_http_parser.pyx":389 * 'Status line is too long', pyparser._max_line_size) * pyparser._buf.extend(at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ /*finally:*/ { __pyx_L14_return: { __pyx_t_9 = __pyx_r; __Pyx_DECREF(__pyx_v_ex); __pyx_v_ex = NULL; __pyx_r = __pyx_t_9; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":384 * cdef HttpParser pyparser = parser.data * cdef str reason * try: # <<<<<<<<<<<<<< * if length > pyparser._max_line_size: * raise LineTooLong( */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":380 * * * cdef int cb_on_status(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_status", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_ex); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":396 * * * cdef int cb_on_header_field(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_field(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_ex = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; PyObject *__pyx_t_8 = NULL; int __pyx_t_9; PyObject *__pyx_t_10 = NULL; __Pyx_RefNannySetupContext("cb_on_header_field", 0); /* "aiohttp/_http_parser.pyx":398 * cdef int cb_on_header_field(cparser.http_parser* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * pyparser._on_status_complete() */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":399 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_status_complete() * if length > pyparser._max_field_size: */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":400 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_status_complete() # <<<<<<<<<<<<<< * if length > pyparser._max_field_size: * raise LineTooLong( */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 400, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":401 * try: * pyparser._on_status_complete() * if length > pyparser._max_field_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Header name is too long', pyparser._max_field_size) */ __pyx_t_5 = ((__pyx_v_length > __pyx_v_pyparser->_max_field_size) != 0); if (__pyx_t_5) { /* "aiohttp/_http_parser.pyx":402 * pyparser._on_status_complete() * if length > pyparser._max_field_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Header name is too long', pyparser._max_field_size) * pyparser._on_header_field( */ __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 402, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); /* "aiohttp/_http_parser.pyx":403 * if length > pyparser._max_field_size: * raise LineTooLong( * 'Header name is too long', pyparser._max_field_size) # <<<<<<<<<<<<<< * pyparser._on_header_field( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) */ __pyx_t_7 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 403, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_8 = NULL; __pyx_t_9 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_6); if (likely(__pyx_t_8)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); __Pyx_INCREF(__pyx_t_8); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_6, function); __pyx_t_9 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_kp_u_Header_name_is_too_long, __pyx_t_7}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 402, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_kp_u_Header_name_is_too_long, __pyx_t_7}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_9, 2+__pyx_t_9); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 402, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif { __pyx_t_10 = PyTuple_New(2+__pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 402, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); if (__pyx_t_8) { __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; } __Pyx_INCREF(__pyx_kp_u_Header_name_is_too_long); __Pyx_GIVEREF(__pyx_kp_u_Header_name_is_too_long); PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_9, __pyx_kp_u_Header_name_is_too_long); __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_9, __pyx_t_7); __pyx_t_7 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 402, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 402, __pyx_L3_error) /* "aiohttp/_http_parser.pyx":401 * try: * pyparser._on_status_complete() * if length > pyparser._max_field_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Header name is too long', pyparser._max_field_size) */ } /* "aiohttp/_http_parser.pyx":405 * 'Header name is too long', pyparser._max_field_size) * pyparser._on_header_field( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) # <<<<<<<<<<<<<< * except BaseException as ex: * pyparser._last_error = ex */ __pyx_t_1 = __Pyx_decode_c_string(__pyx_v_at, 0, __pyx_v_length, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 405, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 405, __pyx_L3_error) __pyx_t_6 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 405, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); /* "aiohttp/_http_parser.pyx":404 * raise LineTooLong( * 'Header name is too long', pyparser._max_field_size) * pyparser._on_header_field( # <<<<<<<<<<<<<< * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: */ __pyx_t_10 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_field(__pyx_v_pyparser, ((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_6)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 404, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; /* "aiohttp/_http_parser.pyx":399 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_status_complete() * if length > pyparser._max_field_size: */ } /* "aiohttp/_http_parser.pyx":410 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; /* "aiohttp/_http_parser.pyx":406 * pyparser._on_header_field( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ __pyx_t_9 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_9) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_6, &__pyx_t_1) < 0) __PYX_ERR(0, 406, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_t_6); __pyx_v_ex = __pyx_t_6; /*try:*/ { /* "aiohttp/_http_parser.pyx":407 * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_ex); __Pyx_GIVEREF(__pyx_v_ex); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; /* "aiohttp/_http_parser.pyx":408 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; goto __pyx_L14_return; } /* "aiohttp/_http_parser.pyx":406 * pyparser._on_header_field( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ /*finally:*/ { __pyx_L14_return: { __pyx_t_9 = __pyx_r; __Pyx_DECREF(__pyx_v_ex); __pyx_v_ex = NULL; __pyx_r = __pyx_t_9; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":399 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_status_complete() * if length > pyparser._max_field_size: */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":396 * * * cdef int cb_on_header_field(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_field", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_ex); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":413 * * * cdef int cb_on_header_value(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_header_value(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_ex = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; int __pyx_t_6; Py_ssize_t __pyx_t_7; PyObject *__pyx_t_8 = NULL; PyObject *__pyx_t_9 = NULL; PyObject *__pyx_t_10 = NULL; int __pyx_t_11; PyObject *__pyx_t_12 = NULL; __Pyx_RefNannySetupContext("cb_on_header_value", 0); /* "aiohttp/_http_parser.pyx":415 * cdef int cb_on_header_value(cparser.http_parser* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * if pyparser._header_value is not None: */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":416 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * if pyparser._header_value is not None: * if len(pyparser._header_value) + length > pyparser._max_field_size: */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":417 * cdef HttpParser pyparser = parser.data * try: * if pyparser._header_value is not None: # <<<<<<<<<<<<<< * if len(pyparser._header_value) + length > pyparser._max_field_size: * raise LineTooLong( */ __pyx_t_5 = (__pyx_v_pyparser->_header_value != ((PyObject*)Py_None)); __pyx_t_6 = (__pyx_t_5 != 0); if (__pyx_t_6) { /* "aiohttp/_http_parser.pyx":418 * try: * if pyparser._header_value is not None: * if len(pyparser._header_value) + length > pyparser._max_field_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) */ __pyx_t_1 = __pyx_v_pyparser->_header_value; __Pyx_INCREF(__pyx_t_1); if (unlikely(__pyx_t_1 == Py_None)) { PyErr_SetString(PyExc_TypeError, "object of type 'NoneType' has no len()"); __PYX_ERR(0, 418, __pyx_L3_error) } __pyx_t_7 = __Pyx_PyUnicode_GET_LENGTH(__pyx_t_1); if (unlikely(__pyx_t_7 == ((Py_ssize_t)-1))) __PYX_ERR(0, 418, __pyx_L3_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_6 = (((__pyx_t_7 + __pyx_v_length) > __pyx_v_pyparser->_max_field_size) != 0); if (__pyx_t_6) { /* "aiohttp/_http_parser.pyx":419 * if pyparser._header_value is not None: * if len(pyparser._header_value) + length > pyparser._max_field_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Header value is too long', pyparser._max_field_size) * elif length > pyparser._max_field_size: */ __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 419, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); /* "aiohttp/_http_parser.pyx":420 * if len(pyparser._header_value) + length > pyparser._max_field_size: * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) # <<<<<<<<<<<<<< * elif length > pyparser._max_field_size: * raise LineTooLong( */ __pyx_t_9 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 420, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_10 = NULL; __pyx_t_11 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_8); if (likely(__pyx_t_10)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); __Pyx_INCREF(__pyx_t_10); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_8, function); __pyx_t_11 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_8)) { PyObject *__pyx_temp[3] = {__pyx_t_10, __pyx_kp_u_Header_value_is_too_long, __pyx_t_9}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { PyObject *__pyx_temp[3] = {__pyx_t_10, __pyx_kp_u_Header_value_is_too_long, __pyx_t_9}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } else #endif { __pyx_t_12 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 419, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_12); if (__pyx_t_10) { __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_10); __pyx_t_10 = NULL; } __Pyx_INCREF(__pyx_kp_u_Header_value_is_too_long); __Pyx_GIVEREF(__pyx_kp_u_Header_value_is_too_long); PyTuple_SET_ITEM(__pyx_t_12, 0+__pyx_t_11, __pyx_kp_u_Header_value_is_too_long); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_12, 1+__pyx_t_11, __pyx_t_9); __pyx_t_9 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_12, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 419, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 419, __pyx_L3_error) /* "aiohttp/_http_parser.pyx":418 * try: * if pyparser._header_value is not None: * if len(pyparser._header_value) + length > pyparser._max_field_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) */ } /* "aiohttp/_http_parser.pyx":417 * cdef HttpParser pyparser = parser.data * try: * if pyparser._header_value is not None: # <<<<<<<<<<<<<< * if len(pyparser._header_value) + length > pyparser._max_field_size: * raise LineTooLong( */ goto __pyx_L9; } /* "aiohttp/_http_parser.pyx":421 * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) * elif length > pyparser._max_field_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) */ __pyx_t_6 = ((__pyx_v_length > __pyx_v_pyparser->_max_field_size) != 0); if (__pyx_t_6) { /* "aiohttp/_http_parser.pyx":422 * 'Header value is too long', pyparser._max_field_size) * elif length > pyparser._max_field_size: * raise LineTooLong( # <<<<<<<<<<<<<< * 'Header value is too long', pyparser._max_field_size) * pyparser._on_header_value( */ __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 422, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); /* "aiohttp/_http_parser.pyx":423 * elif length > pyparser._max_field_size: * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) # <<<<<<<<<<<<<< * pyparser._on_header_value( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) */ __pyx_t_12 = __Pyx_PyInt_FromSize_t(__pyx_v_pyparser->_max_field_size); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 423, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_12); __pyx_t_9 = NULL; __pyx_t_11 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_8); if (likely(__pyx_t_9)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); __Pyx_INCREF(__pyx_t_9); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_8, function); __pyx_t_11 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_8)) { PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_kp_u_Header_value_is_too_long, __pyx_t_12}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 422, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_kp_u_Header_value_is_too_long, __pyx_t_12}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 422, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } else #endif { __pyx_t_10 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 422, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); if (__pyx_t_9) { __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_9); __pyx_t_9 = NULL; } __Pyx_INCREF(__pyx_kp_u_Header_value_is_too_long); __Pyx_GIVEREF(__pyx_kp_u_Header_value_is_too_long); PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_11, __pyx_kp_u_Header_value_is_too_long); __Pyx_GIVEREF(__pyx_t_12); PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_11, __pyx_t_12); __pyx_t_12 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 422, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_Raise(__pyx_t_1, 0, 0, 0); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __PYX_ERR(0, 422, __pyx_L3_error) /* "aiohttp/_http_parser.pyx":421 * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) * elif length > pyparser._max_field_size: # <<<<<<<<<<<<<< * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) */ } __pyx_L9:; /* "aiohttp/_http_parser.pyx":425 * 'Header value is too long', pyparser._max_field_size) * pyparser._on_header_value( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) # <<<<<<<<<<<<<< * except BaseException as ex: * pyparser._last_error = ex */ __pyx_t_1 = __Pyx_decode_c_string(__pyx_v_at, 0, __pyx_v_length, NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 425, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); if (!(likely(PyUnicode_CheckExact(__pyx_t_1))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 425, __pyx_L3_error) __pyx_t_8 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 425, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_8); /* "aiohttp/_http_parser.pyx":424 * raise LineTooLong( * 'Header value is too long', pyparser._max_field_size) * pyparser._on_header_value( # <<<<<<<<<<<<<< * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: */ __pyx_t_10 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_header_value(__pyx_v_pyparser, ((PyObject*)__pyx_t_1), ((PyObject*)__pyx_t_8)); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 424, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; /* "aiohttp/_http_parser.pyx":416 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * if pyparser._header_value is not None: * if len(pyparser._header_value) + length > pyparser._max_field_size: */ } /* "aiohttp/_http_parser.pyx":430 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; /* "aiohttp/_http_parser.pyx":426 * pyparser._on_header_value( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ __pyx_t_11 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_11) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_8, &__pyx_t_1) < 0) __PYX_ERR(0, 426, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GOTREF(__pyx_t_8); __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_t_8); __pyx_v_ex = __pyx_t_8; /*try:*/ { /* "aiohttp/_http_parser.pyx":427 * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: * pyparser._last_error = ex # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_ex); __Pyx_GIVEREF(__pyx_v_ex); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_ex; /* "aiohttp/_http_parser.pyx":428 * except BaseException as ex: * pyparser._last_error = ex * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; goto __pyx_L15_return; } /* "aiohttp/_http_parser.pyx":426 * pyparser._on_header_value( * at[:length].decode('utf-8', 'surrogateescape'), at[:length]) * except BaseException as ex: # <<<<<<<<<<<<<< * pyparser._last_error = ex * return -1 */ /*finally:*/ { __pyx_L15_return: { __pyx_t_11 = __pyx_r; __Pyx_DECREF(__pyx_v_ex); __pyx_v_ex = NULL; __pyx_r = __pyx_t_11; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":416 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * if pyparser._header_value is not None: * if len(pyparser._header_value) + length > pyparser._max_field_size: */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":413 * * * cdef int cb_on_header_value(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_12); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_header_value", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_ex); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":433 * * * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_headers_complete(struct http_parser *__pyx_v_parser) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_exc = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; int __pyx_t_6; int __pyx_t_7; PyObject *__pyx_t_8 = NULL; PyObject *__pyx_t_9 = NULL; __Pyx_RefNannySetupContext("cb_on_headers_complete", 0); /* "aiohttp/_http_parser.pyx":434 * * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * pyparser._on_status_complete() */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":435 * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_status_complete() * pyparser._on_headers_complete() */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":436 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_status_complete() # <<<<<<<<<<<<<< * pyparser._on_headers_complete() * except BaseException as exc: */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_status_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 436, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":437 * try: * pyparser._on_status_complete() * pyparser._on_headers_complete() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_headers_complete(__pyx_v_pyparser, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 437, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":435 * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_status_complete() * pyparser._on_headers_complete() */ } /* "aiohttp/_http_parser.pyx":442 * return -1 * else: * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< * return 2 * else: */ /*else:*/ { __pyx_t_6 = (__pyx_v_pyparser->_cparser->upgrade != 0); if (!__pyx_t_6) { } else { __pyx_t_5 = __pyx_t_6; goto __pyx_L10_bool_binop_done; } __pyx_t_6 = ((__pyx_v_pyparser->_cparser->method == 5) != 0); __pyx_t_5 = __pyx_t_6; __pyx_L10_bool_binop_done:; if (__pyx_t_5) { /* "aiohttp/_http_parser.pyx":443 * else: * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT * return 2 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = 2; goto __pyx_L6_except_return; /* "aiohttp/_http_parser.pyx":442 * return -1 * else: * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT # <<<<<<<<<<<<<< * return 2 * else: */ } /* "aiohttp/_http_parser.pyx":445 * return 2 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else*/ { __pyx_r = 0; goto __pyx_L6_except_return; } } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":438 * pyparser._on_status_complete() * pyparser._on_headers_complete() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_7) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_8, &__pyx_t_9) < 0) __PYX_ERR(0, 438, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_8); __Pyx_GOTREF(__pyx_t_9); __Pyx_INCREF(__pyx_t_8); __pyx_v_exc = __pyx_t_8; /*try:*/ { /* "aiohttp/_http_parser.pyx":439 * pyparser._on_headers_complete() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_exc); __Pyx_GIVEREF(__pyx_v_exc); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; /* "aiohttp/_http_parser.pyx":440 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< * else: * if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; goto __pyx_L16_return; } /* "aiohttp/_http_parser.pyx":438 * pyparser._on_status_complete() * pyparser._on_headers_complete() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ /*finally:*/ { __pyx_L16_return: { __pyx_t_7 = __pyx_r; __Pyx_DECREF(__pyx_v_exc); __pyx_v_exc = NULL; __pyx_r = __pyx_t_7; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":435 * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_status_complete() * pyparser._on_headers_complete() */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":433 * * * cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_8); __Pyx_XDECREF(__pyx_t_9); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_headers_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_exc); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":448 * * * cdef int cb_on_body(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_body(struct http_parser *__pyx_v_parser, char const *__pyx_v_at, size_t __pyx_v_length) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_body = 0; PyObject *__pyx_v_exc = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; int __pyx_t_8; PyObject *__pyx_t_9 = NULL; int __pyx_t_10; int __pyx_t_11; PyObject *__pyx_t_12 = NULL; PyObject *__pyx_t_13 = NULL; PyObject *__pyx_t_14 = NULL; PyObject *__pyx_t_15 = NULL; PyObject *__pyx_t_16 = NULL; int __pyx_t_17; char const *__pyx_t_18; PyObject *__pyx_t_19 = NULL; PyObject *__pyx_t_20 = NULL; PyObject *__pyx_t_21 = NULL; PyObject *__pyx_t_22 = NULL; PyObject *__pyx_t_23 = NULL; PyObject *__pyx_t_24 = NULL; __Pyx_RefNannySetupContext("cb_on_body", 0); /* "aiohttp/_http_parser.pyx":450 * cdef int cb_on_body(cparser.http_parser* parser, * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * cdef bytes body = at[:length] * try: */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":451 * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] # <<<<<<<<<<<<<< * try: * pyparser._payload.feed_data(body, length) */ __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_at + 0, __pyx_v_length - 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 451, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_body = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":452 * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] * try: # <<<<<<<<<<<<<< * pyparser._payload.feed_data(body, length) * except BaseException as exc: */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":453 * cdef bytes body = at[:length] * try: * pyparser._payload.feed_data(body, length) # <<<<<<<<<<<<<< * except BaseException as exc: * if pyparser._payload_exception is not None: */ __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_feed_data); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 453, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = __Pyx_PyInt_FromSize_t(__pyx_v_length); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 453, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_6); __pyx_t_7 = NULL; __pyx_t_8 = 0; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_5); if (likely(__pyx_t_7)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); __Pyx_INCREF(__pyx_t_7); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_5, function); __pyx_t_8 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_5)) { PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_body, __pyx_t_6}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 453, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_body, __pyx_t_6}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 453, __pyx_L3_error) __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif { __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 453, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_9); if (__pyx_t_7) { __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; } __Pyx_INCREF(__pyx_v_body); __Pyx_GIVEREF(__pyx_v_body); PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_v_body); __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_6); __pyx_t_6 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_9, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 453, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; } __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":452 * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] * try: # <<<<<<<<<<<<<< * pyparser._payload.feed_data(body, length) * except BaseException as exc: */ } /* "aiohttp/_http_parser.pyx":462 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":454 * try: * pyparser._payload.feed_data(body, length) * except BaseException as exc: # <<<<<<<<<<<<<< * if pyparser._payload_exception is not None: * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) */ __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_8) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_5, &__pyx_t_9) < 0) __PYX_ERR(0, 454, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_5); __Pyx_GOTREF(__pyx_t_9); __Pyx_INCREF(__pyx_t_5); __pyx_v_exc = __pyx_t_5; /*try:*/ { /* "aiohttp/_http_parser.pyx":455 * pyparser._payload.feed_data(body, length) * except BaseException as exc: * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) * else: */ __pyx_t_10 = (__pyx_v_pyparser->_payload_exception != Py_None); __pyx_t_11 = (__pyx_t_10 != 0); if (__pyx_t_11) { /* "aiohttp/_http_parser.pyx":456 * except BaseException as exc: * if pyparser._payload_exception is not None: * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) # <<<<<<<<<<<<<< * else: * pyparser._payload.set_exception(exc) */ __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_13 = PyTuple_New(1); if (unlikely(!__pyx_t_13)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_13); __Pyx_INCREF(__pyx_v_exc); __Pyx_GIVEREF(__pyx_v_exc); PyTuple_SET_ITEM(__pyx_t_13, 0, __pyx_v_exc); __pyx_t_14 = __Pyx_PyObject_Call(((PyObject *)(&PyUnicode_Type)), __pyx_t_13, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_14); __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_INCREF(__pyx_v_pyparser->_payload_exception); __pyx_t_13 = __pyx_v_pyparser->_payload_exception; __pyx_t_15 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_13))) { __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_13); if (likely(__pyx_t_15)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_13); __Pyx_INCREF(__pyx_t_15); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_13, function); } } if (!__pyx_t_15) { __pyx_t_12 = __Pyx_PyObject_CallOneArg(__pyx_t_13, __pyx_t_14); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_GOTREF(__pyx_t_12); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_13)) { PyObject *__pyx_temp[2] = {__pyx_t_15, __pyx_t_14}; __pyx_t_12 = __Pyx_PyFunction_FastCall(__pyx_t_13, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_GOTREF(__pyx_t_12); __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_13)) { PyObject *__pyx_temp[2] = {__pyx_t_15, __pyx_t_14}; __pyx_t_12 = __Pyx_PyCFunction_FastCall(__pyx_t_13, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_GOTREF(__pyx_t_12); __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; } else #endif { __pyx_t_16 = PyTuple_New(1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_16); __Pyx_GIVEREF(__pyx_t_15); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_15); __pyx_t_15 = NULL; __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_16, 0+1, __pyx_t_14); __pyx_t_14 = 0; __pyx_t_12 = __Pyx_PyObject_Call(__pyx_t_13, __pyx_t_16, NULL); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; } } __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; __pyx_t_13 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { __pyx_t_13 = PyMethod_GET_SELF(__pyx_t_7); if (likely(__pyx_t_13)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); __Pyx_INCREF(__pyx_t_13); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_7, function); } } if (!__pyx_t_13) { __pyx_t_6 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_12); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_GOTREF(__pyx_t_6); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[2] = {__pyx_t_13, __pyx_t_12}; __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[2] = {__pyx_t_13, __pyx_t_12}; __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } else #endif { __pyx_t_16 = PyTuple_New(1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_16); __Pyx_GIVEREF(__pyx_t_13); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_13); __pyx_t_13 = NULL; __Pyx_GIVEREF(__pyx_t_12); PyTuple_SET_ITEM(__pyx_t_16, 0+1, __pyx_t_12); __pyx_t_12 = 0; __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_16, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 456, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; } } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; /* "aiohttp/_http_parser.pyx":455 * pyparser._payload.feed_data(body, length) * except BaseException as exc: * if pyparser._payload_exception is not None: # <<<<<<<<<<<<<< * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) * else: */ goto __pyx_L16; } /* "aiohttp/_http_parser.pyx":458 * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) * else: * pyparser._payload.set_exception(exc) # <<<<<<<<<<<<<< * pyparser._payload_error = 1 * return -1 */ /*else*/ { __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_pyparser->_payload, __pyx_n_s_set_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 458, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_16 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { __pyx_t_16 = PyMethod_GET_SELF(__pyx_t_7); if (likely(__pyx_t_16)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); __Pyx_INCREF(__pyx_t_16); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_7, function); } } if (!__pyx_t_16) { __pyx_t_6 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_exc); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 458, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_6); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[2] = {__pyx_t_16, __pyx_v_exc}; __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 458, __pyx_L14_error) __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; __Pyx_GOTREF(__pyx_t_6); } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { PyObject *__pyx_temp[2] = {__pyx_t_16, __pyx_v_exc}; __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 458, __pyx_L14_error) __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; __Pyx_GOTREF(__pyx_t_6); } else #endif { __pyx_t_12 = PyTuple_New(1+1); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 458, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_12); __Pyx_GIVEREF(__pyx_t_16); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_16); __pyx_t_16 = NULL; __Pyx_INCREF(__pyx_v_exc); __Pyx_GIVEREF(__pyx_v_exc); PyTuple_SET_ITEM(__pyx_t_12, 0+1, __pyx_v_exc); __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_12, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 458, __pyx_L14_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; } } __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __pyx_L16:; /* "aiohttp/_http_parser.pyx":459 * else: * pyparser._payload.set_exception(exc) * pyparser._payload_error = 1 # <<<<<<<<<<<<<< * return -1 * else: */ __pyx_v_pyparser->_payload_error = 1; /* "aiohttp/_http_parser.pyx":460 * pyparser._payload.set_exception(exc) * pyparser._payload_error = 1 * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; goto __pyx_L13_return; } /* "aiohttp/_http_parser.pyx":454 * try: * pyparser._payload.feed_data(body, length) * except BaseException as exc: # <<<<<<<<<<<<<< * if pyparser._payload_exception is not None: * pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) */ /*finally:*/ { __pyx_L14_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_22, &__pyx_t_23, &__pyx_t_24); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_19, &__pyx_t_20, &__pyx_t_21) < 0)) __Pyx_ErrFetch(&__pyx_t_19, &__pyx_t_20, &__pyx_t_21); __Pyx_XGOTREF(__pyx_t_19); __Pyx_XGOTREF(__pyx_t_20); __Pyx_XGOTREF(__pyx_t_21); __Pyx_XGOTREF(__pyx_t_22); __Pyx_XGOTREF(__pyx_t_23); __Pyx_XGOTREF(__pyx_t_24); __pyx_t_8 = __pyx_lineno; __pyx_t_17 = __pyx_clineno; __pyx_t_18 = __pyx_filename; { __Pyx_DECREF(__pyx_v_exc); __pyx_v_exc = NULL; } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_22); __Pyx_XGIVEREF(__pyx_t_23); __Pyx_XGIVEREF(__pyx_t_24); __Pyx_ExceptionReset(__pyx_t_22, __pyx_t_23, __pyx_t_24); } __Pyx_XGIVEREF(__pyx_t_19); __Pyx_XGIVEREF(__pyx_t_20); __Pyx_XGIVEREF(__pyx_t_21); __Pyx_ErrRestore(__pyx_t_19, __pyx_t_20, __pyx_t_21); __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_lineno = __pyx_t_8; __pyx_clineno = __pyx_t_17; __pyx_filename = __pyx_t_18; goto __pyx_L5_except_error; } __pyx_L13_return: { __pyx_t_17 = __pyx_r; __Pyx_DECREF(__pyx_v_exc); __pyx_v_exc = NULL; __pyx_r = __pyx_t_17; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":452 * cdef HttpParser pyparser = parser.data * cdef bytes body = at[:length] * try: # <<<<<<<<<<<<<< * pyparser._payload.feed_data(body, length) * except BaseException as exc: */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":448 * * * cdef int cb_on_body(cparser.http_parser* parser, # <<<<<<<<<<<<<< * const char *at, size_t length) except -1: * cdef HttpParser pyparser = parser.data */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_12); __Pyx_XDECREF(__pyx_t_13); __Pyx_XDECREF(__pyx_t_14); __Pyx_XDECREF(__pyx_t_15); __Pyx_XDECREF(__pyx_t_16); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_body", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_body); __Pyx_XDECREF(__pyx_v_exc); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":465 * * * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_message_complete(struct http_parser *__pyx_v_parser) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_exc = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; __Pyx_RefNannySetupContext("cb_on_message_complete", 0); /* "aiohttp/_http_parser.pyx":466 * * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * pyparser._started = False */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":467 * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._started = False * pyparser._on_message_complete() */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":468 * cdef HttpParser pyparser = parser.data * try: * pyparser._started = False # <<<<<<<<<<<<<< * pyparser._on_message_complete() * except BaseException as exc: */ __pyx_v_pyparser->_started = 0; /* "aiohttp/_http_parser.pyx":469 * try: * pyparser._started = False * pyparser._on_message_complete() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_message_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 469, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":467 * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._started = False * pyparser._on_message_complete() */ } /* "aiohttp/_http_parser.pyx":474 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":470 * pyparser._started = False * pyparser._on_message_complete() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_5) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 470, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(__pyx_t_6); __pyx_v_exc = __pyx_t_6; /*try:*/ { /* "aiohttp/_http_parser.pyx":471 * pyparser._on_message_complete() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_exc); __Pyx_GIVEREF(__pyx_v_exc); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; /* "aiohttp/_http_parser.pyx":472 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L13_return; } /* "aiohttp/_http_parser.pyx":470 * pyparser._started = False * pyparser._on_message_complete() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ /*finally:*/ { __pyx_L13_return: { __pyx_t_5 = __pyx_r; __Pyx_DECREF(__pyx_v_exc); __pyx_v_exc = NULL; __pyx_r = __pyx_t_5; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":467 * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._started = False * pyparser._on_message_complete() */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":465 * * * cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_message_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_exc); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":477 * * * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_header(struct http_parser *__pyx_v_parser) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_exc = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; __Pyx_RefNannySetupContext("cb_on_chunk_header", 0); /* "aiohttp/_http_parser.pyx":478 * * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * pyparser._on_chunk_header() */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":479 * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_chunk_header() * except BaseException as exc: */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":480 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_chunk_header() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_header(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 480, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":479 * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_chunk_header() * except BaseException as exc: */ } /* "aiohttp/_http_parser.pyx":485 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":481 * try: * pyparser._on_chunk_header() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_5) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 481, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(__pyx_t_6); __pyx_v_exc = __pyx_t_6; /*try:*/ { /* "aiohttp/_http_parser.pyx":482 * pyparser._on_chunk_header() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_exc); __Pyx_GIVEREF(__pyx_v_exc); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; /* "aiohttp/_http_parser.pyx":483 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L13_return; } /* "aiohttp/_http_parser.pyx":481 * try: * pyparser._on_chunk_header() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ /*finally:*/ { __pyx_L13_return: { __pyx_t_5 = __pyx_r; __Pyx_DECREF(__pyx_v_exc); __pyx_v_exc = NULL; __pyx_r = __pyx_t_5; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":479 * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_chunk_header() * except BaseException as exc: */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":477 * * * cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_header", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_exc); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":488 * * * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ static int __pyx_f_7aiohttp_12_http_parser_cb_on_chunk_complete(struct http_parser *__pyx_v_parser) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *__pyx_v_pyparser = 0; PyObject *__pyx_v_exc = NULL; int __pyx_r; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; int __pyx_t_5; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; __Pyx_RefNannySetupContext("cb_on_chunk_complete", 0); /* "aiohttp/_http_parser.pyx":489 * * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data # <<<<<<<<<<<<<< * try: * pyparser._on_chunk_complete() */ __pyx_t_1 = ((PyObject *)__pyx_v_parser->data); __Pyx_INCREF(__pyx_t_1); __pyx_v_pyparser = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":490 * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_chunk_complete() * except BaseException as exc: */ { __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); __Pyx_XGOTREF(__pyx_t_2); __Pyx_XGOTREF(__pyx_t_3); __Pyx_XGOTREF(__pyx_t_4); /*try:*/ { /* "aiohttp/_http_parser.pyx":491 * cdef HttpParser pyparser = parser.data * try: * pyparser._on_chunk_complete() # <<<<<<<<<<<<<< * except BaseException as exc: * pyparser._last_error = exc */ __pyx_t_1 = ((struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser *)__pyx_v_pyparser->__pyx_vtab)->_on_chunk_complete(__pyx_v_pyparser); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 491, __pyx_L3_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":490 * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_chunk_complete() * except BaseException as exc: */ } /* "aiohttp/_http_parser.pyx":496 * return -1 * else: * return 0 # <<<<<<<<<<<<<< * * */ /*else:*/ { __pyx_r = 0; goto __pyx_L6_except_return; } __pyx_L3_error:; __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":492 * try: * pyparser._on_chunk_complete() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ __pyx_t_5 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_BaseException); if (__pyx_t_5) { __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(0, 492, __pyx_L5_except_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_GOTREF(__pyx_t_6); __Pyx_GOTREF(__pyx_t_7); __Pyx_INCREF(__pyx_t_6); __pyx_v_exc = __pyx_t_6; /*try:*/ { /* "aiohttp/_http_parser.pyx":493 * pyparser._on_chunk_complete() * except BaseException as exc: * pyparser._last_error = exc # <<<<<<<<<<<<<< * return -1 * else: */ __Pyx_INCREF(__pyx_v_exc); __Pyx_GIVEREF(__pyx_v_exc); __Pyx_GOTREF(__pyx_v_pyparser->_last_error); __Pyx_DECREF(__pyx_v_pyparser->_last_error); __pyx_v_pyparser->_last_error = __pyx_v_exc; /* "aiohttp/_http_parser.pyx":494 * except BaseException as exc: * pyparser._last_error = exc * return -1 # <<<<<<<<<<<<<< * else: * return 0 */ __pyx_r = -1; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L13_return; } /* "aiohttp/_http_parser.pyx":492 * try: * pyparser._on_chunk_complete() * except BaseException as exc: # <<<<<<<<<<<<<< * pyparser._last_error = exc * return -1 */ /*finally:*/ { __pyx_L13_return: { __pyx_t_5 = __pyx_r; __Pyx_DECREF(__pyx_v_exc); __pyx_v_exc = NULL; __pyx_r = __pyx_t_5; goto __pyx_L6_except_return; } } } goto __pyx_L5_except_error; __pyx_L5_except_error:; /* "aiohttp/_http_parser.pyx":490 * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: * cdef HttpParser pyparser = parser.data * try: # <<<<<<<<<<<<<< * pyparser._on_chunk_complete() * except BaseException as exc: */ __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L1_error; __pyx_L6_except_return:; __Pyx_XGIVEREF(__pyx_t_2); __Pyx_XGIVEREF(__pyx_t_3); __Pyx_XGIVEREF(__pyx_t_4); __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); goto __pyx_L0; } /* "aiohttp/_http_parser.pyx":488 * * * cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: # <<<<<<<<<<<<<< * cdef HttpParser pyparser = parser.data * try: */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("aiohttp._http_parser.cb_on_chunk_complete", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = -1; __pyx_L0:; __Pyx_XDECREF((PyObject *)__pyx_v_pyparser); __Pyx_XDECREF(__pyx_v_exc); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":499 * * * cdef parser_error_from_errno(cparser.http_errno errno): # <<<<<<<<<<<<<< * cdef bytes desc = cparser.http_errno_description(errno) * */ static PyObject *__pyx_f_7aiohttp_12_http_parser_parser_error_from_errno(enum http_errno __pyx_v_errno) { PyObject *__pyx_v_desc = 0; PyObject *__pyx_v_cls = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; __Pyx_RefNannySetupContext("parser_error_from_errno", 0); /* "aiohttp/_http_parser.pyx":500 * * cdef parser_error_from_errno(cparser.http_errno errno): * cdef bytes desc = cparser.http_errno_description(errno) # <<<<<<<<<<<<<< * * if errno in (cparser.HPE_CB_message_begin, */ __pyx_t_1 = __Pyx_PyBytes_FromString(http_errno_description(__pyx_v_errno)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 500, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_desc = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":502 * cdef bytes desc = cparser.http_errno_description(errno) * * if errno in (cparser.HPE_CB_message_begin, # <<<<<<<<<<<<<< * cparser.HPE_CB_url, * cparser.HPE_CB_header_field, */ switch (__pyx_v_errno) { case HPE_CB_message_begin: /* "aiohttp/_http_parser.pyx":503 * * if errno in (cparser.HPE_CB_message_begin, * cparser.HPE_CB_url, # <<<<<<<<<<<<<< * cparser.HPE_CB_header_field, * cparser.HPE_CB_header_value, */ case HPE_CB_url: /* "aiohttp/_http_parser.pyx":504 * if errno in (cparser.HPE_CB_message_begin, * cparser.HPE_CB_url, * cparser.HPE_CB_header_field, # <<<<<<<<<<<<<< * cparser.HPE_CB_header_value, * cparser.HPE_CB_headers_complete, */ case HPE_CB_header_field: /* "aiohttp/_http_parser.pyx":505 * cparser.HPE_CB_url, * cparser.HPE_CB_header_field, * cparser.HPE_CB_header_value, # <<<<<<<<<<<<<< * cparser.HPE_CB_headers_complete, * cparser.HPE_CB_body, */ case HPE_CB_header_value: /* "aiohttp/_http_parser.pyx":506 * cparser.HPE_CB_header_field, * cparser.HPE_CB_header_value, * cparser.HPE_CB_headers_complete, # <<<<<<<<<<<<<< * cparser.HPE_CB_body, * cparser.HPE_CB_message_complete, */ case HPE_CB_headers_complete: /* "aiohttp/_http_parser.pyx":507 * cparser.HPE_CB_header_value, * cparser.HPE_CB_headers_complete, * cparser.HPE_CB_body, # <<<<<<<<<<<<<< * cparser.HPE_CB_message_complete, * cparser.HPE_CB_status, */ case HPE_CB_body: /* "aiohttp/_http_parser.pyx":508 * cparser.HPE_CB_headers_complete, * cparser.HPE_CB_body, * cparser.HPE_CB_message_complete, # <<<<<<<<<<<<<< * cparser.HPE_CB_status, * cparser.HPE_CB_chunk_header, */ case HPE_CB_message_complete: /* "aiohttp/_http_parser.pyx":509 * cparser.HPE_CB_body, * cparser.HPE_CB_message_complete, * cparser.HPE_CB_status, # <<<<<<<<<<<<<< * cparser.HPE_CB_chunk_header, * cparser.HPE_CB_chunk_complete): */ case HPE_CB_status: /* "aiohttp/_http_parser.pyx":510 * cparser.HPE_CB_message_complete, * cparser.HPE_CB_status, * cparser.HPE_CB_chunk_header, # <<<<<<<<<<<<<< * cparser.HPE_CB_chunk_complete): * cls = BadHttpMessage */ case HPE_CB_chunk_header: /* "aiohttp/_http_parser.pyx":511 * cparser.HPE_CB_status, * cparser.HPE_CB_chunk_header, * cparser.HPE_CB_chunk_complete): # <<<<<<<<<<<<<< * cls = BadHttpMessage * */ case HPE_CB_chunk_complete: /* "aiohttp/_http_parser.pyx":512 * cparser.HPE_CB_chunk_header, * cparser.HPE_CB_chunk_complete): * cls = BadHttpMessage # <<<<<<<<<<<<<< * * elif errno == cparser.HPE_INVALID_STATUS: */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 512, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_cls = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":502 * cdef bytes desc = cparser.http_errno_description(errno) * * if errno in (cparser.HPE_CB_message_begin, # <<<<<<<<<<<<<< * cparser.HPE_CB_url, * cparser.HPE_CB_header_field, */ break; /* "aiohttp/_http_parser.pyx":514 * cls = BadHttpMessage * * elif errno == cparser.HPE_INVALID_STATUS: # <<<<<<<<<<<<<< * cls = BadStatusLine * */ case HPE_INVALID_STATUS: /* "aiohttp/_http_parser.pyx":515 * * elif errno == cparser.HPE_INVALID_STATUS: * cls = BadStatusLine # <<<<<<<<<<<<<< * * elif errno == cparser.HPE_INVALID_METHOD: */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 515, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_cls = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":514 * cls = BadHttpMessage * * elif errno == cparser.HPE_INVALID_STATUS: # <<<<<<<<<<<<<< * cls = BadStatusLine * */ break; /* "aiohttp/_http_parser.pyx":517 * cls = BadStatusLine * * elif errno == cparser.HPE_INVALID_METHOD: # <<<<<<<<<<<<<< * cls = BadStatusLine * */ case HPE_INVALID_METHOD: /* "aiohttp/_http_parser.pyx":518 * * elif errno == cparser.HPE_INVALID_METHOD: * cls = BadStatusLine # <<<<<<<<<<<<<< * * elif errno == cparser.HPE_INVALID_URL: */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 518, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_cls = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":517 * cls = BadStatusLine * * elif errno == cparser.HPE_INVALID_METHOD: # <<<<<<<<<<<<<< * cls = BadStatusLine * */ break; /* "aiohttp/_http_parser.pyx":520 * cls = BadStatusLine * * elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<< * cls = InvalidURLError * */ case HPE_INVALID_URL: /* "aiohttp/_http_parser.pyx":521 * * elif errno == cparser.HPE_INVALID_URL: * cls = InvalidURLError # <<<<<<<<<<<<<< * * else: */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 521, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_cls = __pyx_t_1; __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":520 * cls = BadStatusLine * * elif errno == cparser.HPE_INVALID_URL: # <<<<<<<<<<<<<< * cls = InvalidURLError * */ break; default: /* "aiohttp/_http_parser.pyx":524 * * else: * cls = BadHttpMessage # <<<<<<<<<<<<<< * * return cls(desc.decode('latin-1')) */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 524, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_cls = __pyx_t_1; __pyx_t_1 = 0; break; } /* "aiohttp/_http_parser.pyx":526 * cls = BadHttpMessage * * return cls(desc.decode('latin-1')) # <<<<<<<<<<<<<< * * */ __Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_decode_bytes(__pyx_v_desc, 0, PY_SSIZE_T_MAX, NULL, NULL, PyUnicode_DecodeLatin1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_v_cls); __pyx_t_3 = __pyx_v_cls; __pyx_t_4 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (!__pyx_t_4) { __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_GOTREF(__pyx_t_1); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_2}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_2}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; } else #endif { __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_t_2); __pyx_t_2 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 526, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* "aiohttp/_http_parser.pyx":499 * * * cdef parser_error_from_errno(cparser.http_errno errno): # <<<<<<<<<<<<<< * cdef bytes desc = cparser.http_errno_description(errno) * */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("aiohttp._http_parser.parser_error_from_errno", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = 0; __pyx_L0:; __Pyx_XDECREF(__pyx_v_desc); __Pyx_XDECREF(__pyx_v_cls); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":529 * * * def parse_url(url): # <<<<<<<<<<<<<< * cdef: * Py_buffer py_buf */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url); /*proto*/ static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_1parse_url = {"parse_url", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_1parse_url, METH_O, 0}; static PyObject *__pyx_pw_7aiohttp_12_http_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url) { PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("parse_url (wrapper)", 0); __pyx_r = __pyx_pf_7aiohttp_12_http_parser_parse_url(__pyx_self, ((PyObject *)__pyx_v_url)); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url) { Py_buffer __pyx_v_py_buf; char *__pyx_v_buf_data; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; PyObject *__pyx_t_2 = NULL; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; int __pyx_t_8; char const *__pyx_t_9; PyObject *__pyx_t_10 = NULL; PyObject *__pyx_t_11 = NULL; PyObject *__pyx_t_12 = NULL; PyObject *__pyx_t_13 = NULL; PyObject *__pyx_t_14 = NULL; PyObject *__pyx_t_15 = NULL; __Pyx_RefNannySetupContext("parse_url", 0); /* "aiohttp/_http_parser.pyx":534 * char* buf_data * * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<< * try: * buf_data = py_buf.buf */ __pyx_t_1 = PyObject_GetBuffer(__pyx_v_url, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(0, 534, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":535 * * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) * try: # <<<<<<<<<<<<<< * buf_data = py_buf.buf * return _parse_url(buf_data, py_buf.len) */ /*try:*/ { /* "aiohttp/_http_parser.pyx":536 * PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) * try: * buf_data = py_buf.buf # <<<<<<<<<<<<<< * return _parse_url(buf_data, py_buf.len) * finally: */ __pyx_v_buf_data = ((char *)__pyx_v_py_buf.buf); /* "aiohttp/_http_parser.pyx":537 * try: * buf_data = py_buf.buf * return _parse_url(buf_data, py_buf.len) # <<<<<<<<<<<<<< * finally: * PyBuffer_Release(&py_buf) */ __Pyx_XDECREF(__pyx_r); __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_parse_url); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyBytes_FromString(__pyx_v_buf_data); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_5 = PyInt_FromSsize_t(__pyx_v_py_buf.len); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = NULL; __pyx_t_1 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_6)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_6); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); __pyx_t_1 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_t_4, __pyx_t_5}; __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 2+__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_t_4, __pyx_t_5}; __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_1, 2+__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; } else #endif { __pyx_t_7 = PyTuple_New(2+__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_GOTREF(__pyx_t_7); if (__pyx_t_6) { __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; } __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_1, __pyx_t_4); __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_1, __pyx_t_5); __pyx_t_4 = 0; __pyx_t_5 = 0; __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 537, __pyx_L4_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L3_return; } /* "aiohttp/_http_parser.pyx":539 * return _parse_url(buf_data, py_buf.len) * finally: * PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<< * * */ /*finally:*/ { __pyx_L4_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12) < 0)) __Pyx_ErrFetch(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); __Pyx_XGOTREF(__pyx_t_10); __Pyx_XGOTREF(__pyx_t_11); __Pyx_XGOTREF(__pyx_t_12); __Pyx_XGOTREF(__pyx_t_13); __Pyx_XGOTREF(__pyx_t_14); __Pyx_XGOTREF(__pyx_t_15); __pyx_t_1 = __pyx_lineno; __pyx_t_8 = __pyx_clineno; __pyx_t_9 = __pyx_filename; { PyBuffer_Release((&__pyx_v_py_buf)); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_13); __Pyx_XGIVEREF(__pyx_t_14); __Pyx_XGIVEREF(__pyx_t_15); __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); } __Pyx_XGIVEREF(__pyx_t_10); __Pyx_XGIVEREF(__pyx_t_11); __Pyx_XGIVEREF(__pyx_t_12); __Pyx_ErrRestore(__pyx_t_10, __pyx_t_11, __pyx_t_12); __pyx_t_10 = 0; __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_lineno = __pyx_t_1; __pyx_clineno = __pyx_t_8; __pyx_filename = __pyx_t_9; goto __pyx_L1_error; } __pyx_L3_return: { __pyx_t_15 = __pyx_r; __pyx_r = 0; PyBuffer_Release((&__pyx_v_py_buf)); __pyx_r = __pyx_t_15; __pyx_t_15 = 0; goto __pyx_L0; } } /* "aiohttp/_http_parser.pyx":529 * * * def parse_url(url): # <<<<<<<<<<<<<< * cdef: * Py_buffer py_buf */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("aiohttp._http_parser.parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* "aiohttp/_http_parser.pyx":542 * * * def _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< * cdef: * cparser.http_parser_url* parsed */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_12_http_parser_3_parse_url(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyMethodDef __pyx_mdef_7aiohttp_12_http_parser_3_parse_url = {"_parse_url", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_3_parse_url, METH_VARARGS|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_7aiohttp_12_http_parser_3_parse_url(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { char *__pyx_v_buf_data; size_t __pyx_v_length; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_parse_url (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_buf_data,&__pyx_n_s_length,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_buf_data)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_length)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_parse_url", 1, 2, 2, 1); __PYX_ERR(0, 542, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_parse_url") < 0)) __PYX_ERR(0, 542, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_buf_data = __Pyx_PyObject_AsWritableString(values[0]); if (unlikely((!__pyx_v_buf_data) && PyErr_Occurred())) __PYX_ERR(0, 542, __pyx_L3_error) __pyx_v_length = __Pyx_PyInt_As_size_t(values[1]); if (unlikely((__pyx_v_length == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 542, __pyx_L3_error) } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_parse_url", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 542, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._http_parser._parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_12_http_parser_2_parse_url(__pyx_self, __pyx_v_buf_data, __pyx_v_length); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_12_http_parser_2_parse_url(CYTHON_UNUSED PyObject *__pyx_self, char *__pyx_v_buf_data, size_t __pyx_v_length) { struct http_parser_url *__pyx_v_parsed; int __pyx_v_res; PyObject *__pyx_v_schema = 0; PyObject *__pyx_v_host = 0; PyObject *__pyx_v_port = 0; PyObject *__pyx_v_path = 0; PyObject *__pyx_v_query = 0; PyObject *__pyx_v_fragment = 0; PyObject *__pyx_v_user = 0; PyObject *__pyx_v_password = 0; PyObject *__pyx_v_userinfo = 0; CYTHON_UNUSED PyObject *__pyx_v_result = 0; int __pyx_v_off; int __pyx_v_ln; CYTHON_UNUSED PyObject *__pyx_v_sep = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations int __pyx_t_1; uint16_t __pyx_t_2; PyObject *__pyx_t_3 = NULL; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; PyObject *__pyx_t_6 = NULL; PyObject *__pyx_t_7 = NULL; PyObject *(*__pyx_t_8)(PyObject *); PyObject *__pyx_t_9 = NULL; PyObject *__pyx_t_10 = NULL; int __pyx_t_11; int __pyx_t_12; char const *__pyx_t_13; PyObject *__pyx_t_14 = NULL; PyObject *__pyx_t_15 = NULL; PyObject *__pyx_t_16 = NULL; PyObject *__pyx_t_17 = NULL; PyObject *__pyx_t_18 = NULL; PyObject *__pyx_t_19 = NULL; __Pyx_RefNannySetupContext("_parse_url", 0); /* "aiohttp/_http_parser.pyx":546 * cparser.http_parser_url* parsed * int res * str schema = None # <<<<<<<<<<<<<< * str host = None * object port = None */ __Pyx_INCREF(Py_None); __pyx_v_schema = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":547 * int res * str schema = None * str host = None # <<<<<<<<<<<<<< * object port = None * str path = None */ __Pyx_INCREF(Py_None); __pyx_v_host = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":548 * str schema = None * str host = None * object port = None # <<<<<<<<<<<<<< * str path = None * str query = None */ __Pyx_INCREF(Py_None); __pyx_v_port = Py_None; /* "aiohttp/_http_parser.pyx":549 * str host = None * object port = None * str path = None # <<<<<<<<<<<<<< * str query = None * str fragment = None */ __Pyx_INCREF(Py_None); __pyx_v_path = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":550 * object port = None * str path = None * str query = None # <<<<<<<<<<<<<< * str fragment = None * str user = None */ __Pyx_INCREF(Py_None); __pyx_v_query = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":551 * str path = None * str query = None * str fragment = None # <<<<<<<<<<<<<< * str user = None * str password = None */ __Pyx_INCREF(Py_None); __pyx_v_fragment = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":552 * str query = None * str fragment = None * str user = None # <<<<<<<<<<<<<< * str password = None * str userinfo = None */ __Pyx_INCREF(Py_None); __pyx_v_user = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":553 * str fragment = None * str user = None * str password = None # <<<<<<<<<<<<<< * str userinfo = None * object result = None */ __Pyx_INCREF(Py_None); __pyx_v_password = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":554 * str user = None * str password = None * str userinfo = None # <<<<<<<<<<<<<< * object result = None * int off */ __Pyx_INCREF(Py_None); __pyx_v_userinfo = ((PyObject*)Py_None); /* "aiohttp/_http_parser.pyx":555 * str password = None * str userinfo = None * object result = None # <<<<<<<<<<<<<< * int off * int ln */ __Pyx_INCREF(Py_None); __pyx_v_result = Py_None; /* "aiohttp/_http_parser.pyx":559 * int ln * * parsed = \ # <<<<<<<<<<<<<< * PyMem_Malloc(sizeof(cparser.http_parser_url)) * if parsed is NULL: */ __pyx_v_parsed = ((struct http_parser_url *)PyMem_Malloc((sizeof(struct http_parser_url)))); /* "aiohttp/_http_parser.pyx":561 * parsed = \ * PyMem_Malloc(sizeof(cparser.http_parser_url)) * if parsed is NULL: # <<<<<<<<<<<<<< * raise MemoryError() * cparser.http_parser_url_init(parsed) */ __pyx_t_1 = ((__pyx_v_parsed == NULL) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":562 * PyMem_Malloc(sizeof(cparser.http_parser_url)) * if parsed is NULL: * raise MemoryError() # <<<<<<<<<<<<<< * cparser.http_parser_url_init(parsed) * try: */ PyErr_NoMemory(); __PYX_ERR(0, 562, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":561 * parsed = \ * PyMem_Malloc(sizeof(cparser.http_parser_url)) * if parsed is NULL: # <<<<<<<<<<<<<< * raise MemoryError() * cparser.http_parser_url_init(parsed) */ } /* "aiohttp/_http_parser.pyx":563 * if parsed is NULL: * raise MemoryError() * cparser.http_parser_url_init(parsed) # <<<<<<<<<<<<<< * try: * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) */ http_parser_url_init(__pyx_v_parsed); /* "aiohttp/_http_parser.pyx":564 * raise MemoryError() * cparser.http_parser_url_init(parsed) * try: # <<<<<<<<<<<<<< * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) * */ /*try:*/ { /* "aiohttp/_http_parser.pyx":565 * cparser.http_parser_url_init(parsed) * try: * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) # <<<<<<<<<<<<<< * * if res == 0: */ __pyx_v_res = http_parser_parse_url(__pyx_v_buf_data, __pyx_v_length, 0, __pyx_v_parsed); /* "aiohttp/_http_parser.pyx":567 * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) * * if res == 0: # <<<<<<<<<<<<<< * if parsed.field_set & (1 << cparser.UF_SCHEMA): * off = parsed.field_data[cparser.UF_SCHEMA].off */ __pyx_t_1 = ((__pyx_v_res == 0) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":568 * * if res == 0: * if parsed.field_set & (1 << cparser.UF_SCHEMA): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_SCHEMA].off * ln = parsed.field_data[cparser.UF_SCHEMA].len */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_SCHEMA)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":569 * if res == 0: * if parsed.field_set & (1 << cparser.UF_SCHEMA): * off = parsed.field_data[cparser.UF_SCHEMA].off # <<<<<<<<<<<<<< * ln = parsed.field_data[cparser.UF_SCHEMA].len * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).off; __pyx_v_off = __pyx_t_2; /* "aiohttp/_http_parser.pyx":570 * if parsed.field_set & (1 << cparser.UF_SCHEMA): * off = parsed.field_data[cparser.UF_SCHEMA].off * ln = parsed.field_data[cparser.UF_SCHEMA].len # <<<<<<<<<<<<<< * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).len; __pyx_v_ln = __pyx_t_2; /* "aiohttp/_http_parser.pyx":571 * off = parsed.field_data[cparser.UF_SCHEMA].off * ln = parsed.field_data[cparser.UF_SCHEMA].len * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * else: * schema = '' */ __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 571, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 571, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_schema, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":568 * * if res == 0: * if parsed.field_set & (1 << cparser.UF_SCHEMA): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_SCHEMA].off * ln = parsed.field_data[cparser.UF_SCHEMA].len */ goto __pyx_L8; } /* "aiohttp/_http_parser.pyx":573 * schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: * schema = '' # <<<<<<<<<<<<<< * * if parsed.field_set & (1 << cparser.UF_HOST): */ /*else*/ { __Pyx_INCREF(__pyx_kp_u__6); __Pyx_DECREF_SET(__pyx_v_schema, __pyx_kp_u__6); } __pyx_L8:; /* "aiohttp/_http_parser.pyx":575 * schema = '' * * if parsed.field_set & (1 << cparser.UF_HOST): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_HOST].off * ln = parsed.field_data[cparser.UF_HOST].len */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_HOST)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":576 * * if parsed.field_set & (1 << cparser.UF_HOST): * off = parsed.field_data[cparser.UF_HOST].off # <<<<<<<<<<<<<< * ln = parsed.field_data[cparser.UF_HOST].len * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).off; __pyx_v_off = __pyx_t_2; /* "aiohttp/_http_parser.pyx":577 * if parsed.field_set & (1 << cparser.UF_HOST): * off = parsed.field_data[cparser.UF_HOST].off * ln = parsed.field_data[cparser.UF_HOST].len # <<<<<<<<<<<<<< * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).len; __pyx_v_ln = __pyx_t_2; /* "aiohttp/_http_parser.pyx":578 * off = parsed.field_data[cparser.UF_HOST].off * ln = parsed.field_data[cparser.UF_HOST].len * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * else: * host = '' */ __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 578, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 578, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_host, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":575 * schema = '' * * if parsed.field_set & (1 << cparser.UF_HOST): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_HOST].off * ln = parsed.field_data[cparser.UF_HOST].len */ goto __pyx_L9; } /* "aiohttp/_http_parser.pyx":580 * host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: * host = '' # <<<<<<<<<<<<<< * * if parsed.field_set & (1 << cparser.UF_PORT): */ /*else*/ { __Pyx_INCREF(__pyx_kp_u__6); __Pyx_DECREF_SET(__pyx_v_host, __pyx_kp_u__6); } __pyx_L9:; /* "aiohttp/_http_parser.pyx":582 * host = '' * * if parsed.field_set & (1 << cparser.UF_PORT): # <<<<<<<<<<<<<< * port = parsed.port * */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_PORT)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":583 * * if parsed.field_set & (1 << cparser.UF_PORT): * port = parsed.port # <<<<<<<<<<<<<< * * if parsed.field_set & (1 << cparser.UF_PATH): */ __pyx_t_3 = __Pyx_PyInt_From_uint16_t(__pyx_v_parsed->port); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 583, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); __Pyx_DECREF_SET(__pyx_v_port, __pyx_t_3); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":582 * host = '' * * if parsed.field_set & (1 << cparser.UF_PORT): # <<<<<<<<<<<<<< * port = parsed.port * */ } /* "aiohttp/_http_parser.pyx":585 * port = parsed.port * * if parsed.field_set & (1 << cparser.UF_PATH): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_PATH].off * ln = parsed.field_data[cparser.UF_PATH].len */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_PATH)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":586 * * if parsed.field_set & (1 << cparser.UF_PATH): * off = parsed.field_data[cparser.UF_PATH].off # <<<<<<<<<<<<<< * ln = parsed.field_data[cparser.UF_PATH].len * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).off; __pyx_v_off = __pyx_t_2; /* "aiohttp/_http_parser.pyx":587 * if parsed.field_set & (1 << cparser.UF_PATH): * off = parsed.field_data[cparser.UF_PATH].off * ln = parsed.field_data[cparser.UF_PATH].len # <<<<<<<<<<<<<< * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).len; __pyx_v_ln = __pyx_t_2; /* "aiohttp/_http_parser.pyx":588 * off = parsed.field_data[cparser.UF_PATH].off * ln = parsed.field_data[cparser.UF_PATH].len * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * else: * path = '' */ __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 588, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 588, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_path, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":585 * port = parsed.port * * if parsed.field_set & (1 << cparser.UF_PATH): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_PATH].off * ln = parsed.field_data[cparser.UF_PATH].len */ goto __pyx_L11; } /* "aiohttp/_http_parser.pyx":590 * path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: * path = '' # <<<<<<<<<<<<<< * * if parsed.field_set & (1 << cparser.UF_QUERY): */ /*else*/ { __Pyx_INCREF(__pyx_kp_u__6); __Pyx_DECREF_SET(__pyx_v_path, __pyx_kp_u__6); } __pyx_L11:; /* "aiohttp/_http_parser.pyx":592 * path = '' * * if parsed.field_set & (1 << cparser.UF_QUERY): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_QUERY].off * ln = parsed.field_data[cparser.UF_QUERY].len */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_QUERY)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":593 * * if parsed.field_set & (1 << cparser.UF_QUERY): * off = parsed.field_data[cparser.UF_QUERY].off # <<<<<<<<<<<<<< * ln = parsed.field_data[cparser.UF_QUERY].len * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).off; __pyx_v_off = __pyx_t_2; /* "aiohttp/_http_parser.pyx":594 * if parsed.field_set & (1 << cparser.UF_QUERY): * off = parsed.field_data[cparser.UF_QUERY].off * ln = parsed.field_data[cparser.UF_QUERY].len # <<<<<<<<<<<<<< * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).len; __pyx_v_ln = __pyx_t_2; /* "aiohttp/_http_parser.pyx":595 * off = parsed.field_data[cparser.UF_QUERY].off * ln = parsed.field_data[cparser.UF_QUERY].len * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * else: * query = '' */ __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 595, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 595, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_query, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":592 * path = '' * * if parsed.field_set & (1 << cparser.UF_QUERY): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_QUERY].off * ln = parsed.field_data[cparser.UF_QUERY].len */ goto __pyx_L12; } /* "aiohttp/_http_parser.pyx":597 * query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: * query = '' # <<<<<<<<<<<<<< * * if parsed.field_set & (1 << cparser.UF_FRAGMENT): */ /*else*/ { __Pyx_INCREF(__pyx_kp_u__6); __Pyx_DECREF_SET(__pyx_v_query, __pyx_kp_u__6); } __pyx_L12:; /* "aiohttp/_http_parser.pyx":599 * query = '' * * if parsed.field_set & (1 << cparser.UF_FRAGMENT): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_FRAGMENT].off * ln = parsed.field_data[cparser.UF_FRAGMENT].len */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_FRAGMENT)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":600 * * if parsed.field_set & (1 << cparser.UF_FRAGMENT): * off = parsed.field_data[cparser.UF_FRAGMENT].off # <<<<<<<<<<<<<< * ln = parsed.field_data[cparser.UF_FRAGMENT].len * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).off; __pyx_v_off = __pyx_t_2; /* "aiohttp/_http_parser.pyx":601 * if parsed.field_set & (1 << cparser.UF_FRAGMENT): * off = parsed.field_data[cparser.UF_FRAGMENT].off * ln = parsed.field_data[cparser.UF_FRAGMENT].len # <<<<<<<<<<<<<< * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).len; __pyx_v_ln = __pyx_t_2; /* "aiohttp/_http_parser.pyx":602 * off = parsed.field_data[cparser.UF_FRAGMENT].off * ln = parsed.field_data[cparser.UF_FRAGMENT].len * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * else: * fragment = '' */ __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 602, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 602, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_fragment, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":599 * query = '' * * if parsed.field_set & (1 << cparser.UF_FRAGMENT): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_FRAGMENT].off * ln = parsed.field_data[cparser.UF_FRAGMENT].len */ goto __pyx_L13; } /* "aiohttp/_http_parser.pyx":604 * fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * else: * fragment = '' # <<<<<<<<<<<<<< * * if parsed.field_set & (1 << cparser.UF_USERINFO): */ /*else*/ { __Pyx_INCREF(__pyx_kp_u__6); __Pyx_DECREF_SET(__pyx_v_fragment, __pyx_kp_u__6); } __pyx_L13:; /* "aiohttp/_http_parser.pyx":606 * fragment = '' * * if parsed.field_set & (1 << cparser.UF_USERINFO): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_USERINFO].off * ln = parsed.field_data[cparser.UF_USERINFO].len */ __pyx_t_1 = ((__pyx_v_parsed->field_set & (1 << UF_USERINFO)) != 0); if (__pyx_t_1) { /* "aiohttp/_http_parser.pyx":607 * * if parsed.field_set & (1 << cparser.UF_USERINFO): * off = parsed.field_data[cparser.UF_USERINFO].off # <<<<<<<<<<<<<< * ln = parsed.field_data[cparser.UF_USERINFO].len * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).off; __pyx_v_off = __pyx_t_2; /* "aiohttp/_http_parser.pyx":608 * if parsed.field_set & (1 << cparser.UF_USERINFO): * off = parsed.field_data[cparser.UF_USERINFO].off * ln = parsed.field_data[cparser.UF_USERINFO].len # <<<<<<<<<<<<<< * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * */ __pyx_t_2 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).len; __pyx_v_ln = __pyx_t_2; /* "aiohttp/_http_parser.pyx":609 * off = parsed.field_data[cparser.UF_USERINFO].off * ln = parsed.field_data[cparser.UF_USERINFO].len * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') # <<<<<<<<<<<<<< * * user, sep, password = userinfo.partition(':') */ __pyx_t_3 = __Pyx_decode_c_string(__pyx_v_buf_data, __pyx_v_off, (__pyx_v_off + __pyx_v_ln), NULL, ((char const *)"surrogateescape"), PyUnicode_DecodeUTF8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 609, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 609, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_userinfo, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; /* "aiohttp/_http_parser.pyx":611 * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * * user, sep, password = userinfo.partition(':') # <<<<<<<<<<<<<< * * return URL.build(scheme=schema, */ __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_userinfo, __pyx_n_s_partition); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; if ((likely(PyTuple_CheckExact(__pyx_t_4))) || (PyList_CheckExact(__pyx_t_4))) { PyObject* sequence = __pyx_t_4; #if !CYTHON_COMPILING_IN_PYPY Py_ssize_t size = Py_SIZE(sequence); #else Py_ssize_t size = PySequence_Size(sequence); #endif if (unlikely(size != 3)) { if (size > 3) __Pyx_RaiseTooManyValuesError(3); else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); __PYX_ERR(0, 611, __pyx_L5_error) } #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS if (likely(PyTuple_CheckExact(sequence))) { __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); __pyx_t_6 = PyTuple_GET_ITEM(sequence, 2); } else { __pyx_t_3 = PyList_GET_ITEM(sequence, 0); __pyx_t_5 = PyList_GET_ITEM(sequence, 1); __pyx_t_6 = PyList_GET_ITEM(sequence, 2); } __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(__pyx_t_5); __Pyx_INCREF(__pyx_t_6); #else __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_5); __pyx_t_6 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); #endif __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; } else { Py_ssize_t index = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext; index = 0; __pyx_t_3 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_3)) goto __pyx_L15_unpacking_failed; __Pyx_GOTREF(__pyx_t_3); index = 1; __pyx_t_5 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_5)) goto __pyx_L15_unpacking_failed; __Pyx_GOTREF(__pyx_t_5); index = 2; __pyx_t_6 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L15_unpacking_failed; __Pyx_GOTREF(__pyx_t_6); if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_7), 3) < 0) __PYX_ERR(0, 611, __pyx_L5_error) __pyx_t_8 = NULL; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; goto __pyx_L16_unpacking_done; __pyx_L15_unpacking_failed:; __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __pyx_t_8 = NULL; if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); __PYX_ERR(0, 611, __pyx_L5_error) __pyx_L16_unpacking_done:; } if (!(likely(PyUnicode_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 611, __pyx_L5_error) if (!(likely(PyUnicode_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "unicode", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 611, __pyx_L5_error) __Pyx_DECREF_SET(__pyx_v_user, ((PyObject*)__pyx_t_3)); __pyx_t_3 = 0; __pyx_v_sep = __pyx_t_5; __pyx_t_5 = 0; __Pyx_DECREF_SET(__pyx_v_password, ((PyObject*)__pyx_t_6)); __pyx_t_6 = 0; /* "aiohttp/_http_parser.pyx":606 * fragment = '' * * if parsed.field_set & (1 << cparser.UF_USERINFO): # <<<<<<<<<<<<<< * off = parsed.field_data[cparser.UF_USERINFO].off * ln = parsed.field_data[cparser.UF_USERINFO].len */ } /* "aiohttp/_http_parser.pyx":613 * user, sep, password = userinfo.partition(':') * * return URL.build(scheme=schema, # <<<<<<<<<<<<<< * user=user, password=password, host=host, port=port, * path=path, query=query, fragment=fragment) */ __Pyx_XDECREF(__pyx_r); __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_URL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 613, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_build); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 613, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_t_4 = __Pyx_PyDict_NewPresized(8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 613, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_4); if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_scheme, __pyx_v_schema) < 0) __PYX_ERR(0, 613, __pyx_L5_error) /* "aiohttp/_http_parser.pyx":614 * * return URL.build(scheme=schema, * user=user, password=password, host=host, port=port, # <<<<<<<<<<<<<< * path=path, query=query, fragment=fragment) * else: */ if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_user, __pyx_v_user) < 0) __PYX_ERR(0, 613, __pyx_L5_error) if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_password, __pyx_v_password) < 0) __PYX_ERR(0, 613, __pyx_L5_error) if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_host, __pyx_v_host) < 0) __PYX_ERR(0, 613, __pyx_L5_error) if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_port, __pyx_v_port) < 0) __PYX_ERR(0, 613, __pyx_L5_error) /* "aiohttp/_http_parser.pyx":615 * return URL.build(scheme=schema, * user=user, password=password, host=host, port=port, * path=path, query=query, fragment=fragment) # <<<<<<<<<<<<<< * else: * raise InvalidURLError("invalid url {!r}".format(buf_data)) */ if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_path, __pyx_v_path) < 0) __PYX_ERR(0, 613, __pyx_L5_error) if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_query, __pyx_v_query) < 0) __PYX_ERR(0, 613, __pyx_L5_error) if (PyDict_SetItem(__pyx_t_4, __pyx_n_s_fragment, __pyx_v_fragment) < 0) __PYX_ERR(0, 613, __pyx_L5_error) /* "aiohttp/_http_parser.pyx":613 * user, sep, password = userinfo.partition(':') * * return URL.build(scheme=schema, # <<<<<<<<<<<<<< * user=user, password=password, host=host, port=port, * path=path, query=query, fragment=fragment) */ __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_empty_tuple, __pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 613, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __pyx_r = __pyx_t_5; __pyx_t_5 = 0; goto __pyx_L4_return; /* "aiohttp/_http_parser.pyx":567 * res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) * * if res == 0: # <<<<<<<<<<<<<< * if parsed.field_set & (1 << cparser.UF_SCHEMA): * off = parsed.field_data[cparser.UF_SCHEMA].off */ } /* "aiohttp/_http_parser.pyx":617 * path=path, query=query, fragment=fragment) * else: * raise InvalidURLError("invalid url {!r}".format(buf_data)) # <<<<<<<<<<<<<< * finally: * PyMem_Free(parsed) */ /*else*/ { __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_4); __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_invalid_url_r, __pyx_n_s_format); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_7 = __Pyx_PyBytes_FromString(__pyx_v_buf_data); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_7); __pyx_t_9 = NULL; if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_3); if (likely(__pyx_t_9)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); __Pyx_INCREF(__pyx_t_9); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_3, function); } } if (!__pyx_t_9) { __pyx_t_6 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_7); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_GOTREF(__pyx_t_6); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_t_7}; __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_t_7}; __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; } else #endif { __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_9); __pyx_t_9 = NULL; __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_t_7); __pyx_t_7 = 0; __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_10, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_6); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } } __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; __pyx_t_3 = NULL; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); if (likely(__pyx_t_3)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); __Pyx_INCREF(__pyx_t_3); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_4, function); } } if (!__pyx_t_3) { __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_GOTREF(__pyx_t_5); } else { #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_t_6}; __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_t_6}; __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } else #endif { __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_3); __pyx_t_3 = NULL; __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_t_6); __pyx_t_6 = 0; __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_10, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 617, __pyx_L5_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; } } __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_Raise(__pyx_t_5, 0, 0, 0); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __PYX_ERR(0, 617, __pyx_L5_error) } } /* "aiohttp/_http_parser.pyx":619 * raise InvalidURLError("invalid url {!r}".format(buf_data)) * finally: * PyMem_Free(parsed) # <<<<<<<<<<<<<< */ /*finally:*/ { __pyx_L5_error:; /*exception exit:*/{ __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; __pyx_t_19 = 0; __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_17, &__pyx_t_18, &__pyx_t_19); if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16) < 0)) __Pyx_ErrFetch(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16); __Pyx_XGOTREF(__pyx_t_14); __Pyx_XGOTREF(__pyx_t_15); __Pyx_XGOTREF(__pyx_t_16); __Pyx_XGOTREF(__pyx_t_17); __Pyx_XGOTREF(__pyx_t_18); __Pyx_XGOTREF(__pyx_t_19); __pyx_t_11 = __pyx_lineno; __pyx_t_12 = __pyx_clineno; __pyx_t_13 = __pyx_filename; { PyMem_Free(__pyx_v_parsed); } if (PY_MAJOR_VERSION >= 3) { __Pyx_XGIVEREF(__pyx_t_17); __Pyx_XGIVEREF(__pyx_t_18); __Pyx_XGIVEREF(__pyx_t_19); __Pyx_ExceptionReset(__pyx_t_17, __pyx_t_18, __pyx_t_19); } __Pyx_XGIVEREF(__pyx_t_14); __Pyx_XGIVEREF(__pyx_t_15); __Pyx_XGIVEREF(__pyx_t_16); __Pyx_ErrRestore(__pyx_t_14, __pyx_t_15, __pyx_t_16); __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; __pyx_t_19 = 0; __pyx_lineno = __pyx_t_11; __pyx_clineno = __pyx_t_12; __pyx_filename = __pyx_t_13; goto __pyx_L1_error; } __pyx_L4_return: { __pyx_t_19 = __pyx_r; __pyx_r = 0; PyMem_Free(__pyx_v_parsed); __pyx_r = __pyx_t_19; __pyx_t_19 = 0; goto __pyx_L0; } } /* "aiohttp/_http_parser.pyx":542 * * * def _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< * cdef: * cparser.http_parser_url* parsed */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_XDECREF(__pyx_t_6); __Pyx_XDECREF(__pyx_t_7); __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("aiohttp._http_parser._parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_schema); __Pyx_XDECREF(__pyx_v_host); __Pyx_XDECREF(__pyx_v_port); __Pyx_XDECREF(__pyx_v_path); __Pyx_XDECREF(__pyx_v_query); __Pyx_XDECREF(__pyx_v_fragment); __Pyx_XDECREF(__pyx_v_user); __Pyx_XDECREF(__pyx_v_password); __Pyx_XDECREF(__pyx_v_userinfo); __Pyx_XDECREF(__pyx_v_result); __Pyx_XDECREF(__pyx_v_sep); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser __pyx_vtable_7aiohttp_12_http_parser_HttpParser; static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpParser(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p; PyObject *o; if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { o = (*t->tp_alloc)(t, 0); } else { o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); } if (unlikely(!o)) return 0; p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o); p->__pyx_vtab = __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; p->_header_name = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_header_value = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_raw_header_name = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_raw_header_value = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_protocol = Py_None; Py_INCREF(Py_None); p->_loop = Py_None; Py_INCREF(Py_None); p->_timer = Py_None; Py_INCREF(Py_None); p->_url = Py_None; Py_INCREF(Py_None); p->_buf = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_path = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_reason = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_raw_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_messages = ((PyObject*)Py_None); Py_INCREF(Py_None); p->_payload = Py_None; Py_INCREF(Py_None); p->_payload_exception = Py_None; Py_INCREF(Py_None); p->_last_error = Py_None; Py_INCREF(Py_None); p->py_buf.obj = NULL; if (unlikely(__pyx_pw_7aiohttp_12_http_parser_10HttpParser_1__cinit__(o, __pyx_empty_tuple, NULL) < 0)) goto bad; return o; bad: Py_DECREF(o); o = 0; return NULL; } static void __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser(PyObject *o) { struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; #if CYTHON_USE_TP_FINALIZE if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { if (PyObject_CallFinalizerFromDealloc(o)) return; } #endif PyObject_GC_UnTrack(o); { PyObject *etype, *eval, *etb; PyErr_Fetch(&etype, &eval, &etb); ++Py_REFCNT(o); __pyx_pw_7aiohttp_12_http_parser_10HttpParser_3__dealloc__(o); --Py_REFCNT(o); PyErr_Restore(etype, eval, etb); } Py_CLEAR(p->_header_name); Py_CLEAR(p->_header_value); Py_CLEAR(p->_raw_header_name); Py_CLEAR(p->_raw_header_value); Py_CLEAR(p->_protocol); Py_CLEAR(p->_loop); Py_CLEAR(p->_timer); Py_CLEAR(p->_url); Py_CLEAR(p->_buf); Py_CLEAR(p->_path); Py_CLEAR(p->_reason); Py_CLEAR(p->_headers); Py_CLEAR(p->_raw_headers); Py_CLEAR(p->_messages); Py_CLEAR(p->_payload); Py_CLEAR(p->_payload_exception); Py_CLEAR(p->_last_error); (*Py_TYPE(o)->tp_free)(o); } static int __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser(PyObject *o, visitproc v, void *a) { int e; struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; if (p->_protocol) { e = (*v)(p->_protocol, a); if (e) return e; } if (p->_loop) { e = (*v)(p->_loop, a); if (e) return e; } if (p->_timer) { e = (*v)(p->_timer, a); if (e) return e; } if (p->_url) { e = (*v)(p->_url, a); if (e) return e; } if (p->_headers) { e = (*v)(p->_headers, a); if (e) return e; } if (p->_raw_headers) { e = (*v)(p->_raw_headers, a); if (e) return e; } if (p->_messages) { e = (*v)(p->_messages, a); if (e) return e; } if (p->_payload) { e = (*v)(p->_payload, a); if (e) return e; } if (p->_payload_exception) { e = (*v)(p->_payload_exception, a); if (e) return e; } if (p->_last_error) { e = (*v)(p->_last_error, a); if (e) return e; } if (p->py_buf.obj) { e = (*v)(p->py_buf.obj, a); if (e) return e; } return 0; } static int __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser(PyObject *o) { PyObject* tmp; struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *p = (struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *)o; tmp = ((PyObject*)p->_protocol); p->_protocol = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_loop); p->_loop = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_timer); p->_timer = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_url); p->_url = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_headers); p->_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_raw_headers); p->_raw_headers = ((PyObject*)Py_None); Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_messages); p->_messages = ((PyObject*)Py_None); Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_payload); p->_payload = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_payload_exception); p->_payload_exception = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); tmp = ((PyObject*)p->_last_error); p->_last_error = Py_None; Py_INCREF(Py_None); Py_XDECREF(tmp); Py_CLEAR(p->py_buf.obj); return 0; } static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpParser[] = { {"http_version", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_5http_version, METH_NOARGS, 0}, {"feed_eof", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_7feed_eof, METH_NOARGS, 0}, {"feed_data", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_9feed_data, METH_O, 0}, {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_11__reduce_cython__, METH_NOARGS, 0}, {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_10HttpParser_13__setstate_cython__, METH_O, 0}, {0, 0, 0, 0} }; static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpParser = { PyVarObject_HEAD_INIT(0, 0) "aiohttp._http_parser.HttpParser", /*tp_name*/ sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser), /*tp_basicsize*/ 0, /*tp_itemsize*/ __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if PY_MAJOR_VERSION < 3 0, /*tp_compare*/ #endif #if PY_MAJOR_VERSION >= 3 0, /*tp_as_async*/ #endif 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ 0, /*tp_doc*/ __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ 0, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ __pyx_methods_7aiohttp_12_http_parser_HttpParser, /*tp_methods*/ 0, /*tp_members*/ 0, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ 0, /*tp_init*/ 0, /*tp_alloc*/ __pyx_tp_new_7aiohttp_12_http_parser_HttpParser, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpRequestParserC __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParserC; static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParserC(PyTypeObject *t, PyObject *a, PyObject *k) { struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *p; PyObject *o = __pyx_tp_new_7aiohttp_12_http_parser_HttpParser(t, a, k); if (unlikely(!o)) return 0; p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC *)o); p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser*)__pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParserC; return o; } static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpRequestParserC[] = { {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_3__reduce_cython__, METH_NOARGS, 0}, {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_5__setstate_cython__, METH_O, 0}, {0, 0, 0, 0} }; static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpRequestParserC = { PyVarObject_HEAD_INIT(0, 0) "aiohttp._http_parser.HttpRequestParserC", /*tp_name*/ sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpRequestParserC), /*tp_basicsize*/ 0, /*tp_itemsize*/ __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if PY_MAJOR_VERSION < 3 0, /*tp_compare*/ #endif #if PY_MAJOR_VERSION >= 3 0, /*tp_as_async*/ #endif 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ 0, /*tp_doc*/ __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ 0, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ __pyx_methods_7aiohttp_12_http_parser_HttpRequestParserC, /*tp_methods*/ 0, /*tp_members*/ 0, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ __pyx_pw_7aiohttp_12_http_parser_18HttpRequestParserC_1__init__, /*tp_init*/ 0, /*tp_alloc*/ __pyx_tp_new_7aiohttp_12_http_parser_HttpRequestParserC, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpResponseParserC __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParserC; static PyObject *__pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParserC(PyTypeObject *t, PyObject *a, PyObject *k) { struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *p; PyObject *o = __pyx_tp_new_7aiohttp_12_http_parser_HttpParser(t, a, k); if (unlikely(!o)) return 0; p = ((struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC *)o); p->__pyx_base.__pyx_vtab = (struct __pyx_vtabstruct_7aiohttp_12_http_parser_HttpParser*)__pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParserC; return o; } static PyMethodDef __pyx_methods_7aiohttp_12_http_parser_HttpResponseParserC[] = { {"__reduce_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_3__reduce_cython__, METH_NOARGS, 0}, {"__setstate_cython__", (PyCFunction)__pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_5__setstate_cython__, METH_O, 0}, {0, 0, 0, 0} }; static PyTypeObject __pyx_type_7aiohttp_12_http_parser_HttpResponseParserC = { PyVarObject_HEAD_INIT(0, 0) "aiohttp._http_parser.HttpResponseParserC", /*tp_name*/ sizeof(struct __pyx_obj_7aiohttp_12_http_parser_HttpResponseParserC), /*tp_basicsize*/ 0, /*tp_itemsize*/ __pyx_tp_dealloc_7aiohttp_12_http_parser_HttpParser, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ #if PY_MAJOR_VERSION < 3 0, /*tp_compare*/ #endif #if PY_MAJOR_VERSION >= 3 0, /*tp_as_async*/ #endif 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash*/ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ 0, /*tp_doc*/ __pyx_tp_traverse_7aiohttp_12_http_parser_HttpParser, /*tp_traverse*/ __pyx_tp_clear_7aiohttp_12_http_parser_HttpParser, /*tp_clear*/ 0, /*tp_richcompare*/ 0, /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ __pyx_methods_7aiohttp_12_http_parser_HttpResponseParserC, /*tp_methods*/ 0, /*tp_members*/ 0, /*tp_getset*/ 0, /*tp_base*/ 0, /*tp_dict*/ 0, /*tp_descr_get*/ 0, /*tp_descr_set*/ 0, /*tp_dictoffset*/ __pyx_pw_7aiohttp_12_http_parser_19HttpResponseParserC_1__init__, /*tp_init*/ 0, /*tp_alloc*/ __pyx_tp_new_7aiohttp_12_http_parser_HttpResponseParserC, /*tp_new*/ 0, /*tp_free*/ 0, /*tp_is_gc*/ 0, /*tp_bases*/ 0, /*tp_mro*/ 0, /*tp_cache*/ 0, /*tp_subclasses*/ 0, /*tp_weaklist*/ 0, /*tp_del*/ 0, /*tp_version_tag*/ #if PY_VERSION_HEX >= 0x030400a1 0, /*tp_finalize*/ #endif }; static PyMethodDef __pyx_methods[] = { {0, 0, 0, 0} }; #if PY_MAJOR_VERSION >= 3 #if CYTHON_PEP489_MULTI_PHASE_INIT static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ static int __pyx_pymod_exec__http_parser(PyObject* module); /*proto*/ static PyModuleDef_Slot __pyx_moduledef_slots[] = { {Py_mod_create, (void*)__pyx_pymod_create}, {Py_mod_exec, (void*)__pyx_pymod_exec__http_parser}, {0, NULL} }; #endif static struct PyModuleDef __pyx_moduledef = { PyModuleDef_HEAD_INIT, "_http_parser", 0, /* m_doc */ #if CYTHON_PEP489_MULTI_PHASE_INIT 0, /* m_size */ #else -1, /* m_size */ #endif __pyx_methods /* m_methods */, #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_moduledef_slots, /* m_slots */ #else NULL, /* m_reload */ #endif NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ }; #endif static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_BadHttpMessage, __pyx_k_BadHttpMessage, sizeof(__pyx_k_BadHttpMessage), 0, 0, 1, 1}, {&__pyx_n_s_BadStatusLine, __pyx_k_BadStatusLine, sizeof(__pyx_k_BadStatusLine), 0, 0, 1, 1}, {&__pyx_n_s_BaseException, __pyx_k_BaseException, sizeof(__pyx_k_BaseException), 0, 0, 1, 1}, {&__pyx_n_s_CIMultiDict, __pyx_k_CIMultiDict, sizeof(__pyx_k_CIMultiDict), 0, 0, 1, 1}, {&__pyx_n_s_CONTENT_ENCODING, __pyx_k_CONTENT_ENCODING, sizeof(__pyx_k_CONTENT_ENCODING), 0, 0, 1, 1}, {&__pyx_n_s_ContentLengthError, __pyx_k_ContentLengthError, sizeof(__pyx_k_ContentLengthError), 0, 0, 1, 1}, {&__pyx_n_s_DeflateBuffer, __pyx_k_DeflateBuffer, sizeof(__pyx_k_DeflateBuffer), 0, 0, 1, 1}, {&__pyx_n_s_EMPTY_PAYLOAD, __pyx_k_EMPTY_PAYLOAD, sizeof(__pyx_k_EMPTY_PAYLOAD), 0, 0, 1, 1}, {&__pyx_kp_u_Header_name_is_too_long, __pyx_k_Header_name_is_too_long, sizeof(__pyx_k_Header_name_is_too_long), 0, 1, 0, 0}, {&__pyx_kp_u_Header_value_is_too_long, __pyx_k_Header_value_is_too_long, sizeof(__pyx_k_Header_value_is_too_long), 0, 1, 0, 0}, {&__pyx_n_u_HttpRequestParserC, __pyx_k_HttpRequestParserC, sizeof(__pyx_k_HttpRequestParserC), 0, 1, 0, 1}, {&__pyx_n_u_HttpResponseMessageC, __pyx_k_HttpResponseMessageC, sizeof(__pyx_k_HttpResponseMessageC), 0, 1, 0, 1}, {&__pyx_n_s_HttpVersion, __pyx_k_HttpVersion, sizeof(__pyx_k_HttpVersion), 0, 0, 1, 1}, {&__pyx_n_s_HttpVersion10, __pyx_k_HttpVersion10, sizeof(__pyx_k_HttpVersion10), 0, 0, 1, 1}, {&__pyx_n_s_HttpVersion11, __pyx_k_HttpVersion11, sizeof(__pyx_k_HttpVersion11), 0, 0, 1, 1}, {&__pyx_n_s_InvalidHeader, __pyx_k_InvalidHeader, sizeof(__pyx_k_InvalidHeader), 0, 0, 1, 1}, {&__pyx_n_s_InvalidURLError, __pyx_k_InvalidURLError, sizeof(__pyx_k_InvalidURLError), 0, 0, 1, 1}, {&__pyx_n_s_LineTooLong, __pyx_k_LineTooLong, sizeof(__pyx_k_LineTooLong), 0, 0, 1, 1}, {&__pyx_n_s_MemoryError, __pyx_k_MemoryError, sizeof(__pyx_k_MemoryError), 0, 0, 1, 1}, {&__pyx_kp_u_Not_enough_data_for_satisfy_cont, __pyx_k_Not_enough_data_for_satisfy_cont, sizeof(__pyx_k_Not_enough_data_for_satisfy_cont), 0, 1, 0, 0}, {&__pyx_kp_u_Not_enough_data_for_satisfy_tran, __pyx_k_Not_enough_data_for_satisfy_tran, sizeof(__pyx_k_Not_enough_data_for_satisfy_tran), 0, 1, 0, 0}, {&__pyx_n_s_PayloadEncodingError, __pyx_k_PayloadEncodingError, sizeof(__pyx_k_PayloadEncodingError), 0, 0, 1, 1}, {&__pyx_n_s_RawRequestMessage, __pyx_k_RawRequestMessage, sizeof(__pyx_k_RawRequestMessage), 0, 0, 1, 1}, {&__pyx_n_s_RawResponseMessage, __pyx_k_RawResponseMessage, sizeof(__pyx_k_RawResponseMessage), 0, 0, 1, 1}, {&__pyx_n_s_SEC_WEBSOCKET_KEY1, __pyx_k_SEC_WEBSOCKET_KEY1, sizeof(__pyx_k_SEC_WEBSOCKET_KEY1), 0, 0, 1, 1}, {&__pyx_kp_u_Status_line_is_too_long, __pyx_k_Status_line_is_too_long, sizeof(__pyx_k_Status_line_is_too_long), 0, 1, 0, 0}, {&__pyx_n_s_StreamReader, __pyx_k_StreamReader, sizeof(__pyx_k_StreamReader), 0, 0, 1, 1}, {&__pyx_n_s_TransferEncodingError, __pyx_k_TransferEncodingError, sizeof(__pyx_k_TransferEncodingError), 0, 0, 1, 1}, {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, {&__pyx_n_s_URL, __pyx_k_URL, sizeof(__pyx_k_URL), 0, 0, 1, 1}, {&__pyx_kp_u__13, __pyx_k__13, sizeof(__pyx_k__13), 0, 1, 0, 0}, {&__pyx_kp_b__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 0, 0, 0}, {&__pyx_kp_u__6, __pyx_k__6, sizeof(__pyx_k__6), 0, 1, 0, 0}, {&__pyx_n_s_aiohttp, __pyx_k_aiohttp, sizeof(__pyx_k_aiohttp), 0, 0, 1, 1}, {&__pyx_n_s_aiohttp__http_parser, __pyx_k_aiohttp__http_parser, sizeof(__pyx_k_aiohttp__http_parser), 0, 0, 1, 1}, {&__pyx_kp_s_aiohttp__http_parser_pyx, __pyx_k_aiohttp__http_parser_pyx, sizeof(__pyx_k_aiohttp__http_parser_pyx), 0, 0, 1, 0}, {&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1}, {&__pyx_n_s_auto_decompress, __pyx_k_auto_decompress, sizeof(__pyx_k_auto_decompress), 0, 0, 1, 1}, {&__pyx_n_s_begin_http_chunk_receiving, __pyx_k_begin_http_chunk_receiving, sizeof(__pyx_k_begin_http_chunk_receiving), 0, 0, 1, 1}, {&__pyx_n_u_br, __pyx_k_br, sizeof(__pyx_k_br), 0, 1, 0, 1}, {&__pyx_n_s_buf_data, __pyx_k_buf_data, sizeof(__pyx_k_buf_data), 0, 0, 1, 1}, {&__pyx_n_s_build, __pyx_k_build, sizeof(__pyx_k_build), 0, 0, 1, 1}, {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_decode, __pyx_k_decode, sizeof(__pyx_k_decode), 0, 0, 1, 1}, {&__pyx_n_u_deflate, __pyx_k_deflate, sizeof(__pyx_k_deflate), 0, 1, 0, 1}, {&__pyx_n_s_end_http_chunk_receiving, __pyx_k_end_http_chunk_receiving, sizeof(__pyx_k_end_http_chunk_receiving), 0, 0, 1, 1}, {&__pyx_n_s_extend, __pyx_k_extend, sizeof(__pyx_k_extend), 0, 0, 1, 1}, {&__pyx_n_s_feed_data, __pyx_k_feed_data, sizeof(__pyx_k_feed_data), 0, 0, 1, 1}, {&__pyx_n_s_feed_eof, __pyx_k_feed_eof, sizeof(__pyx_k_feed_eof), 0, 0, 1, 1}, {&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1}, {&__pyx_n_s_fragment, __pyx_k_fragment, sizeof(__pyx_k_fragment), 0, 0, 1, 1}, {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, {&__pyx_n_u_gzip, __pyx_k_gzip, sizeof(__pyx_k_gzip), 0, 1, 0, 1}, {&__pyx_n_s_hdrs, __pyx_k_hdrs, sizeof(__pyx_k_hdrs), 0, 0, 1, 1}, {&__pyx_n_s_host, __pyx_k_host, sizeof(__pyx_k_host), 0, 0, 1, 1}, {&__pyx_n_s_http_exceptions, __pyx_k_http_exceptions, sizeof(__pyx_k_http_exceptions), 0, 0, 1, 1}, {&__pyx_n_s_http_parser, __pyx_k_http_parser, sizeof(__pyx_k_http_parser), 0, 0, 1, 1}, {&__pyx_n_s_http_version, __pyx_k_http_version, sizeof(__pyx_k_http_version), 0, 0, 1, 1}, {&__pyx_n_s_http_writer, __pyx_k_http_writer, sizeof(__pyx_k_http_writer), 0, 0, 1, 1}, {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, {&__pyx_kp_u_invalid_url_r, __pyx_k_invalid_url_r, sizeof(__pyx_k_invalid_url_r), 0, 1, 0, 0}, {&__pyx_n_s_length, __pyx_k_length, sizeof(__pyx_k_length), 0, 0, 1, 1}, {&__pyx_n_s_ln, __pyx_k_ln, sizeof(__pyx_k_ln), 0, 0, 1, 1}, {&__pyx_n_s_loop, __pyx_k_loop, sizeof(__pyx_k_loop), 0, 0, 1, 1}, {&__pyx_n_s_lower, __pyx_k_lower, sizeof(__pyx_k_lower), 0, 0, 1, 1}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_max_field_size, __pyx_k_max_field_size, sizeof(__pyx_k_max_field_size), 0, 0, 1, 1}, {&__pyx_n_s_max_headers, __pyx_k_max_headers, sizeof(__pyx_k_max_headers), 0, 0, 1, 1}, {&__pyx_n_s_max_line_size, __pyx_k_max_line_size, sizeof(__pyx_k_max_line_size), 0, 0, 1, 1}, {&__pyx_n_s_multidict, __pyx_k_multidict, sizeof(__pyx_k_multidict), 0, 0, 1, 1}, {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0}, {&__pyx_n_s_off, __pyx_k_off, sizeof(__pyx_k_off), 0, 0, 1, 1}, {&__pyx_n_s_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 0, 1, 1}, {&__pyx_n_s_parse_url_2, __pyx_k_parse_url_2, sizeof(__pyx_k_parse_url_2), 0, 0, 1, 1}, {&__pyx_n_u_parse_url_2, __pyx_k_parse_url_2, sizeof(__pyx_k_parse_url_2), 0, 1, 0, 1}, {&__pyx_n_s_parsed, __pyx_k_parsed, sizeof(__pyx_k_parsed), 0, 0, 1, 1}, {&__pyx_n_s_partition, __pyx_k_partition, sizeof(__pyx_k_partition), 0, 0, 1, 1}, {&__pyx_n_s_password, __pyx_k_password, sizeof(__pyx_k_password), 0, 0, 1, 1}, {&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1}, {&__pyx_n_s_payload_exception, __pyx_k_payload_exception, sizeof(__pyx_k_payload_exception), 0, 0, 1, 1}, {&__pyx_n_s_port, __pyx_k_port, sizeof(__pyx_k_port), 0, 0, 1, 1}, {&__pyx_n_s_protocol, __pyx_k_protocol, sizeof(__pyx_k_protocol), 0, 0, 1, 1}, {&__pyx_n_s_py_buf, __pyx_k_py_buf, sizeof(__pyx_k_py_buf), 0, 0, 1, 1}, {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, {&__pyx_n_s_query, __pyx_k_query, sizeof(__pyx_k_query), 0, 0, 1, 1}, {&__pyx_n_s_read_until_eof, __pyx_k_read_until_eof, sizeof(__pyx_k_read_until_eof), 0, 0, 1, 1}, {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, {&__pyx_n_s_res, __pyx_k_res, sizeof(__pyx_k_res), 0, 0, 1, 1}, {&__pyx_n_s_response_with_body, __pyx_k_response_with_body, sizeof(__pyx_k_response_with_body), 0, 0, 1, 1}, {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, {&__pyx_n_s_schema, __pyx_k_schema, sizeof(__pyx_k_schema), 0, 0, 1, 1}, {&__pyx_n_s_scheme, __pyx_k_scheme, sizeof(__pyx_k_scheme), 0, 0, 1, 1}, {&__pyx_n_s_sep, __pyx_k_sep, sizeof(__pyx_k_sep), 0, 0, 1, 1}, {&__pyx_n_s_set_exception, __pyx_k_set_exception, sizeof(__pyx_k_set_exception), 0, 0, 1, 1}, {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, {&__pyx_n_s_streams, __pyx_k_streams, sizeof(__pyx_k_streams), 0, 0, 1, 1}, {&__pyx_n_u_surrogateescape, __pyx_k_surrogateescape, sizeof(__pyx_k_surrogateescape), 0, 1, 0, 1}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {&__pyx_n_s_timer, __pyx_k_timer, sizeof(__pyx_k_timer), 0, 0, 1, 1}, {&__pyx_n_s_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 0, 1, 1}, {&__pyx_n_s_user, __pyx_k_user, sizeof(__pyx_k_user), 0, 0, 1, 1}, {&__pyx_n_s_userinfo, __pyx_k_userinfo, sizeof(__pyx_k_userinfo), 0, 0, 1, 1}, {&__pyx_kp_u_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 1, 0, 0}, {&__pyx_n_s_yarl, __pyx_k_yarl, sizeof(__pyx_k_yarl), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; static int __Pyx_InitCachedBuiltins(void) { __pyx_builtin_MemoryError = __Pyx_GetBuiltinName(__pyx_n_s_MemoryError); if (!__pyx_builtin_MemoryError) __PYX_ERR(0, 70, __pyx_L1_error) __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error) __pyx_builtin_BaseException = __Pyx_GetBuiltinName(__pyx_n_s_BaseException); if (!__pyx_builtin_BaseException) __PYX_ERR(0, 373, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); /* "aiohttp/_http_parser.pyx":166 * CONTENT_ENCODING=hdrs.CONTENT_ENCODING, * SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1, * SUPPORTED=('gzip', 'deflate', 'br')): # <<<<<<<<<<<<<< * self._process_header() * */ __pyx_tuple__3 = PyTuple_Pack(3, __pyx_n_u_gzip, __pyx_n_u_deflate, __pyx_n_u_br); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 166, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__3); __Pyx_GIVEREF(__pyx_tuple__3); /* "aiohttp/_http_parser.pyx":249 * if self._payload is not None: * if self._cparser.flags & cparser.F_CHUNKED: * raise TransferEncodingError( # <<<<<<<<<<<<<< * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENTLENGTH: */ __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_u_Not_enough_data_for_satisfy_tran); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 249, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__4); __Pyx_GIVEREF(__pyx_tuple__4); /* "aiohttp/_http_parser.pyx":252 * "Not enough data for satisfy transfer length header.") * elif self._cparser.flags & cparser.F_CONTENTLENGTH: * raise ContentLengthError( # <<<<<<<<<<<<<< * "Not enough data for satisfy content length header.") * elif self._cparser.http_errno != cparser.HPE_OK: */ __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_u_Not_enough_data_for_satisfy_cont); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 252, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__5); __Pyx_GIVEREF(__pyx_tuple__5); /* "(tree fragment)":2 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(1, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__7); __Pyx_GIVEREF(__pyx_tuple__7); /* "(tree fragment)":4 * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< */ __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__8); __Pyx_GIVEREF(__pyx_tuple__8); /* "(tree fragment)":2 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(1, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__9); __Pyx_GIVEREF(__pyx_tuple__9); /* "(tree fragment)":4 * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< */ __pyx_tuple__10 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__10); __Pyx_GIVEREF(__pyx_tuple__10); /* "(tree fragment)":2 * def __reduce_cython__(self): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") */ __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(1, 2, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__11); __Pyx_GIVEREF(__pyx_tuple__11); /* "(tree fragment)":4 * raise TypeError("no default __reduce__ due to non-trivial __cinit__") * def __setstate_cython__(self, __pyx_state): * raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<< */ __pyx_tuple__12 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(1, 4, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__12); __Pyx_GIVEREF(__pyx_tuple__12); /* "aiohttp/_http_parser.pyx":611 * userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') * * user, sep, password = userinfo.partition(':') # <<<<<<<<<<<<<< * * return URL.build(scheme=schema, */ __pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_u__13); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 611, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__14); __Pyx_GIVEREF(__pyx_tuple__14); /* "aiohttp/_http_parser.pyx":25 * * * __all__ = ('HttpRequestParserC', 'HttpResponseMessageC', 'parse_url') # <<<<<<<<<<<<<< * * */ __pyx_tuple__15 = PyTuple_Pack(3, __pyx_n_u_HttpRequestParserC, __pyx_n_u_HttpResponseMessageC, __pyx_n_u_parse_url_2); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 25, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__15); __Pyx_GIVEREF(__pyx_tuple__15); /* "aiohttp/_http_parser.pyx":529 * * * def parse_url(url): # <<<<<<<<<<<<<< * cdef: * Py_buffer py_buf */ __pyx_tuple__16 = PyTuple_Pack(3, __pyx_n_s_url, __pyx_n_s_py_buf, __pyx_n_s_buf_data); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 529, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__16); __Pyx_GIVEREF(__pyx_tuple__16); __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(1, 0, 3, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_parse_url_2, 529, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(0, 529, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":542 * * * def _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< * cdef: * cparser.http_parser_url* parsed */ __pyx_tuple__18 = PyTuple_Pack(17, __pyx_n_s_buf_data, __pyx_n_s_length, __pyx_n_s_parsed, __pyx_n_s_res, __pyx_n_s_schema, __pyx_n_s_host, __pyx_n_s_port, __pyx_n_s_path, __pyx_n_s_query, __pyx_n_s_fragment, __pyx_n_s_user, __pyx_n_s_password, __pyx_n_s_userinfo, __pyx_n_s_result, __pyx_n_s_off, __pyx_n_s_ln, __pyx_n_s_sep); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 542, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__18); __Pyx_GIVEREF(__pyx_tuple__18); __pyx_codeobj__19 = (PyObject*)__Pyx_PyCode_New(2, 0, 17, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__18, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__http_parser_pyx, __pyx_n_s_parse_url, 542, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__19)) __PYX_ERR(0, 542, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } static int __Pyx_InitGlobals(void) { if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; __pyx_L1_error:; return -1; } #if PY_MAJOR_VERSION < 3 PyMODINIT_FUNC init_http_parser(void); /*proto*/ PyMODINIT_FUNC init_http_parser(void) #else PyMODINIT_FUNC PyInit__http_parser(void); /*proto*/ PyMODINIT_FUNC PyInit__http_parser(void) #if CYTHON_PEP489_MULTI_PHASE_INIT { return PyModuleDef_Init(&__pyx_moduledef); } static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { PyObject *value = PyObject_GetAttrString(spec, from_name); int result = 0; if (likely(value)) { result = PyDict_SetItemString(moddict, to_name, value); Py_DECREF(value); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { result = -1; } return result; } static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { PyObject *module = NULL, *moddict, *modname; if (__pyx_m) return __Pyx_NewRef(__pyx_m); modname = PyObject_GetAttrString(spec, "name"); if (unlikely(!modname)) goto bad; module = PyModule_NewObject(modname); Py_DECREF(modname); if (unlikely(!module)) goto bad; moddict = PyModule_GetDict(module); if (unlikely(!moddict)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; return module; bad: Py_XDECREF(module); return NULL; } static int __pyx_pymod_exec__http_parser(PyObject *__pyx_pyinit_module) #endif #endif { PyObject *__pyx_t_1 = NULL; PyObject *__pyx_t_2 = NULL; __Pyx_RefNannyDeclarations #if CYTHON_PEP489_MULTI_PHASE_INIT if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0; #endif #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { PyErr_Clear(); __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); if (!__Pyx_RefNanny) Py_FatalError("failed to import 'refnanny' module"); } #endif __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__http_parser(void)", 0); if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pyx_CyFunction_USED if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_StopAsyncIteration_USED if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS #ifdef WITH_THREAD /* Python build with threading support? */ PyEval_InitThreads(); #endif #endif /*--- Module creation code ---*/ #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_m = __pyx_pyinit_module; Py_INCREF(__pyx_m); #else #if PY_MAJOR_VERSION < 3 __pyx_m = Py_InitModule4("_http_parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); #else __pyx_m = PyModule_Create(&__pyx_moduledef); #endif if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_d); __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) #if CYTHON_COMPILING_IN_PYPY Py_INCREF(__pyx_b); #endif if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); /*--- Initialize various global constants etc. ---*/ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif if (__pyx_module_is_main_aiohttp___http_parser) { if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) } #if PY_MAJOR_VERSION >= 3 { PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) if (!PyDict_GetItemString(modules, "aiohttp._http_parser")) { if (unlikely(PyDict_SetItemString(modules, "aiohttp._http_parser", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) } } #endif /*--- Builtin init code ---*/ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Global init code ---*/ /*--- Variable export code ---*/ /*--- Function export code ---*/ /*--- Type init code ---*/ __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser = &__pyx_vtable_7aiohttp_12_http_parser_HttpParser; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._init = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, enum http_parser_type, PyObject *, PyObject *, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__init *__pyx_optional_args))__pyx_f_7aiohttp_12_http_parser_10HttpParser__init; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._process_header = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__process_header; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_header_field = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, PyObject *, PyObject *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_field; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_header_value = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, PyObject *, PyObject *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_header_value; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_headers_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *, struct __pyx_opt_args_7aiohttp_12_http_parser_10HttpParser__on_headers_complete *__pyx_optional_args))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_headers_complete; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_message_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_message_complete; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_chunk_header = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_header; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_chunk_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_chunk_complete; __pyx_vtable_7aiohttp_12_http_parser_HttpParser._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_10HttpParser__on_status_complete; if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 29, __pyx_L1_error) __pyx_type_7aiohttp_12_http_parser_HttpParser.tp_print = 0; if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpParser.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 29, __pyx_L1_error) if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpParser) < 0) __PYX_ERR(0, 29, __pyx_L1_error) __pyx_ptype_7aiohttp_12_http_parser_HttpParser = &__pyx_type_7aiohttp_12_http_parser_HttpParser; __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParserC = &__pyx_vtable_7aiohttp_12_http_parser_HttpRequestParserC; __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParserC.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; __pyx_vtable_7aiohttp_12_http_parser_HttpRequestParserC.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_18HttpRequestParserC__on_status_complete; __pyx_type_7aiohttp_12_http_parser_HttpRequestParserC.tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpRequestParserC) < 0) __PYX_ERR(0, 309, __pyx_L1_error) __pyx_type_7aiohttp_12_http_parser_HttpRequestParserC.tp_print = 0; if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpRequestParserC.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpRequestParserC) < 0) __PYX_ERR(0, 309, __pyx_L1_error) if (PyObject_SetAttrString(__pyx_m, "HttpRequestParserC", (PyObject *)&__pyx_type_7aiohttp_12_http_parser_HttpRequestParserC) < 0) __PYX_ERR(0, 309, __pyx_L1_error) if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpRequestParserC) < 0) __PYX_ERR(0, 309, __pyx_L1_error) __pyx_ptype_7aiohttp_12_http_parser_HttpRequestParserC = &__pyx_type_7aiohttp_12_http_parser_HttpRequestParserC; __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParserC = &__pyx_vtable_7aiohttp_12_http_parser_HttpResponseParserC; __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParserC.__pyx_base = *__pyx_vtabptr_7aiohttp_12_http_parser_HttpParser; __pyx_vtable_7aiohttp_12_http_parser_HttpResponseParserC.__pyx_base._on_status_complete = (PyObject *(*)(struct __pyx_obj_7aiohttp_12_http_parser_HttpParser *))__pyx_f_7aiohttp_12_http_parser_19HttpResponseParserC__on_status_complete; __pyx_type_7aiohttp_12_http_parser_HttpResponseParserC.tp_base = __pyx_ptype_7aiohttp_12_http_parser_HttpParser; if (PyType_Ready(&__pyx_type_7aiohttp_12_http_parser_HttpResponseParserC) < 0) __PYX_ERR(0, 336, __pyx_L1_error) __pyx_type_7aiohttp_12_http_parser_HttpResponseParserC.tp_print = 0; if (__Pyx_SetVtable(__pyx_type_7aiohttp_12_http_parser_HttpResponseParserC.tp_dict, __pyx_vtabptr_7aiohttp_12_http_parser_HttpResponseParserC) < 0) __PYX_ERR(0, 336, __pyx_L1_error) if (PyObject_SetAttrString(__pyx_m, "HttpResponseParserC", (PyObject *)&__pyx_type_7aiohttp_12_http_parser_HttpResponseParserC) < 0) __PYX_ERR(0, 336, __pyx_L1_error) if (__Pyx_setup_reduce((PyObject*)&__pyx_type_7aiohttp_12_http_parser_HttpResponseParserC) < 0) __PYX_ERR(0, 336, __pyx_L1_error) __pyx_ptype_7aiohttp_12_http_parser_HttpResponseParserC = &__pyx_type_7aiohttp_12_http_parser_HttpResponseParserC; /*--- Type import code ---*/ __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", #if CYTHON_COMPILING_IN_PYPY sizeof(PyTypeObject), #else sizeof(PyHeapTypeObject), #endif 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(2, 9, __pyx_L1_error) __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), 0); if (unlikely(!__pyx_ptype_7cpython_4bool_bool)) __PYX_ERR(3, 8, __pyx_L1_error) __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), 0); if (unlikely(!__pyx_ptype_7cpython_7complex_complex)) __PYX_ERR(4, 15, __pyx_L1_error) /*--- Variable import code ---*/ /*--- Function import code ---*/ /*--- Execution code ---*/ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /* "aiohttp/_http_parser.pyx":10 * Py_buffer, PyBytes_AsString * * from multidict import CIMultiDict # <<<<<<<<<<<<<< * from yarl import URL * */ __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_CIMultiDict); __Pyx_GIVEREF(__pyx_n_s_CIMultiDict); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_CIMultiDict); __pyx_t_2 = __Pyx_Import(__pyx_n_s_multidict, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_CIMultiDict); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_CIMultiDict, __pyx_t_1) < 0) __PYX_ERR(0, 10, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":11 * * from multidict import CIMultiDict * from yarl import URL # <<<<<<<<<<<<<< * * from aiohttp import hdrs */ __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_n_s_URL); __Pyx_GIVEREF(__pyx_n_s_URL); PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_URL); __pyx_t_1 = __Pyx_Import(__pyx_n_s_yarl, __pyx_t_2, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_URL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_URL, __pyx_t_2) < 0) __PYX_ERR(0, 11, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":13 * from yarl import URL * * from aiohttp import hdrs # <<<<<<<<<<<<<< * from .http_exceptions import ( * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, */ __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_hdrs); __Pyx_GIVEREF(__pyx_n_s_hdrs); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_hdrs); __pyx_t_2 = __Pyx_Import(__pyx_n_s_aiohttp, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 13, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_hdrs, __pyx_t_1) < 0) __PYX_ERR(0, 13, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":15 * from aiohttp import hdrs * from .http_exceptions import ( * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, # <<<<<<<<<<<<<< * PayloadEncodingError, ContentLengthError, TransferEncodingError) * from .http_writer import HttpVersion, HttpVersion10, HttpVersion11 */ __pyx_t_2 = PyList_New(8); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_n_s_BadHttpMessage); __Pyx_GIVEREF(__pyx_n_s_BadHttpMessage); PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_BadHttpMessage); __Pyx_INCREF(__pyx_n_s_BadStatusLine); __Pyx_GIVEREF(__pyx_n_s_BadStatusLine); PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_BadStatusLine); __Pyx_INCREF(__pyx_n_s_InvalidHeader); __Pyx_GIVEREF(__pyx_n_s_InvalidHeader); PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_InvalidHeader); __Pyx_INCREF(__pyx_n_s_LineTooLong); __Pyx_GIVEREF(__pyx_n_s_LineTooLong); PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_LineTooLong); __Pyx_INCREF(__pyx_n_s_InvalidURLError); __Pyx_GIVEREF(__pyx_n_s_InvalidURLError); PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_InvalidURLError); __Pyx_INCREF(__pyx_n_s_PayloadEncodingError); __Pyx_GIVEREF(__pyx_n_s_PayloadEncodingError); PyList_SET_ITEM(__pyx_t_2, 5, __pyx_n_s_PayloadEncodingError); __Pyx_INCREF(__pyx_n_s_ContentLengthError); __Pyx_GIVEREF(__pyx_n_s_ContentLengthError); PyList_SET_ITEM(__pyx_t_2, 6, __pyx_n_s_ContentLengthError); __Pyx_INCREF(__pyx_n_s_TransferEncodingError); __Pyx_GIVEREF(__pyx_n_s_TransferEncodingError); PyList_SET_ITEM(__pyx_t_2, 7, __pyx_n_s_TransferEncodingError); /* "aiohttp/_http_parser.pyx":14 * * from aiohttp import hdrs * from .http_exceptions import ( # <<<<<<<<<<<<<< * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, * PayloadEncodingError, ContentLengthError, TransferEncodingError) */ __pyx_t_1 = __Pyx_Import(__pyx_n_s_http_exceptions, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BadHttpMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_BadHttpMessage, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_BadStatusLine); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_BadStatusLine, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_InvalidHeader); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_InvalidHeader, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_LineTooLong); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_LineTooLong, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_InvalidURLError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_InvalidURLError, __pyx_t_2) < 0) __PYX_ERR(0, 15, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_PayloadEncodingError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_PayloadEncodingError, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_ContentLengthError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_ContentLengthError, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_TransferEncodingError); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_TransferEncodingError, __pyx_t_2) < 0) __PYX_ERR(0, 16, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":17 * BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, * PayloadEncodingError, ContentLengthError, TransferEncodingError) * from .http_writer import HttpVersion, HttpVersion10, HttpVersion11 # <<<<<<<<<<<<<< * from .http_parser import RawRequestMessage, RawResponseMessage, DeflateBuffer * from .streams import EMPTY_PAYLOAD, StreamReader */ __pyx_t_1 = PyList_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_HttpVersion); __Pyx_GIVEREF(__pyx_n_s_HttpVersion); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_HttpVersion); __Pyx_INCREF(__pyx_n_s_HttpVersion10); __Pyx_GIVEREF(__pyx_n_s_HttpVersion10); PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_HttpVersion10); __Pyx_INCREF(__pyx_n_s_HttpVersion11); __Pyx_GIVEREF(__pyx_n_s_HttpVersion11); PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_HttpVersion11); __pyx_t_2 = __Pyx_Import(__pyx_n_s_http_writer, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpVersion); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion, __pyx_t_1) < 0) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpVersion10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion10, __pyx_t_1) < 0) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpVersion11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpVersion11, __pyx_t_1) < 0) __PYX_ERR(0, 17, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":18 * PayloadEncodingError, ContentLengthError, TransferEncodingError) * from .http_writer import HttpVersion, HttpVersion10, HttpVersion11 * from .http_parser import RawRequestMessage, RawResponseMessage, DeflateBuffer # <<<<<<<<<<<<<< * from .streams import EMPTY_PAYLOAD, StreamReader * */ __pyx_t_2 = PyList_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_INCREF(__pyx_n_s_RawRequestMessage); __Pyx_GIVEREF(__pyx_n_s_RawRequestMessage); PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_RawRequestMessage); __Pyx_INCREF(__pyx_n_s_RawResponseMessage); __Pyx_GIVEREF(__pyx_n_s_RawResponseMessage); PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_RawResponseMessage); __Pyx_INCREF(__pyx_n_s_DeflateBuffer); __Pyx_GIVEREF(__pyx_n_s_DeflateBuffer); PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_DeflateBuffer); __pyx_t_1 = __Pyx_Import(__pyx_n_s_http_parser, __pyx_t_2, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_RawRequestMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_RawRequestMessage, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_RawResponseMessage); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_RawResponseMessage, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_DeflateBuffer); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_DeflateBuffer, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":19 * from .http_writer import HttpVersion, HttpVersion10, HttpVersion11 * from .http_parser import RawRequestMessage, RawResponseMessage, DeflateBuffer * from .streams import EMPTY_PAYLOAD, StreamReader # <<<<<<<<<<<<<< * * cimport cython */ __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_INCREF(__pyx_n_s_EMPTY_PAYLOAD); __Pyx_GIVEREF(__pyx_n_s_EMPTY_PAYLOAD); PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_EMPTY_PAYLOAD); __Pyx_INCREF(__pyx_n_s_StreamReader); __Pyx_GIVEREF(__pyx_n_s_StreamReader); PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_StreamReader); __pyx_t_2 = __Pyx_Import(__pyx_n_s_streams, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_EMPTY_PAYLOAD); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_EMPTY_PAYLOAD, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_StreamReader); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_StreamReader, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":25 * * * __all__ = ('HttpRequestParserC', 'HttpResponseMessageC', 'parse_url') # <<<<<<<<<<<<<< * * */ if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__15) < 0) __PYX_ERR(0, 25, __pyx_L1_error) /* "aiohttp/_http_parser.pyx":164 * ENCODING='utf-8', * ENCODING_ERR='surrogateescape', * CONTENT_ENCODING=hdrs.CONTENT_ENCODING, # <<<<<<<<<<<<<< * SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1, * SUPPORTED=('gzip', 'deflate', 'br')): */ __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_hdrs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 164, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_CONTENT_ENCODING); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 164, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_k_ = __pyx_t_1; __Pyx_GIVEREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_http_parser.pyx":165 * ENCODING_ERR='surrogateescape', * CONTENT_ENCODING=hdrs.CONTENT_ENCODING, * SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1, # <<<<<<<<<<<<<< * SUPPORTED=('gzip', 'deflate', 'br')): * self._process_header() */ __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_hdrs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 165, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_SEC_WEBSOCKET_KEY1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 165, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_k__2 = __pyx_t_2; __Pyx_GIVEREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":529 * * * def parse_url(url): # <<<<<<<<<<<<<< * cdef: * Py_buffer py_buf */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_1parse_url, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 529, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_parse_url_2, __pyx_t_2) < 0) __PYX_ERR(0, 529, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":542 * * * def _parse_url(char* buf_data, size_t length): # <<<<<<<<<<<<<< * cdef: * cparser.http_parser_url* parsed */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_12_http_parser_3_parse_url, NULL, __pyx_n_s_aiohttp__http_parser); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 542, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_parse_url, __pyx_t_2) < 0) __PYX_ERR(0, 542, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /* "aiohttp/_http_parser.pyx":1 * #cython: language_level=3 # <<<<<<<<<<<<<< * # * # Based on https://github.com/MagicStack/httptools */ __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; /*--- Wrapped vars code ---*/ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); if (__pyx_m) { if (__pyx_d) { __Pyx_AddTraceback("init aiohttp._http_parser", 0, __pyx_lineno, __pyx_filename); } Py_DECREF(__pyx_m); __pyx_m = 0; } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init aiohttp._http_parser"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); #if CYTHON_PEP489_MULTI_PHASE_INIT return (__pyx_m != NULL) ? 0 : -1; #elif PY_MAJOR_VERSION >= 3 return __pyx_m; #else return; #endif } /* --- Runtime support code --- */ /* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule((char *)modname); if (!m) goto end; p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* GetBuiltinName */ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); if (unlikely(!result)) { PyErr_Format(PyExc_NameError, #if PY_MAJOR_VERSION >= 3 "name '%U' is not defined", name); #else "name '%.200s' is not defined", PyString_AS_STRING(name)); #endif } return result; } /* RaiseArgTupleInvalid */ static void __Pyx_RaiseArgtupleInvalid( const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found) { Py_ssize_t num_expected; const char *more_or_less; if (num_found < num_min) { num_expected = num_min; more_or_less = "at least"; } else { num_expected = num_max; more_or_less = "at most"; } if (exact) { more_or_less = "exactly"; } PyErr_Format(PyExc_TypeError, "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", func_name, more_or_less, num_expected, (num_expected == 1) ? "" : "s", num_found); } /* KeywordStringCheck */ static int __Pyx_CheckKeywordStrings( PyObject *kwdict, const char* function_name, int kw_allowed) { PyObject* key = 0; Py_ssize_t pos = 0; #if CYTHON_COMPILING_IN_PYPY if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) goto invalid_keyword; return 1; #else while (PyDict_Next(kwdict, &pos, &key, 0)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyString_Check(key))) #endif if (unlikely(!PyUnicode_Check(key))) goto invalid_keyword_type; } if ((!kw_allowed) && unlikely(key)) goto invalid_keyword; return 1; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); return 0; #endif invalid_keyword: PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION < 3 "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else "%s() got an unexpected keyword argument '%U'", function_name, key); #endif return 0; } /* PyObjectCall */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; ternaryfunc call = func->ob_type->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = (*call)(func, arg, kw); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* GetModuleGlobalName */ static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { PyObject *result; #if !CYTHON_AVOID_BORROWED_REFS result = PyDict_GetItem(__pyx_d, name); if (likely(result)) { Py_INCREF(result); } else { #else result = PyObject_GetItem(__pyx_d, name); if (!result) { PyErr_Clear(); #endif result = __Pyx_GetBuiltinName(name); } return result; } /* PyCFunctionFastCall */ #if CYTHON_FAST_PYCCALL static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { PyCFunctionObject *func = (PyCFunctionObject*)func_obj; PyCFunction meth = PyCFunction_GET_FUNCTION(func); PyObject *self = PyCFunction_GET_SELF(func); int flags = PyCFunction_GET_FLAGS(func); assert(PyCFunction_Check(func)); assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); assert(nargs >= 0); assert(nargs == 0 || args != NULL); /* _PyCFunction_FastCallDict() must not be called with an exception set, because it may clear it (directly or indirectly) and so the caller loses its exception */ assert(!PyErr_Occurred()); if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); } else { return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); } } #endif /* PyFunctionFastCall */ #if CYTHON_FAST_PYCALL #include "frameobject.h" static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, PyObject *globals) { PyFrameObject *f; PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject **fastlocals; Py_ssize_t i; PyObject *result; assert(globals != NULL); /* XXX Perhaps we should create a specialized PyFrame_New() that doesn't take locals, but does take builtins without sanity checking them. */ assert(tstate != NULL); f = PyFrame_New(tstate, co, globals, NULL); if (f == NULL) { return NULL; } fastlocals = f->f_localsplus; for (i = 0; i < na; i++) { Py_INCREF(*args); fastlocals[i] = *args++; } result = PyEval_EvalFrameEx(f,0); ++tstate->recursion_depth; Py_DECREF(f); --tstate->recursion_depth; return result; } #if 1 || PY_VERSION_HEX < 0x030600B1 static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); PyObject *globals = PyFunction_GET_GLOBALS(func); PyObject *argdefs = PyFunction_GET_DEFAULTS(func); PyObject *closure; #if PY_MAJOR_VERSION >= 3 PyObject *kwdefs; #endif PyObject *kwtuple, **k; PyObject **d; Py_ssize_t nd; Py_ssize_t nk; PyObject *result; assert(kwargs == NULL || PyDict_Check(kwargs)); nk = kwargs ? PyDict_Size(kwargs) : 0; if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { return NULL; } if ( #if PY_MAJOR_VERSION >= 3 co->co_kwonlyargcount == 0 && #endif likely(kwargs == NULL || nk == 0) && co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { if (argdefs == NULL && co->co_argcount == nargs) { result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); goto done; } else if (nargs == 0 && argdefs != NULL && co->co_argcount == Py_SIZE(argdefs)) { /* function called with no arguments, but all parameters have a default value: use default values as arguments .*/ args = &PyTuple_GET_ITEM(argdefs, 0); result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); goto done; } } if (kwargs != NULL) { Py_ssize_t pos, i; kwtuple = PyTuple_New(2 * nk); if (kwtuple == NULL) { result = NULL; goto done; } k = &PyTuple_GET_ITEM(kwtuple, 0); pos = i = 0; while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { Py_INCREF(k[i]); Py_INCREF(k[i+1]); i += 2; } nk = i / 2; } else { kwtuple = NULL; k = NULL; } closure = PyFunction_GET_CLOSURE(func); #if PY_MAJOR_VERSION >= 3 kwdefs = PyFunction_GET_KW_DEFAULTS(func); #endif if (argdefs != NULL) { d = &PyTuple_GET_ITEM(argdefs, 0); nd = Py_SIZE(argdefs); } else { d = NULL; nd = 0; } #if PY_MAJOR_VERSION >= 3 result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, args, nargs, k, (int)nk, d, (int)nd, kwdefs, closure); #else result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, args, nargs, k, (int)nk, d, (int)nd, closure); #endif Py_XDECREF(kwtuple); done: Py_LeaveRecursiveCall(); return result; } #endif #endif /* PyObjectCallMethO */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { PyObject *self, *result; PyCFunction cfunc; cfunc = PyCFunction_GET_FUNCTION(func); self = PyCFunction_GET_SELF(func); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = cfunc(self, arg); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyObjectCallOneArg */ #if CYTHON_COMPILING_IN_CPYTHON static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_New(1); if (unlikely(!args)) return NULL; Py_INCREF(arg); PyTuple_SET_ITEM(args, 0, arg); result = __Pyx_PyObject_Call(func, args, NULL); Py_DECREF(args); return result; } static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCall(func, &arg, 1); } #endif if (likely(PyCFunction_Check(func))) { if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { return __Pyx_PyObject_CallMethO(func, arg); #if CYTHON_FAST_PYCCALL } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { return __Pyx_PyCFunction_FastCall(func, &arg, 1); #endif } } return __Pyx__PyObject_CallOneArg(func, arg); } #else static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { PyObject *result; PyObject *args = PyTuple_Pack(1, arg); if (unlikely(!args)) return NULL; result = __Pyx_PyObject_Call(func, args, NULL); Py_DECREF(args); return result; } #endif /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; } #endif /* RaiseException */ #if PY_MAJOR_VERSION < 3 static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, CYTHON_UNUSED PyObject *cause) { __Pyx_PyThreadState_declare Py_XINCREF(type); if (!value || value == Py_None) value = NULL; else Py_INCREF(value); if (!tb || tb == Py_None) tb = NULL; else { Py_INCREF(tb); if (!PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto raise_error; } } if (PyType_Check(type)) { #if CYTHON_COMPILING_IN_PYPY if (!value) { Py_INCREF(Py_None); value = Py_None; } #endif PyErr_NormalizeException(&type, &value, &tb); } else { if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto raise_error; } value = type; type = (PyObject*) Py_TYPE(type); Py_INCREF(type); if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto raise_error; } } __Pyx_PyThreadState_assign __Pyx_ErrRestore(type, value, tb); return; raise_error: Py_XDECREF(value); Py_XDECREF(type); Py_XDECREF(tb); return; } #else static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { PyObject* owned_instance = NULL; if (tb == Py_None) { tb = 0; } else if (tb && !PyTraceBack_Check(tb)) { PyErr_SetString(PyExc_TypeError, "raise: arg 3 must be a traceback or None"); goto bad; } if (value == Py_None) value = 0; if (PyExceptionInstance_Check(type)) { if (value) { PyErr_SetString(PyExc_TypeError, "instance exception may not have a separate value"); goto bad; } value = type; type = (PyObject*) Py_TYPE(value); } else if (PyExceptionClass_Check(type)) { PyObject *instance_class = NULL; if (value && PyExceptionInstance_Check(value)) { instance_class = (PyObject*) Py_TYPE(value); if (instance_class != type) { int is_subclass = PyObject_IsSubclass(instance_class, type); if (!is_subclass) { instance_class = NULL; } else if (unlikely(is_subclass == -1)) { goto bad; } else { type = instance_class; } } } if (!instance_class) { PyObject *args; if (!value) args = PyTuple_New(0); else if (PyTuple_Check(value)) { Py_INCREF(value); args = value; } else args = PyTuple_Pack(1, value); if (!args) goto bad; owned_instance = PyObject_Call(type, args, NULL); Py_DECREF(args); if (!owned_instance) goto bad; value = owned_instance; if (!PyExceptionInstance_Check(value)) { PyErr_Format(PyExc_TypeError, "calling %R should have returned an instance of " "BaseException, not %R", type, Py_TYPE(value)); goto bad; } } } else { PyErr_SetString(PyExc_TypeError, "raise: exception class must be a subclass of BaseException"); goto bad; } if (cause) { PyObject *fixed_cause; if (cause == Py_None) { fixed_cause = NULL; } else if (PyExceptionClass_Check(cause)) { fixed_cause = PyObject_CallObject(cause, NULL); if (fixed_cause == NULL) goto bad; } else if (PyExceptionInstance_Check(cause)) { fixed_cause = cause; Py_INCREF(fixed_cause); } else { PyErr_SetString(PyExc_TypeError, "exception causes must derive from " "BaseException"); goto bad; } PyException_SetCause(value, fixed_cause); } PyErr_SetObject(type, value); if (tb) { #if CYTHON_COMPILING_IN_PYPY PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); Py_INCREF(tb); PyErr_Restore(tmp_type, tmp_value, tb); Py_XDECREF(tmp_tb); #else PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject* tmp_tb = tstate->curexc_traceback; if (tb != tmp_tb) { Py_INCREF(tb); tstate->curexc_traceback = tb; Py_XDECREF(tmp_tb); } #endif } bad: Py_XDECREF(owned_instance); return; } #endif /* PyObjectCallNoArg */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { #if CYTHON_FAST_PYCALL if (PyFunction_Check(func)) { return __Pyx_PyFunction_FastCall(func, NULL, 0); } #endif #ifdef __Pyx_CyFunction_USED if (likely(PyCFunction_Check(func) || __Pyx_TypeCheck(func, __pyx_CyFunctionType))) { #else if (likely(PyCFunction_Check(func))) { #endif if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { return __Pyx_PyObject_CallMethO(func, NULL); } } return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); } #endif /* decode_c_bytes */ static CYTHON_INLINE PyObject* __Pyx_decode_c_bytes( const char* cstring, Py_ssize_t length, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { if (unlikely((start < 0) | (stop < 0))) { if (start < 0) { start += length; if (start < 0) start = 0; } if (stop < 0) stop += length; } if (stop > length) stop = length; length = stop - start; if (unlikely(length <= 0)) return PyUnicode_FromUnicode(NULL, 0); cstring += start; if (decode_func) { return decode_func(cstring, length, errors); } else { return PyUnicode_Decode(cstring, length, encoding, errors); } } /* GetItemInt */ static PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { PyObject *r; if (!j) return NULL; r = PyObject_GetItem(o, j); Py_DECREF(j); return r; } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += PyList_GET_SIZE(o); } if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyList_GET_SIZE(o)))) { PyObject *r = PyList_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS Py_ssize_t wrapped_i = i; if (wraparound & unlikely(i < 0)) { wrapped_i += PyTuple_GET_SIZE(o); } if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); Py_INCREF(r); return r; } return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); #else return PySequence_GetItem(o, i); #endif } static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, CYTHON_NCP_UNUSED int wraparound, CYTHON_NCP_UNUSED int boundscheck) { #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS if (is_list || PyList_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { PyObject *r = PyList_GET_ITEM(o, n); Py_INCREF(r); return r; } } else if (PyTuple_CheckExact(o)) { Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { PyObject *r = PyTuple_GET_ITEM(o, n); Py_INCREF(r); return r; } } else { PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; if (likely(m && m->sq_item)) { if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { Py_ssize_t l = m->sq_length(o); if (likely(l >= 0)) { i += l; } else { if (!PyErr_ExceptionMatches(PyExc_OverflowError)) return NULL; PyErr_Clear(); } } return m->sq_item(o, i); } } #else if (is_list || PySequence_Check(o)) { return PySequence_GetItem(o, i); } #endif return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); } /* SliceObject */ static CYTHON_INLINE PyObject* __Pyx_PyObject_GetSlice(PyObject* obj, Py_ssize_t cstart, Py_ssize_t cstop, PyObject** _py_start, PyObject** _py_stop, PyObject** _py_slice, int has_cstart, int has_cstop, CYTHON_UNUSED int wraparound) { #if CYTHON_USE_TYPE_SLOTS PyMappingMethods* mp; #if PY_MAJOR_VERSION < 3 PySequenceMethods* ms = Py_TYPE(obj)->tp_as_sequence; if (likely(ms && ms->sq_slice)) { if (!has_cstart) { if (_py_start && (*_py_start != Py_None)) { cstart = __Pyx_PyIndex_AsSsize_t(*_py_start); if ((cstart == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; } else cstart = 0; } if (!has_cstop) { if (_py_stop && (*_py_stop != Py_None)) { cstop = __Pyx_PyIndex_AsSsize_t(*_py_stop); if ((cstop == (Py_ssize_t)-1) && PyErr_Occurred()) goto bad; } else cstop = PY_SSIZE_T_MAX; } if (wraparound && unlikely((cstart < 0) | (cstop < 0)) && likely(ms->sq_length)) { Py_ssize_t l = ms->sq_length(obj); if (likely(l >= 0)) { if (cstop < 0) { cstop += l; if (cstop < 0) cstop = 0; } if (cstart < 0) { cstart += l; if (cstart < 0) cstart = 0; } } else { if (!PyErr_ExceptionMatches(PyExc_OverflowError)) goto bad; PyErr_Clear(); } } return ms->sq_slice(obj, cstart, cstop); } #endif mp = Py_TYPE(obj)->tp_as_mapping; if (likely(mp && mp->mp_subscript)) #endif { PyObject* result; PyObject *py_slice, *py_start, *py_stop; if (_py_slice) { py_slice = *_py_slice; } else { PyObject* owned_start = NULL; PyObject* owned_stop = NULL; if (_py_start) { py_start = *_py_start; } else { if (has_cstart) { owned_start = py_start = PyInt_FromSsize_t(cstart); if (unlikely(!py_start)) goto bad; } else py_start = Py_None; } if (_py_stop) { py_stop = *_py_stop; } else { if (has_cstop) { owned_stop = py_stop = PyInt_FromSsize_t(cstop); if (unlikely(!py_stop)) { Py_XDECREF(owned_start); goto bad; } } else py_stop = Py_None; } py_slice = PySlice_New(py_start, py_stop, Py_None); Py_XDECREF(owned_start); Py_XDECREF(owned_stop); if (unlikely(!py_slice)) goto bad; } #if CYTHON_USE_TYPE_SLOTS result = mp->mp_subscript(obj, py_slice); #else result = PyObject_GetItem(obj, py_slice); #endif if (!_py_slice) { Py_DECREF(py_slice); } return result; } PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", Py_TYPE(obj)->tp_name); bad: return NULL; } /* RaiseDoubleKeywords */ static void __Pyx_RaiseDoubleKeywordsError( const char* func_name, PyObject* kw_name) { PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION >= 3 "%s() got multiple values for keyword argument '%U'", func_name, kw_name); #else "%s() got multiple values for keyword argument '%s'", func_name, PyString_AsString(kw_name)); #endif } /* ParseKeywords */ static int __Pyx_ParseOptionalKeywords( PyObject *kwds, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name) { PyObject *key = 0, *value = 0; Py_ssize_t pos = 0; PyObject*** name; PyObject*** first_kw_arg = argnames + num_pos_args; while (PyDict_Next(kwds, &pos, &key, &value)) { name = first_kw_arg; while (*name && (**name != key)) name++; if (*name) { values[name-argnames] = value; continue; } name = first_kw_arg; #if PY_MAJOR_VERSION < 3 if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { while (*name) { if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) && _PyString_Eq(**name, key)) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { if ((**argname == key) || ( (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) && _PyString_Eq(**argname, key))) { goto arg_passed_twice; } argname++; } } } else #endif if (likely(PyUnicode_Check(key))) { while (*name) { int cmp = (**name == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : #endif PyUnicode_Compare(**name, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { int cmp = (**argname == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : #endif PyUnicode_Compare(**argname, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) goto arg_passed_twice; argname++; } } } else goto invalid_keyword_type; if (kwds2) { if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; } else { goto invalid_keyword; } } return 0; arg_passed_twice: __Pyx_RaiseDoubleKeywordsError(function_name, key); goto bad; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); goto bad; invalid_keyword: PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION < 3 "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else "%s() got an unexpected keyword argument '%U'", function_name, key); #endif bad: return -1; } /* GetException */ #if CYTHON_FAST_THREAD_STATE static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #else static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { #endif PyObject *local_type, *local_value, *local_tb; #if CYTHON_FAST_THREAD_STATE PyObject *tmp_type, *tmp_value, *tmp_tb; local_type = tstate->curexc_type; local_value = tstate->curexc_value; local_tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; #else PyErr_Fetch(&local_type, &local_value, &local_tb); #endif PyErr_NormalizeException(&local_type, &local_value, &local_tb); #if CYTHON_FAST_THREAD_STATE if (unlikely(tstate->curexc_type)) #else if (unlikely(PyErr_Occurred())) #endif goto bad; #if PY_MAJOR_VERSION >= 3 if (local_tb) { if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) goto bad; } #endif Py_XINCREF(local_tb); Py_XINCREF(local_type); Py_XINCREF(local_value); *type = local_type; *value = local_value; *tb = local_tb; #if CYTHON_FAST_THREAD_STATE #if PY_VERSION_HEX >= 0x030700A2 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; tstate->exc_state.exc_type = local_type; tstate->exc_state.exc_value = local_value; tstate->exc_state.exc_traceback = local_tb; #else tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = local_type; tstate->exc_value = local_value; tstate->exc_traceback = local_tb; #endif Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); #else PyErr_SetExcInfo(local_type, local_value, local_tb); #endif return 0; bad: *type = 0; *value = 0; *tb = 0; Py_XDECREF(local_type); Py_XDECREF(local_value); Py_XDECREF(local_tb); return -1; } /* SwapException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; #if PY_VERSION_HEX >= 0x030700A2 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; tstate->exc_state.exc_type = *type; tstate->exc_state.exc_value = *value; tstate->exc_state.exc_traceback = *tb; #else tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = *type; tstate->exc_value = *value; tstate->exc_traceback = *tb; #endif *type = tmp_type; *value = tmp_value; *tb = tmp_tb; } #else static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); PyErr_SetExcInfo(*type, *value, *tb); *type = tmp_type; *value = tmp_value; *tb = tmp_tb; } #endif /* SaveResetException */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { #if PY_VERSION_HEX >= 0x030700A2 *type = tstate->exc_state.exc_type; *value = tstate->exc_state.exc_value; *tb = tstate->exc_state.exc_traceback; #else *type = tstate->exc_type; *value = tstate->exc_value; *tb = tstate->exc_traceback; #endif Py_XINCREF(*type); Py_XINCREF(*value); Py_XINCREF(*tb); } static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; #if PY_VERSION_HEX >= 0x030700A2 tmp_type = tstate->exc_state.exc_type; tmp_value = tstate->exc_state.exc_value; tmp_tb = tstate->exc_state.exc_traceback; tstate->exc_state.exc_type = type; tstate->exc_state.exc_value = value; tstate->exc_state.exc_traceback = tb; #else tmp_type = tstate->exc_type; tmp_value = tstate->exc_value; tmp_tb = tstate->exc_traceback; tstate->exc_type = type; tstate->exc_value = value; tstate->exc_traceback = tb; #endif Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } #endif /* PyErrExceptionMatches */ #if CYTHON_FAST_THREAD_STATE static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(tuple); #if PY_MAJOR_VERSION >= 3 for (i=0; icurexc_type; if (exc_type == err) return 1; if (unlikely(!exc_type)) return 0; if (unlikely(PyTuple_Check(err))) return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); } #endif /* decode_c_string */ static CYTHON_INLINE PyObject* __Pyx_decode_c_string( const char* cstring, Py_ssize_t start, Py_ssize_t stop, const char* encoding, const char* errors, PyObject* (*decode_func)(const char *s, Py_ssize_t size, const char *errors)) { Py_ssize_t length; if (unlikely((start < 0) | (stop < 0))) { size_t slen = strlen(cstring); if (unlikely(slen > (size_t) PY_SSIZE_T_MAX)) { PyErr_SetString(PyExc_OverflowError, "c-string too long to convert to Python"); return NULL; } length = (Py_ssize_t) slen; if (start < 0) { start += length; if (start < 0) start = 0; } if (stop < 0) stop += length; } length = stop - start; if (unlikely(length <= 0)) return PyUnicode_FromUnicode(NULL, 0); cstring += start; if (decode_func) { return decode_func(cstring, length, errors); } else { return PyUnicode_Decode(cstring, length, encoding, errors); } } /* RaiseTooManyValuesToUnpack */ static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { PyErr_Format(PyExc_ValueError, "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); } /* RaiseNeedMoreValuesToUnpack */ static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { PyErr_Format(PyExc_ValueError, "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", index, (index == 1) ? "" : "s"); } /* IterFinish */ static CYTHON_INLINE int __Pyx_IterFinish(void) { #if CYTHON_FAST_THREAD_STATE PyThreadState *tstate = __Pyx_PyThreadState_Current; PyObject* exc_type = tstate->curexc_type; if (unlikely(exc_type)) { if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) { PyObject *exc_value, *exc_tb; exc_value = tstate->curexc_value; exc_tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; Py_DECREF(exc_type); Py_XDECREF(exc_value); Py_XDECREF(exc_tb); return 0; } else { return -1; } } return 0; #else if (unlikely(PyErr_Occurred())) { if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { PyErr_Clear(); return 0; } else { return -1; } } return 0; #endif } /* UnpackItemEndCheck */ static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { if (unlikely(retval)) { Py_DECREF(retval); __Pyx_RaiseTooManyValuesError(expected); return -1; } else { return __Pyx_IterFinish(); } return 0; } /* SetVTable */ static int __Pyx_SetVtable(PyObject *dict, void *vtable) { #if PY_VERSION_HEX >= 0x02070000 PyObject *ob = PyCapsule_New(vtable, 0, 0); #else PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); #endif if (!ob) goto bad; if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) goto bad; Py_DECREF(ob); return 0; bad: Py_XDECREF(ob); return -1; } /* SetupReduce */ static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { int ret; PyObject *name_attr; name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); if (likely(name_attr)) { ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); } else { ret = -1; } if (unlikely(ret < 0)) { PyErr_Clear(); ret = 0; } Py_XDECREF(name_attr); return ret; } static int __Pyx_setup_reduce(PyObject* type_obj) { int ret = 0; PyObject *object_reduce = NULL; PyObject *object_reduce_ex = NULL; PyObject *reduce = NULL; PyObject *reduce_ex = NULL; PyObject *reduce_cython = NULL; PyObject *setstate = NULL; PyObject *setstate_cython = NULL; #if CYTHON_USE_PYTYPE_LOOKUP if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD; #else if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD; #endif #if CYTHON_USE_PYTYPE_LOOKUP object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; #else object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD; #endif reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD; if (reduce_ex == object_reduce_ex) { #if CYTHON_USE_PYTYPE_LOOKUP object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; #else object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD; #endif reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD; if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD; ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD; ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD; setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); if (!setstate) PyErr_Clear(); if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD; ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD; ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD; } PyType_Modified((PyTypeObject*)type_obj); } } goto GOOD; BAD: if (!PyErr_Occurred()) PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); ret = -1; GOOD: #if !CYTHON_USE_PYTYPE_LOOKUP Py_XDECREF(object_reduce); Py_XDECREF(object_reduce_ex); #endif Py_XDECREF(reduce); Py_XDECREF(reduce_ex); Py_XDECREF(reduce_cython); Py_XDECREF(setstate); Py_XDECREF(setstate_cython); return ret; } /* Import */ static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { PyObject *empty_list = 0; PyObject *module = 0; PyObject *global_dict = 0; PyObject *empty_dict = 0; PyObject *list; #if PY_MAJOR_VERSION < 3 PyObject *py_import; py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); if (!py_import) goto bad; #endif if (from_list) list = from_list; else { empty_list = PyList_New(0); if (!empty_list) goto bad; list = empty_list; } global_dict = PyModule_GetDict(__pyx_m); if (!global_dict) goto bad; empty_dict = PyDict_New(); if (!empty_dict) goto bad; { #if PY_MAJOR_VERSION >= 3 if (level == -1) { if (strchr(__Pyx_MODULE_NAME, '.')) { module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, 1); if (!module) { if (!PyErr_ExceptionMatches(PyExc_ImportError)) goto bad; PyErr_Clear(); } } level = 0; } #endif if (!module) { #if PY_MAJOR_VERSION < 3 PyObject *py_level = PyInt_FromLong(level); if (!py_level) goto bad; module = PyObject_CallFunctionObjArgs(py_import, name, global_dict, empty_dict, list, py_level, NULL); Py_DECREF(py_level); #else module = PyImport_ImportModuleLevelObject( name, global_dict, empty_dict, list, level); #endif } } bad: #if PY_MAJOR_VERSION < 3 Py_XDECREF(py_import); #endif Py_XDECREF(empty_list); Py_XDECREF(empty_dict); return module; } /* ImportFrom */ static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Format(PyExc_ImportError, #if PY_MAJOR_VERSION < 3 "cannot import name %.230s", PyString_AS_STRING(name)); #else "cannot import name %S", name); #endif } return value; } /* CLineInTraceback */ #ifndef CYTHON_CLINE_IN_TRACEBACK static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); if (likely(cython_runtime_dict)) { use_cline = PyDict_GetItem(*cython_runtime_dict, __pyx_n_s_cline_in_traceback); } else #endif { PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_DECREF(use_cline_obj); } else { PyErr_Clear(); use_cline = NULL; } } if (!use_cline) { c_line = 0; PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (PyObject_Not(use_cline) != 0) { c_line = 0; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); return c_line; } #endif /* CodeObjectCache */ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static PyCodeObject *__pyx_find_code_object(int code_line) { PyCodeObject* code_object; int pos; if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { return NULL; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { return NULL; } code_object = __pyx_code_cache.entries[pos].code_object; Py_INCREF(code_object); return code_object; } static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = 64; __pyx_code_cache.count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { PyCodeObject* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_DECREF(tmp); return; } if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = new_max; } for (i=__pyx_code_cache.count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; __pyx_code_cache.count++; Py_INCREF(code_object); } /* AddTraceback */ #include "compile.h" #include "frameobject.h" #include "traceback.h" static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyObject *py_srcfile = 0; PyObject *py_funcname = 0; #if PY_MAJOR_VERSION < 3 py_srcfile = PyString_FromString(filename); #else py_srcfile = PyUnicode_FromString(filename); #endif if (!py_srcfile) goto bad; if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); #else py_funcname = PyUnicode_FromString(funcname); #endif } if (!py_funcname) goto bad; py_code = __Pyx_PyCode_New( 0, 0, 0, 0, 0, __pyx_empty_bytes, /*PyObject *code,*/ __pyx_empty_tuple, /*PyObject *consts,*/ __pyx_empty_tuple, /*PyObject *names,*/ __pyx_empty_tuple, /*PyObject *varnames,*/ __pyx_empty_tuple, /*PyObject *freevars,*/ __pyx_empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ py_line, __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); Py_DECREF(py_funcname); return py_code; bad: Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; PyThreadState *tstate = __Pyx_PyThreadState_Current; if (c_line) { c_line = __Pyx_CLineForTraceback(tstate, c_line); } py_code = __pyx_find_code_object(c_line ? -c_line : py_line); if (!py_code) { py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) goto bad; __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( tstate, /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ __pyx_d, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } /* CIntFromPyVerify */ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ {\ func_type value = func_value;\ if (sizeof(target_type) < sizeof(func_type)) {\ if (unlikely(value != (func_type) (target_type) value)) {\ func_type zero = 0;\ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ return (target_type) -1;\ if (is_unsigned && unlikely(value < zero))\ goto raise_neg_overflow;\ else\ goto raise_overflow;\ }\ }\ return (target_type) value;\ } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { const int neg_one = (int) -1, const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(int) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(int) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(int) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(int), little, !is_unsigned); } } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_int(unsigned int value) { const unsigned int neg_one = (unsigned int) -1, const_zero = (unsigned int) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(unsigned int) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(unsigned int) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(unsigned int) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(unsigned int) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(unsigned int) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(unsigned int), little, !is_unsigned); } } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_unsigned_short(unsigned short value) { const unsigned short neg_one = (unsigned short) -1, const_zero = (unsigned short) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(unsigned short) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(unsigned short) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(unsigned short) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(unsigned short) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(unsigned short) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(unsigned short), little, !is_unsigned); } } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(long) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(long) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(long) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(long), little, !is_unsigned); } } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) { const uint16_t neg_one = (uint16_t) -1, const_zero = (uint16_t) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(uint16_t) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(uint16_t) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(uint16_t) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(uint16_t), little, !is_unsigned); } } /* CIntFromPy */ static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *x) { const size_t neg_one = (size_t) -1, const_zero = (size_t) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(size_t) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(size_t, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (size_t) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (size_t) 0; case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, digits[0]) case 2: if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) >= 2 * PyLong_SHIFT) { return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } } break; case 3: if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) >= 3 * PyLong_SHIFT) { return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } } break; case 4: if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) >= 4 * PyLong_SHIFT) { return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (size_t) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(size_t) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (size_t) 0; case -1: __PYX_VERIFY_RETURN_INT(size_t, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(size_t, digit, +digits[0]) case -2: if (8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case 2: if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case -3: if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case 3: if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case -4: if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; case 4: if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) { return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]))); } } break; } #endif if (sizeof(size_t) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(size_t) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else size_t val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (size_t) -1; } } else { size_t val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (size_t) -1; val = __Pyx_PyInt_As_size_t(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to size_t"); return (size_t) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to size_t"); return (size_t) -1; } /* CIntFromPy */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(long) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (long) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(long) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) case -2: if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } #endif if (sizeof(long) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else long val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (long) -1; } } else { long val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (long) -1; val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to long"); return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to long"); return (long) -1; } /* CIntFromPy */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { const int neg_one = (int) -1, const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(int) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (int) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(int) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) case -2: if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -3: if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -4: if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; } #endif if (sizeof(int) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else int val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (int) -1; } } else { int val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (int) -1; val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int"); return (int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int"); return (int) -1; } /* FastTypeChecks */ #if CYTHON_COMPILING_IN_CPYTHON static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { while (a) { a = a->tp_base; if (a == b) return 1; } return b == &PyBaseObject_Type; } static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (a == b) return 1; mro = a->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(a, b); } #if PY_MAJOR_VERSION == 2 static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { PyObject *exception, *value, *tb; int res; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ErrFetch(&exception, &value, &tb); res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } if (!res) { res = PyObject_IsSubclass(err, exc_type2); if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } } __Pyx_ErrRestore(exception, value, tb); return res; } #else static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; if (!res) { res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); } return res; } #endif static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { if (likely(err == exc_type)) return 1; if (likely(PyExceptionClass_Check(err))) { return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); } return PyErr_GivenExceptionMatches(err, exc_type); } static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { if (likely(err == exc_type1 || err == exc_type2)) return 1; if (likely(PyExceptionClass_Check(err))) { return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); } return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); } #endif /* CheckBinaryVersion */ static int __Pyx_check_binary_version(void) { char ctversion[4], rtversion[4]; PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { char message[200]; PyOS_snprintf(message, sizeof(message), "compiletime version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); return PyErr_WarnEx(NULL, message, 1); } return 0; } /* ModuleImport */ #ifndef __PYX_HAVE_RT_ImportModule #define __PYX_HAVE_RT_ImportModule static PyObject *__Pyx_ImportModule(const char *name) { PyObject *py_name = 0; PyObject *py_module = 0; py_name = __Pyx_PyIdentifier_FromString(name); if (!py_name) goto bad; py_module = PyImport_Import(py_name); Py_DECREF(py_name); return py_module; bad: Py_XDECREF(py_name); return 0; } #endif /* TypeImport */ #ifndef __PYX_HAVE_RT_ImportType #define __PYX_HAVE_RT_ImportType static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict) { PyObject *py_module = 0; PyObject *result = 0; PyObject *py_name = 0; char warning[200]; Py_ssize_t basicsize; #ifdef Py_LIMITED_API PyObject *py_basicsize; #endif py_module = __Pyx_ImportModule(module_name); if (!py_module) goto bad; py_name = __Pyx_PyIdentifier_FromString(class_name); if (!py_name) goto bad; result = PyObject_GetAttr(py_module, py_name); Py_DECREF(py_name); py_name = 0; Py_DECREF(py_module); py_module = 0; if (!result) goto bad; if (!PyType_Check(result)) { PyErr_Format(PyExc_TypeError, "%.200s.%.200s is not a type object", module_name, class_name); goto bad; } #ifndef Py_LIMITED_API basicsize = ((PyTypeObject *)result)->tp_basicsize; #else py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); if (!py_basicsize) goto bad; basicsize = PyLong_AsSsize_t(py_basicsize); Py_DECREF(py_basicsize); py_basicsize = 0; if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; #endif if (!strict && (size_t)basicsize > size) { PyOS_snprintf(warning, sizeof(warning), "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", module_name, class_name, basicsize, size); if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; } else if ((size_t)basicsize != size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", module_name, class_name, basicsize, size); goto bad; } return (PyTypeObject *)result; bad: Py_XDECREF(py_module); Py_XDECREF(result); return NULL; } #endif /* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 if (t->is_unicode) { *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); } else if (t->intern) { *t->p = PyString_InternFromString(t->s); } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } #else if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); } else if (t->encoding) { *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); } else { *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); } } else { *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); } #endif if (!*t->p) return -1; if (PyObject_Hash(*t->p) == -1) PyErr_Clear(); ++t; } return 0; } static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); } static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT #if !CYTHON_PEP393_ENABLED static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { char* defenc_c; PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); if (!defenc) return NULL; defenc_c = PyBytes_AS_STRING(defenc); #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII { char* end = defenc_c + PyBytes_GET_SIZE(defenc); char* c; for (c = defenc_c; c < end; c++) { if ((unsigned char) (*c) >= 128) { PyUnicode_AsASCIIString(o); return NULL; } } } #endif *length = PyBytes_GET_SIZE(defenc); return defenc_c; } #else static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (likely(PyUnicode_IS_ASCII(o))) { *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { PyUnicode_AsASCIIString(o); return NULL; } #else return PyUnicode_AsUTF8AndSize(o, length); #endif } #endif #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT if ( #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII __Pyx_sys_getdefaultencoding_not_ascii && #endif PyUnicode_Check(o)) { return __Pyx_PyUnicode_AsStringAndSize(o, length); } else #endif #if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) if (PyByteArray_Check(o)) { *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); } else #endif { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { #if PY_MAJOR_VERSION >= 3 if (PyLong_Check(result)) { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, "__int__ returned non-int (type %.200s). " "The ability to return an instance of a strict subclass of int " "is deprecated, and may be removed in a future version of Python.", Py_TYPE(result)->tp_name)) { Py_DECREF(result); return NULL; } return result; } #endif PyErr_Format(PyExc_TypeError, "__%.4s__ returned non-%.4s (type %.200s)", type_name, type_name, Py_TYPE(result)->tp_name); Py_DECREF(result); return NULL; } static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif const char *name = NULL; PyObject *res = NULL; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x) || PyLong_Check(x))) #else if (likely(PyLong_Check(x))) #endif return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; res = m->nb_long(x); } #else if (likely(m && m->nb_int)) { name = "int"; res = m->nb_int(x); } #endif #else if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { res = PyNumber_Int(x); } #endif if (likely(res)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else if (unlikely(!PyLong_CheckExact(res))) { #endif return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(b))) { if (sizeof(Py_ssize_t) >= sizeof(long)) return PyInt_AS_LONG(b); else return PyInt_AsSsize_t(x); } #endif if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)b)->ob_digit; const Py_ssize_t size = Py_SIZE(b); if (likely(__Pyx_sst_abs(size) <= 1)) { ival = likely(size) ? digits[0] : 0; if (size == -1) ival = -ival; return ival; } else { switch (size) { case 2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } #endif /* Py_PYTHON_H */ aiohttp-3.0.1/aiohttp/_http_parser.pyx0000666000000000000000000005114513240304665016232 0ustar 00000000000000#cython: language_level=3 # # Based on https://github.com/MagicStack/httptools # from __future__ import absolute_import, print_function from cpython.mem cimport PyMem_Malloc, PyMem_Free from cpython cimport PyObject_GetBuffer, PyBuffer_Release, PyBUF_SIMPLE, \ Py_buffer, PyBytes_AsString from multidict import CIMultiDict from yarl import URL from aiohttp import hdrs from .http_exceptions import ( BadHttpMessage, BadStatusLine, InvalidHeader, LineTooLong, InvalidURLError, PayloadEncodingError, ContentLengthError, TransferEncodingError) from .http_writer import HttpVersion, HttpVersion10, HttpVersion11 from .http_parser import RawRequestMessage, RawResponseMessage, DeflateBuffer from .streams import EMPTY_PAYLOAD, StreamReader cimport cython from . cimport _cparser as cparser __all__ = ('HttpRequestParserC', 'HttpResponseMessageC', 'parse_url') @cython.internal cdef class HttpParser: cdef: cparser.http_parser* _cparser cparser.http_parser_settings* _csettings str _header_name str _header_value bytes _raw_header_name bytes _raw_header_value object _protocol object _loop object _timer size_t _max_line_size size_t _max_field_size size_t _max_headers bint _response_with_body bint _started object _url bytearray _buf str _path str _reason list _headers list _raw_headers bint _upgraded list _messages object _payload bint _payload_error object _payload_exception object _last_error bint _auto_decompress Py_buffer py_buf def __cinit__(self): self._cparser = \ PyMem_Malloc(sizeof(cparser.http_parser)) if self._cparser is NULL: raise MemoryError() self._csettings = \ PyMem_Malloc(sizeof(cparser.http_parser_settings)) if self._csettings is NULL: raise MemoryError() def __dealloc__(self): PyMem_Free(self._cparser) PyMem_Free(self._csettings) cdef _init(self, cparser.http_parser_type mode, object protocol, object loop, object timer=None, size_t max_line_size=8190, size_t max_headers=32768, size_t max_field_size=8190, payload_exception=None, response_with_body=True, auto_decompress=True): cparser.http_parser_init(self._cparser, mode) self._cparser.data = self self._cparser.content_length = 0 cparser.http_parser_settings_init(self._csettings) self._protocol = protocol self._loop = loop self._timer = timer self._buf = bytearray() self._payload = None self._payload_error = 0 self._payload_exception = payload_exception self._messages = [] self._header_name = None self._header_value = None self._raw_header_name = None self._raw_header_value = None self._max_line_size = max_line_size self._max_headers = max_headers self._max_field_size = max_field_size self._response_with_body = response_with_body self._upgraded = False self._auto_decompress = auto_decompress self._csettings.on_url = cb_on_url self._csettings.on_status = cb_on_status self._csettings.on_header_field = cb_on_header_field self._csettings.on_header_value = cb_on_header_value self._csettings.on_headers_complete = cb_on_headers_complete self._csettings.on_body = cb_on_body self._csettings.on_message_begin = cb_on_message_begin self._csettings.on_message_complete = cb_on_message_complete self._csettings.on_chunk_header = cb_on_chunk_header self._csettings.on_chunk_complete = cb_on_chunk_complete self._last_error = None cdef _process_header(self): if self._header_name is not None: name = self._header_name value = self._header_value self._header_name = self._header_value = None self._headers.append((name, value)) raw_name = self._raw_header_name raw_value = self._raw_header_value self._raw_header_name = self._raw_header_value = None self._raw_headers.append((raw_name, raw_value)) cdef _on_header_field(self, str field, bytes raw_field): if self._header_value is not None: self._process_header() self._header_value = None if self._header_name is None: self._header_name = field self._raw_header_name = raw_field else: self._header_name += field self._raw_header_name += raw_field cdef _on_header_value(self, str val, bytes raw_val): if self._header_value is None: self._header_value = val self._raw_header_value = raw_val else: self._header_value += val self._raw_header_value += raw_val cdef _on_headers_complete(self, ENCODING='utf-8', ENCODING_ERR='surrogateescape', CONTENT_ENCODING=hdrs.CONTENT_ENCODING, SEC_WEBSOCKET_KEY1=hdrs.SEC_WEBSOCKET_KEY1, SUPPORTED=('gzip', 'deflate', 'br')): self._process_header() method = cparser.http_method_str( self._cparser.method) should_close = not bool(cparser.http_should_keep_alive(self._cparser)) upgrade = bool(self._cparser.upgrade) chunked = bool(self._cparser.flags & cparser.F_CHUNKED) raw_headers = tuple(self._raw_headers) headers = CIMultiDict(self._headers) if upgrade or self._cparser.method == 5: # cparser.CONNECT: self._upgraded = True # do not support old websocket spec if SEC_WEBSOCKET_KEY1 in headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) encoding = None enc = headers.get(CONTENT_ENCODING) if enc: enc = enc.lower() if enc in SUPPORTED: encoding = enc if self._cparser.type == cparser.HTTP_REQUEST: msg = RawRequestMessage( method.decode(ENCODING, ENCODING_ERR), self._path, self.http_version(), headers, raw_headers, should_close, encoding, upgrade, chunked, self._url) else: msg = RawResponseMessage( self.http_version(), self._cparser.status_code, self._reason, headers, raw_headers, should_close, encoding, upgrade, chunked) if (self._cparser.content_length > 0 or chunked or self._cparser.method == 5): # CONNECT: 5 payload = StreamReader( self._protocol, timer=self._timer, loop=self._loop) else: payload = EMPTY_PAYLOAD self._payload = payload if encoding is not None and self._auto_decompress: self._payload = DeflateBuffer(payload, encoding) if not self._response_with_body: payload = EMPTY_PAYLOAD self._messages.append((msg, payload)) cdef _on_message_complete(self): self._payload.feed_eof() self._payload = None cdef _on_chunk_header(self): self._payload.begin_http_chunk_receiving() cdef _on_chunk_complete(self): self._payload.end_http_chunk_receiving() cdef object _on_status_complete(self): pass ### Public API ### def http_version(self): cdef cparser.http_parser* parser = self._cparser if parser.http_major == 1: if parser.http_minor == 0: return HttpVersion10 elif parser.http_minor == 1: return HttpVersion11 return HttpVersion(parser.http_major, parser.http_minor) def feed_eof(self): cdef bytes desc if self._payload is not None: if self._cparser.flags & cparser.F_CHUNKED: raise TransferEncodingError( "Not enough data for satisfy transfer length header.") elif self._cparser.flags & cparser.F_CONTENTLENGTH: raise ContentLengthError( "Not enough data for satisfy content length header.") elif self._cparser.http_errno != cparser.HPE_OK: desc = cparser.http_errno_description( self._cparser.http_errno) raise PayloadEncodingError(desc.decode('latin-1')) else: self._payload.feed_eof() elif self._started: self._on_headers_complete() if self._messages: return self._messages[-1][0] def feed_data(self, data): cdef: size_t data_len size_t nb PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE) data_len = self.py_buf.len nb = cparser.http_parser_execute( self._cparser, self._csettings, self.py_buf.buf, data_len) PyBuffer_Release(&self.py_buf) # i am not sure about cparser.HPE_INVALID_METHOD, # seems get err for valid request # test_client_functional.py::test_post_data_with_bytesio_file if (self._cparser.http_errno != cparser.HPE_OK and (self._cparser.http_errno != cparser.HPE_INVALID_METHOD or self._cparser.method == 0)): if self._payload_error == 0: if self._last_error is not None: ex = self._last_error self._last_error = None else: ex = parser_error_from_errno( self._cparser.http_errno) self._payload = None raise ex if self._messages: messages = self._messages self._messages = [] else: messages = () if self._upgraded: return messages, True, data[nb:] else: return messages, False, b'' cdef class HttpRequestParserC(HttpParser): def __init__(self, protocol, loop, timer=None, size_t max_line_size=8190, size_t max_headers=32768, size_t max_field_size=8190, payload_exception=None, response_with_body=True, read_until_eof=False): self._init(cparser.HTTP_REQUEST, protocol, loop, timer, max_line_size, max_headers, max_field_size, payload_exception, response_with_body) cdef object _on_status_complete(self): cdef Py_buffer py_buf if not self._buf: return self._path = self._buf.decode('utf-8', 'surrogateescape') if self._cparser.method == 5: # CONNECT self._url = URL(self._path) else: PyObject_GetBuffer(self._buf, &py_buf, PyBUF_SIMPLE) try: self._url = _parse_url(py_buf.buf, py_buf.len) finally: PyBuffer_Release(&py_buf) self._buf.clear() cdef class HttpResponseParserC(HttpParser): def __init__(self, protocol, loop, timer=None, size_t max_line_size=8190, size_t max_headers=32768, size_t max_field_size=8190, payload_exception=None, response_with_body=True, read_until_eof=False, auto_decompress=True): self._init(cparser.HTTP_RESPONSE, protocol, loop, timer, max_line_size, max_headers, max_field_size, payload_exception, response_with_body, auto_decompress) cdef object _on_status_complete(self): if self._buf: self._reason = self._buf.decode('utf-8', 'surrogateescape') self._buf.clear() cdef int cb_on_message_begin(cparser.http_parser* parser) except -1: cdef HttpParser pyparser = parser.data pyparser._started = True pyparser._headers = [] pyparser._raw_headers = [] pyparser._buf.clear() pyparser._path = None pyparser._reason = None return 0 cdef int cb_on_url(cparser.http_parser* parser, const char *at, size_t length) except -1: cdef HttpParser pyparser = parser.data try: if length > pyparser._max_line_size: raise LineTooLong( 'Status line is too long', pyparser._max_line_size) pyparser._buf.extend(at[:length]) except BaseException as ex: pyparser._last_error = ex return -1 else: return 0 cdef int cb_on_status(cparser.http_parser* parser, const char *at, size_t length) except -1: cdef HttpParser pyparser = parser.data cdef str reason try: if length > pyparser._max_line_size: raise LineTooLong( 'Status line is too long', pyparser._max_line_size) pyparser._buf.extend(at[:length]) except BaseException as ex: pyparser._last_error = ex return -1 else: return 0 cdef int cb_on_header_field(cparser.http_parser* parser, const char *at, size_t length) except -1: cdef HttpParser pyparser = parser.data try: pyparser._on_status_complete() if length > pyparser._max_field_size: raise LineTooLong( 'Header name is too long', pyparser._max_field_size) pyparser._on_header_field( at[:length].decode('utf-8', 'surrogateescape'), at[:length]) except BaseException as ex: pyparser._last_error = ex return -1 else: return 0 cdef int cb_on_header_value(cparser.http_parser* parser, const char *at, size_t length) except -1: cdef HttpParser pyparser = parser.data try: if pyparser._header_value is not None: if len(pyparser._header_value) + length > pyparser._max_field_size: raise LineTooLong( 'Header value is too long', pyparser._max_field_size) elif length > pyparser._max_field_size: raise LineTooLong( 'Header value is too long', pyparser._max_field_size) pyparser._on_header_value( at[:length].decode('utf-8', 'surrogateescape'), at[:length]) except BaseException as ex: pyparser._last_error = ex return -1 else: return 0 cdef int cb_on_headers_complete(cparser.http_parser* parser) except -1: cdef HttpParser pyparser = parser.data try: pyparser._on_status_complete() pyparser._on_headers_complete() except BaseException as exc: pyparser._last_error = exc return -1 else: if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT return 2 else: return 0 cdef int cb_on_body(cparser.http_parser* parser, const char *at, size_t length) except -1: cdef HttpParser pyparser = parser.data cdef bytes body = at[:length] try: pyparser._payload.feed_data(body, length) except BaseException as exc: if pyparser._payload_exception is not None: pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) else: pyparser._payload.set_exception(exc) pyparser._payload_error = 1 return -1 else: return 0 cdef int cb_on_message_complete(cparser.http_parser* parser) except -1: cdef HttpParser pyparser = parser.data try: pyparser._started = False pyparser._on_message_complete() except BaseException as exc: pyparser._last_error = exc return -1 else: return 0 cdef int cb_on_chunk_header(cparser.http_parser* parser) except -1: cdef HttpParser pyparser = parser.data try: pyparser._on_chunk_header() except BaseException as exc: pyparser._last_error = exc return -1 else: return 0 cdef int cb_on_chunk_complete(cparser.http_parser* parser) except -1: cdef HttpParser pyparser = parser.data try: pyparser._on_chunk_complete() except BaseException as exc: pyparser._last_error = exc return -1 else: return 0 cdef parser_error_from_errno(cparser.http_errno errno): cdef bytes desc = cparser.http_errno_description(errno) if errno in (cparser.HPE_CB_message_begin, cparser.HPE_CB_url, cparser.HPE_CB_header_field, cparser.HPE_CB_header_value, cparser.HPE_CB_headers_complete, cparser.HPE_CB_body, cparser.HPE_CB_message_complete, cparser.HPE_CB_status, cparser.HPE_CB_chunk_header, cparser.HPE_CB_chunk_complete): cls = BadHttpMessage elif errno == cparser.HPE_INVALID_STATUS: cls = BadStatusLine elif errno == cparser.HPE_INVALID_METHOD: cls = BadStatusLine elif errno == cparser.HPE_INVALID_URL: cls = InvalidURLError else: cls = BadHttpMessage return cls(desc.decode('latin-1')) def parse_url(url): cdef: Py_buffer py_buf char* buf_data PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) try: buf_data = py_buf.buf return _parse_url(buf_data, py_buf.len) finally: PyBuffer_Release(&py_buf) def _parse_url(char* buf_data, size_t length): cdef: cparser.http_parser_url* parsed int res str schema = None str host = None object port = None str path = None str query = None str fragment = None str user = None str password = None str userinfo = None object result = None int off int ln parsed = \ PyMem_Malloc(sizeof(cparser.http_parser_url)) if parsed is NULL: raise MemoryError() cparser.http_parser_url_init(parsed) try: res = cparser.http_parser_parse_url(buf_data, length, 0, parsed) if res == 0: if parsed.field_set & (1 << cparser.UF_SCHEMA): off = parsed.field_data[cparser.UF_SCHEMA].off ln = parsed.field_data[cparser.UF_SCHEMA].len schema = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') else: schema = '' if parsed.field_set & (1 << cparser.UF_HOST): off = parsed.field_data[cparser.UF_HOST].off ln = parsed.field_data[cparser.UF_HOST].len host = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') else: host = '' if parsed.field_set & (1 << cparser.UF_PORT): port = parsed.port if parsed.field_set & (1 << cparser.UF_PATH): off = parsed.field_data[cparser.UF_PATH].off ln = parsed.field_data[cparser.UF_PATH].len path = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') else: path = '' if parsed.field_set & (1 << cparser.UF_QUERY): off = parsed.field_data[cparser.UF_QUERY].off ln = parsed.field_data[cparser.UF_QUERY].len query = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') else: query = '' if parsed.field_set & (1 << cparser.UF_FRAGMENT): off = parsed.field_data[cparser.UF_FRAGMENT].off ln = parsed.field_data[cparser.UF_FRAGMENT].len fragment = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') else: fragment = '' if parsed.field_set & (1 << cparser.UF_USERINFO): off = parsed.field_data[cparser.UF_USERINFO].off ln = parsed.field_data[cparser.UF_USERINFO].len userinfo = buf_data[off:off+ln].decode('utf-8', 'surrogateescape') user, sep, password = userinfo.partition(':') return URL.build(scheme=schema, user=user, password=password, host=host, port=port, path=path, query=query, fragment=fragment) else: raise InvalidURLError("invalid url {!r}".format(buf_data)) finally: PyMem_Free(parsed) aiohttp-3.0.1/aiohttp/_websocket.c0000666000000000000000000034343613240304736015275 0ustar 00000000000000/* Generated by Cython 0.27.3 */ /* BEGIN: Cython Metadata { "distutils": { "depends": [], "name": "aiohttp._websocket", "sources": [ "aiohttp/_websocket.pyx" ] }, "module_name": "aiohttp._websocket" } END: Cython Metadata */ #define PY_SSIZE_T_CLEAN #include "Python.h" #ifndef Py_PYTHON_H #error Python headers needed to compile C extensions, please install development version of Python. #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else #define CYTHON_ABI "0_27_3" #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) #endif #if !defined(WIN32) && !defined(MS_WINDOWS) #ifndef __stdcall #define __stdcall #endif #ifndef __cdecl #define __cdecl #endif #ifndef __fastcall #define __fastcall #endif #endif #ifndef DL_IMPORT #define DL_IMPORT(t) t #endif #ifndef DL_EXPORT #define DL_EXPORT(t) t #endif #define __PYX_COMMA , #ifndef HAVE_LONG_LONG #if PY_VERSION_HEX >= 0x02070000 #define HAVE_LONG_LONG #endif #endif #ifndef PY_LONG_LONG #define PY_LONG_LONG LONG_LONG #endif #ifndef Py_HUGE_VAL #define Py_HUGE_VAL HUGE_VAL #endif #ifdef PYPY_VERSION #define CYTHON_COMPILING_IN_PYPY 1 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 0 #undef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 0 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #if PY_VERSION_HEX < 0x03050000 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #undef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #undef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 1 #undef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 0 #undef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 0 #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #elif defined(PYSTON_VERSION) #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 1 #define CYTHON_COMPILING_IN_CPYTHON 0 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #undef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 0 #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #undef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 0 #undef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 0 #undef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT 0 #undef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE 0 #else #define CYTHON_COMPILING_IN_PYPY 0 #define CYTHON_COMPILING_IN_PYSTON 0 #define CYTHON_COMPILING_IN_CPYTHON 1 #ifndef CYTHON_USE_TYPE_SLOTS #define CYTHON_USE_TYPE_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYTYPE_LOOKUP #define CYTHON_USE_PYTYPE_LOOKUP 0 #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) #define CYTHON_USE_PYTYPE_LOOKUP 1 #endif #if PY_MAJOR_VERSION < 3 #undef CYTHON_USE_ASYNC_SLOTS #define CYTHON_USE_ASYNC_SLOTS 0 #elif !defined(CYTHON_USE_ASYNC_SLOTS) #define CYTHON_USE_ASYNC_SLOTS 1 #endif #if PY_VERSION_HEX < 0x02070000 #undef CYTHON_USE_PYLONG_INTERNALS #define CYTHON_USE_PYLONG_INTERNALS 0 #elif !defined(CYTHON_USE_PYLONG_INTERNALS) #define CYTHON_USE_PYLONG_INTERNALS 1 #endif #ifndef CYTHON_USE_PYLIST_INTERNALS #define CYTHON_USE_PYLIST_INTERNALS 1 #endif #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif #if PY_VERSION_HEX < 0x030300F0 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) #define CYTHON_USE_UNICODE_WRITER 1 #endif #ifndef CYTHON_AVOID_BORROWED_REFS #define CYTHON_AVOID_BORROWED_REFS 0 #endif #ifndef CYTHON_ASSUME_SAFE_MACROS #define CYTHON_ASSUME_SAFE_MACROS 1 #endif #ifndef CYTHON_UNPACK_METHODS #define CYTHON_UNPACK_METHODS 1 #endif #ifndef CYTHON_FAST_THREAD_STATE #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL #define CYTHON_FAST_PYCALL 1 #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000) #endif #ifndef CYTHON_USE_TP_FINALIZE #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) #endif #endif #if !defined(CYTHON_FAST_PYCCALL) #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS #include "longintrepr.h" #undef SHIFT #undef BASE #undef MASK #endif #if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) #define Py_OptimizeFlag 0 #endif #define __PYX_BUILD_PY_SSIZE_T "n" #define CYTHON_FORMAT_SSIZE_T "z" #if PY_MAJOR_VERSION < 3 #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) #define __Pyx_DefaultClassType PyType_Type #endif #ifndef Py_TPFLAGS_CHECKTYPES #define Py_TPFLAGS_CHECKTYPES 0 #endif #ifndef Py_TPFLAGS_HAVE_INDEX #define Py_TPFLAGS_HAVE_INDEX 0 #endif #ifndef Py_TPFLAGS_HAVE_NEWBUFFER #define Py_TPFLAGS_HAVE_NEWBUFFER 0 #endif #ifndef Py_TPFLAGS_HAVE_FINALIZE #define Py_TPFLAGS_HAVE_FINALIZE 0 #endif #if PY_VERSION_HEX < 0x030700A0 || !defined(METH_FASTCALL) #ifndef METH_FASTCALL #define METH_FASTCALL 0x80 #endif typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, Py_ssize_t nargs); typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject **args, Py_ssize_t nargs, PyObject *kwnames); #else #define __Pyx_PyCFunctionFast _PyCFunctionFast #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords #endif #if CYTHON_FAST_PYCCALL #define __Pyx_PyFastCFunction_Check(func)\ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) #else #define __Pyx_PyFastCFunction_Check(func) 0 #endif #if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #elif PY_VERSION_HEX >= 0x03060000 #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() #elif PY_VERSION_HEX >= 0x03000000 #define __Pyx_PyThreadState_Current PyThreadState_GET() #else #define __Pyx_PyThreadState_Current _PyThreadState_Current #endif #if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) #define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) #else #define __Pyx_PyDict_NewPresized(n) PyDict_New() #endif #if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) #else #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) #endif #if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) #define CYTHON_PEP393_ENABLED 1 #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ 0 : _PyUnicode_Ready((PyObject *)(op))) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) #else #define CYTHON_PEP393_ENABLED 0 #define PyUnicode_1BYTE_KIND 1 #define PyUnicode_2BYTE_KIND 2 #define PyUnicode_4BYTE_KIND 4 #define __Pyx_PyUnicode_READY(op) (0) #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) #endif #if CYTHON_COMPILING_IN_PYPY #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) #else #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) #endif #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #define PyObject_Malloc(s) PyMem_Malloc(s) #define PyObject_Free(p) PyMem_Free(p) #define PyObject_Realloc(p) PyMem_Realloc(p) #endif #if CYTHON_COMPILING_IN_PYSTON #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) #else #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) #endif #define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) #define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) #else #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) #endif #if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) #define PyObject_ASCII(o) PyObject_Repr(o) #endif #if PY_MAJOR_VERSION >= 3 #define PyBaseString_Type PyUnicode_Type #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) #else #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) #endif #ifndef PySet_CheckExact #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) #endif #define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) #if PY_MAJOR_VERSION >= 3 #define PyIntObject PyLongObject #define PyInt_Type PyLong_Type #define PyInt_Check(op) PyLong_Check(op) #define PyInt_CheckExact(op) PyLong_CheckExact(op) #define PyInt_FromString PyLong_FromString #define PyInt_FromUnicode PyLong_FromUnicode #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyInt_FromSsize_t PyLong_FromSsize_t #define PyInt_AsLong PyLong_AsLong #define PyInt_AS_LONG PyLong_AS_LONG #define PyInt_AsSsize_t PyLong_AsSsize_t #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask #define PyNumber_Int PyNumber_Long #endif #if PY_MAJOR_VERSION >= 3 #define PyBoolObject PyLongObject #endif #if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY #ifndef PyUnicode_InternFromString #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) #endif #endif #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong #define __Pyx_PyInt_AsHash_t PyInt_AsLong #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) #else #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) #endif #ifndef __has_attribute #define __has_attribute(x) 0 #endif #ifndef __has_cpp_attribute #define __has_cpp_attribute(x) 0 #endif #if CYTHON_USE_ASYNC_SLOTS #if PY_VERSION_HEX >= 0x030500B1 #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) #else #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) #endif #else #define __Pyx_PyType_AsAsync(obj) NULL #endif #ifndef __Pyx_PyAsyncMethodsStruct typedef struct { unaryfunc am_await; unaryfunc am_aiter; unaryfunc am_anext; } __Pyx_PyAsyncMethodsStruct; #endif #ifndef CYTHON_RESTRICT #if defined(__GNUC__) #define CYTHON_RESTRICT __restrict__ #elif defined(_MSC_VER) && _MSC_VER >= 1400 #define CYTHON_RESTRICT __restrict #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_RESTRICT restrict #else #define CYTHON_RESTRICT #endif #endif #ifndef CYTHON_UNUSED # if defined(__GNUC__) # if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif # elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) # define CYTHON_UNUSED __attribute__ ((__unused__)) # else # define CYTHON_UNUSED # endif #endif #ifndef CYTHON_MAYBE_UNUSED_VAR # if defined(__cplusplus) template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } # else # define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) # endif #endif #ifndef CYTHON_NCP_UNUSED # if CYTHON_COMPILING_IN_CPYTHON # define CYTHON_NCP_UNUSED # else # define CYTHON_NCP_UNUSED CYTHON_UNUSED # endif #endif #define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) #ifdef _MSC_VER #ifndef _MSC_STDINT_H_ #if _MSC_VER < 1300 typedef unsigned char uint8_t; typedef unsigned int uint32_t; #else typedef unsigned __int8 uint8_t; typedef unsigned __int32 uint32_t; #endif #endif #else #include #endif #ifndef CYTHON_FALLTHROUGH #if defined(__cplusplus) && __cplusplus >= 201103L #if __has_cpp_attribute(fallthrough) #define CYTHON_FALLTHROUGH [[fallthrough]] #elif __has_cpp_attribute(clang::fallthrough) #define CYTHON_FALLTHROUGH [[clang::fallthrough]] #elif __has_cpp_attribute(gnu::fallthrough) #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] #endif #endif #ifndef CYTHON_FALLTHROUGH #if __has_attribute(fallthrough) #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) #else #define CYTHON_FALLTHROUGH #endif #endif #if defined(__clang__ ) && defined(__apple_build_version__) #if __apple_build_version__ < 7000000 #undef CYTHON_FALLTHROUGH #define CYTHON_FALLTHROUGH #endif #endif #endif #ifndef CYTHON_INLINE #if defined(__clang__) #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) #elif defined(__GNUC__) #define CYTHON_INLINE __inline__ #elif defined(_MSC_VER) #define CYTHON_INLINE __inline #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define CYTHON_INLINE inline #else #define CYTHON_INLINE #endif #endif #if defined(WIN32) || defined(MS_WINDOWS) #define _USE_MATH_DEFINES #endif #include #ifdef NAN #define __PYX_NAN() ((float) NAN) #else static CYTHON_INLINE float __PYX_NAN() { float value; memset(&value, 0xFF, sizeof(value)); return value; } #endif #if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) #define __Pyx_truncl trunc #else #define __Pyx_truncl truncl #endif #define __PYX_ERR(f_index, lineno, Ln_error) \ { \ __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ } #ifndef __PYX_EXTERN_C #ifdef __cplusplus #define __PYX_EXTERN_C extern "C" #else #define __PYX_EXTERN_C extern #endif #endif #define __PYX_HAVE__aiohttp___websocket #define __PYX_HAVE_API__aiohttp___websocket #include #include #include "pythread.h" #include #ifdef _OPENMP #include #endif /* _OPENMP */ #if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) #define CYTHON_WITHOUT_ASSERTIONS #endif typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; #define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 #define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 #define __PYX_DEFAULT_STRING_ENCODING "" #define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString #define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #define __Pyx_uchar_cast(c) ((unsigned char)c) #define __Pyx_long_cast(x) ((long)x) #define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ (sizeof(type) < sizeof(Py_ssize_t)) ||\ (sizeof(type) > sizeof(Py_ssize_t) &&\ likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX) &&\ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ v == (type)PY_SSIZE_T_MIN))) ||\ (sizeof(type) == sizeof(Py_ssize_t) &&\ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ v == (type)PY_SSIZE_T_MAX))) ) #if defined (__cplusplus) && __cplusplus >= 201103L #include #define __Pyx_sst_abs(value) std::abs(value) #elif SIZEOF_INT >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) abs(value) #elif SIZEOF_LONG >= SIZEOF_SIZE_T #define __Pyx_sst_abs(value) labs(value) #elif defined (_MSC_VER) #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L #define __Pyx_sst_abs(value) llabs(value) #elif defined (__GNUC__) #define __Pyx_sst_abs(value) __builtin_llabs(value) #else #define __Pyx_sst_abs(value) ((value<0) ? -value : value) #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); #define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) #define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) #define __Pyx_PyBytes_FromString PyBytes_FromString #define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); #if PY_MAJOR_VERSION < 3 #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize #else #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize #endif #define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) #define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) #define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) #define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) #define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) #define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) #define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) #define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { const Py_UNICODE *u_end = u; while (*u_end++) ; return (size_t)(u_end - u - 1); } #define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) #define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode #define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode #define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) #define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) #define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); #define __Pyx_PySequence_Tuple(obj)\ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else #define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) #endif #define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) #else #define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) #endif #define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII static int __Pyx_sys_getdefaultencoding_not_ascii; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; PyObject* ascii_chars_u = NULL; PyObject* ascii_chars_b = NULL; const char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; if (strcmp(default_encoding_c, "ascii") == 0) { __Pyx_sys_getdefaultencoding_not_ascii = 0; } else { char ascii_chars[128]; int c; for (c = 0; c < 128; c++) { ascii_chars[c] = c; } __Pyx_sys_getdefaultencoding_not_ascii = 1; ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); if (!ascii_chars_u) goto bad; ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { PyErr_Format( PyExc_ValueError, "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", default_encoding_c); goto bad; } Py_DECREF(ascii_chars_u); Py_DECREF(ascii_chars_b); } Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); Py_XDECREF(ascii_chars_u); Py_XDECREF(ascii_chars_b); return -1; } #endif #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) #else #define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) #if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT static char* __PYX_DEFAULT_STRING_ENCODING; static int __Pyx_init_sys_getdefaultencoding_params(void) { PyObject* sys; PyObject* default_encoding = NULL; char* default_encoding_c; sys = PyImport_ImportModule("sys"); if (!sys) goto bad; default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); Py_DECREF(sys); if (!default_encoding) goto bad; default_encoding_c = PyBytes_AsString(default_encoding); if (!default_encoding_c) goto bad; __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); Py_DECREF(default_encoding); return 0; bad: Py_XDECREF(default_encoding); return -1; } #endif #endif /* Test for GCC > 2.95 */ #if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) #define likely(x) __builtin_expect(!!(x), 1) #define unlikely(x) __builtin_expect(!!(x), 0) #else /* !__GNUC__ or GCC < 2.95 */ #define likely(x) (x) #define unlikely(x) (x) #endif /* __GNUC__ */ static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } static PyObject *__pyx_m = NULL; static PyObject *__pyx_d; static PyObject *__pyx_b; static PyObject *__pyx_cython_runtime; static PyObject *__pyx_empty_tuple; static PyObject *__pyx_empty_bytes; static PyObject *__pyx_empty_unicode; static int __pyx_lineno; static int __pyx_clineno = 0; static const char * __pyx_cfilenm= __FILE__; static const char *__pyx_filename; static const char *__pyx_f[] = { "aiohttp\\_websocket.pyx", "type.pxd", "bool.pxd", "complex.pxd", }; /*--- Type declarations ---*/ /* --- Runtime support code (head) --- */ /* Refnanny.proto */ #ifndef CYTHON_REFNANNY #define CYTHON_REFNANNY 0 #endif #if CYTHON_REFNANNY typedef struct { void (*INCREF)(void*, PyObject*, int); void (*DECREF)(void*, PyObject*, int); void (*GOTREF)(void*, PyObject*, int); void (*GIVEREF)(void*, PyObject*, int); void* (*SetupContext)(const char*, int, const char*); void (*FinishContext)(void**); } __Pyx_RefNannyAPIStruct; static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; #ifdef WITH_THREAD #define __Pyx_RefNannySetupContext(name, acquire_gil)\ if (acquire_gil) {\ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ PyGILState_Release(__pyx_gilstate_save);\ } else {\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ } #else #define __Pyx_RefNannySetupContext(name, acquire_gil)\ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) #endif #define __Pyx_RefNannyFinishContext()\ __Pyx_RefNanny->FinishContext(&__pyx_refnanny) #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) #else #define __Pyx_RefNannyDeclarations #define __Pyx_RefNannySetupContext(name, acquire_gil) #define __Pyx_RefNannyFinishContext() #define __Pyx_INCREF(r) Py_INCREF(r) #define __Pyx_DECREF(r) Py_DECREF(r) #define __Pyx_GOTREF(r) #define __Pyx_GIVEREF(r) #define __Pyx_XINCREF(r) Py_XINCREF(r) #define __Pyx_XDECREF(r) Py_XDECREF(r) #define __Pyx_XGOTREF(r) #define __Pyx_XGIVEREF(r) #endif #define __Pyx_XDECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_XDECREF(tmp);\ } while (0) #define __Pyx_DECREF_SET(r, v) do {\ PyObject *tmp = (PyObject *) r;\ r = v; __Pyx_DECREF(tmp);\ } while (0) #define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) #define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) /* PyObjectGetAttrStr.proto */ #if CYTHON_USE_TYPE_SLOTS static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { PyTypeObject* tp = Py_TYPE(obj); if (likely(tp->tp_getattro)) return tp->tp_getattro(obj, attr_name); #if PY_MAJOR_VERSION < 3 if (likely(tp->tp_getattr)) return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); #endif return PyObject_GetAttr(obj, attr_name); } #else #define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) #endif /* GetBuiltinName.proto */ static PyObject *__Pyx_GetBuiltinName(PyObject *name); /* RaiseArgTupleInvalid.proto */ static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /* RaiseDoubleKeywords.proto */ static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); /* ParseKeywords.proto */ static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ const char* function_name); /* PyObjectCall.proto */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); #else #define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) #endif /* PyThreadStateGet.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; #define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; #define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type #else #define __Pyx_PyThreadState_declare #define __Pyx_PyThreadState_assign #define __Pyx_PyErr_Occurred() PyErr_Occurred() #endif /* PyErrFetchRestore.proto */ #if CYTHON_FAST_THREAD_STATE #define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) #define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) #define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) #define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) #else #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #endif #else #define __Pyx_PyErr_Clear() PyErr_Clear() #define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) #define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) #define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) #define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) #endif /* CLineInTraceback.proto */ #ifdef CYTHON_CLINE_IN_TRACEBACK #define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) #else static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); #endif /* CodeObjectCache.proto */ typedef struct { PyCodeObject* code_object; int code_line; } __Pyx_CodeObjectCacheEntry; struct __Pyx_CodeObjectCache { int count; int max_count; __Pyx_CodeObjectCacheEntry* entries; }; static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); static PyCodeObject *__pyx_find_code_object(int code_line); static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); /* AddTraceback.proto */ static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename); /* CIntToPy.proto */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); /* CIntFromPy.proto */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); /* CIntFromPy.proto */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); /* FastTypeChecks.proto */ #if CYTHON_COMPILING_IN_CPYTHON #define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); #else #define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) #define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) #define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) #endif /* CheckBinaryVersion.proto */ static int __Pyx_check_binary_version(void); /* PyIdentifierFromString.proto */ #if !defined(__Pyx_PyIdentifier_FromString) #if PY_MAJOR_VERSION < 3 #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) #else #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) #endif #endif /* ModuleImport.proto */ static PyObject *__Pyx_ImportModule(const char *name); /* TypeImport.proto */ static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); /* InitStrings.proto */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /* Module declarations from 'cpython.version' */ /* Module declarations from '__builtin__' */ /* Module declarations from 'cpython.type' */ static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; /* Module declarations from 'libc.string' */ /* Module declarations from 'libc.stdio' */ /* Module declarations from 'cpython.object' */ /* Module declarations from 'cpython.ref' */ /* Module declarations from 'cpython.exc' */ /* Module declarations from 'cpython.module' */ /* Module declarations from 'cpython.mem' */ /* Module declarations from 'cpython.tuple' */ /* Module declarations from 'cpython.list' */ /* Module declarations from 'cpython.sequence' */ /* Module declarations from 'cpython.mapping' */ /* Module declarations from 'cpython.iterator' */ /* Module declarations from 'cpython.number' */ /* Module declarations from 'cpython.int' */ /* Module declarations from '__builtin__' */ /* Module declarations from 'cpython.bool' */ static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0; /* Module declarations from 'cpython.long' */ /* Module declarations from 'cpython.float' */ /* Module declarations from '__builtin__' */ /* Module declarations from 'cpython.complex' */ static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0; /* Module declarations from 'cpython.string' */ /* Module declarations from 'cpython.unicode' */ /* Module declarations from 'cpython.dict' */ /* Module declarations from 'cpython.instance' */ /* Module declarations from 'cpython.function' */ /* Module declarations from 'cpython.method' */ /* Module declarations from 'cpython.weakref' */ /* Module declarations from 'cpython.getargs' */ /* Module declarations from 'cpython.pythread' */ /* Module declarations from 'cpython.pystate' */ /* Module declarations from 'cpython.cobject' */ /* Module declarations from 'cpython.oldbuffer' */ /* Module declarations from 'cpython.set' */ /* Module declarations from 'cpython.buffer' */ /* Module declarations from 'cpython.bytes' */ /* Module declarations from 'cpython.pycapsule' */ /* Module declarations from 'cpython' */ /* Module declarations from 'libc.stdint' */ /* Module declarations from 'aiohttp._websocket' */ #define __Pyx_MODULE_NAME "aiohttp._websocket" extern int __pyx_module_is_main_aiohttp___websocket; int __pyx_module_is_main_aiohttp___websocket = 0; /* Implementation of 'aiohttp._websocket' */ static PyObject *__pyx_builtin_range; static const char __pyx_k_i[] = "i"; static const char __pyx_k_data[] = "data"; static const char __pyx_k_main[] = "__main__"; static const char __pyx_k_mask[] = "mask"; static const char __pyx_k_test[] = "__test__"; static const char __pyx_k_range[] = "range"; static const char __pyx_k_in_buf[] = "in_buf"; static const char __pyx_k_data_len[] = "data_len"; static const char __pyx_k_mask_buf[] = "mask_buf"; static const char __pyx_k_uint32_msk[] = "uint32_msk"; static const char __pyx_k_uint64_msk[] = "uint64_msk"; static const char __pyx_k_aiohttp__websocket[] = "aiohttp._websocket"; static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; static const char __pyx_k_websocket_mask_cython[] = "_websocket_mask_cython"; static const char __pyx_k_aiohttp__websocket_pyx[] = "aiohttp\\_websocket.pyx"; static PyObject *__pyx_n_s_aiohttp__websocket; static PyObject *__pyx_kp_s_aiohttp__websocket_pyx; static PyObject *__pyx_n_s_cline_in_traceback; static PyObject *__pyx_n_s_data; static PyObject *__pyx_n_s_data_len; static PyObject *__pyx_n_s_i; static PyObject *__pyx_n_s_in_buf; static PyObject *__pyx_n_s_main; static PyObject *__pyx_n_s_mask; static PyObject *__pyx_n_s_mask_buf; static PyObject *__pyx_n_s_range; static PyObject *__pyx_n_s_test; static PyObject *__pyx_n_s_uint32_msk; static PyObject *__pyx_n_s_uint64_msk; static PyObject *__pyx_n_s_websocket_mask_cython; static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data); /* proto */ static PyObject *__pyx_tuple_; static PyObject *__pyx_codeobj__2; /* "aiohttp/_websocket.pyx":9 * from libc.stdint cimport uint32_t, uint64_t, uintmax_t * * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< * """Note, this function mutates it's `data` argument * """ */ /* Python wrapper */ static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static char __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython[] = "Note, this function mutates it's `data` argument\n "; static PyMethodDef __pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython = {"_websocket_mask_cython", (PyCFunction)__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython, METH_VARARGS|METH_KEYWORDS, __pyx_doc_7aiohttp_10_websocket__websocket_mask_cython}; static PyObject *__pyx_pw_7aiohttp_10_websocket_1_websocket_mask_cython(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_mask = 0; PyObject *__pyx_v_data = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("_websocket_mask_cython (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_mask,&__pyx_n_s_data,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_mask)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_data)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, 1); __PYX_ERR(0, 9, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "_websocket_mask_cython") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_mask = values[0]; __pyx_v_data = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("_websocket_mask_cython", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(__pyx_self, __pyx_v_mask, __pyx_v_data); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_7aiohttp_10_websocket__websocket_mask_cython(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_mask, PyObject *__pyx_v_data) { Py_ssize_t __pyx_v_data_len; Py_ssize_t __pyx_v_i; unsigned char *__pyx_v_in_buf; unsigned char const *__pyx_v_mask_buf; uint32_t __pyx_v_uint32_msk; uint64_t __pyx_v_uint64_msk; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations Py_ssize_t __pyx_t_1; int __pyx_t_2; int __pyx_t_3; PyObject *__pyx_t_4 = NULL; PyObject *__pyx_t_5 = NULL; char *__pyx_t_6; uint64_t *__pyx_t_7; long __pyx_t_8; uint32_t *__pyx_t_9; Py_ssize_t __pyx_t_10; Py_ssize_t __pyx_t_11; __Pyx_RefNannySetupContext("_websocket_mask_cython", 0); __Pyx_INCREF(__pyx_v_mask); __Pyx_INCREF(__pyx_v_data); /* "aiohttp/_websocket.pyx":20 * uint64_t uint64_msk * * assert len(mask) == 4 # <<<<<<<<<<<<<< * * if not isinstance(mask, bytes): */ #ifndef CYTHON_WITHOUT_ASSERTIONS if (unlikely(!Py_OptimizeFlag)) { __pyx_t_1 = PyObject_Length(__pyx_v_mask); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 20, __pyx_L1_error) if (unlikely(!((__pyx_t_1 == 4) != 0))) { PyErr_SetNone(PyExc_AssertionError); __PYX_ERR(0, 20, __pyx_L1_error) } } #endif /* "aiohttp/_websocket.pyx":22 * assert len(mask) == 4 * * if not isinstance(mask, bytes): # <<<<<<<<<<<<<< * mask = bytes(mask) * */ __pyx_t_2 = PyBytes_Check(__pyx_v_mask); __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); if (__pyx_t_3) { /* "aiohttp/_websocket.pyx":23 * * if not isinstance(mask, bytes): * mask = bytes(mask) # <<<<<<<<<<<<<< * * if isinstance(data, bytearray): */ __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 23, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_INCREF(__pyx_v_mask); __Pyx_GIVEREF(__pyx_v_mask); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_mask); __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)(&PyBytes_Type)), __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 23, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_DECREF_SET(__pyx_v_mask, __pyx_t_5); __pyx_t_5 = 0; /* "aiohttp/_websocket.pyx":22 * assert len(mask) == 4 * * if not isinstance(mask, bytes): # <<<<<<<<<<<<<< * mask = bytes(mask) * */ } /* "aiohttp/_websocket.pyx":25 * mask = bytes(mask) * * if isinstance(data, bytearray): # <<<<<<<<<<<<<< * data = data * else: */ __pyx_t_3 = PyByteArray_Check(__pyx_v_data); __pyx_t_2 = (__pyx_t_3 != 0); if (__pyx_t_2) { /* "aiohttp/_websocket.pyx":26 * * if isinstance(data, bytearray): * data = data # <<<<<<<<<<<<<< * else: * data = bytearray(data) */ __pyx_t_5 = __pyx_v_data; __Pyx_INCREF(__pyx_t_5); __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_5); __pyx_t_5 = 0; /* "aiohttp/_websocket.pyx":25 * mask = bytes(mask) * * if isinstance(data, bytearray): # <<<<<<<<<<<<<< * data = data * else: */ goto __pyx_L4; } /* "aiohttp/_websocket.pyx":28 * data = data * else: * data = bytearray(data) # <<<<<<<<<<<<<< * * data_len = len(data) */ /*else*/ { __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 28, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_5); __Pyx_INCREF(__pyx_v_data); __Pyx_GIVEREF(__pyx_v_data); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_data); __pyx_t_4 = __Pyx_PyObject_Call(((PyObject *)(&PyByteArray_Type)), __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 28, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_4); __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; __Pyx_DECREF_SET(__pyx_v_data, __pyx_t_4); __pyx_t_4 = 0; } __pyx_L4:; /* "aiohttp/_websocket.pyx":30 * data = bytearray(data) * * data_len = len(data) # <<<<<<<<<<<<<< * in_buf = PyByteArray_AsString(data) * mask_buf = PyBytes_AsString(mask) */ __pyx_t_1 = PyObject_Length(__pyx_v_data); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 30, __pyx_L1_error) __pyx_v_data_len = __pyx_t_1; /* "aiohttp/_websocket.pyx":31 * * data_len = len(data) * in_buf = PyByteArray_AsString(data) # <<<<<<<<<<<<<< * mask_buf = PyBytes_AsString(mask) * uint32_msk = (mask_buf)[0] */ if (!(likely(PyByteArray_CheckExact(__pyx_v_data))||((__pyx_v_data) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "bytearray", Py_TYPE(__pyx_v_data)->tp_name), 0))) __PYX_ERR(0, 31, __pyx_L1_error) __pyx_t_6 = PyByteArray_AsString(((PyObject*)__pyx_v_data)); if (unlikely(__pyx_t_6 == ((char *)NULL))) __PYX_ERR(0, 31, __pyx_L1_error) __pyx_v_in_buf = ((unsigned char *)__pyx_t_6); /* "aiohttp/_websocket.pyx":32 * data_len = len(data) * in_buf = PyByteArray_AsString(data) * mask_buf = PyBytes_AsString(mask) # <<<<<<<<<<<<<< * uint32_msk = (mask_buf)[0] * */ __pyx_t_6 = PyBytes_AsString(__pyx_v_mask); if (unlikely(__pyx_t_6 == ((char *)NULL))) __PYX_ERR(0, 32, __pyx_L1_error) __pyx_v_mask_buf = ((unsigned char const *)__pyx_t_6); /* "aiohttp/_websocket.pyx":33 * in_buf = PyByteArray_AsString(data) * mask_buf = PyBytes_AsString(mask) * uint32_msk = (mask_buf)[0] # <<<<<<<<<<<<<< * * # TODO: align in_data ptr to achieve even faster speeds */ __pyx_v_uint32_msk = (((uint32_t *)__pyx_v_mask_buf)[0]); /* "aiohttp/_websocket.pyx":38 * # does it need in python ?! malloc() always aligns to sizeof(long) bytes * * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< * uint64_msk = uint32_msk * uint64_msk = (uint64_msk << 32) | uint32_msk */ __pyx_t_2 = (((sizeof(size_t)) >= 8) != 0); if (__pyx_t_2) { /* "aiohttp/_websocket.pyx":39 * * if sizeof(size_t) >= 8: * uint64_msk = uint32_msk # <<<<<<<<<<<<<< * uint64_msk = (uint64_msk << 32) | uint32_msk * */ __pyx_v_uint64_msk = __pyx_v_uint32_msk; /* "aiohttp/_websocket.pyx":40 * if sizeof(size_t) >= 8: * uint64_msk = uint32_msk * uint64_msk = (uint64_msk << 32) | uint32_msk # <<<<<<<<<<<<<< * * while data_len >= 8: */ __pyx_v_uint64_msk = ((__pyx_v_uint64_msk << 32) | __pyx_v_uint32_msk); /* "aiohttp/_websocket.pyx":42 * uint64_msk = (uint64_msk << 32) | uint32_msk * * while data_len >= 8: # <<<<<<<<<<<<<< * (in_buf)[0] ^= uint64_msk * in_buf += 8 */ while (1) { __pyx_t_2 = ((__pyx_v_data_len >= 8) != 0); if (!__pyx_t_2) break; /* "aiohttp/_websocket.pyx":43 * * while data_len >= 8: * (in_buf)[0] ^= uint64_msk # <<<<<<<<<<<<<< * in_buf += 8 * data_len -= 8 */ __pyx_t_7 = ((uint64_t *)__pyx_v_in_buf); __pyx_t_8 = 0; (__pyx_t_7[__pyx_t_8]) = ((__pyx_t_7[__pyx_t_8]) ^ __pyx_v_uint64_msk); /* "aiohttp/_websocket.pyx":44 * while data_len >= 8: * (in_buf)[0] ^= uint64_msk * in_buf += 8 # <<<<<<<<<<<<<< * data_len -= 8 * */ __pyx_v_in_buf = (__pyx_v_in_buf + 8); /* "aiohttp/_websocket.pyx":45 * (in_buf)[0] ^= uint64_msk * in_buf += 8 * data_len -= 8 # <<<<<<<<<<<<<< * * */ __pyx_v_data_len = (__pyx_v_data_len - 8); } /* "aiohttp/_websocket.pyx":38 * # does it need in python ?! malloc() always aligns to sizeof(long) bytes * * if sizeof(size_t) >= 8: # <<<<<<<<<<<<<< * uint64_msk = uint32_msk * uint64_msk = (uint64_msk << 32) | uint32_msk */ } /* "aiohttp/_websocket.pyx":48 * * * while data_len >= 4: # <<<<<<<<<<<<<< * (in_buf)[0] ^= uint32_msk * in_buf += 4 */ while (1) { __pyx_t_2 = ((__pyx_v_data_len >= 4) != 0); if (!__pyx_t_2) break; /* "aiohttp/_websocket.pyx":49 * * while data_len >= 4: * (in_buf)[0] ^= uint32_msk # <<<<<<<<<<<<<< * in_buf += 4 * data_len -= 4 */ __pyx_t_9 = ((uint32_t *)__pyx_v_in_buf); __pyx_t_8 = 0; (__pyx_t_9[__pyx_t_8]) = ((__pyx_t_9[__pyx_t_8]) ^ __pyx_v_uint32_msk); /* "aiohttp/_websocket.pyx":50 * while data_len >= 4: * (in_buf)[0] ^= uint32_msk * in_buf += 4 # <<<<<<<<<<<<<< * data_len -= 4 * */ __pyx_v_in_buf = (__pyx_v_in_buf + 4); /* "aiohttp/_websocket.pyx":51 * (in_buf)[0] ^= uint32_msk * in_buf += 4 * data_len -= 4 # <<<<<<<<<<<<<< * * for i in range(0, data_len): */ __pyx_v_data_len = (__pyx_v_data_len - 4); } /* "aiohttp/_websocket.pyx":53 * data_len -= 4 * * for i in range(0, data_len): # <<<<<<<<<<<<<< * in_buf[i] ^= mask_buf[i] */ __pyx_t_1 = __pyx_v_data_len; for (__pyx_t_10 = 0; __pyx_t_10 < __pyx_t_1; __pyx_t_10+=1) { __pyx_v_i = __pyx_t_10; /* "aiohttp/_websocket.pyx":54 * * for i in range(0, data_len): * in_buf[i] ^= mask_buf[i] # <<<<<<<<<<<<<< */ __pyx_t_11 = __pyx_v_i; (__pyx_v_in_buf[__pyx_t_11]) = ((__pyx_v_in_buf[__pyx_t_11]) ^ (__pyx_v_mask_buf[__pyx_v_i])); } /* "aiohttp/_websocket.pyx":9 * from libc.stdint cimport uint32_t, uint64_t, uintmax_t * * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< * """Note, this function mutates it's `data` argument * """ */ /* function exit code */ __pyx_r = Py_None; __Pyx_INCREF(Py_None); goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_5); __Pyx_AddTraceback("aiohttp._websocket._websocket_mask_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XDECREF(__pyx_v_mask); __Pyx_XDECREF(__pyx_v_data); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyMethodDef __pyx_methods[] = { {0, 0, 0, 0} }; #if PY_MAJOR_VERSION >= 3 #if CYTHON_PEP489_MULTI_PHASE_INIT static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ static int __pyx_pymod_exec__websocket(PyObject* module); /*proto*/ static PyModuleDef_Slot __pyx_moduledef_slots[] = { {Py_mod_create, (void*)__pyx_pymod_create}, {Py_mod_exec, (void*)__pyx_pymod_exec__websocket}, {0, NULL} }; #endif static struct PyModuleDef __pyx_moduledef = { PyModuleDef_HEAD_INIT, "_websocket", 0, /* m_doc */ #if CYTHON_PEP489_MULTI_PHASE_INIT 0, /* m_size */ #else -1, /* m_size */ #endif __pyx_methods /* m_methods */, #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_moduledef_slots, /* m_slots */ #else NULL, /* m_reload */ #endif NULL, /* m_traverse */ NULL, /* m_clear */ NULL /* m_free */ }; #endif static __Pyx_StringTabEntry __pyx_string_tab[] = { {&__pyx_n_s_aiohttp__websocket, __pyx_k_aiohttp__websocket, sizeof(__pyx_k_aiohttp__websocket), 0, 0, 1, 1}, {&__pyx_kp_s_aiohttp__websocket_pyx, __pyx_k_aiohttp__websocket_pyx, sizeof(__pyx_k_aiohttp__websocket_pyx), 0, 0, 1, 0}, {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1}, {&__pyx_n_s_data_len, __pyx_k_data_len, sizeof(__pyx_k_data_len), 0, 0, 1, 1}, {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1}, {&__pyx_n_s_in_buf, __pyx_k_in_buf, sizeof(__pyx_k_in_buf), 0, 0, 1, 1}, {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, {&__pyx_n_s_mask, __pyx_k_mask, sizeof(__pyx_k_mask), 0, 0, 1, 1}, {&__pyx_n_s_mask_buf, __pyx_k_mask_buf, sizeof(__pyx_k_mask_buf), 0, 0, 1, 1}, {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1}, {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, {&__pyx_n_s_uint32_msk, __pyx_k_uint32_msk, sizeof(__pyx_k_uint32_msk), 0, 0, 1, 1}, {&__pyx_n_s_uint64_msk, __pyx_k_uint64_msk, sizeof(__pyx_k_uint64_msk), 0, 0, 1, 1}, {&__pyx_n_s_websocket_mask_cython, __pyx_k_websocket_mask_cython, sizeof(__pyx_k_websocket_mask_cython), 0, 0, 1, 1}, {0, 0, 0, 0, 0, 0, 0} }; static int __Pyx_InitCachedBuiltins(void) { __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 53, __pyx_L1_error) return 0; __pyx_L1_error:; return -1; } static int __Pyx_InitCachedConstants(void) { __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); /* "aiohttp/_websocket.pyx":9 * from libc.stdint cimport uint32_t, uint64_t, uintmax_t * * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< * """Note, this function mutates it's `data` argument * """ */ __pyx_tuple_ = PyTuple_Pack(8, __pyx_n_s_mask, __pyx_n_s_data, __pyx_n_s_data_len, __pyx_n_s_i, __pyx_n_s_in_buf, __pyx_n_s_mask_buf, __pyx_n_s_uint32_msk, __pyx_n_s_uint64_msk); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_); __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(2, 0, 8, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_aiohttp__websocket_pyx, __pyx_n_s_websocket_mask_cython, 9, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_RefNannyFinishContext(); return 0; __pyx_L1_error:; __Pyx_RefNannyFinishContext(); return -1; } static int __Pyx_InitGlobals(void) { if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); return 0; __pyx_L1_error:; return -1; } #if PY_MAJOR_VERSION < 3 PyMODINIT_FUNC init_websocket(void); /*proto*/ PyMODINIT_FUNC init_websocket(void) #else PyMODINIT_FUNC PyInit__websocket(void); /*proto*/ PyMODINIT_FUNC PyInit__websocket(void) #if CYTHON_PEP489_MULTI_PHASE_INIT { return PyModuleDef_Init(&__pyx_moduledef); } static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) { PyObject *value = PyObject_GetAttrString(spec, from_name); int result = 0; if (likely(value)) { result = PyDict_SetItemString(moddict, to_name, value); Py_DECREF(value); } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { PyErr_Clear(); } else { result = -1; } return result; } static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { PyObject *module = NULL, *moddict, *modname; if (__pyx_m) return __Pyx_NewRef(__pyx_m); modname = PyObject_GetAttrString(spec, "name"); if (unlikely(!modname)) goto bad; module = PyModule_NewObject(modname); Py_DECREF(modname); if (unlikely(!module)) goto bad; moddict = PyModule_GetDict(module); if (unlikely(!moddict)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad; if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad; return module; bad: Py_XDECREF(module); return NULL; } static int __pyx_pymod_exec__websocket(PyObject *__pyx_pyinit_module) #endif #endif { PyObject *__pyx_t_1 = NULL; __Pyx_RefNannyDeclarations #if CYTHON_PEP489_MULTI_PHASE_INIT if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0; #endif #if CYTHON_REFNANNY __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); if (!__Pyx_RefNanny) { PyErr_Clear(); __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); if (!__Pyx_RefNanny) Py_FatalError("failed to import 'refnanny' module"); } #endif __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__websocket(void)", 0); if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) #ifdef __Pyx_CyFunction_USED if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_FusedFunction_USED if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Coroutine_USED if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_Generator_USED if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_AsyncGen_USED if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif #ifdef __Pyx_StopAsyncIteration_USED if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /*--- Library function declarations ---*/ /*--- Threads initialization code ---*/ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS #ifdef WITH_THREAD /* Python build with threading support? */ PyEval_InitThreads(); #endif #endif /*--- Module creation code ---*/ #if CYTHON_PEP489_MULTI_PHASE_INIT __pyx_m = __pyx_pyinit_module; Py_INCREF(__pyx_m); #else #if PY_MAJOR_VERSION < 3 __pyx_m = Py_InitModule4("_websocket", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); #else __pyx_m = PyModule_Create(&__pyx_moduledef); #endif if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) #endif __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) Py_INCREF(__pyx_d); __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) #if CYTHON_COMPILING_IN_PYPY Py_INCREF(__pyx_b); #endif if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); /*--- Initialize various global constants etc. ---*/ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif if (__pyx_module_is_main_aiohttp___websocket) { if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) } #if PY_MAJOR_VERSION >= 3 { PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) if (!PyDict_GetItemString(modules, "aiohttp._websocket")) { if (unlikely(PyDict_SetItemString(modules, "aiohttp._websocket", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) } } #endif /*--- Builtin init code ---*/ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Constants init code ---*/ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) /*--- Global init code ---*/ /*--- Variable export code ---*/ /*--- Function export code ---*/ /*--- Type init code ---*/ /*--- Type import code ---*/ __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", #if CYTHON_COMPILING_IN_PYPY sizeof(PyTypeObject), #else sizeof(PyHeapTypeObject), #endif 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(1, 9, __pyx_L1_error) __pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), 0); if (unlikely(!__pyx_ptype_7cpython_4bool_bool)) __PYX_ERR(2, 8, __pyx_L1_error) __pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), 0); if (unlikely(!__pyx_ptype_7cpython_7complex_complex)) __PYX_ERR(3, 15, __pyx_L1_error) /*--- Variable import code ---*/ /*--- Function import code ---*/ /*--- Execution code ---*/ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) #endif /* "aiohttp/_websocket.pyx":9 * from libc.stdint cimport uint32_t, uint64_t, uintmax_t * * def _websocket_mask_cython(object mask, object data): # <<<<<<<<<<<<<< * """Note, this function mutates it's `data` argument * """ */ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7aiohttp_10_websocket_1_websocket_mask_cython, NULL, __pyx_n_s_aiohttp__websocket); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_websocket_mask_cython, __pyx_t_1) < 0) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /* "aiohttp/_websocket.pyx":1 * from cpython cimport PyBytes_AsString # <<<<<<<<<<<<<< * * #from cpython cimport PyByteArray_AsString # cython still not exports that */ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; /*--- Wrapped vars code ---*/ goto __pyx_L0; __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); if (__pyx_m) { if (__pyx_d) { __Pyx_AddTraceback("init aiohttp._websocket", 0, __pyx_lineno, __pyx_filename); } Py_DECREF(__pyx_m); __pyx_m = 0; } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ImportError, "init aiohttp._websocket"); } __pyx_L0:; __Pyx_RefNannyFinishContext(); #if CYTHON_PEP489_MULTI_PHASE_INIT return (__pyx_m != NULL) ? 0 : -1; #elif PY_MAJOR_VERSION >= 3 return __pyx_m; #else return; #endif } /* --- Runtime support code --- */ /* Refnanny */ #if CYTHON_REFNANNY static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { PyObject *m = NULL, *p = NULL; void *r = NULL; m = PyImport_ImportModule((char *)modname); if (!m) goto end; p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); if (!p) goto end; r = PyLong_AsVoidPtr(p); end: Py_XDECREF(p); Py_XDECREF(m); return (__Pyx_RefNannyAPIStruct *)r; } #endif /* GetBuiltinName */ static PyObject *__Pyx_GetBuiltinName(PyObject *name) { PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); if (unlikely(!result)) { PyErr_Format(PyExc_NameError, #if PY_MAJOR_VERSION >= 3 "name '%U' is not defined", name); #else "name '%.200s' is not defined", PyString_AS_STRING(name)); #endif } return result; } /* RaiseArgTupleInvalid */ static void __Pyx_RaiseArgtupleInvalid( const char* func_name, int exact, Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found) { Py_ssize_t num_expected; const char *more_or_less; if (num_found < num_min) { num_expected = num_min; more_or_less = "at least"; } else { num_expected = num_max; more_or_less = "at most"; } if (exact) { more_or_less = "exactly"; } PyErr_Format(PyExc_TypeError, "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", func_name, more_or_less, num_expected, (num_expected == 1) ? "" : "s", num_found); } /* RaiseDoubleKeywords */ static void __Pyx_RaiseDoubleKeywordsError( const char* func_name, PyObject* kw_name) { PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION >= 3 "%s() got multiple values for keyword argument '%U'", func_name, kw_name); #else "%s() got multiple values for keyword argument '%s'", func_name, PyString_AsString(kw_name)); #endif } /* ParseKeywords */ static int __Pyx_ParseOptionalKeywords( PyObject *kwds, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name) { PyObject *key = 0, *value = 0; Py_ssize_t pos = 0; PyObject*** name; PyObject*** first_kw_arg = argnames + num_pos_args; while (PyDict_Next(kwds, &pos, &key, &value)) { name = first_kw_arg; while (*name && (**name != key)) name++; if (*name) { values[name-argnames] = value; continue; } name = first_kw_arg; #if PY_MAJOR_VERSION < 3 if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { while (*name) { if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) && _PyString_Eq(**name, key)) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { if ((**argname == key) || ( (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) && _PyString_Eq(**argname, key))) { goto arg_passed_twice; } argname++; } } } else #endif if (likely(PyUnicode_Check(key))) { while (*name) { int cmp = (**name == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : #endif PyUnicode_Compare(**name, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) { values[name-argnames] = value; break; } name++; } if (*name) continue; else { PyObject*** argname = argnames; while (argname != first_kw_arg) { int cmp = (**argname == key) ? 0 : #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : #endif PyUnicode_Compare(**argname, key); if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; if (cmp == 0) goto arg_passed_twice; argname++; } } } else goto invalid_keyword_type; if (kwds2) { if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; } else { goto invalid_keyword; } } return 0; arg_passed_twice: __Pyx_RaiseDoubleKeywordsError(function_name, key); goto bad; invalid_keyword_type: PyErr_Format(PyExc_TypeError, "%.200s() keywords must be strings", function_name); goto bad; invalid_keyword: PyErr_Format(PyExc_TypeError, #if PY_MAJOR_VERSION < 3 "%.200s() got an unexpected keyword argument '%.200s'", function_name, PyString_AsString(key)); #else "%s() got an unexpected keyword argument '%U'", function_name, key); #endif bad: return -1; } /* PyObjectCall */ #if CYTHON_COMPILING_IN_CPYTHON static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { PyObject *result; ternaryfunc call = func->ob_type->tp_call; if (unlikely(!call)) return PyObject_Call(func, arg, kw); if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) return NULL; result = (*call)(func, arg, kw); Py_LeaveRecursiveCall(); if (unlikely(!result) && unlikely(!PyErr_Occurred())) { PyErr_SetString( PyExc_SystemError, "NULL result without error in PyObject_Call"); } return result; } #endif /* PyErrFetchRestore */ #if CYTHON_FAST_THREAD_STATE static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { PyObject *tmp_type, *tmp_value, *tmp_tb; tmp_type = tstate->curexc_type; tmp_value = tstate->curexc_value; tmp_tb = tstate->curexc_traceback; tstate->curexc_type = type; tstate->curexc_value = value; tstate->curexc_traceback = tb; Py_XDECREF(tmp_type); Py_XDECREF(tmp_value); Py_XDECREF(tmp_tb); } static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { *type = tstate->curexc_type; *value = tstate->curexc_value; *tb = tstate->curexc_traceback; tstate->curexc_type = 0; tstate->curexc_value = 0; tstate->curexc_traceback = 0; } #endif /* CLineInTraceback */ #ifndef CYTHON_CLINE_IN_TRACEBACK static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) { PyObject *use_cline; PyObject *ptype, *pvalue, *ptraceback; #if CYTHON_COMPILING_IN_CPYTHON PyObject **cython_runtime_dict; #endif __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); #if CYTHON_COMPILING_IN_CPYTHON cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); if (likely(cython_runtime_dict)) { use_cline = PyDict_GetItem(*cython_runtime_dict, __pyx_n_s_cline_in_traceback); } else #endif { PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); if (use_cline_obj) { use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; Py_DECREF(use_cline_obj); } else { PyErr_Clear(); use_cline = NULL; } } if (!use_cline) { c_line = 0; PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (PyObject_Not(use_cline) != 0) { c_line = 0; } __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); return c_line; } #endif /* CodeObjectCache */ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { int start = 0, mid = 0, end = count - 1; if (end >= 0 && code_line > entries[end].code_line) { return count; } while (start < end) { mid = start + (end - start) / 2; if (code_line < entries[mid].code_line) { end = mid; } else if (code_line > entries[mid].code_line) { start = mid + 1; } else { return mid; } } if (code_line <= entries[mid].code_line) { return mid; } else { return mid + 1; } } static PyCodeObject *__pyx_find_code_object(int code_line) { PyCodeObject* code_object; int pos; if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { return NULL; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { return NULL; } code_object = __pyx_code_cache.entries[pos].code_object; Py_INCREF(code_object); return code_object; } static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { int pos, i; __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; if (unlikely(!code_line)) { return; } if (unlikely(!entries)) { entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); if (likely(entries)) { __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = 64; __pyx_code_cache.count = 1; entries[0].code_line = code_line; entries[0].code_object = code_object; Py_INCREF(code_object); } return; } pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { PyCodeObject* tmp = entries[pos].code_object; entries[pos].code_object = code_object; Py_DECREF(tmp); return; } if (__pyx_code_cache.count == __pyx_code_cache.max_count) { int new_max = __pyx_code_cache.max_count + 64; entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); if (unlikely(!entries)) { return; } __pyx_code_cache.entries = entries; __pyx_code_cache.max_count = new_max; } for (i=__pyx_code_cache.count; i>pos; i--) { entries[i] = entries[i-1]; } entries[pos].code_line = code_line; entries[pos].code_object = code_object; __pyx_code_cache.count++; Py_INCREF(code_object); } /* AddTraceback */ #include "compile.h" #include "frameobject.h" #include "traceback.h" static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyObject *py_srcfile = 0; PyObject *py_funcname = 0; #if PY_MAJOR_VERSION < 3 py_srcfile = PyString_FromString(filename); #else py_srcfile = PyUnicode_FromString(filename); #endif if (!py_srcfile) goto bad; if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); #else py_funcname = PyUnicode_FromString(funcname); #endif } if (!py_funcname) goto bad; py_code = __Pyx_PyCode_New( 0, 0, 0, 0, 0, __pyx_empty_bytes, /*PyObject *code,*/ __pyx_empty_tuple, /*PyObject *consts,*/ __pyx_empty_tuple, /*PyObject *names,*/ __pyx_empty_tuple, /*PyObject *varnames,*/ __pyx_empty_tuple, /*PyObject *freevars,*/ __pyx_empty_tuple, /*PyObject *cellvars,*/ py_srcfile, /*PyObject *filename,*/ py_funcname, /*PyObject *name,*/ py_line, __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); Py_DECREF(py_funcname); return py_code; bad: Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, int py_line, const char *filename) { PyCodeObject *py_code = 0; PyFrameObject *py_frame = 0; PyThreadState *tstate = __Pyx_PyThreadState_Current; if (c_line) { c_line = __Pyx_CLineForTraceback(tstate, c_line); } py_code = __pyx_find_code_object(c_line ? -c_line : py_line); if (!py_code) { py_code = __Pyx_CreateCodeObjectForTraceback( funcname, c_line, py_line, filename); if (!py_code) goto bad; __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); } py_frame = PyFrame_New( tstate, /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ __pyx_d, /*PyObject *globals,*/ 0 /*PyObject *locals*/ ); if (!py_frame) goto bad; __Pyx_PyFrame_SetLineNumber(py_frame, py_line); PyTraceBack_Here(py_frame); bad: Py_XDECREF(py_code); Py_XDECREF(py_frame); } /* CIntToPy */ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; if (is_unsigned) { if (sizeof(long) < sizeof(long)) { return PyInt_FromLong((long) value); } else if (sizeof(long) <= sizeof(unsigned long)) { return PyLong_FromUnsignedLong((unsigned long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); #endif } } else { if (sizeof(long) <= sizeof(long)) { return PyInt_FromLong((long) value); #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { return PyLong_FromLongLong((PY_LONG_LONG) value); #endif } } { int one = 1; int little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&value; return _PyLong_FromByteArray(bytes, sizeof(long), little, !is_unsigned); } } /* CIntFromPyVerify */ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) #define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) #define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ {\ func_type value = func_value;\ if (sizeof(target_type) < sizeof(func_type)) {\ if (unlikely(value != (func_type) (target_type) value)) {\ func_type zero = 0;\ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ return (target_type) -1;\ if (is_unsigned && unlikely(value < zero))\ goto raise_neg_overflow;\ else\ goto raise_overflow;\ }\ }\ return (target_type) value;\ } /* CIntFromPy */ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { const long neg_one = (long) -1, const_zero = (long) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(long) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (long) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (long) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(long) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (long) 0; case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) case -2: if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 2: if (8 * sizeof(long) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -3: if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 3: if (8 * sizeof(long) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case -4: if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; case 4: if (8 * sizeof(long) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); } } break; } #endif if (sizeof(long) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else long val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (long) -1; } } else { long val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (long) -1; val = __Pyx_PyInt_As_long(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to long"); return (long) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to long"); return (long) -1; } /* CIntFromPy */ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { const int neg_one = (int) -1, const_zero = (int) 0; const int is_unsigned = neg_one > const_zero; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x))) { if (sizeof(int) < sizeof(long)) { __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) } else { long val = PyInt_AS_LONG(x); if (is_unsigned && unlikely(val < 0)) { goto raise_neg_overflow; } return (int) val; } } else #endif if (likely(PyLong_Check(x))) { if (is_unsigned) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); } } break; } #endif #if CYTHON_COMPILING_IN_CPYTHON if (unlikely(Py_SIZE(x) < 0)) { goto raise_neg_overflow; } #else { int result = PyObject_RichCompareBool(x, Py_False, Py_LT); if (unlikely(result < 0)) return (int) -1; if (unlikely(result == 1)) goto raise_neg_overflow; } #endif if (sizeof(int) <= sizeof(unsigned long)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) #endif } } else { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)x)->ob_digit; switch (Py_SIZE(x)) { case 0: return (int) 0; case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) case -2: if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 2: if (8 * sizeof(int) > 1 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -3: if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 3: if (8 * sizeof(int) > 2 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case -4: if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; case 4: if (8 * sizeof(int) > 3 * PyLong_SHIFT) { if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); } } break; } #endif if (sizeof(int) <= sizeof(long)) { __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) #ifdef HAVE_LONG_LONG } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) #endif } } { #if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) PyErr_SetString(PyExc_RuntimeError, "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); #else int val; PyObject *v = __Pyx_PyNumber_IntOrLong(x); #if PY_MAJOR_VERSION < 3 if (likely(v) && !PyLong_Check(v)) { PyObject *tmp = v; v = PyNumber_Long(tmp); Py_DECREF(tmp); } #endif if (likely(v)) { int one = 1; int is_little = (int)*(unsigned char *)&one; unsigned char *bytes = (unsigned char *)&val; int ret = _PyLong_AsByteArray((PyLongObject *)v, bytes, sizeof(val), is_little, !is_unsigned); Py_DECREF(v); if (likely(!ret)) return val; } #endif return (int) -1; } } else { int val; PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); if (!tmp) return (int) -1; val = __Pyx_PyInt_As_int(tmp); Py_DECREF(tmp); return val; } raise_overflow: PyErr_SetString(PyExc_OverflowError, "value too large to convert to int"); return (int) -1; raise_neg_overflow: PyErr_SetString(PyExc_OverflowError, "can't convert negative value to int"); return (int) -1; } /* FastTypeChecks */ #if CYTHON_COMPILING_IN_CPYTHON static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { while (a) { a = a->tp_base; if (a == b) return 1; } return b == &PyBaseObject_Type; } static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { PyObject *mro; if (a == b) return 1; mro = a->tp_mro; if (likely(mro)) { Py_ssize_t i, n; n = PyTuple_GET_SIZE(mro); for (i = 0; i < n; i++) { if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) return 1; } return 0; } return __Pyx_InBases(a, b); } #if PY_MAJOR_VERSION == 2 static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { PyObject *exception, *value, *tb; int res; __Pyx_PyThreadState_declare __Pyx_PyThreadState_assign __Pyx_ErrFetch(&exception, &value, &tb); res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } if (!res) { res = PyObject_IsSubclass(err, exc_type2); if (unlikely(res == -1)) { PyErr_WriteUnraisable(err); res = 0; } } __Pyx_ErrRestore(exception, value, tb); return res; } #else static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; if (!res) { res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); } return res; } #endif static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) { if (likely(err == exc_type)) return 1; if (likely(PyExceptionClass_Check(err))) { return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type); } return PyErr_GivenExceptionMatches(err, exc_type); } static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) { if (likely(err == exc_type1 || err == exc_type2)) return 1; if (likely(PyExceptionClass_Check(err))) { return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2); } return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2)); } #endif /* CheckBinaryVersion */ static int __Pyx_check_binary_version(void) { char ctversion[4], rtversion[4]; PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { char message[200]; PyOS_snprintf(message, sizeof(message), "compiletime version %s of module '%.100s' " "does not match runtime version %s", ctversion, __Pyx_MODULE_NAME, rtversion); return PyErr_WarnEx(NULL, message, 1); } return 0; } /* ModuleImport */ #ifndef __PYX_HAVE_RT_ImportModule #define __PYX_HAVE_RT_ImportModule static PyObject *__Pyx_ImportModule(const char *name) { PyObject *py_name = 0; PyObject *py_module = 0; py_name = __Pyx_PyIdentifier_FromString(name); if (!py_name) goto bad; py_module = PyImport_Import(py_name); Py_DECREF(py_name); return py_module; bad: Py_XDECREF(py_name); return 0; } #endif /* TypeImport */ #ifndef __PYX_HAVE_RT_ImportType #define __PYX_HAVE_RT_ImportType static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict) { PyObject *py_module = 0; PyObject *result = 0; PyObject *py_name = 0; char warning[200]; Py_ssize_t basicsize; #ifdef Py_LIMITED_API PyObject *py_basicsize; #endif py_module = __Pyx_ImportModule(module_name); if (!py_module) goto bad; py_name = __Pyx_PyIdentifier_FromString(class_name); if (!py_name) goto bad; result = PyObject_GetAttr(py_module, py_name); Py_DECREF(py_name); py_name = 0; Py_DECREF(py_module); py_module = 0; if (!result) goto bad; if (!PyType_Check(result)) { PyErr_Format(PyExc_TypeError, "%.200s.%.200s is not a type object", module_name, class_name); goto bad; } #ifndef Py_LIMITED_API basicsize = ((PyTypeObject *)result)->tp_basicsize; #else py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); if (!py_basicsize) goto bad; basicsize = PyLong_AsSsize_t(py_basicsize); Py_DECREF(py_basicsize); py_basicsize = 0; if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) goto bad; #endif if (!strict && (size_t)basicsize > size) { PyOS_snprintf(warning, sizeof(warning), "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", module_name, class_name, basicsize, size); if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; } else if ((size_t)basicsize != size) { PyErr_Format(PyExc_ValueError, "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", module_name, class_name, basicsize, size); goto bad; } return (PyTypeObject *)result; bad: Py_XDECREF(py_module); Py_XDECREF(result); return NULL; } #endif /* InitStrings */ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { while (t->p) { #if PY_MAJOR_VERSION < 3 if (t->is_unicode) { *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); } else if (t->intern) { *t->p = PyString_InternFromString(t->s); } else { *t->p = PyString_FromStringAndSize(t->s, t->n - 1); } #else if (t->is_unicode | t->is_str) { if (t->intern) { *t->p = PyUnicode_InternFromString(t->s); } else if (t->encoding) { *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); } else { *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); } } else { *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); } #endif if (!*t->p) return -1; if (PyObject_Hash(*t->p) == -1) PyErr_Clear(); ++t; } return 0; } static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); } static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { Py_ssize_t ignore; return __Pyx_PyObject_AsStringAndSize(o, &ignore); } #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT #if !CYTHON_PEP393_ENABLED static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { char* defenc_c; PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); if (!defenc) return NULL; defenc_c = PyBytes_AS_STRING(defenc); #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII { char* end = defenc_c + PyBytes_GET_SIZE(defenc); char* c; for (c = defenc_c; c < end; c++) { if ((unsigned char) (*c) >= 128) { PyUnicode_AsASCIIString(o); return NULL; } } } #endif *length = PyBytes_GET_SIZE(defenc); return defenc_c; } #else static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII if (likely(PyUnicode_IS_ASCII(o))) { *length = PyUnicode_GET_LENGTH(o); return PyUnicode_AsUTF8(o); } else { PyUnicode_AsASCIIString(o); return NULL; } #else return PyUnicode_AsUTF8AndSize(o, length); #endif } #endif #endif static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { #if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT if ( #if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII __Pyx_sys_getdefaultencoding_not_ascii && #endif PyUnicode_Check(o)) { return __Pyx_PyUnicode_AsStringAndSize(o, length); } else #endif #if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) if (PyByteArray_Check(o)) { *length = PyByteArray_GET_SIZE(o); return PyByteArray_AS_STRING(o); } else #endif { char* result; int r = PyBytes_AsStringAndSize(o, &result, length); if (unlikely(r < 0)) { return NULL; } else { return result; } } } static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { int is_true = x == Py_True; if (is_true | (x == Py_False) | (x == Py_None)) return is_true; else return PyObject_IsTrue(x); } static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { #if PY_MAJOR_VERSION >= 3 if (PyLong_Check(result)) { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, "__int__ returned non-int (type %.200s). " "The ability to return an instance of a strict subclass of int " "is deprecated, and may be removed in a future version of Python.", Py_TYPE(result)->tp_name)) { Py_DECREF(result); return NULL; } return result; } #endif PyErr_Format(PyExc_TypeError, "__%.4s__ returned non-%.4s (type %.200s)", type_name, type_name, Py_TYPE(result)->tp_name); Py_DECREF(result); return NULL; } static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { #if CYTHON_USE_TYPE_SLOTS PyNumberMethods *m; #endif const char *name = NULL; PyObject *res = NULL; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_Check(x) || PyLong_Check(x))) #else if (likely(PyLong_Check(x))) #endif return __Pyx_NewRef(x); #if CYTHON_USE_TYPE_SLOTS m = Py_TYPE(x)->tp_as_number; #if PY_MAJOR_VERSION < 3 if (m && m->nb_int) { name = "int"; res = m->nb_int(x); } else if (m && m->nb_long) { name = "long"; res = m->nb_long(x); } #else if (likely(m && m->nb_int)) { name = "int"; res = m->nb_int(x); } #endif #else if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { res = PyNumber_Int(x); } #endif if (likely(res)) { #if PY_MAJOR_VERSION < 3 if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { #else if (unlikely(!PyLong_CheckExact(res))) { #endif return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); } } else if (!PyErr_Occurred()) { PyErr_SetString(PyExc_TypeError, "an integer is required"); } return res; } static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_ssize_t ival; PyObject *x; #if PY_MAJOR_VERSION < 3 if (likely(PyInt_CheckExact(b))) { if (sizeof(Py_ssize_t) >= sizeof(long)) return PyInt_AS_LONG(b); else return PyInt_AsSsize_t(x); } #endif if (likely(PyLong_CheckExact(b))) { #if CYTHON_USE_PYLONG_INTERNALS const digit* digits = ((PyLongObject*)b)->ob_digit; const Py_ssize_t size = Py_SIZE(b); if (likely(__Pyx_sst_abs(size) <= 1)) { ival = likely(size) ? digits[0] : 0; if (size == -1) ival = -ival; return ival; } else { switch (size) { case 2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -2: if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -3: if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case 4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; case -4: if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); } break; } } #endif return PyLong_AsSsize_t(b); } x = PyNumber_Index(b); if (!x) return -1; ival = PyInt_AsSsize_t(x); Py_DECREF(x); return ival; } static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { return PyInt_FromSize_t(ival); } #endif /* Py_PYTHON_H */ aiohttp-3.0.1/aiohttp/_websocket.pyx0000666000000000000000000000303013240304665015653 0ustar 00000000000000from cpython cimport PyBytes_AsString #from cpython cimport PyByteArray_AsString # cython still not exports that cdef extern from "Python.h": char* PyByteArray_AsString(bytearray ba) except NULL from libc.stdint cimport uint32_t, uint64_t, uintmax_t def _websocket_mask_cython(object mask, object data): """Note, this function mutates it's `data` argument """ cdef: Py_ssize_t data_len, i # bit operations on signed integers are implementation-specific unsigned char * in_buf const unsigned char * mask_buf uint32_t uint32_msk uint64_t uint64_msk assert len(mask) == 4 if not isinstance(mask, bytes): mask = bytes(mask) if isinstance(data, bytearray): data = data else: data = bytearray(data) data_len = len(data) in_buf = PyByteArray_AsString(data) mask_buf = PyBytes_AsString(mask) uint32_msk = (mask_buf)[0] # TODO: align in_data ptr to achieve even faster speeds # does it need in python ?! malloc() always aligns to sizeof(long) bytes if sizeof(size_t) >= 8: uint64_msk = uint32_msk uint64_msk = (uint64_msk << 32) | uint32_msk while data_len >= 8: (in_buf)[0] ^= uint64_msk in_buf += 8 data_len -= 8 while data_len >= 4: (in_buf)[0] ^= uint32_msk in_buf += 4 data_len -= 4 for i in range(0, data_len): in_buf[i] ^= mask_buf[i] aiohttp-3.0.1/aiohttp/__init__.py0000666000000000000000000000254713240304665015111 0ustar 00000000000000__version__ = '3.0.1' # This relies on each of the submodules having an __all__ variable. from . import hdrs # noqa from .client import * # noqa from .cookiejar import * # noqa from .formdata import * # noqa from .helpers import * # noqa from .http import (HttpVersion, HttpVersion10, HttpVersion11, # noqa WSMsgType, WSCloseCode, WSMessage, WebSocketError) # noqa from .multipart import * # noqa from .payload import * # noqa from .payload_streamer import * # noqa from .resolver import * # noqa from .signals import * # noqa from .streams import * # noqa from .tracing import * # noqa try: from .worker import GunicornWebWorker, GunicornUVLoopWebWorker # noqa workers = ('GunicornWebWorker', 'GunicornUVLoopWebWorker') except ImportError: # pragma: no cover workers = () __all__ = (client.__all__ + # noqa cookiejar.__all__ + # noqa formdata.__all__ + # noqa helpers.__all__ + # noqa multipart.__all__ + # noqa payload.__all__ + # noqa payload_streamer.__all__ + # noqa streams.__all__ + # noqa signals.__all__ + # noqa tracing.__all__ + # noqa ('hdrs', 'HttpVersion', 'HttpVersion10', 'HttpVersion11', 'WSMsgType', 'WSCloseCode', 'WebSocketError', 'WSMessage', ) + workers) aiohttp-3.0.1/aiohttp.egg-info/0000777000000000000000000000000013240305035014452 5ustar 00000000000000aiohttp-3.0.1/aiohttp.egg-info/dependency_links.txt0000666000000000000000000000000113240305035020520 0ustar 00000000000000 aiohttp-3.0.1/aiohttp.egg-info/PKG-INFO0000666000000000000000000003472513240305035015562 0ustar 00000000000000Metadata-Version: 1.2 Name: aiohttp Version: 3.0.1 Summary: Async http client/server framework (asyncio) Home-page: https://github.com/aio-libs/aiohttp/ Author: Nikolay Kim , Andrew Svetlov Author-email: aio-libs@googlegroups.com License: Apache 2 Description-Content-Type: UNKNOWN Description: ================================== Async http client/server framework ================================== .. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png :height: 64px :width: 64px :alt: aiohttp logo .. image:: https://travis-ci.org/aio-libs/aiohttp.svg?branch=master :target: https://travis-ci.org/aio-libs/aiohttp :align: right :alt: Travis status for master branch .. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiohttp :alt: codecov.io status for master branch .. image:: https://badge.fury.io/py/aiohttp.svg :target: https://badge.fury.io/py/aiohttp :alt: Latest PyPI package version .. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest :target: http://docs.aiohttp.org/ :alt: Latest Read The Docs .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter Key Features ============ - Supports both client and server side of HTTP protocol. - Supports both client and server Web-Sockets out-of-the-box. - Web-server has middlewares and pluggable routing. Getting started =============== Client ------ To retrieve something from the web: .. code-block:: python import aiohttp import asyncio import async_timeout async def fetch(session, url): async with async_timeout.timeout(10): async with session.get(url) as response: return await response.text() async def main(): async with aiohttp.ClientSession() as session: html = await fetch(session, 'http://python.org') print(html) if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(main()) Server ------ This is simple usage example: .. code-block:: python from aiohttp import web async def handle(request): name = request.match_info.get('name', "Anonymous") text = "Hello, " + name return web.Response(text=text) async def wshandler(request): ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: if msg.type == web.MsgType.text: await ws.send_str("Hello, {}".format(msg.data)) elif msg.type == web.MsgType.binary: await ws.send_bytes(msg.data) elif msg.type == web.MsgType.close: break return ws app = web.Application() app.router.add_get('/echo', wshandler) app.router.add_get('/', handle) app.router.add_get('/{name}', handle) web.run_app(app) Documentation ============= https://aiohttp.readthedocs.io/ External links ============== * `Third party libraries `_ * `Built with aiohttp `_ * `Powered by aiohttp `_ Feel free to make a Pull Request for adding your link to these pages! Communication channels ====================== *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs Feel free to post your questions and ideas here. *gitter chat* https://gitter.im/aio-libs/Lobby We support `Stack Overflow `_. Please add *aiohttp* tag to your question there. Requirements ============ - Python >= 3.5.3 - async-timeout_ - chardet_ - multidict_ - yarl_ Optionally you may install the cChardet_ and aiodns_ libraries (highly recommended for sake of speed). .. _chardet: https://pypi.python.org/pypi/chardet .. _aiodns: https://pypi.python.org/pypi/aiodns .. _multidict: https://pypi.python.org/pypi/multidict .. _yarl: https://pypi.python.org/pypi/yarl .. _async-timeout: https://pypi.python.org/pypi/async_timeout .. _cChardet: https://pypi.python.org/pypi/cchardet License ======= ``aiohttp`` is offered under the Apache 2 license. Keepsafe ======== The aiohttp community would like to thank Keepsafe (https://www.getkeepsafe.com) for it's support in the early days of the project. Source code =========== The latest developer version is available in a github repository: https://github.com/aio-libs/aiohttp Benchmarks ========== If you are interested in by efficiency, AsyncIO community maintains a list of benchmarks on the official wiki: https://github.com/python/asyncio/wiki/Benchmarks ========= Changelog ========= .. You should *NOT* be adding new change log entries to this file, this file is managed by towncrier. You *may* edit previous change logs to fix problems like typo corrections or such. To add a new change log entry, please see https://pip.pypa.io/en/latest/development/#adding-a-news-entry we named the news folder "changes". WARNING: Don't drop the next directive! .. towncrier release notes start 3.0.0 (2018-02-12) ================== Features -------- - Speed up the `PayloadWriter.write` method for large request bodies. (#2126) - StreamResponse and Response are now MutableMappings. (#2246) - ClientSession publishes a set of signals to track the HTTP request execution. (#2313) - Content-Disposition fast access in ClientResponse (#2455) - Added support to Flask-style decorators with class-based Views. (#2472) - Signal handlers (registered callbacks) should be coroutines. (#2480) - Support ``async with test_client.ws_connect(...)`` (#2525) - Introduce *site* and *application runner* as underlying API for `web.run_app` implementation. (#2530) - Only quote multipart boundary when necessary and sanitize input (#2544) - Make the `aiohttp.ClientResponse.get_encoding` method public with the processing of invalid charset while detecting content encoding. (#2549) - Add optional configurable per message compression for `ClientWebSocketResponse` and `WebSocketResponse`. (#2551) - Add hysteresis to `StreamReader` to prevent flipping between paused and resumed states too often. (#2555) - Support `.netrc` by `trust_env` (#2581) - Avoid to create a new resource when adding a route with the same name and path of the last added resource (#2586) - `MultipartWriter.boundary` is `str` now. (#2589) - Allow a custom port to be used by `TestServer` (and associated pytest fixtures) (#2613) - Add param access_log_class to web.run_app function (#2615) - Add ``ssl`` parameter to client API (#2626) - Fixes performance issue introduced by #2577. When there are no middlewares installed by the user, no additional and useless code is executed. (#2629) - Rename PayloadWriter to StreamWriter (#2654) - New options *reuse_port*, *reuse_address* are added to `run_app` and `TCPSite`. (#2679) - Use custom classes to pass client signals parameters (#2686) - Use ``attrs`` library for data classes, replace `namedtuple`. (#2690) - Pytest fixtures renaming, add ``aiohttp_`` prefix (#2578) - Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line parameters (#2578) Bugfixes -------- - Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not support HTTP2 yet, the protocol is not upgraded but response is handled correctly. (#2277) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy connector (#2408) - Fix connector convert OSError to ClientConnectorError (#2423) - Fix connection attempts for multiple dns hosts (#2424) - Fix writing to closed transport by raising `asyncio.CancelledError` (#2499) - Fix warning in `ClientSession.__del__` by stopping to try to close it. (#2523) - Fixed race-condition for iterating addresses from the DNSCache. (#2620) - Fix default value of `access_log_format` argument in `web.run_app` (#2649) - Freeze sub-application on adding to parent app (#2656) - Do percent encoding for `.url_for()` parameters (#2668) - Correctly process request start time and multiple request/response headers in access log extra (#2641) Improved Documentation ---------------------- - Improve tutorial docs, using `literalinclude` to link to the actual files. (#2396) - Small improvement docs: better example for file uploads. (#2401) - Rename `from_env` to `trust_env` in client reference. (#2451) - Fixed mistype in `Proxy Support` section where `trust_env` parameter was used in `session.get("http://python.org", trust_env=True)` method instead of aiohttp.ClientSession constructor as follows: `aiohttp.ClientSession(trust_env=True)`. (#2688) - Fix issue with unittest example not compiling in testing docs. (#2717) Deprecations and Removals ------------------------- - Simplify HTTP pipelining implementation (#2109) - Drop `StreamReaderPayload` and `DataQueuePayload`. (#2257) - Drop `md5` and `sha1` finger-prints (#2267) - Drop WSMessage.tp (#2321) - Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. (#2343) - Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (#2348) - Drop `resolve` param from TCPConnector. (#2377) - Add DeprecationWarning for returning HTTPException (#2415) - `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are genuine async functions now. (#2475) - Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal handlers should be coroutines, support for regular functions is dropped. (#2480) - `StreamResponse.drain()` is not a part of public API anymore, just use `await StreamResponse.write()`. `StreamResponse.write` is converted to async function. (#2483) - Drop deprecated `slow_request_timeout` param and `**kwargs`` from `RequestHandler`. (#2500) - Drop deprecated `resource.url()`. (#2501) - Remove `%u` and `%l` format specifiers from access log format. (#2506) - Drop deprecated `request.GET` property. (#2547) - Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, merge `FlowControlStreamReader` functionality into `StreamReader`, drop `FlowControlStreamReader` name. (#2555) - Do not create a new resource on `router.add_get(..., allow_head=True)` (#2585) - Drop access to TCP tuning options from PayloadWriter and Response classes (#2604) - Drop deprecated `encoding` parameter from client API (#2606) - Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in client API (#2626) - Get rid of the legacy class StreamWriter. (#2651) - Forbid non-strings in `resource.url_for()` parameters. (#2668) - Deprecate inheritance from ``ClientSession`` and ``web.Application`` and custom user attributes for ``ClientSession``, ``web.Request`` and ``web.Application`` (#2691) - Drop `resp = await aiohttp.request(...)` syntax for sake of `async with aiohttp.request(...) as resp:`. (#2540) - Forbid synchronous context managers for `ClientSession` and test server/client. (#2362) Misc ---- - #2552 Platform: UNKNOWN Classifier: License :: OSI Approved :: Apache Software License Classifier: Intended Audience :: Developers Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Development Status :: 5 - Production/Stable Classifier: Operating System :: POSIX Classifier: Operating System :: MacOS :: MacOS X Classifier: Operating System :: Microsoft :: Windows Classifier: Topic :: Internet :: WWW/HTTP Classifier: Framework :: AsyncIO Requires-Python: >=3.5.3 aiohttp-3.0.1/aiohttp.egg-info/requires.txt0000666000000000000000000000015113240305035017047 0ustar 00000000000000attrs>=17.4.0 chardet<4.0,>=2.0 multidict<5.0,>=4.0 async_timeout<3.0,>=1.2 yarl<2.0,>=1.0 idna-ssl>=1.0 aiohttp-3.0.1/aiohttp.egg-info/SOURCES.txt0000666000000000000000000001520013240305035016334 0ustar 00000000000000.appveyor.yml .gitignore .pyup.yml .readthedocs.yml .travis.yml CHANGES.rst CONTRIBUTING.rst CONTRIBUTORS.txt HISTORY.rst LICENSE.txt MANIFEST.in Makefile README.rst codecov.yml pyproject.toml pytest.ini setup.cfg setup.py tox.ini .github/ISSUE_TEMPLATE.md .github/PULL_REQUEST_TEMPLATE.md CHANGES/.gitignore aiohttp/__init__.py aiohttp/_cparser.pxd aiohttp/_frozenlist.c aiohttp/_frozenlist.pyx aiohttp/_http_parser.c aiohttp/_http_parser.pyx aiohttp/_websocket.c aiohttp/_websocket.pyx aiohttp/abc.py aiohttp/client.py aiohttp/client_exceptions.py aiohttp/client_proto.py aiohttp/client_reqrep.py aiohttp/client_ws.py aiohttp/connector.py aiohttp/cookiejar.py aiohttp/formdata.py aiohttp/frozenlist.py aiohttp/hdrs.py aiohttp/helpers.py aiohttp/http.py aiohttp/http_exceptions.py aiohttp/http_parser.py aiohttp/http_websocket.py aiohttp/http_writer.py aiohttp/locks.py aiohttp/log.py aiohttp/multipart.py aiohttp/payload.py aiohttp/payload_streamer.py aiohttp/pytest_plugin.py aiohttp/resolver.py aiohttp/signals.py aiohttp/streams.py aiohttp/tcp_helpers.py aiohttp/test_utils.py aiohttp/tracing.py aiohttp/web.py aiohttp/web_app.py aiohttp/web_exceptions.py aiohttp/web_fileresponse.py aiohttp/web_middlewares.py aiohttp/web_protocol.py aiohttp/web_request.py aiohttp/web_response.py aiohttp/web_runner.py aiohttp/web_server.py aiohttp/web_urldispatcher.py aiohttp/web_ws.py aiohttp/worker.py aiohttp.egg-info/PKG-INFO aiohttp.egg-info/SOURCES.txt aiohttp.egg-info/dependency_links.txt aiohttp.egg-info/requires.txt aiohttp.egg-info/top_level.txt demos/README.rst demos/chat/setup.py demos/chat/aiohttpdemo_chat/__init__.py demos/chat/aiohttpdemo_chat/main.py demos/chat/aiohttpdemo_chat/views.py demos/chat/aiohttpdemo_chat/templates/index.html demos/polls/Makefile demos/polls/README.rst demos/polls/requirements.txt demos/polls/setup.py demos/polls/tox.ini demos/polls/aiohttpdemo_polls/__init__.py demos/polls/aiohttpdemo_polls/__main__.py demos/polls/aiohttpdemo_polls/db.py demos/polls/aiohttpdemo_polls/main.py demos/polls/aiohttpdemo_polls/middlewares.py demos/polls/aiohttpdemo_polls/routes.py demos/polls/aiohttpdemo_polls/utils.py demos/polls/aiohttpdemo_polls/views.py demos/polls/aiohttpdemo_polls/static/style.css demos/polls/aiohttpdemo_polls/templates/404.html demos/polls/aiohttpdemo_polls/templates/500.html demos/polls/aiohttpdemo_polls/templates/base.html demos/polls/aiohttpdemo_polls/templates/detail.html demos/polls/aiohttpdemo_polls/templates/index.html demos/polls/aiohttpdemo_polls/templates/results.html demos/polls/config/polls.yaml demos/polls/images/example.png demos/polls/sql/create_tables.sql demos/polls/sql/install.sh demos/polls/sql/sample_data.sql demos/polls/tests/conftest.py demos/polls/tests/test_integration.py docs/Makefile docs/abc.rst docs/aiohttp-icon.svg docs/aiohttp-plain.svg docs/built_with.rst docs/changes.rst docs/client.rst docs/client_advanced.rst docs/client_quickstart.rst docs/client_reference.rst docs/conf.py docs/contributing.rst docs/deployment.rst docs/essays.rst docs/external.rst docs/faq.rst docs/favicon.ico docs/glossary.rst docs/index.rst docs/logging.rst docs/make.bat docs/migration_to_2xx.rst docs/misc.rst docs/multipart.rst docs/multipart_reference.rst docs/new_router.rst docs/old-logo.svg docs/powered_by.rst docs/signals.rst docs/spelling_wordlist.txt docs/streams.rst docs/testing.rst docs/third_party.rst docs/tracing_reference.rst docs/tutorial.rst docs/utilities.rst docs/web.rst docs/web_advanced.rst docs/web_lowlevel.rst docs/web_quickstart.rst docs/web_reference.rst docs/websocket_utilities.rst docs/whats_new_1_1.rst docs/whats_new_3_0.rst docs/_static/aiohttp-icon-128x128.png examples/background_tasks.py examples/basic_srv.py examples/cli_app.py examples/client_auth.py examples/client_json.py examples/client_ws.py examples/curl.py examples/fake_server.py examples/lowlevel_srv.py examples/server.crt examples/server.csr examples/server.key examples/static_files.py examples/web_classview1.py examples/web_cookies.py examples/web_rewrite_headers_middleware.py examples/web_srv.py examples/web_srv_route_deco.py examples/web_srv_route_table.py examples/web_ws.py examples/websocket.html examples/legacy/crawl.py examples/legacy/srv.py examples/legacy/tcp_protocol_parser.py requirements/ci-wheel.txt requirements/ci.txt requirements/dev.txt requirements/doc-spelling.txt requirements/doc.txt requirements/wheel.txt tests/aiohttp.jpg tests/aiohttp.png tests/conftest.py tests/data.unknown_mime_type tests/hello.txt.gz tests/sample.crt tests/sample.crt.der tests/sample.key tests/test_classbasedview.py tests/test_client_connection.py tests/test_client_exceptions.py tests/test_client_fingerprint.py tests/test_client_functional.py tests/test_client_proto.py tests/test_client_request.py tests/test_client_response.py tests/test_client_session.py tests/test_client_ws.py tests/test_client_ws_functional.py tests/test_connector.py tests/test_cookiejar.py tests/test_flowcontrol_streams.py tests/test_formdata.py tests/test_frozenlist.py tests/test_helpers.py tests/test_http_exceptions.py tests/test_http_parser.py tests/test_http_writer.py tests/test_locks.py tests/test_loop.py tests/test_multipart.py tests/test_payload.py tests/test_proxy.py tests/test_proxy_functional.py tests/test_pytest_plugin.py tests/test_resolver.py tests/test_route_def.py tests/test_run_app.py tests/test_signals.py tests/test_streams.py tests/test_tcp_helpers.py tests/test_test_utils.py tests/test_tracing.py tests/test_urldispatch.py tests/test_web_app.py tests/test_web_cli.py tests/test_web_exceptions.py tests/test_web_functional.py tests/test_web_middleware.py tests/test_web_protocol.py tests/test_web_request.py tests/test_web_request_handler.py tests/test_web_response.py tests/test_web_runner.py tests/test_web_sendfile.py tests/test_web_sendfile_functional.py tests/test_web_server.py tests/test_web_urldispatcher.py tests/test_web_websocket.py tests/test_web_websocket_functional.py tests/test_websocket_handshake.py tests/test_websocket_parser.py tests/test_websocket_writer.py tests/test_worker.py tests/autobahn/client.py tests/autobahn/fuzzingclient.json tests/autobahn/fuzzingserver.json tests/autobahn/server.py tools/build-wheels.sh tools/build.cmd tools/check_changes.py tools/drop_merged_branches.sh tools/run_docker.sh vendor/http-parser/http_parser.c vendor/http-parser/.gitignore vendor/http-parser/.mailmap vendor/http-parser/.travis.yml vendor/http-parser/AUTHORS vendor/http-parser/LICENSE-MIT vendor/http-parser/Makefile vendor/http-parser/README.md vendor/http-parser/bench.c vendor/http-parser/http_parser.c vendor/http-parser/http_parser.gyp vendor/http-parser/http_parser.h vendor/http-parser/test.c vendor/http-parser/contrib/parsertrace.c vendor/http-parser/contrib/url_parser.caiohttp-3.0.1/aiohttp.egg-info/top_level.txt0000666000000000000000000000001013240305035017173 0ustar 00000000000000aiohttp aiohttp-3.0.1/CHANGES/0000777000000000000000000000000013240305035012360 5ustar 00000000000000aiohttp-3.0.1/CHANGES/.gitignore0000666000000000000000000000001413240304665014353 0ustar 00000000000000!.gitignore aiohttp-3.0.1/CHANGES.rst0000666000000000000000000001442513240304665013130 0ustar 00000000000000========= Changelog ========= .. You should *NOT* be adding new change log entries to this file, this file is managed by towncrier. You *may* edit previous change logs to fix problems like typo corrections or such. To add a new change log entry, please see https://pip.pypa.io/en/latest/development/#adding-a-news-entry we named the news folder "changes". WARNING: Don't drop the next directive! .. towncrier release notes start 3.0.0 (2018-02-12) ================== Features -------- - Speed up the `PayloadWriter.write` method for large request bodies. (#2126) - StreamResponse and Response are now MutableMappings. (#2246) - ClientSession publishes a set of signals to track the HTTP request execution. (#2313) - Content-Disposition fast access in ClientResponse (#2455) - Added support to Flask-style decorators with class-based Views. (#2472) - Signal handlers (registered callbacks) should be coroutines. (#2480) - Support ``async with test_client.ws_connect(...)`` (#2525) - Introduce *site* and *application runner* as underlying API for `web.run_app` implementation. (#2530) - Only quote multipart boundary when necessary and sanitize input (#2544) - Make the `aiohttp.ClientResponse.get_encoding` method public with the processing of invalid charset while detecting content encoding. (#2549) - Add optional configurable per message compression for `ClientWebSocketResponse` and `WebSocketResponse`. (#2551) - Add hysteresis to `StreamReader` to prevent flipping between paused and resumed states too often. (#2555) - Support `.netrc` by `trust_env` (#2581) - Avoid to create a new resource when adding a route with the same name and path of the last added resource (#2586) - `MultipartWriter.boundary` is `str` now. (#2589) - Allow a custom port to be used by `TestServer` (and associated pytest fixtures) (#2613) - Add param access_log_class to web.run_app function (#2615) - Add ``ssl`` parameter to client API (#2626) - Fixes performance issue introduced by #2577. When there are no middlewares installed by the user, no additional and useless code is executed. (#2629) - Rename PayloadWriter to StreamWriter (#2654) - New options *reuse_port*, *reuse_address* are added to `run_app` and `TCPSite`. (#2679) - Use custom classes to pass client signals parameters (#2686) - Use ``attrs`` library for data classes, replace `namedtuple`. (#2690) - Pytest fixtures renaming, add ``aiohttp_`` prefix (#2578) - Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line parameters (#2578) Bugfixes -------- - Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not support HTTP2 yet, the protocol is not upgraded but response is handled correctly. (#2277) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy connector (#2408) - Fix connector convert OSError to ClientConnectorError (#2423) - Fix connection attempts for multiple dns hosts (#2424) - Fix writing to closed transport by raising `asyncio.CancelledError` (#2499) - Fix warning in `ClientSession.__del__` by stopping to try to close it. (#2523) - Fixed race-condition for iterating addresses from the DNSCache. (#2620) - Fix default value of `access_log_format` argument in `web.run_app` (#2649) - Freeze sub-application on adding to parent app (#2656) - Do percent encoding for `.url_for()` parameters (#2668) - Correctly process request start time and multiple request/response headers in access log extra (#2641) Improved Documentation ---------------------- - Improve tutorial docs, using `literalinclude` to link to the actual files. (#2396) - Small improvement docs: better example for file uploads. (#2401) - Rename `from_env` to `trust_env` in client reference. (#2451) - Fixed mistype in `Proxy Support` section where `trust_env` parameter was used in `session.get("http://python.org", trust_env=True)` method instead of aiohttp.ClientSession constructor as follows: `aiohttp.ClientSession(trust_env=True)`. (#2688) - Fix issue with unittest example not compiling in testing docs. (#2717) Deprecations and Removals ------------------------- - Simplify HTTP pipelining implementation (#2109) - Drop `StreamReaderPayload` and `DataQueuePayload`. (#2257) - Drop `md5` and `sha1` finger-prints (#2267) - Drop WSMessage.tp (#2321) - Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. (#2343) - Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (#2348) - Drop `resolve` param from TCPConnector. (#2377) - Add DeprecationWarning for returning HTTPException (#2415) - `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are genuine async functions now. (#2475) - Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal handlers should be coroutines, support for regular functions is dropped. (#2480) - `StreamResponse.drain()` is not a part of public API anymore, just use `await StreamResponse.write()`. `StreamResponse.write` is converted to async function. (#2483) - Drop deprecated `slow_request_timeout` param and `**kwargs`` from `RequestHandler`. (#2500) - Drop deprecated `resource.url()`. (#2501) - Remove `%u` and `%l` format specifiers from access log format. (#2506) - Drop deprecated `request.GET` property. (#2547) - Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, merge `FlowControlStreamReader` functionality into `StreamReader`, drop `FlowControlStreamReader` name. (#2555) - Do not create a new resource on `router.add_get(..., allow_head=True)` (#2585) - Drop access to TCP tuning options from PayloadWriter and Response classes (#2604) - Drop deprecated `encoding` parameter from client API (#2606) - Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in client API (#2626) - Get rid of the legacy class StreamWriter. (#2651) - Forbid non-strings in `resource.url_for()` parameters. (#2668) - Deprecate inheritance from ``ClientSession`` and ``web.Application`` and custom user attributes for ``ClientSession``, ``web.Request`` and ``web.Application`` (#2691) - Drop `resp = await aiohttp.request(...)` syntax for sake of `async with aiohttp.request(...) as resp:`. (#2540) - Forbid synchronous context managers for `ClientSession` and test server/client. (#2362) Misc ---- - #2552 aiohttp-3.0.1/codecov.yml0000666000000000000000000000007013240304665013462 0ustar 00000000000000coverage: range: "95..100" status: project: no aiohttp-3.0.1/CONTRIBUTING.rst0000666000000000000000000001456513240304665013774 0ustar 00000000000000Contributing ============ Instructions for contributors ----------------------------- In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. I hope everybody knows how to work with git and github nowadays :) Workflow is pretty straightforward: 1. Clone the GitHub_ repo 2. Make a change 3. Make sure all tests passed 4. Add a file into ``CHANGES`` folder (`Changelog update`_). 5. Commit changes to own aiohttp clone 6. Make pull request from github page for your clone against master branch Preconditions for running aiohttp test suite -------------------------------------------- We expect you to use a python virtual environment to run our tests. There are several ways to make a virtual environment. If you like to use *virtualenv* please run: .. code-block:: shell $ cd aiohttp $ virtualenv --python=`which python3` venv $ . venv/bin/activate For standard python *venv*: .. code-block:: shell $ cd aiohttp $ python3 -m venv venv $ . venv/bin/activate For *virtualenvwrapper*: .. code-block:: shell $ cd aiohttp $ mkvirtualenv --python=`which python3` aiohttp There are other tools like *pyvenv* but you know the rule of thumb now: create a python3 virtual environment and activate it. After that please install libraries required for development: .. code-block:: shell $ pip install -r requirements/dev.txt .. note:: If you plan to use ``pdb`` or ``ipdb`` within the test suite, execute: .. code-block:: shell $ py.test tests -s command to run the tests with disabled output capturing. Congratulations, you are ready to run the test suite! Run aiohttp test suite ---------------------- After all the preconditions are met you can run tests typing the next command: .. code-block:: shell $ make test The command at first will run the *flake8* tool (sorry, we don't accept pull requests with pep8 or pyflakes errors). On *flake8* success the tests will be run. Please take a look on the produced output. Any extra texts (print statements and so on) should be removed. Tests coverage -------------- We are trying hard to have good test coverage; please don't make it worse. Use: .. code-block:: shell $ make cov to run test suite and collect coverage information. Once the command has finished check your coverage at the file that appears in the last line of the output: ``open file:///.../aiohttp/htmlcov/index.html`` Please go to the link and make sure that your code change is covered. The project uses *codecov.io* for storing coverage results. Visit https://codecov.io/gh/aio-libs/aiohttp for looking on coverage of master branch, history, pull requests etc. The browser extension https://docs.codecov.io/docs/browser-extension is highly recommended for analyzing the coverage just in *Files Changed* tab on *GitHub Pull Request* review page. Documentation ------------- We encourage documentation improvements. Please before making a Pull Request about documentation changes run: .. code-block:: shell $ make doc Once it finishes it will output the index html page ``open file:///.../aiohttp/docs/_build/html/index.html``. Go to the link and make sure your doc changes looks good. Spell checking -------------- We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: .. code-block:: shell $ make doc-spelling Unfortunately there are problems with running spell checker on MacOS X. To run spell checker on Linux box you should install it first: .. code-block:: shell $ sudo apt-get install enchant $ pip install sphinxcontrib-spelling Changelog update ---------------- The ``CHANGES.rst`` file is managed using `towncrier `_ tool and all non trivial changes must be accompanied by a news entry. To add an entry to the news file, first you need to have created an issue describing the change you want to make. A Pull Request itself *may* function as such, but it is preferred to have a dedicated issue (for example, in case the PR ends up rejected due to code quality reasons). Once you have an issue or pull request, you take the number and you create a file inside of the ``CHANGES/`` directory named after that issue number with an extension of ``.removal``, ``.feature``, ``.bugfix``, or ``.doc``. Thus if your issue or PR number is ``1234`` and this change is fixing a bug, then you would create a file ``CHANGES/1234.bugfix``. PRs can span multiple categories by creating multiple files (for instance, if you added a feature and deprecated/removed the old feature at the same time, you would create ``CHANGES/NNNN.feature`` and ``CHANGES/NNNN.removal``). Likewise if a PR touches multiple issues/PRs you may create a file for each of them with the exact same contents and *Towncrier* will deduplicate them. The contents of this file are *reStructuredText* formatted text that will be used as the content of the news file entry. You do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. The End ------- After finishing all steps make a GitHub_ Pull Request, thanks. How to become an aiohttp committer ---------------------------------- Contribute! The easiest way is providing Pull Requests for issues in our bug tracker. But if you have a great idea for the library improvement -- please make an issue and Pull Request. The rules for committers are simple: 1. No wild commits! Everything should go through PRs. 2. Take a part in reviews. It's very important part of maintainer's activity. 3. Pickup issues created by others, especially if they are simple. 4. Keep test suite comprehensive. In practice it means leveling up coverage. 97% is not bad but we wish to have 100% someday. Well, 99% is good target too. 5. Don't hesitate to improve our docs. Documentation is very important thing, it's the key for project success. The documentation should not only cover our public API but help newbies to start using the project and shed a light on non-obvious gotchas. After positive answer aiohttp committer creates an issue on github with the proposal for nomination. If the proposal will collect only positive votes and no strong objection -- you'll be a new member in our team. .. _GitHub: https://github.com/aio-libs/aiohttp .. _ipdb: https://pypi.python.org/pypi/ipdb aiohttp-3.0.1/CONTRIBUTORS.txt0000666000000000000000000000624713240304665014027 0ustar 00000000000000Contributors ------------ A. Jesse Jiryu Davis Adam Mills Adrián Chaves Alec Hanefeld Alejandro Gómez Aleksandr Danshyn Aleksey Kutepov Alex Hayes Alex Key Alex Khomchenko Alex Kuzmenko Alex Lisovoy Alexander Bayandin Alexander Karpinsky Alexander Koshevoy Alexander Malev Alexander Mohr Alexander Shorin Alexander Travov Alexandru Mihai Alexey Firsov Alexey Popravka Alexey Stepanov Amin Etesamian Amy Boyle Andrei Ursulenko Andrej Antonov Andrew Leech Andrew Lytvyn Andrew Svetlov Andrii Soldatenko Antoine Pietri Anton Kasyanov Anton Zhdan-Pushkin Arthur Darcet Ben Bader Benedikt Reinartz Boris Feld Boyi Chen Brett Cannon Brian C. Lane Brian Muller Carl George Cecile Tonglet Chien-Wei Huang Chih-Yuan Chen Chris AtLee Chris Laws Chris Moore Christopher Schmitt Claudiu Popa Damien Nadé Dan Xu Daniel García Daniel Nelson Danny Song David Michael Brown Denilson Amorim Denis Matiychuk Dima Veselov Dimitar Dimitrov Dmitry Doroshev Dmitry Shamov Dmitry Trofimov Dmytro Kuznetsov Dustin J. Mitchell Eduard Iskandarov Eli Ribble Elizabeth Leddy Enrique Saez Eric Sheng Erich Healy Eugene Chernyshov Eugene Naydenov Evert Lammerts FichteFoll Frederik Gladhorn Frederik Peter Aalund Gabriel Tremblay Gennady Andreyev Georges Dubus Greg Holt Gregory Haynes Günther Jena Hu Bo Hugo Herter Hynek Schlawack Igor Alexandrov Igor Davydenko Igor Pavlov Ingmar Steen Jacob Champion Jaesung Lee Jake Davis Jakub Wilk Jashandeep Sohi Jeongkyu Shin Jeroen van der Heijden Jesus Cea Jinkyu Yi Joel Watts Jon Nabozny Joongi Kim Josep Cugat Julia Tsemusheva Julien Duponchelle Jungkook Park Junjie Tao Justas Trimailovas Justin Turner Arthur Kay Zheng Kimmo Parviainen-Jalanko Kirill Klenov Kirill Malovitsa Kyrylo Perevozchikov Lars P. Søndergaard Loïc Lajeanne Louis-Philippe Huberdeau Lu Gong Lubomir Gelo Ludovic Gasc Lukasz Marcin Dobrzanski Makc Belousow Manuel Miranda Marat Sharafutdinov Marco Paolini Mariano Anaya Martin Melka Martin Richard Mathias Fröjdman Matthieu Hauglustaine Matthieu Rigal Michael Ihnatenko Mikhail Kashkin Mikhail Lukyanchenko Misha Behersky Morgan Delahaye-Prat Moss Collum Mun Gwan-gyeong Nicolas Braem Nikolay Kim Nikolay Novik Olaf Conradi Pahaz Blinov Panagiotis Kolokotronis Pankaj Pandey Pau Freixes Paul Colomiets Paulus Schoutsen Pavel Kamaev Pawel Miech Philipp A. Pieter van Beek Rafael Viotti Raúl Cumplido Required Field Robert Lu Roman Podoliaka Samuel Colvin Sean Hunt Sebastien Geffroy Sebastian Hanula Sebastian Hüther SeongSoo Cho Sergey Ninua Sergey Skripnick Serhii Kostel Simon Kennedy Sin-Woo Bang Stanislas Plum Stanislav Prokop Stephen Granade Steven Seguin Sunghyun Hwang Sviatoslav Bulbakha Sviatoslav Sydorenko Taha Jahangir Taras Voinarovskyi Terence Honles Thanos Lefteris Thijs Vermeir Thomas Grainger Tolga Tezel Vaibhav Sagar Vamsi Krishna Avula Vasiliy Faronov Vasyl Baran Victor Kovtun Vikas Kawadia Vitalik Verhovodov Vitaly Haritonsky Vitaly Magerya Vladimir Kozlovski Vladimir Rutsky Vladimir Shulyak Vladimir Zakharov Vladyslav Bondar W. Trevor King Will McGugan Willem de Groot Wilson Ong Wei Lin Weiwei Wang Yannick Koechlin Yannick Péroux Yegor Roganov Young-Ho Cha Yuriy Shatrov Yury Selivanov Yusuke Tsutsumi Марк Коренберг Семён МарьÑÑин aiohttp-3.0.1/demos/0000777000000000000000000000000013240305035012417 5ustar 00000000000000aiohttp-3.0.1/demos/chat/0000777000000000000000000000000013240305035013336 5ustar 00000000000000aiohttp-3.0.1/demos/chat/aiohttpdemo_chat/0000777000000000000000000000000013240305035016652 5ustar 00000000000000aiohttp-3.0.1/demos/chat/aiohttpdemo_chat/main.py0000666000000000000000000000136013240304665020160 0ustar 00000000000000import asyncio import logging import jinja2 import aiohttp_jinja2 from aiohttp import web from aiohttpdemo_chat.views import setup as setup_routes async def init(loop): app = web.Application(loop=loop) app['sockets'] = {} app.on_shutdown.append(shutdown) aiohttp_jinja2.setup( app, loader=jinja2.PackageLoader('aiohttpdemo_chat', 'templates')) setup_routes(app) return app async def shutdown(app): for ws in app['sockets'].values(): await ws.close() app['sockets'].clear() def main(): # init logging logging.basicConfig(level=logging.DEBUG) loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) web.run_app(app) if __name__ == '__main__': main() aiohttp-3.0.1/demos/chat/aiohttpdemo_chat/templates/0000777000000000000000000000000013240305035020650 5ustar 00000000000000aiohttp-3.0.1/demos/chat/aiohttpdemo_chat/templates/index.html0000666000000000000000000000617713240304665022670 0ustar 00000000000000

    Chat!

     | Status: UNKNOWN disconnected
    aiohttp-3.0.1/demos/chat/aiohttpdemo_chat/views.py0000666000000000000000000000301613240304665020371 0ustar 00000000000000import json import logging import random import string import aiohttp_jinja2 from aiohttp import web log = logging.getLogger(__name__) async def index(request): resp = web.WebSocketResponse() ok, protocol = resp.can_start(request) if not ok: return aiohttp_jinja2.render_template('index.html', request, {}) await resp.prepare(request) name = (random.choice(string.ascii_uppercase) + ''.join(random.sample(string.ascii_lowercase*10, 10))) log.info('%s joined.', name) await resp.send_str(json.dumps({'action': 'connect', 'name': name})) for ws in request.app['sockets'].values(): await ws.send_str(json.dumps({'action': 'join', 'name': name})) request.app['sockets'][name] = resp while True: msg = await resp.receive() if msg.type == web.MsgType.text: for ws in request.app['sockets'].values(): if ws is not resp: await ws.send_str(json.dumps({'action': 'sent', 'name': name, 'text': msg.data})) else: break del request.app['sockets'][name] log.info('%s disconnected.', name) for ws in request.app['sockets'].values(): await ws.send_str(json.dumps({'action': 'disconnect', 'name': name})) return resp def setup(app): app.router.add_get('/', index) aiohttp-3.0.1/demos/chat/aiohttpdemo_chat/__init__.py0000666000000000000000000000002613240304665020771 0ustar 00000000000000__version__ = '0.0.1' aiohttp-3.0.1/demos/chat/setup.py0000666000000000000000000000156613240304665015070 0ustar 00000000000000import os import re from setuptools import find_packages, setup def read_version(): regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'") init_py = os.path.join(os.path.dirname(__file__), 'aiohttpdemo_chat', '__init__.py') with open(init_py) as f: for line in f: match = regexp.match(line) if match is not None: return match.group(1) else: msg = 'Cannot find version in aiohttpdemo_chat/__init__.py' raise RuntimeError(msg) install_requires = ['aiohttp', 'aiohttp_jinja2'] setup(name='aiohttpdemo_chat', version=read_version(), description='Chat example from aiohttp', platforms=['POSIX'], packages=find_packages(), include_package_data=True, install_requires=install_requires, zip_safe=False) aiohttp-3.0.1/demos/polls/0000777000000000000000000000000013240305035013550 5ustar 00000000000000aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/0000777000000000000000000000000013240305035017276 5ustar 00000000000000aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/db.py0000666000000000000000000000465013240304665020252 0ustar 00000000000000import aiopg.sa import sqlalchemy as sa __all__ = ['question', 'choice'] meta = sa.MetaData() question = sa.Table( 'question', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_text', sa.String(200), nullable=False), sa.Column('pub_date', sa.Date, nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='question_id_pkey')) choice = sa.Table( 'choice', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_id', sa.Integer, nullable=False), sa.Column('choice_text', sa.String(200), nullable=False), sa.Column('votes', sa.Integer, server_default="0", nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='choice_id_pkey'), sa.ForeignKeyConstraint(['question_id'], [question.c.id], name='choice_question_id_fkey', ondelete='CASCADE'), ) class RecordNotFound(Exception): """Requested record in database was not found""" async def init_pg(app): conf = app['config']['postgres'] engine = await aiopg.sa.create_engine( database=conf['database'], user=conf['user'], password=conf['password'], host=conf['host'], port=conf['port'], minsize=conf['minsize'], maxsize=conf['maxsize'], loop=app.loop) app['db'] = engine async def close_pg(app): app['db'].close() await app['db'].wait_closed() async def get_question(conn, question_id): result = await conn.execute( question.select() .where(question.c.id == question_id)) question_record = await result.first() if not question_record: msg = "Question with id: {} does not exists" raise RecordNotFound(msg.format(question_id)) result = await conn.execute( choice.select() .where(choice.c.question_id == question_id) .order_by(choice.c.id)) choice_recoreds = await result.fetchall() return question_record, choice_recoreds async def vote(conn, question_id, choice_id): result = await conn.execute( choice.update() .returning(*choice.c) .where(choice.c.question_id == question_id) .where(choice.c.id == choice_id) .values(votes=choice.c.votes+1)) record = await result.fetchone() if not record: msg = "Question with id: {} or choice id: {} does not exists" raise RecordNotFound(msg.format(question_id, choice_id)) aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/main.py0000666000000000000000000000301713240304665020605 0ustar 00000000000000import argparse import asyncio import logging import sys import jinja2 import aiohttp_jinja2 from aiohttp import web from aiohttpdemo_polls.db import close_pg, init_pg from aiohttpdemo_polls.middlewares import setup_middlewares from aiohttpdemo_polls.routes import setup_routes from aiohttpdemo_polls.utils import TRAFARET from trafaret_config import commandline def init(loop, argv): ap = argparse.ArgumentParser() commandline.standard_argparse_options(ap, default_config='./config/polls.yaml') # # define your command-line arguments here # options = ap.parse_args(argv) config = commandline.config_from_options(options, TRAFARET) # setup application and extensions app = web.Application(loop=loop) # load config from yaml file in current dir app['config'] = config # setup Jinja2 template renderer aiohttp_jinja2.setup( app, loader=jinja2.PackageLoader('aiohttpdemo_polls', 'templates')) # create connection to the database app.on_startup.append(init_pg) # shutdown db connection on exit app.on_cleanup.append(close_pg) # setup views and routes setup_routes(app) setup_middlewares(app) return app def main(argv): # init logging logging.basicConfig(level=logging.DEBUG) loop = asyncio.get_event_loop() app = init(loop, argv) web.run_app(app, host=app['config']['host'], port=app['config']['port']) if __name__ == '__main__': main(sys.argv[1:]) aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/middlewares.py0000666000000000000000000000253213240304665022162 0ustar 00000000000000import aiohttp_jinja2 from aiohttp import web async def handle_404(request, response): response = aiohttp_jinja2.render_template('404.html', request, {}) return response async def handle_500(request, response): response = aiohttp_jinja2.render_template('500.html', request, {}) return response def error_pages(overrides): async def middleware(app, handler): async def middleware_handler(request): try: response = await handler(request) override = overrides.get(response.status) if override is None: return response else: return await override(request, response) except web.HTTPException as ex: override = overrides.get(ex.status) if override is None: raise else: return await override(request, ex) return middleware_handler return middleware def setup_middlewares(app): error_middleware = error_pages({404: handle_404, 500: handle_500}) app.middlewares.append(error_middleware) aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/routes.py0000666000000000000000000000113313240304665021177 0ustar 00000000000000import pathlib from .views import index, poll, results, vote PROJECT_ROOT = pathlib.Path(__file__).parent def setup_routes(app): app.router.add_get('/', index) app.router.add_get('/poll/{question_id}', poll, name='poll') app.router.add_get('/poll/{question_id}/results', results, name='results') app.router.add_post('/poll/{question_id}/vote', vote, name='vote') setup_static_routes(app) def setup_static_routes(app): app.router.add_static('/static/', path=PROJECT_ROOT / 'static', name='static') aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/static/0000777000000000000000000000000013240305035020565 5ustar 00000000000000aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/static/style.css0000666000000000000000000000016013240304665022444 0ustar 00000000000000li a { color: green; } body { background: white url("images/background.gif") no-repeat right bottom; } aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/0000777000000000000000000000000013240305035021274 5ustar 00000000000000aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/404.html0000666000000000000000000000007613240304665022504 0ustar 00000000000000{% extends "base.html" %} {% set title = "Page Not Found" %} aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/500.html0000666000000000000000000000010513240304665022472 0ustar 00000000000000{% extends "base.html" %} {% set title = "Internal Server Error" %} aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/base.html0000666000000000000000000000053213240304665023104 0ustar 00000000000000 {% block head %} {{title}} {% endblock %}

    {{title}}

    {% block content %} {% endblock %}
    aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/detail.html0000666000000000000000000000100113240304665023424 0ustar 00000000000000{% extends "base.html" %} {% set title = question.question_text %} {% block content %} {% if error_message %}

    {{ error_message }}

    {% endif %}
    {% for choice in choices %}
    {% endfor %}
    {% endblock %} aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/index.html0000666000000000000000000000052013240304665023276 0ustar 00000000000000{% extends "base.html" %} {% set title = "Main" %} {% block content %} {% if questions %} {% else %}

    No polls are available.

    {% endif %} {% endblock %} aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/templates/results.html0000666000000000000000000000046513240304665023700 0ustar 00000000000000{% extends "base.html" %} {% set title = question.question_text %} {% block content %}
      {% for choice in choices %}
    • {{ choice.choice_text }} -- {{ choice.votes }} vote{{ choice.votes }}
    • {% endfor %}
    Vote again? {% endblock %} aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/utils.py0000666000000000000000000000057413240304665021026 0ustar 00000000000000import trafaret as T TRAFARET = T.Dict({ T.Key('postgres'): T.Dict({ 'database': T.String(), 'user': T.String(), 'password': T.String(), 'host': T.String(), 'port': T.Int(), 'minsize': T.Int(), 'maxsize': T.Int(), }), T.Key('host'): T.IP, T.Key('port'): T.Int(), }) aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/views.py0000666000000000000000000000402013240304665021011 0ustar 00000000000000import aiohttp_jinja2 from aiohttp import web from . import db @aiohttp_jinja2.template('index.html') async def index(request): async with request.app['db'].acquire() as conn: cursor = await conn.execute(db.question.select()) records = await cursor.fetchall() questions = [dict(q) for q in records] return {'questions': questions} @aiohttp_jinja2.template('detail.html') async def poll(request): async with request.app['db'].acquire() as conn: question_id = request.match_info['question_id'] try: question, choices = await db.get_question(conn, question_id) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) return { 'question': question, 'choices': choices } @aiohttp_jinja2.template('results.html') async def results(request): async with request.app['db'].acquire() as conn: question_id = request.match_info['question_id'] try: question, choices = await db.get_question(conn, question_id) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) return { 'question': question, 'choices': choices } async def vote(request): async with request.app['db'].acquire() as conn: question_id = int(request.match_info['question_id']) data = await request.post() try: choice_id = int(data['choice']) except (KeyError, TypeError, ValueError) as e: raise web.HTTPBadRequest( text='You have not specified choice value') from e try: await db.vote(conn, question_id, choice_id) except db.RecordNotFound as e: raise web.HTTPNotFound(text=str(e)) router = request.app.router url = router['results'].url(parts={'question_id': question_id}) return web.HTTPFound(location=url) aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/__init__.py0000666000000000000000000000002613240304665021415 0ustar 00000000000000__version__ = '0.0.1' aiohttp-3.0.1/demos/polls/aiohttpdemo_polls/__main__.py0000666000000000000000000000011113240304665021371 0ustar 00000000000000import sys from aiohttpdemo_polls.main import main main(sys.argv[1:]) aiohttp-3.0.1/demos/polls/config/0000777000000000000000000000000013240305035015015 5ustar 00000000000000aiohttp-3.0.1/demos/polls/config/polls.yaml0000666000000000000000000000026313240304665017043 0ustar 00000000000000postgres: database: aiohttpdemo_polls user: aiohttpdemo_user password: aiohttpdemo_user host: localhost port: 5432 minsize: 1 maxsize: 5 host: 127.0.0.1 port: 8080 aiohttp-3.0.1/demos/polls/images/0000777000000000000000000000000013240305035015015 5ustar 00000000000000aiohttp-3.0.1/demos/polls/images/example.png0000666000000000000000000017655013240304665017204 0ustar 00000000000000‰PNG  IHDR€–èª28 iCCPICC ProfileH‰•WXSÉž[R -)¡7Az•Þ;ÒÁFH„C ¨ˆ YTp-¨ˆ ¨è ˆm-€,*bW{],¨(ëbÁ†Ê›$€®ûÊ÷Î÷Í?gÎ9óŸsgnf·e ™¨Yü\a¤¿3>!‘IúPP Pb±sž!ÊXÿwyw âþй8Ö?Çÿ«(r¸9lˆ“99ì,ˆ€«³Â\]P¯7'W Æo!VB‚Ébœ*Åbœ,Å–›èHoˆ} SY,a*râøÌÁÃZ¢ QFŒç(¶f %¾Ð ãåFâdáìÈÑøh?3,d4Îò4nà®ææøFÙ¤ðü!†+ =œŸ'剞ÎãņA,qWNFTð¨ïýü4ï°1¡(RÌYâ·)B¿H© ¦š•3–fÁfIæR…Ø#7-:@ê‹ÅssâCÆ8p¸>¾R‡Ëå†ÁÕå9ê[,ÈŒµÇª¹™þ‘Ò:crò¢Æ|{rá“Ö{˜Î ŠòÇÞ r#¢¥Üp„oà˜@[2˜ Ò¯s iþ’Žø‚TÀ棚18É>£@>ø".È÷ó’ŒrAÔ×JŸæ E2š'ñÈO ÎÂÕq7ÜOجqGÜiÌ)?6+Ñ—èC úMÆy°!ëLØ„€÷otÁ°çÂìÄ\øc9|‹GxBè&<$\#ônXðXeÔj¯Pøs&½0šßhvÉ0fÿ˜ nYÛá^¸+ä¹ã \˜ã¶0OÜæfµß3sûVËç³þ>ŸQ½œ©œÝ(‹äñ7ã=nõcïïjÄ}ð–Ørìv;‰]ÀZ±&ÀÄN`ÍXvLŒÇWÂcÉJ›-RÂ-ÆáÙX6Xö[~þÇì¬QBÉû¹Ü¹¹â á=[0OÈKMËezÂ/2—Èg[LbZ[ZÙ þ¾K?o’ï6¸øM—Ý€S T¦~Ó±ô8úú»o:½×p{­àX[$Ì“êpñƒÿ9äáÎPZ@ܬ=pÀp ÀLXõ4YÏ` (¥` Ø*ÁV°Ô½à h­à$8 ..p Ük£¼ƒàF„„Ð:¢†h#ˆb8"nˆ/‚D" H’ŠðR€,EJ‘2¤ÙŽÔ#¿"G‘“Ȥ¹…<@ú‘×È'C©¨2ª‰¢“QGÔ F£Ñh*šæ£Eè*´­A÷ èIôz íE_ CÀd1¦ƒ™cŽ˜7Ž%b)˜[ˆ•`åX ¶kïú Ö‹ `q"NÇ™¸9\Ÿx ÎÆ³ñ…øJ¼¯ÃñÓøü>ˆ%Ð3‚3!OH%Ì!Ê »GgàÞé#¼#‰ ¢ÑîÍb:q>q%q q?±ØM|D"‘Hj$3’+)œÄ"å’ŠI›H{H'H=¤>Ò²,Y›lMö#'’ùäBr9y7ù8¹‡ü”<,£ c ã,.Ñ™'³Zf§L‹Ìe™>™aŠ"ňâJ‰¦¤S–P*(û(g(w)odeeued§ÊòdËVÈ=/û@ö#U‰jJõ¦N§Š¨«¨µÔ6ê-êfHó %Òri«hõ´S´û´rt9 ¹@9ŽÜ"¹*¹F¹¹—ò2òòžò3åóåËåÉ_–PQ0TðV`),T¨R8ªpCaH‘®h¥®˜¥¸Rq·âÅgJ$%C%_%ŽR‘Ò¥SJè]îMgÓ—ÒwÒÏÐû”‰ÊFÊÊéÊ¥Ê{•;•U”TlUbUæªT©Sée` CF #“±šqqñi‚æÏ Ü +&ì›Ð3á½êDUU®j‰ê~ÕkªŸÔ˜j¾jjkÕšÔî©ãê¦êSÕç¨W«ŸQ˜¨<Ñe"{bÉăok ¦‘ó5vhth ijiúk 47ižÒÐbhyh¥k­×:®Õ¯M×vÓæi¯×>¡ýœ©Âôdf2+˜§™ƒ:::"í::úFº1º…ºûuïéQôõRôÖëµë êkë‡êè7èß61p4H3ØhpÎཡ‘aœá2Ã&ÃgFªFFùF FwiÆîÆÙÆ5ÆWMˆ&Ž&&[LºLQS;Ó4Ó*ÓËf¨™½Ïl‹Y÷$Â$§IüI5“n˜SÍ=ÍóÌÌX0,B, -š,^NÖŸœ8yíäs“¿ZÚYfZî´¼c¥ddUhÕbõÚÚÔšm]e}Õ†fãg³È¦Ùæ•­™-×¶Úö¦Ý.Ôn™]»Ý{{¡ý>û~}‡$‡Í7•#W:žw"8y9-rjuúèlïœë|Ðù/s— —Ý.ϦMáNÙ9å‘«®+Ëu»k¯Ó-Ém›[¯»Ž;˽Æý¡‡žÇc—ÇSOÏtÏ=ž/½,½„^G¼Þ{;{/ðnóÁ|ü}J|:}•|c|+}ïûéú¥ú5ø úÛùÏ÷o ¬ ¸¨È¬ rZt:˜\ü0Ä4DÒІ…® ½fÆk ááëÂïEEdGü6•85bjÕÔ'‘V‘‘ç¢èQ³¢vG½‹öŠ^}'Æ8FÓ+;=¶>ö}œO\Y\oüäøñ—Ôx ͉¤ÄØÄ]‰CÓ|§m˜Ö7Ýnzñôë3ŒfÌqa¦úÌÌ™ÇfÉÏbÍ:”DHŠKÚô™Ϊa %&oNd{³7²_p<8ë9ý\Wn÷iŠkJYʳT×Ôu©ýiîiåiW¦U^«òªÚ¿YcóŠÍï·p¶ôT{TïÛª¹µtë§m¼m7·ûoo¬1¬)ßAÜ‘·ãÉÎØç~qü¥~—ú®Ò]_jùµ½u‘u§ëêëwkì^Ý€6ˆú÷LßÓµ×goó>ó}Û÷3ö—DžÿšôëõƒÁÛ9ÚwØàðæ#ô#%Hã¼ÆÁ¦´¦Þæ„æî£AGÛ[\ZŽüfñ[m«NkÕ1•c«SŽ9‘b¨MÐ6p2õä£öYíwNÅŸºzzêéÎ3ÁgΟõ;{êœç¹ç]Ï·^p¾pô¢ãŦKö—;ì:Žün÷û‘NûÎÆË—›»œºZº§tïqï9yÅçÊÙ«W/] »Ö}=æúÍÓoôÞäÜ|v+óÖ«Ûy·‡ï,¾K¸[rOá^ù}û5˜ü±¿×¾÷ØŸ£ÞyÄ~ôâqÎãÏ}EOhOÊŸj?­fý¬µß¯¿ëù´ç}//†ŠÿTüsóKã—‡ÿòø«c0~°ï•ðÕÈë•oÔÞÔ¾µ}Û>1tÿ]Ö»á÷%Ô>Ô}tüxîSܧ§Ãs>“>W|1ùÒò5øëÝ‘¬‘KÈ’0ØÐ”^×@K€gx£ÈIï_A¤wF ÿ Kïh± Ö€˜Å„À3J5lSa/>~G{ÔÆf¼JNе4ÞbFFÞh@jà‹pddxËÈÈ—ì-Ú²¥÷>±á›…uõ½,?È¿ÆÂmðdáiû pHYs%%IR$ðiTXtXML:com.adobe.xmp 640 406 Ba iDOTË(ËË®p–ÕÞ@IDATxì½  ]ó•ø¿Î½çæ-ò‘¢E+¥†é¿–Pt4¦CúHTQm•¡­ŸaÊ*ŠzMI-¥jEIP¦¢-SÑ¢‘D‚DäqϹ÷ÿù¬}î½çÞÜܼ褜}“{öÙûûXßµ÷=ûsÖú®õ-ýáhÆÖÐÀZh ½½=ÚÚÚbéÒ¥ñÌ3ÏÄÓO?Ûn»mŒ3&-^í¥J4GeJQŠ*ïËAhj/E[{5Êí¼oâ8eÚÛÛ"šKìWó|{[S475Eµ6š)Wå|©(×ÔDk¥*mÒZk3å‚VÛ¢¹­9¨lO”õx5*ô´]âìy(ÓTŽfŽ7!;û¶×^µ^ÒP†²HI=Þ—Ú9GŸåjD¹ÊôFûœ¦ÍjöO§´i_íÈ«^Ž2åei«©Bá–ˆÖÖ¦@ k纩Ð%ÕJQ¡MÊq²Dm6ÈxÚi_ÔG™ö+¼/µÓF“2Ð ðŽrþfkvLt5`p<ñÄâOz*¶·yl¼ÉÆÑ¿ß h*3FÊVo m¶R»œ× v›cæª9¸R…cŒ¡TnÍ:mè§e7q-,ÕÔ„|¥VTÀ1ô^bÌme„­TòZSð/îú„Þÿc<ùäS±ùæãb̘±1 ?]µµ Ø¿ë_áZ"1׫éßiþ poq´sÞ¨ëßÈ xüˆ'ÿôtl¾åf±é&c£\îÏ}îšZü‹müý÷ò÷?h`Küá‰'â©§ž‰-·ðïuTô+,þý\\ÿÿÿ­ëÿ¿õ÷ß`ÿxâêïO±Ùæ›Ç&c6š}šøé˜Ÿ±ü1¼e>ÿW÷ú—èS³±­®: ïµ×^‹E‹ÅK/½ ,zšc³ÍÆÅàcÙòåKÂC˜) YBû‚…¤"^µû^æì€ ù© +}ü­rŽRÀZ;m j¶"žµQ¯8´z© @ò¨PX-`)QˆºBŠ=–ºm8…-P-¡J†šòdQ>k¤\ÂmS”&OÑü.ñ0-¤àQæC­Ö‡ ŠMºÕ>pèh—± ¿Œ2lÞ²5š® mò†¦€-Jx^5Ù˜%“ú†±*9oÑ& KÞ…\S_ ‚>”L0dLÈÜ ŒÍ™='fÏ™Íñ¦6bxŒ9<@žBèÌØv»ç>²–8^BöŽ÷UÛ£s!SÑJÈÞ.Is<Á¾  sÍJ\“&ÇbÍ¿àëßà›5 ½Íž¥Fb£á#cĨá@{€ë¤&Ö÷ëï—¬ü²Å-Ó V½ï¹G¼tùå‚×7âú÷ë×?ž›õ\Ì™3'ÇÇð£bi¿ÜäýÅ=Òøûïþ÷߯ÿÀ˜óÜLî»9ù79‚¿×‘#6Šƒø{åÇk¹&ÿÿ[×ÿë￉çÄ>ëæÌží)÷ÜÈü?pP Æ?Y[ÞRŸÿ«sýK—^z©ŸØ­¡†hh ¡†hhà-¢ÒâW_ÕKÀ·1­Z`g-ZGü¦˜ÇµJ°“)Ë©f,U¾UꊲÿòWQ¿°,øm¿XÑ%Ò~`ì§¡€SºÖtKؾ® ¾æpcÔµFí;ªß²}+¶uX+²õ¬¢ÕB!l‡%hwŸjØFxÇyÏø”¥JÅ´ttÖ±"5µ^t”/š,Ú®—]k²Òí©ŸÂŠbíb€ÅXmK«•Ö£bl…Ìö¥È/æÊïébܵr)e¡Ô¤è‰sT)éÈbœWožã}ÊÀÛ­‡ýŠÏ+âbAòW¶¯ê+]‡¹ã(¬àÖ16¯“Zã=²–°(XCO­¬ç-‘ÂâSôÓ¸Taã>jÜGÜŸ-Ï#?ƒŸG~þúIž¶W>YóÔû¤ñ\k<×x ×3†·ÆÌG¥çç½qKôÁ5Ðm +Ê›´yZr¾ˆs`šq 7íÀaeuÙ9—ª˜‘•D‘m9ÿ)˜+Uªhæf¢þªŠ ç¼&J·`®Õ}ÕÆ"&×@7üAÐÍÓs§€ ço%Óx0ëâFcÎâñÊLׯÂN ÷_µ…ùLºÛ¤>ÕÚº¥˜'Ö®Û,ÿìì P´sc*€W™¹Nrj1w 4Lh¢6çÐÓåÃA^‹9XÔɺ´í¼±ûG#Í­LbîòTq;z1u·(O›“½üT/üÓµ©<8ÎrŸÐÚ™£¥©ßyi”¦žÍp¦›Í£êÇ!4á–ʘœV¸ät‡ sŒ»„˱ÙÛ€=¥¯£WææyýœKæ³ö×h¢r(ª A/γŸã²Õs¹Å!0¯­=Zi+¿1ÐVã>jÜGùùÕ¸ŸG~`4>øôÃÔñÆs­ñ\ãÙ 7ÈZë •žá…ö2“¶½AKLÒÕÒ¤I*[Àó³¼uµš9±½¹˜heÀ !o4 8V¶œHo1¯’(U€!Š6q¬041çÉÿ& !‰?¦Ä5ÌO±ÎUzŠIôb`+r5Wi!c_lçFX…M îñoIêͯzP®d@MÈYuÞŽ°”¶3?¡¥¹Ó6ä ëhnœKæXœë&9|!Í5`‚.ERÂa‚)g›{øDÔFÀÒRÖ&3^Á¹  jPi«ŠŒ8Œ hF¯žkŽ=¦NUkfàÊ6y9‘Ú¨S-E«ŒŒSÏÔ9ÿNU'YÐOñ¼.‚3:t²¨sº˜­€g™>ÛCÈS¥0ךtÓîúà½L³?M5y÷Qã>jÜGÏ£ÆçQã¹Öx®ýeðQiÞÜÁ-GÈñè×î—@Ð 8?!‘Jübç¸SØÛùvãA+õª¶›V-Êȯի)exhC£”‡¥{í2Ò”U ›¹ét-WYàF‹`Y›ùƒÖYš.f!•þÊ* ˜r_«›ö9çE6a‰Ãä3K5 V{柎ոœ`lFÁ×9y©VÞ4¥Uù‘ÌÁLNÛ›èYŒ[ÕÐeÖ£){åÐì26A¶ fž;} ‡éf§h¦@‡Ì2xNY´ æQ À¤û^«i}7s¸ð¡šsl¸ÄÕm‹¿44î£Æ}ÄýиŸGÏ#îÆs­ñ\[¿ùˆ ç1~ ay¿&(¤ —'»nXmv†óçx¼“4Öñ°ö(ÁC_ Š€ È*Á‚½´ hÍX¨ÄÃfá†:íÀˆð¥•©]³¢à¦Õ‰vš¥>¶t ,º†Óú'|É:€Â1ºL«T;säÌA× ÔiáJ¥/…nN`´ÞzN«¢‘[(Õ½¬ÑÑ ŽP× eÀõì¸kãÑÒF¼WNêéN¥X3Pè\H>4òiQsœJªM3M(’<*¢5m[Î÷ªu±kmA¶´&¦Þqµ ä~ƒÔ‡nŠñRË¡8]J0§8îfŽ)›ý;Òy}Hƒè@ÏÄÇ¢ÕS³`εd¼êT¤Œ×A=â†çz4Ñ¿FM鹄kß}ÝÍ4GÛÔà"Ú®ó…|ÛYßï#®6Iiƒ1h©ßú%¡™kéüÔŒxglyÕ8©µØæõ£¼½¼~Þ3ùEDz9Õ6?4ZÜGÞWöSè(£Æ¹^÷UÞôí½ãƒ6¿(ÑIZ`k÷‘W±ó>¢Åi“Í/BÜGT²c^ rš,:ï#¾€x¿kݵÑbLÈVo×}D£Ù?Mú‡nÞ;ô_ÍûH´G6GÂñœ’á=åó‹“rШ_/¼8Ê«zdÝ¥å?…æÑ#À~ÞG9äóK ívÜGÙŽB*6rùÅ%?+üBÅx<áõò󈯟yÍ:>²8:PSÛ™r¯èÉ/föA+¼¢l>;Š.®QÝ瑺Ôzï–ŸG´“ò#k­šjkÜGŸGhƒÞ«ûEpÿú\kÜGüqSøw×øJsŸŸK@ŠŒüñæg&‹ùA‹­«U@àƒÏÏz|Þ™P1Ó§P¡ÍçùÀÍ ‚¼áýÀ¶êûñk`A•ãf\¿ 6üu40ùàužmåç´sp…Ҭϗ²§#–ó~l·ñеK­e‚+œSWr.a²…ëÑþ}d€ˆ7ž•!PYù^+€M‹ÀŒ¨­—ù€ð!!H]´ ¯Éz‹ Z ö> Óöq/ðĤcO«"rg‡œ#’¶)(l‡vÕu}hwÌ¯ìˆ j¯²²×ñ'ØÑ†VQç*6³"ƒtuëJŒ5SÒÚYÀbÎôAƼ¾LÀl9ÞW„[ôÅSÖଥSpCÒÚµX 6Aþ&®K;Ö_D§cöiŒÎù%)¥þC‚ ר\_|*•Y©äÉøÓÓOÅ /¼ÄTÆÁuÆŠ9ª^Cß« ÆJoɼžêÌ¿7¬ºÈ{Õ{SàZä— NyÏs.¯ ;ž÷O^ p–ñÇm¹È½d“ Š7ÞSžAï¯ì*ÛË£4î­K]!Íûîê¢ m dÞÙ¶g¸Ï2ÝÍ W£cEÎ!…m*áP6´HXôO†ýâŽW·Î~ü,(Úôž÷Ë#…ü¼wòpÞ¹ÄÜêÖ°QT”a,yßsFK´Ðéß¡òZÌ^ÕbFãó*LúÞ†Ô“­ä´ŽåÕTš•Šú©DÚKZ8ë;$ŽY†Ï•Ô-e`”íÖþ†í;¯Hê%›LYó˜ºTÇh»ì7î#/­zoÜGÞû¨ñy”Ÿñùñ°þ~•^˜?—OO>ÎL%â‡%Ê<ÖóaPÀ Æ\>ä¶6> ZÍg ði-d|ççQĺ ©á¾`I™ o´Lä1,À…Q°Zë|P”u[rœÂ”)ú¦&û@ õ `೚òüÂ*Y"ÄH”ñ<”‡¶>B¢A #ËÉA~>›g0 fÎ÷<àÛ1©ezeõ!E?>·&d>,2àÁ¾=/ˆÑ§HÜŠ º™}µúÐ"©¥5$ƒ-(?"Ÿ6Êà(㊠e`³ùË´[á©ØL k ÑYF òáâøè3SµøÞ‡Ž'Ë:NúˆUª«LØ…9FÛì­huá<ú±MDâRk±dŒ¥@¨xUAÒÓÒçÜÏ´A # G>Úm1ï ®OÀp„ÆSžÜµõõð>rnjëòeqÿÝÖJñ¶-·Œ#FD¿~ýЯÚll 44ÐÐ@C ¼õ4Pš7g_‚y¸CMZÊéX°„h ô¹ž°Vœ/–„øìIÝ€–©/èµMH—šßÛýöžé_è'Yˇ/& -ŠæÌM„†ÊP‰ƒ6Ý3ÐàAKz!›) %8¶,B¢ÖåÑé™KWÑ€s÷4X!©¬Bwœç#I(Ò2¡ÛYWWº¼8)9—Í‘`Ø¢Žó8¬¤…´v"d~ÓÕ VX•Ò Áªšóç€Bôgp….ß„6€Êt3m€PV0Ë@`TžÖN\°Ê©Ûؼ‹­ÂZÊ,äe \÷Z9a›÷Z…Z°¬ÕeôÝq-µºÆ«pW¸áŒíÐ=®­&ºè´l–‘C÷|P¬R0ÒÊëOY¼>¹$-£sáÆçZÒ§ ìÖÑ÷útéÖ¾çþûcCÖ(Ýöío&ü¥Ü)uãWC 44ÐÐ@Co= ”æÎÆŒ{2çûnZyŠü|ZûtÉðè$´áœX•4YÒH…  ÁZíø¯åŠsQ˜ã/&†^„ŒLP Ý”y0gÊ €('¸mL†J.Ä:UÆ]ì"óv¦¡KòK·2œ3W@‘¦PNÛ^›®\àJ—o:Ÿ‘‰Ö9£{W‰ø¬T¤+÷º6º§´ ¤sMV‰NÈ¡b9ñ(‘ ŠU¬Í9‡€a‚(peÔ± ÕJ=ç%YOkb‡ÅM8ÒP ã@g¸Qí²9ïÐèæ¸u¾¡–:ƒ9J쫤õÏÆa-PqÓ–ðœ+_&ØæœQº™Ë‚ÎAùãv_¨U¶6,~ÍXhuIæ\Mê¶r ŠDÛXG¹vêÙŒ‘ú”U°„YÅ ¼ª2â[ðÓ=¼¾ÝGïܹ3ã™§fÆøw¿›upGÄàÁƒÑVckh ¡†hhà­­,€¸€5{ vÒ uÝy‹óè¾2ÊÁ'¾xÂÃ^pž?˜¿œ“–Ö4)%Á`d×ùmÌ'kbÅŒ¶# J¢`WD‘b´>€ eLˆL¨£˜®ÕÌMH™vÈ©Ýù@SÂð—à4*[æõ£]á„ÖB aËçÁ&®‰¹‚9¯6Ö´"‚y «’™óíhÃ)o‚˜-¥¡+A³¹ÊœKd®>- f–f2]·y÷Ô£'ñ®*<ò¡/Û¦?ïétÓžÚôpêy+ºi‘] ntAV‘[Y\ݤ¤j)LàZ£l›×Ýáœ×—Šh*¡Nk¤LZ oE¶WÊåP;å8¢0^»XúPßy“$ˆŠ¥ž£¾×MÁÖ³û¨™Äæ<ô@Œ=:¶Øj«Ø`ƒ Uckh ¡†hhà-¯ÒÜysYaŒG9ѽ9Q—§îÜ6Ò4×|¶ã‹ÌŒ$LËK00Š®>çåµV @B H'`¼J—aFb=¢X§KVV“Ì©§uÉÂZ«œ>XbN””©G¼:-6D…Ì–8a¾XŽòeäƒ,j¬ås¢7²èêsU‹œ`Ž¿Ú´5E¢àJYà§c‘nVp5¡(ç-Ú§°ªÝ,ôåêXÛÒÉéÂTQ®Y¿¯†E ªÀÆ5xª€$]°<›VFä3zWŒ”¯2H„ê9? zMD¯v¡‹8É ¹m£™¶…T¤æ$¿©':ûLã*2;o²Xî}Ê xí$î£0íµf\X3½¶ð$m¥e™)“<µ ïd}[Pwiý$m à¡{`{=»ÊÜwӦ߻íºkŒ³ úFY­¡†hh ¡†¢4oö<â?´àhË~4ôÅ’Qy–mèvÔ4—Q®B”!Œh?£EY(âñ\ÒVÚ “\û–ú¥Ú¼<­h™€š²ZÈ„¾"úºgéC0A²hðjs¬9­lì„Âp€)Ž:w`*3UÆ©Ó;sú©tƒÒª—< YJÒ-m`G¶«Õ”òZ&¼"z“1:&~«›=AÍrL¬ °ªÀÚ/sMŒ˜¶)S»dNB¨Éп<Ê2/Ÿn 4FjÀOÙ8-‰kÝãZ$Pò¶ˆB¥#Ú1b¹‚_›qëVN9mS鯂^‚¢G¯!:À…x¯|Q-q×·ûh °?}úÝñþ¿ù@Œ5J 44ÐÐ@C  ÒóóçI…JP«ARá†å´kZJaXhn7y3sü27šýÇ+Gø¡' Ú$=5¾&`SH´ÑÔ•]sê2-–IŒ‚£×'9 hÔú¶7mÀšðíy¥Qö Õ§Ñ×Dy¯,ÀnÙÕ;¨[R #› i&(;`Ë;Ÿ0GÇÅw €úʪcd. iÙå½ÚÈH½,ƒ,™sq=¸ZšÄÝ÷NÝ?¸{Fþ*ackh ¡†hh ¡žõóæÍÉü§™FE(È€øEZž„ Nð 7zTèÊ ‡$0Ë¡N™ðHä¡n+uuCj™“9l]P’9taÔá\:0H)7 ƒUQRšÓâ¨%0óÌ)G–·ºÂ›‘¹P‹I˜™f bhš´E‚­hÕ³¼òi뢜sû˜˜9ïŒÜMÏsž-¬ŸñŠAüp^@CrJð›ú @•v;­qUÀʈf<å PHƫǃ¾iõk±´væ! ˜4ľ:×¥Ë>Öº\B3>…Ly«üôami0˜c FKŽKÞÊ*<ªY¸zè„¶e:PƒÞi«¥ˆäµ=õºÉ"Àúàš‰.Î9Ò¿“)¹^m@¤–FÓÃxã–ºØ (IC$íkÍ\î£ær¿¸gú´Øc„rù 44ÐÐ@C(=ÿüó¦Û°ŠazÒ…X£E—)ëèú€š´\IšÖœÇ·f‰H² sŠ.C&%HQÞ9i—ɣµì¹œœÖª$óüQ´¢EK`0ÖMNŒ†M×3ò@FÙê´,@ áÄ>9›©S±J« WÍeÌèhÉeì8çœÁÌ‰ÇøÒ‚Å\8ÁVøBÚ$:Ww(r*‹²W°ºõš)Vx¯ÅϾ©»:f X±ŸŒ  Å´ VPËå:º¶‡Nré8-l”Њ'LêJöÇ9yBWaac,ªùT¿$%=ëÚ•FÛ+żCõÝf$v6ÏX8g`ŽãΕhDU¼^êŠ×)vtn)më†/À„†Rs $fE4@{ÈŠ ®‚Ò–¦FúGWmYßî£rý݃ x=wáǃlünh ¡†hh ¡,€sžçiÏC‹‘ Thš[HV ,°Æk‰<|œJ˜ÉU&À…6ÓÅàÊ” æxøƒ*rSºF›p‘êZvtof@Dó¼ÇÂ…VK•KlyØTí-ŠAH‘ËHn®¼‘ph3bŠîXÀã¢LC9ŽyH`äÅ9‚àûZªSË '<ÚŽ«‡T„@ȧ}ÇebæbÞo´¤Q»XÆJq³dc¯øxÔRšãh/U£;Ê .‡,1¹LZ>­–èS˜´-ÎleGWz•Ј ¶«º„»=å (S)/ "¬V ô/ôbÝcð)ýÊd…g0¦šî{µ#fFI`4â¦L \ŠÞ˜Çè5q (·™:^×<] ©ÎϤ°r¤·1m"eZo †Yß~äœvoLØ£€\¦ÆÖÐ@C 44ÐÐ@§J³IÓ‘†Nˆ²_l²É&k\¯Q¡¡†hh ¡?‡™­w“çu5…©™§9¦´™*Ät*Mäëˆ&åù*y$jâÛèŠëïµ×¾ÔÝêÞGTMO7?ƒJD¿æ´tkéWŽ»¦O½öØ+† –ÇÖä×âÅ‹ã†nŒO~òkR-®»îºxõÕWãþáégÖHs 44ÐÐ@C. ”æÒ$íù0¥W#V äȾy²k]ÒA˜hàÃ\ëwÙ$òK†ÒB¥ÑÊ  $࡞sÝx°—µ²“®7«%¬s×5ß™?ÿùÏÓ½û¿ù›˜4yr Ô¿€ F„¡É´p .Fš¶§ ËdeJÈõi… ¤”ðZŒ')ž‘¾pÙ¯f:F•m L³}#“Õ!ÁâDêÁÑ{Ô½Kg dÏNðÒ‚VB M²[æçÓE½¤¦sIhC#=öÇxáÅ)Û›“˜øm[¿ƒr¹ÆIÂcSûÀU²§iIúÌ%䄬T¸×yT¸–<\ÞºzuS‚:ÖBÎ*eª¹M;C# \(y…AŽaÙM´Ã³¯!ûtNYÀj!nB/¹2eŒÚ~ü‰'â·¿y žzæ™x‘q¹¼\¿~-1š•7¶}ÇÛã½»ý5Ë[³ÞG*3!ÅÑ«þýêøÔ§?Egè‹s«{µ»$÷fZp¡Fs9&™£øŠðÌ8û· ŒéÓöÚc­ð•W^‰›nº9>õ©OòB}¯zS&ðàƒ‰«®úvW­²F‰†hh ¡ÿ ä@iEt(8‡½ÐÀC4$!@j„»´ñ”ƹX@ÏZ=™Z¥œ‡—¯,´ŽMy¨‹õ¨ —¡nT£r/¼àâxàb$ë³ê¶\ðÒKñW»¼'>wÂçR ­Xóp?Z‘´&&B‰Ç5“)#¡kÔ"±\‘V8 ÉyU•­Sº…Mò¬®-çð! -o®fáqCt…Ú‡®Ý„.À§½Õy…öDP%"Ñ?õÙ/–¾ó0rèAé„,Î ê«…r3f< _}%†m8,ç;¾òòÂ:tpl¿ý©wG¥àHˆåT9x0Äã°9_“ b)u,Õøš5(2~á\hæ‚ÐgáMµŽq^Ý›úÆévƒ_¸î6Û ©ê†oc¥”fÜê^/OS¢Ç›iK¸L[ÔŸÿü¶¸åÇ·Äœ9ÏÇÀ£ÿþÑÒÒ/½ŠÛtykk,]º4–/_ïz׎qè?ï¿£W‹k­ÜyYâÚk¾ßÃÂvûm·å™ À¹UßGZa]ϰ.TB²0߯*(]){À~ýcêÔéñ¡½ÖÎX€­šÉWcki)w Å¸Jkih ¡†ø³k 4{öl0ÈÊy]@à@$dð8ÎÿÁŠ#Zþ̘ɡ,#Mú7]XЄµ6ip€ÄÒeؤ§4lë’xà×ÆØ±cbà  :K—. RÒÄå—_L€~Xúô–&†yº<É€@h)C5á ,â|™‘´m€3Êk!Óý Ð*³ þpx%.˜öD<ÆÚ"\±Ìpý!³;ÈáPü¯eO·¥T fµ%ìÑìa-§¨ð%|.ãR<ðÛc“Gá"&™4M,_ÞóçÏ¡ ŠíÞ±=r.S]܉‘Ž]yŽ;;-ô›p 3£VQjWJR ÓÇËæKŒÏÝÏ™f•5órÍ„Mã‘›¸†¹ }Œ^q)_«×íÑÆ7ο ^ÖGŒÀÍk¥Õ«\Dåøµ–9N|ù•—ãå—^ŽÝvÛ5>÷¹ÏÅ ÁÐn;ðwmüì¶ŸEY`ºözäJ¤]å}®šÂm–nå„Iäç^õfIxÌs !N0½wú´˜ð¡ņn˜úX“_à~pS| e+P»:›÷na<¸³øUW]õ†Y—.z)–x©Ëcø:ûü‹Þ©,—-É! Ü`x àã¥c{SŽ·cp×·ŽªËbáâe9Þþƒ‡F[_ç:Ê4^x4Pš3gÌ‚uÇÀͺ3õ¥ºìé‚+ð%^=¯—- Arž˜Ä&\>ÜÐ%¬Eeæãéf4PB Û·¿ýíøÅ}¿ˆÍ6Ý, @ñl€k‹¹àù\€å¦½RÞà iMB,çÙiybŒØÕâ–óù’äºwÎ0Ó^ÆM­¼|rõö“|x5!²*ím- vië:»Ø†+8ç5BAº+3?¡VQÆ&$µ—8æ3 ]¶-Šu¦šià*=Dr<-^œ{ø‘Gb£‘›¤J)EÍJ,[V¹ ž  m¶Û:õ“¼Œ…´DÍ™F‡㬣 6ÝÞìzAŒbv¾f»–VÆn”rFã&$Ò·² ³¹ ;¯¥Mï¹,Ÿú¥í\}jQ,¬†Å«+zwêû¶ŸßS®º:Aj£6ŠAƒå9åH··—Ñk¥Â}EëUpçÒÍy~N´ ·;ó̸kêÔ¸ «ß¸qãbáÂ…qùeW ãQ§v%÷‘œ£Þ¼vÍ|épU-€Âw1ëFõ¼#g tú½wÇ^öZk¼ñÆĤIŸØWu €ÿx"v\}õ‹âŠ=‡ÆÑÓìab<¸äÇñž7.z袺Ká ˜xùÃñãÉ;:@¶7çx‹±­Ýï…3gÄý|™~ü©¹±Ü&øì¾ñ–ñ®÷üUì²í?5Vk[¶à©øÅôûâÑŽv¨5dôÛã}»ï;ŽºZmXh}“§ºp~<ûâ«|AcÇ &­Ûâ7ƉMOYFïý…8ýcÛvÊÕ×¹ÎBµ5ß²sbö+K³ö€‘ccÌÐõE#=GÖxÿçÐ@º€M,LØÐøiqâG¸²töšÊDÛ˜–>#6×}õaí„{mú<-E™/s‚… É~•p‡]÷•¹©ð7pÀÂÇo!dé’¥1fÌØøì±ÇÊœ¸aëbªòA¯¥Íˆ×*€ª»3]Z~r•Îiå[Š #‚ ª(Ö‘+]†´e„±îOSß9×b\£è g„¹Š‰°¡»‘úF¨æ²rÃlE£…{4/·°Ö1~ÀC‡:‚ç¸KλëWŽ™O=ËëàƒÓg@ŠÄe­¯Å¼ç_ˆáXÔÞñ¶m€Má¯7¤ƒnhGyÓ2Ëí °+kõ¢‘ØDÕ®…¬ Xü¤´\j5÷Ÿ)e(H»Zò¼¦¼bñÍîVŠö鑱¢[êV˜SXfY]ãå–ÿ8®ÅJ·éر1œ` ­vÞ#n¬}½YÇúñ ÊëKÓ-[¶,°6§[ØÞÆŽ“D]zÙ¥Ü'‚&Nà•ÜG곌¡_u‚nŒfnEZ°›xElîÚA^‡þ-ýãîéScϽ˜W‡Üçæ¸ üø 宾úê×Ù¸(¾sÀÐ8껚¿\øãøë Vèö/îÀ¢G®ˆ¡ãN¹'~óÁøññï©á/¼K-2QŸ=cƒzÓfm„«ý²xfÜxù¥1ý‰—W^eÀqÔI“b·1}=ä«1ãöoÇE·üv¥íŒÞõøÒ'vÁ+-Á‰õMžš¬\÷ù¸ô>g@sÞ±cŸƒèk€¯ï¹Åÿ(N<ÿŽltôÇÇéoßÙA_ç: ÕvÖd|ßuuœó¯;›ðþcâ‚Ã;¾\unì¼…4PzîùY9Ý,ŸôÀAsZÅ„'öåM*>p¥v€I(Ñ"X,_fà ‹¸ƒµÏðÒ2ôá1x+@ì\÷ÝkãŽÿüÏ»éØ”>—,[sç½£7›où6Æ2vÝÞ&yvž[Î'¤-¼¾ …÷P;¯EÚmˆŠ…ÓÀƒ8 ~‘sÝ`¯ zj]ÙÂ’?Dç˜f‘úB%mrN±ó2;î¸t÷êöÍ(júX²äµØ|ó-°f} «à¦yŸÕM5dqæ%:úxðÁ¸å'·$ô ÆrËKCŸ¼ò/ž{nV\rÙ·ÔÁ#….êï#ŭ˚èšzÕœš0Zâ~Qns2(®'î)ÛB€ÊÝS§ÅÞ{¯=~ÿû7ÄÑGOÎû´@ß¿½—/¿üò¾ 1&#‹;æM®¢p§ø¼ààu^щà×¹ñô¸tz´í4qRüã„£ðæ-‹§þûgqÑ”;¢óq?nb\xê>+¸?gÞuqœuó£÷×€möŽã?ý‘ØÊ†–Í»oú÷¸á¾™]çw=*.øÄnï;vÖ7y:䊅Äé'_jjÖ®³×#kW_×׹α¹³ã[0ã®øúE7GϯVo Zxko¸€ŸÃø%ˆà^Þ¤‘LÓ‘s¿„pH €‡ÇгÁ¦©K9g޲>¬çÄýäÇ?ŒÞü#>ÆÆC€ƒfÜ„?'Í}Ìgbô¨Q4 \aÅdÜÒ…E~¿|²Ãn<Ü e~?© XiÒ%Ë®ifÒ²&!O±n0²qRËP&C¶¾rSOKRÕa¡k"lkš®Ðb94:rlÂä|¿BÔÕšÆÿ¦V€9Õ_™eㄯ\„æ°ÏQ‹ƒv>Ýr¬`O?ýFÁ~Ño ƒî (k‹×øÖý¼ù1fÓMbó±ãÒÊÙ ´š¤!)+3¦2Çè¾iyõ¸ÀÞä¾°¨Ž‘C#ªÞm_(’rš_1!‹Zz…\ÝÀ‚µy i Ç;«í}÷ü"Æm6¨¤N ;í¼süñC胈4\¬Œl´•a1ö޽’ïüo|ÃÞó¾ÊSùŽæÇ,ð²K/CTÿºûÈÜ®©l`Šz¨:'4Ÿ¡ís9‹2^R.C~9¡ºh‹~ÀØ´éÓâo?üáØ`ƒ5·µhüÞ÷¾ŸùÌÑÐR{•/sæÌYiñ–[~ÿôOGä—¡•\­ ü‹ÀÊ#qDËøN‰%?>Çäšn‹ãG§ŸwÔøo§CNÉ»[¡‘Åÿã­µãÛÄ.übl[ï ®>ß8öÜx¢VbÀÆÿ=n¯ qƾÝÑY*Ž8ã¼xß(>{:·õMžNÁà£ãäK§ç÷sN¾ãúbÿÃcþ:¸€W5¾9w_gÜðÛ.…Ä6±÷ýâŽéô7°N5oÑ]¢€@­hQ±„™,¤æÄÃo*Ôèöóa\æ¯ñ¥”OZá2X¤Úuc¥J‹Šä-ýô§·Æ7ÞXƒ¿!@m×`“ê}¸OžüÜ‹çÃ\hX K¸ôaÞL›Zm7צ!C8Ë€ ³•à Îy|Ìdã¼î_Ü›–KÌ!ÉR‹•~Nvsþ4’®k,vÎ+4BVÞÈÀ )­€XåÌLÿ¢…)ñ9…áYóSn ûeúÐUî’ÜÓ‡8ñ£ü0î¹ïÞè° ¦Å“ãTÉ_³t_vÂÊØ~k÷‘帤²(òq?!¿nûÉ/ ¨ØsFûæE¤Á²i`¼&Þ>ü8¸_L›:5>¼÷>k €×_ÿ½8†/(K˜£úzl™ûz)À{Øa‡®U`JwV—ƓݿzøqÂ(úgÕ!›Ä;Þ½sì¸ÅðîMöú®£þ³ÔZè#6ß:Þ½ã»c‹Q«@˜Ê¢xòá߯o{2^-¢²î;·Û!¶Û;”¯îÀ"èEÙ~ÝÙ~¿!#bÜÛÞ»¾gëÕ€«Žq­^æ?ùP<ø›ÇâùX¿²É¸x×øwÅvc{ètéüxìñ…¯Ý¿÷¨x8u|BÜ3ãØÅÖÛn·ÅjÈKÅeÇ7N8¿nbóÚvîu^[=˜fþÛéÝæ¿-øï«ãËS:æƒí§^r\Œëøøêv,ŽŸžubÜ:³88zoæ«}lû®ë›<]’E—e7ø9¸ÁkügàÄŸæbã³wìæãb0ã®.œÿÏ£1wI9¶.‰%GÄ;wÞ%Æ íU)u½¸»,fÎø]üþ‰çâ5ž 5Qó·oÛŽé:_\Ùø:„›ñ£Ó÷⛂ÖÝ“NøXlüìOãØs‹/ ìÐÔ[÷µ4ëùÙÀƒ……Ǧñ„ KMNœãÁ*äå„~­b<¸sU ×íº(Wø(– ÔÄ"¬U?¿óçñÝk¯Ëå°†’‚#݇‚X>øå¯jFWbZ—$;!…þµ¶iñìš2¹ 3§—~벬ãÁtŸ#›ðnîH…t½iu楀Ý’+£þi6/à6p©©ý[bêÓâûöÀ.,Ò’ Å:mæMüsàüGnŽ/Œ?¨°:õ"ù„¦Ä”¯[pKô¶½ôØOâøí'öYÿ:ê]¡~%ùÉùñ¹‰'Ç´ÞæØ„®/8ê¾­NŠ»üû¸q—}ãÊîÕ‹wΈ‡oùjìØ;cƺè¥2ûWqÚ1ï³s2ߊ?üœ¸ñÒ“b»ZßõQÍ+–öÈx¦vð,ž3#xtv´Œx[ì²óV=¬r âºS¾÷¥OompYÜ~Ö qËÌBšqû§î¿Uï¢q´ÞÒ±Cœ,nÕÉEë›<ÃXùü¸7žåÄÀˆ½¿pFlûÈwãÒN+gGýâu§‰ÇǧöÙžO“Þ·ß—ŸsÔT¹B¡a;ìdzŒéÑÀºàÊÇ×!DÇ8w=ä ñ‰Ý·ÍÃ}õÛQ¯ñúÖÑ@éùY³1°ðtÕâ€瑉ˆ×êåÉÊ+iT²åBàé[¸9ÙÃ.ãëj׿µÈ]wM‹«§LIøÛ\qZÔ|xû+Ꮍ… Å‚ âe&-›0X‹ÏÚlƒ(ÙûC{Ç?ýó‘X€pEþ­ cMº-«\§WòƒìDLèQ=0Ñ´0å2u5ø‘„'8-O>Ü€Ý×O>þDÌ!¸¡M)[GŒ®æÂ5ì+}B*Z +%]ª›ì=â8hp Ùp)`p‹«s%`#=¦¯×¦˜ûÒ¼Øzó­bò%©­<Î5Ì1@>2— E®@Ö4ʺ9 Ä6A`+ï-£FÔ¾Á%‚3Ç5¯áZv5–É¡©ÛŒu§Êë¥_O=õTŒ$:Y ~mñk±Û_ï?ú÷ XàO×tΤyçNEn»ZD¯ÿîwãÖŸÝãø†m¾À„bŸÆÔ‹R)Ñsàe—^Î;ÆÅ5ó>re¯‹–÷G¿Rœ~øÎ+”™‰‹õ¬n.ÖŠÔì']89¶ªsÁ÷b}ëì¡ñu”Y0ãþxfÈ»cçq«O×sG½Æë[G¥çf’Ÿ.F“ñj13Ê7²÷¡+Ð8·LsK‘š¦<ñ!^ÖêÄŽX•sÄHyòË~_üÍ„¿aÃ6¤OløRD>ô åšly)nÆÞRˆ¬®ú…FáÑeÈ>úÑýã>~#€øÉ…Rù!ƒ ¹Í!ÂBaiÚÌÔ¢éæMK¢© F·2½ ©_ž~úrÎŽáX4ˬ2!{ eiÕÞ´DÚ¥,[ÖDNeÂlu ¸p\=¸Ü^SÉp>sJ;Ê dB8Y‡þ‘2bþ /ÄÛßþŽØhøÈtq›€%aØÒ-ï–&ÔËà­£´k_bŸ.^gZ4d¸Î¨(!3-”^?ú.–Ðp_¸vóW¾òU î_Á©=“?½mË­àS®»@JŸ|5yeŒLÌûèšk®‰Ûo¿÷ñè"èY‹M8P6ÚШ¥ó²K.Éý„jAØ•MŠj΄ é§€U]Ú&çÒF®zÂ¥å*Ò2º@mÂ\ް¶Ýu÷±ïßî·Öx-Vìcý,kû..ä^ÇßC°„^rÉ·âˆ#cpöíQÚtß.iÇOŠ[¦œv-KÄý?øzìuô…]ç¿)–\{`—rþíñî÷­¹+-vxÜôËÓãoß½iTÍŠ_ýè’Ø·®þø3î‰ß}õE{@Òž@Ò´ZëO™gNþHl;zƒh]47îû¶•V@IDATþY±ïçºàpÊŒ…qd‡¹Œ:k€NˆÛÎ;!Þϸ¹~~ùéqÐi×ÕzŽ8å¶YqÖ>c;ßÇ:饷ŸºKì{váÄñ'Ä]7~)>¸ÝØ(ãê~ì¿~ÿòÁÃ:áð„[žŽ >º÷ó¢˜=) ÿ+öÅššµÇŸßùùÉ&+ÄÆ¸ý„ªÏ§ÐÃHrv]Šu·ÈEì´ß¤8èÃãcDþ`ª‹ãñ_ý¿¸ôÚé¹Ã!§Åq»wâŸGepµÃÉ]ph+l¸™wxkáMŒýN:/öߪ *Ö7y¿^¦žóÿzÀa;í“>¶{ŒÅå»ðÙßÇ WO‰GÓÊZ(cãωƒ·¯sçêŸÔ;·ÛÄ!“¿zÛˆ(/[¿»ó†˜rG1×.ËŒ;@œ®9–}A^_ç:úëk|ez{]¶{«×8öæÔsg‘6ððÙLº’ €WÄ¢ŠKÓµfÛ2 Ç|pk)Ë-ðøP;šú ZMñ¥“O¤J®aZ$$Ú¢Zn>×k»];ë:X B–é<^ìd]NøÎÍ2>ð.z%õžÁ7!"EYeC· KŸu¬ÙëÒpiUb z»Çgê-e ÌÙcBŸµb•mk1s_næþŒåÁß>Ã&põõ ûT `sε³¼0™NZÈ)$YK7¦ÀfúS„8ƒ´ é¡-Rê q;ÖÄج_e¯°|Üòe^ì´KÎ÷s®Ÿ–±"6ãSÆT×…¶§:I bTmXÈ2%ŽÖ7Æ‘EsìŒ%Ù‚WÓÀeþÇÏ{Bl¼Ñ(¬Ÿ>–ÚYµd~œuÖYÑËe»®_do“œËÏ0çÒx[‹9Ó]®Ñ Ç3Ÿ¡p®»ºã˜°K¥%¯-Éû¨}9ï4×dæ¾+3ƒhÊÈÙÎü>ëåu7P…º ç¼·o¯§©k2¯Õ\¦Î¤Óûì·öø]aŽc.äë €_|I¬Íê$¨ n[ùÀ©gî{6­(;᜘5õ¤¨Ã <>ÿWWÄÆï=º³½›žnk¾à{©ÿÁŽú½Xø¬ôÒ¯.Šï-6GLŠ­WÄvÜ.•g~Þrb¼8>bäž7Å­Ôe­·‡®8"v9úº|×=×ß ‘´óˆ¤íéB®—}ü)·Åƒgí“pegë¦ô}ySì qÏ‚©ñÓýêurø”‡ãÚ#w¬˜—¥ ìé= ¸:ÿî8ö´:ëŒÞ$ÁûoÛù¾cgæÝ×Åù7Ü× yÇ{¾Ž{ÿñ¹Ãß×=‡óöÎbáL Þ;Î;ýcÝÏ÷l„9nw_ü/qãÅ<Ø`ˆòë›<3ndþ[FJwŸÿçÐzà6û_Üû£^?:ëËÑi(ìIýßWŸS~]#Äaï3Î>|…ûpÁ#?Š/_zGg»žvIìU³´öb}ëh¬¯ñu”éíuuÚî­^ãØ›S €™S+‹kã:_M ŸáJBŠW¡ 9G Z‘I·e³_Ê5‘ÿ̧ýg?KDïèM2”35­ùè„>ç¬ÚǺnC‡nßüæEqä‘ÿü†`g¾¹ñçļߴ‚u¢cõ«ñ'`-»kY=¨¬¢~G;«z­âh Ñ•K—2=·çó\»L<-«­ÎÒw×ö¾ôÝÊ,mk0®^õÂ_ÆO>ßë<ç“ΘŸ8hB¼}ËMcøªVö¨ï¿\•.Îøiœ|QÉÙQvÀèq±Ý˜‘ñê‹OÇ3ë|—ØáÀ“⸽¶ê(ª´3?^¬.ÖÍ›ë €ë›<±Šùqõ¸‚{¼KKìÕï:]ßàYàLËö¶P—ŠfÀGÄyǽÏþ>çâ­ÒV1>ÅZٶʶWV±qüM©ÒÌgȵ+ `}hJx jÑ’²´ÄhÖIK ÐŒ´…ZŸtu–Ék'<ñä“€|'Wöp½VÛƒŠÝbó@ñ¦k¯ã\o¯]¥ê÷Ä+磹Öìž{í{OØ“##Å6—_S‚ @ÝaÍΩm‘e-_@ãàh•nD­¡iªò•cZ0w^,Xø"óɆSÀ“[õ#ðµc‘„¬&PæŠÔMòªå%æµn¥µÑ(oÂg7¥ÐæV|L®ì<ÉÍH =p`?Þ;³†d|ŠT¨¢õÐ “tÓ}ޝ…}7A*—ˆÓ"é`©›úS@X÷}U‹_*Ëq’ÌMôô¿þŸÿC:Sûx ÀÙ³âä“OŽ-6Ý<-‘Ž#*3ÑYúÖ©oÅu¼´Ï¦©YA•‘ãmÒ —s0O£2ú£¬_T‡s] &¿ŸÞ\ãÁýÆT,€ûýݾ™ÌÚÚk²iœÂ½| Ÿ{]ð ¿Gu䀋ùK©U uâÙ-È¢›˜·'s§y°f-+×-Å«ªß­±ú7•xòÞÄ%Wþ ¦^wKÝ\Âú2ÅþÚ k¯lå“•€ÖºêE+b}}¸C_=†2ž„̇ð‘øè‡' ¦—ðã•ÈÕ£™¾ßÖÛ%ǽŸäÍÛê²òUω;®»"nùmmÒÅöfÛÇ:æ°ÕÌj¹€»Ïì€ë›<Œu!ëìž\[g·çü?UV€{á¼øØ¶]ºó|ýöÈu$Ý®­”Ò1în9Gï‡ }ÿ^­´ÙVÄS°"&’ëZ¤/ëëœm®j|ÙïJ~­ªí•Tk~“j€ gylc!ÚuÝñÎ5t ݈>HÓL&hÔ1anÑÂdª–twòö!¬¥ì7?7Ýøƒè?€å¾4Yõ²-o]΃Ý~|Ä\¤‹8a³8TÔâ \EÙ®2ž@bÝ]vabö¾A ¬ ÊžJ8àÔÕ- ieŽc „A Væ86+䨛}pXëgYK®ÎœÛ賡tÞœã•Å/b…kTÞK«5óàɃZ×0¹Ñ:º(‹XH_WZäÔÕ‚ILà¬ÌÎË Û\^Ŭ2v̦ÀP%EŠQ¶ZÂÛVç4oÊ«A1Óø@„˜3ý¼vÍèC:4ñ³cµ¬ék2]NŠh{Ì”…8k–to7en¾Y@ß@—…bsY· »Oˆ&~4-ml¦ÿÁÙ„µÔ‹´²ûIÃa#(å 8iåZ¯ì>¢iK!§VJêä—¥ÓZêoûòVñº±tﵑ¥…d×nvÎç –Ü»kÚÔøÈ~Yk¼úê)ñùÏŸ¯¼²×}ÛpáqÁÆ'>qÔ€±–î.Åܾ gÜS¿ºçÊïÅZuõ{Ρ[yCõg^ŠïMÞ#»²p-ןémÿÏ€uãZ½t ]eþCqþWNŒ“¯œÖÛpòØÄsn‹NÚ§+¨Æ£¯>uû7âÜ[ž(ú½G|íôƒ{w_ò­òþ+NŒk[Ë_¹Ã!qÉq»û—Ø#dI¢Oíž$ºh½î÷B’OŸ\K>=ŒdÐÿÖ™ z}“G¡W5?®; ®n°ÝvŸÂõ}n×÷âDX_TÌíÆê(g÷²:Jg#+™oÙˆõuÎvW5¾Î¾{ÙYUÛ½Tizk D.:§bºFÙI`àr~ŸV@ÜùãÄ~°ºÚ2E`NüÇŠ+ŽG0ÿ™¸/’>ðPÜü£& 5uX)!ï@bÄñ¤IGÇÛ¶Þ Ža$œà¤,â® Õf„„oÜ ùt³êÌ€ Ëó°_ŽKi®%h€ó¤'aN»Ô#péZǵuÙ_§æ+݈ZǺ£¡°Ð(ï3¡"P®,éÚ61vF#@–€B-~ rK:+ÝËåâЫÐõꫯÅ33gÆ"UMs’¤¾ÖÇ¥Þ2Üè£6#†p ÅT üÓeݪ|º@ úàœ®R—ÉØ«²·u:t}OLu) „êUÍ¥ó8¤»9åF†*QÈå~ÊRŠ{§ÛÉág 7­¬¯²ªÇ9çœý(ã­¢S]+W3GÛÛ}ÔŒ¾Vv™äº·û¨ýè.·]‘ÚôDд›¿ 2YÆÉ@¼4^ÏæJc*lV H=pЀ˜~ÇÔØ÷ïö[k¼êª«ã _ø²Ü]KYYâs'[l¾ ÇxªÛ›¥}­Q€2R¯A‚_™‚×eXÔˆ ¬J¡5ΤĂjZÐTS¬@ ´®Õh彩oÊÀ†¿¨p.Ù‡º&$v$€’,—y (¨4ëâød½8úô `BÅ`’Ñ«Ôñ°#ѨVAŽÚψáÔU™Õ>ÆŸžx2úãJ6dz¥<2ZµÊ‘$z̈bèÈá|Ø€‚Ƭ9[2Oãù|#þû¿Ö—“Ûm×]ãÄ“¿”÷QF«/ÆBÅXj÷Q¦Ž¡ƒœ†@Ë­Ì`eʪêdþF‚í™ãŒÂ²\³Á†Ä]ÓÛïïÖ¿ýí«â‹_üIË_1bX|ãçǧ>õÉ7ÌH–8µeË(°.⌻fÅW÷ì™}kkŸ«ñÊáSfĵGnÇñÙq&ùþN«´N¸é‰¸àÀ­­Ðc«/GPÆB‚26¨OMsx<ÜzmìXa·ºõéXzZÊV/àZàºêÅ\3žöë_´ Ú,¶ÛºíºÆÆz¿°ÞoÂ7ij’¦¦ÃuÜÍØó\W }îÕƒÛ6OŠ/î³ÕJË/øÍuñå+ï+Î÷°†-{ ËÔ¹ÅW&Ò¬<4Ùµô%‡tÏ)¸¾ÉSŸG±·ù*£™DÙÝÛC“õ–ët•WçÄÇžÖ‘fû¨3â»õLFdc݃HêW\é Äú<ÇüÆû˜ßØc+¼í«í 7¼é5PâOð ]Ö‚"ä p‹`£åKÜI—€eÂa%RцK® ŒhpŽÏdæ_ñT¦­:Ö,wM»+þóŽ;;-Zí€@¾zêWbÓÍ7KJ9d1\œ¹Ä˜Ö9ÝÌ‚ lº$èhzhÓ-¸Å 8ç÷¥õ"ÎYDÎ’›Ž±¢‹0Y@ÉÔ)´YØ5)V' Iµï@:cT€UÇÚ»‚G®¬|>Øè gc™Ó…êRj®ªæȂeÊÍñW¿=ö{ô0(FŒ$a¨ÀJ/öïÿe¯.Š ‡ˆ‘o­ËŠeÑ”‘AôŒUO·o%4!—NçÒ¥œtS"Z;Ó¨®¶)„ JŒÛhÙ6röå83õÊRdR”PE;€`•`¡Û<~/¿òRœ{Î×ÜÒ‚‹^^zù¥\½ãC{(9ô0–[«™†´Øõ¸æ¼8?¾InFWû;vlôg¾æR vÎìÙW\Mÿ-± —}_÷Q² ÕX˜˜zÇ£5Wpú¤tÆãÖĽç}'TÆ8 ðþÿ.ƒ’²ÀüÒøïÿþí8ñÄ/’lü¥5¨¹ò¢#±êžG:OúSoÒýcß›ÛveM qÓŒÆÛÕË¢¸ýÜ#bß“;  ;P=yó䨿 ŽúãcÊ/ïŒ#ÿºûCî!VûØ¥sµSXuã,VÝXW04Ž®5{ÆmOÇW÷Ù¢›BžüÉ™¬PrZç±?®£^*¸Æ[pÛ„¸kÞÔØ³»Jbþ½ÅƬåF¬OAc¥EXf‡vXf'’GðÇ+ä$áRÜ~ñEqË£p GœôÅxßsÛLæžÕ1pÛïø¯ÆþÛè8ÝùZÿ›8ç´+‹HUŽ؉ÔÉEjQ¨;œDlÇœóÅÀÃÙµ:7žyFd:½<º ôÅnó×7yVg~\wd`£÷ˆÓN98êÔŒ›÷þ8û¬kkɲñ&³ÈÙu«t›ûÃâÀ“N‰½¶ªWÞâ¸ÿº¯Çµµ:é¶‚J_ Ö×¹Õ_×\q¯¯¶W,Ý8òf×8“¼ÎZ¸x ê"JL©bþµ|¨rHS˜$89G-‘ —O®QëÃÖ³”g<[á,°èÈÍùrýʹ2Ä´éÓX${ÍÑPm«qs 0¿üå/³öíXÄh&·Ɇ[3M€0-WòŠò4#£sû2¡3}!2rè–J€˜sšÕ@ç,æ1u¦u‹)§5¤?š¸RyiŽ×–¿3~?#ú1þ‘#Y2Ì'ÔwÛ«KŨ 7"Ïàˆ¨âöMë–I9G0]´c0¸E8 u@{mö´°r.SøÐ¦ç‡ÇÆVæf&sÕÂê¯úÌ-ˆÊÌÝó:Ýÿ‹_ÄÏnýY ÉAè½»BɬY³²Ý}÷Ý7v{ïn1v“ͺh‰×^m?.çž_Ü¿à¿K°åšdÍcåX.Ã*xö¿…pöˆ£xD*FÂPzÞGiUF_~ÅóÚqñ§Î½ó:qÕ(TaÀY–kîÐwÞ}gLüÈGׯ¼òßãK_:\@‹ë¾ü¿þõórì72tT‹Ï·lÖ‰|øSâ°]7!­Ðóñ³ Šú†Î€å{XË<<ñ”Ëãè}wˆ˜ûhüÇ¥Gw«_o%üÕ¹{Æ{OžÖÑR~ÆåqЮ[05äÕ¸çò3YC·fZ¬•øsàºéevœZÚ´Ó²1!ΘrBðÞw’ksvüòÇWÆQu«¬:°Ì–º,³h4¾yýÇc£~›Ä¾î™®âú´êYa%9ËŒÞi箱 ßÅ*èø÷ß7O¯[…@9ŠÀÝFå×(«äÖm.[{òñxÏØ!QyõÉøïÞ3kó=Ý«¥k½’§Î%ÝÃâ™Ã«ýZó8cŸø¡Ød Stf>·Ü÷D]•a1ékgÇÎõœ]}*.>öܨ×ò¸]÷‹=¶cŦÖãÛo'^îj¢> Gë¯óhV{9½nµ—•Ÿ[½ñuõºâÞÊÛ^±lãÈ›_¤yŽœ@ÆÓ3™HòAë$}-gäb°¶?º*µÊèäÄLD9†‡x0Ñý&i‚ÑMk„­«S‚ÍZâÿÝòÿâþûï[aB~+sîæ‘be8Ö‘;ýÌMÁP$ÈhdäÐ1 AÀ(ô wtƒ,ŒX†R˜/%ÓÕpL×myê)êz8hÃr)`™ž¥‚Œì‡€•ÎV¬„Ê ,Ñ$ºV®ëÍ C–3Ý©º% iÖê…L® ÂÀqe:n£t)Ç9º.ËñÐóÍ1‚ ðýp+knX±¾¶,†n8ˆ57#àcÍ!¿pKûæñÓŠ›¡[>à‚c­ƹêíëô͹‚)}iSÌþ‘Ut<š+]gW£Ÿ2•t3.ªp™Ò† žÕOV"bš}e¿á†â‘ÿy$á]™µ.%HE4:X·®®Þ~Ìë3Ç›–¾ X1e£6JpìG’põ'<.%Ü;ß1ï#×^ûÈ/Îtb-†Z‡±o¦Å—7\#®5j5͹ËXKû³îîÔ;ïd™ÀµÀ+®¸2N:éKñ ¯n´Ñˆ8÷ܯÇäÉ“^ `—µmbü’´(ÝéoäBÍ¿7ŽØøƒq­­×ˆUË¿ôPL±KtØWÖÄ„Sn‰[ÏBÇ=Ÿ:¾|vœêx=á›çÄSŸ;9]¥k€}Ž·£ñzK[Ïyx–Y½,zìæø›ík2ýõözø”Xpí‘]óÿje¹â€ßaí¬7k75¯]ýÚÓÃÞϼ±Ã»ÏQ«Î¹?¾rF-µHg+ßÙcÒqðÎ=L•µâ½åðë­¥È%8y¯­z;ë<«1ÿÏÔàèm¶‰¹OÔÃÞŠCÜïø¯õje¿‰³¾Üee]±fqdôûˆSkº¶úë¼Ú¸šãëêe޾ú]±tãÈ›]DÏ”ˆ'ø“d¶qÉC5ÝŽ€©vÅ=¬H­|ôœh0A>t]I#SðN@¢¼ósî ÑÄÛ:-ý[â?þã‚C~¹Âœ,B«Ò…¸?SÎU% 4ѽ)Tå^ ˆ:Ó¬ÐG¦Aþbu €@h«$s.|à²uB`Z C­D@‘a`Î+ó‹1mTœ[çø¬´Î‰¹ M/e…&†ÈÈD¸E?oÍ‚Ç`‰1£“æ»'~NˆËïº.&ï¹0&—¶O¸ìi)«Ï×7éúqÅ¡ÎKt«_‰c%㵩m&³äZJ|øõ±ðÚC»æáyÞmôR™ÿH\uÁ¹½Œv'SN:)ßgG>¹zۖƯ¾wQœrØÉ1­ótû·ëÕ_9#ŠøŒa1ñ §Ä>ÛÖ»k•ÏŒ»þãqs7KUgƒ¹3z‡½ã°Ã&ƶ.ô±UÏ·¿uiÔ¥ ì*úÀcŽŽ½¶­7uîÜ[äYùÊ[€&wÞ}éÔ8›ÄÚu»Ö€q»Æ'>}Xì8ªÝ:G\ì,›·_?%nùõÌ'x;l›8ðÐCc¯ëÖa®•ªÏ%Øs.çÊέîøV¤ëȲÇ'œkÇ2ƒ§ö²Ì`WéÆÞ›]¥gžz ´(aÙª|ºLsFŸ.ž¥ùʃZ«žV/°ZÇ*ø3 àá4褅Æ2ž8([¶Ž¢ÛŽ•FBä?üa<øàƒÝ,ʼyóãìsÏŽþ¬à 5Ñ5xË@UW+¡ðð‚)Ì`‡&öï' ¦û= WœÃ\¸<­€‘À8QŽK6pPN),2[e~¡©b¬ ;$ӯ͂œV'«B…sÖL–lä¬E÷…8jtb* ¬`šK(æ ò6CÉ3~ÿû6|k&Ó-:sò5àÏ`±ãÆÆr,jn®:"ä8Ï0£wŸVNÁ¦¬‹Ö¾¼^W&…Æ$(¬»|ŸéS¼^){QA®E>A×6i'Óâp}„WŽéúnCö6 Ñ8ÎŒôvÈyøgÌαë÷îé÷ÄÿyG‚ŸA=«³Ùž€8‚hâüÇCcÓ±cXãXýðº¦÷QZ(¹fÍŒÕùŠÞx¨ŒëÂu÷^So^3ÜïC˜ky×ÔiD¯=^~ù$À>‰€¥×GI*àáèɯƒpu®@­ÌÒE1‹m”FëÂ…ƒFÄØQõæÂU´UY/QÉÂ×2bÐÐÑ1jx§Íoå•éwö‚E1¿Çt»ñccƒÞÉhåm¼‘gÖE/èdþK âµ× A¥CG “ÕÕi%–.Z‚.[ø2<€/Mõƒ$ T®fÔŸ5Èë÷²O®Ð9³Ÿçç/Š2V÷%/¡k‚È6Ý|ó1xU•»··`ÎS¤¦šF ¥ó_âu«xûV¬Þ½XßïþåYÝùqõØ•¼…¯Äk|VqY£L¾ÎQCûH±ÓS Œ{Áâרȳ‹`¹èOýkP¿g{½¼_ÝñõRµq¨¡^5Pzöé§q0W}Z„ø­%Lœæ¦ûѹgÅÚ­öõP„r€Çè‹¿¹³ž%¯-&7àDŸ¯á 5Haìæ›ð‘ ¦2u,ÏàQ¼§¬ö¿T¿Œ‚ÖÊF¼P]ÀŸ+z´a’Ìyty€=D2Vw­ãµ¾Kãa:,ræ!¯séZÐs6=^¬\Ây`7çr\6.­´ýËýâå…‹â6æþÏÿüOs¥F¼†é&W^ OW0£'‚x`|ðƒŒ¿ùà¹ýrQŒkmï£Ìkȵq*BZ` ½ùÅ{ÆjÔþƒ:ð£km¼ì²Ëã_þådð¯ì:o.¯÷ÿï9ñ™ÏýçÀu–¼Ñ@C볘ב¬zX׊½IÜ;öVr}:¶úã[Ÿ¤nȲ~k ôô³Dc5qÊ–'Á/ï#`)B2ÄÎC€°ˆªÂOúƒ¾'JX­ØäZ€ ¦€`—I‹Œ©JÌØŒˆc-ýÆwþ<~ý«_“Èyy¼ëïŠ~쀬'¼´ft(5¿~)m‚LºuurZ XW ÖG­mB‚ÒMŠÐ  mIEr°ܤÕS€åGë‘+ QÅJHÀ„%‘q‚]éš­æxK­‹#ûiuR[ô#•:ÆÖ°"ýÅrSÿ˜=kN,Z¾$­ˆC‡ QÌ‘s¾Ed-wâ.uq].ìL¿Ü‚sŒ ™$L "N*/ ¬,pަU2ëÊÖk ‹nä̧¨¬œNW0€äõ!&DîÏ2^s­…B®A)‚qíHÜ0}:6úcÍçVT1)´ní'<æÎËRi ©ÏXYÙd8É«7»i¼cûícüøw¢W Dݲhµ„óýu¸tÉ{•ÚеæÜ2òåm~åÿŸ½3’¥*Û|Vw_/—åõ Ã'ÐPÁ…PCpô3Td܆ác$ —p7ÂýC p @$77D!pEQT¼îÈ]XD÷ÛÝ5ÏïÍ›ýegçrªººoQõ$ô­¬Ì³>çTž_¾g ðÁÊ6+YT%oƒÆî1õC4¡U ¢î@¬Rts21BP¢i":Q¬ï@¶€ K_Â’(3RŒ©“•ŠîÛÙ{Èò¤û ³ìòÚY ט ¡Oe)ºz3º@gñ‡ Fßé²J âr8Œ‰.0(V6Ì^òGg­ð'€tÑ%ŒÅq†´„3º‘?Ö­ë)ÿ±<Œ‚bye%ò¨« Ws¥uŸýycœ^LH!*)&w@a€9éUÞéWôa‰#͸ÁBÉ$E"ù˨T–Ú>–RÝ£ R—k­tÆÒ2Ȟܭ²G*l…¡º@º”ÕÐ?¾ ÉQ¬‰¨ôÑ•ÉLëmƒÇÝãVöÜK Aý²ìð#†³*«>¬€°VÀ L¤¸Ucüiø±VX~€YÐd= .ÌbåS£OW.àîLtWbâ¿ Aa ¹ ¸›Tèj@bL>P¿'@] ÁU‚¼´T`t‡bpý€©ç%äbgÜ>{úF×©ÂÆH÷䢺êzê2fœÓI%”ƽ±UØœÒÌ.‘^Q“U‚(]ÅãíDgŒ?¤Ë”mÓ°ˆÅLf,cJ'Ýžt;]±‹°h Ћíç@%4T>b¯¾ɱ<Œ¬˜€acq“¹UZ£ËZšìŒ—Ba–$T^ÀDœ—ÝÄÑA-«›ÀKqJ¦°þ¡ ýáXps'|Â{4xbI|©LéB²#¹Þß±¯ÌˆŠò#ÎøéSçl+· ¤f,÷5îsÜêÑ¿±àeêþ[Uš>¬€°VÀ ,)ÐÛzÓ6‘I0(#jþëXb„ž`þjû `žô/„#À …Õ ŒŠ=fe¥c\ [­Å¬\ÜÈ Å–iÂ…#ø ¸`<à!(aŒ,KÂŒè&¥ÛÐpÄy9É |‰+ÒË¿X%ň8åAôÊõ  S›Â'Žœ tk§>€+Í%UZ…Œ±¤ˆÜ éŽnpùõ` -ºtc)@€¿°ºá†ô •"\…#¸ ¸4†Qð§¡Š22¢°ÇAÎÂÖ:—Ž’+ò³ÑIYÞ ÷t,ψ¶3øŽÝB˜àô‘~&®°üMî$pºAéb,e^6²ú ¶Ù°Æ"T^.‚KÒIº¥O¾µ"7¤;vêPØùØB&ç(H@Yÿ²ôãÑKA(Lɪ4K=Úk¯=³¯õ²ìÿñ5ãüJ +`¬€°VzÛedM?ÖËc\–¾‡˜@#pDÍ}À ˾Ìo€À˜Œ †_ÿc§‹õÿ &8`cFp,Ó"HÈæéîŒ0 ºqå u4N®d¸ÂBˆE2–62 »€T¥I]Í3ê2ň¥Nˆ%€T ݨ H¾œ‹îc1cV/,—°R,£$”ÿX O5GB”B Arò¨I#iÆ„˜²À%]Ó=9‘µPŽ•T|sŽõ3Ÿ0¢L!Ztõ ¶ÐCéïaaÓu¼¡!³kÅoÕ|²~!“._ Òýh”ÎÇ6 P±¨TËYô/}ò«±1ÉóV]]Ãj*êSÐT¥ø£µ5'˜¼‚–±d ™’3`v^åO¾ñFUˆq”úŽãq«G{ý·=³Ë¾qyvè3ž–ýÛ^5Ûi)ç>¬€°VÀ L£²n—q +È€1vX}[jò¹½Àô³nç2°PÝ r·°Cpʼn8@w‚‘B%üP@ ‘€%Œpò\4=ÆÕå ¢nT–Ñ Yœf5–oQ Bg3ëÓ6°dÑUÚc à²¯ˆ4æ7h0"Î)>‚r€°€A©4áò³ è‚ä”cÖÔ”Q\ vå_Žwèúœ€KŒ,ÿ‚TÅ;Ř;…'Ù"þØ‚ P#fÌ{:.u¬t€kP¤ÎViÓ“U/â÷ÀLEÍĺ¥±xÆâËŠ”­ø˜õÛ—‰¥f°Àíy±‹À1$ -€Câg±ný«?ÆFJ#¹‹I4tÙ*„`ÇPDÀ§°IZ^ê„§3Õ ,žtgc9dɺÐY£ÊA9è…O >e1^õhÍþñu?ÌþǾÈöÛo?¥W öa¬€°VÀ d±`±s"%–qéi,ݧsÿ¼»Q´#˜‰ñtì"Án‚‹°âaqŠÛr+“a¾N:UÅ-@ @‡ëÐûúÆø5ùE„5kFÀµC,ÑkIȺ¿Y¦eY¦hÊ"³S£¯ÿ ˆ „ Š‰êR¥Jà‡îR2 %,c“’£ ‹îlº° †ÍKš0¦ 0ã/f@£‰ÂÑÝ|&o$Œ•þŽüç¼h…«ébef5Ùd<^LªÁ\ür ¸s@œ×&á,*ꆩMz*IÁcò£°°–r!@‹²RúcÖµ€8h&}ò$Ï J »npu™ ÂxF "0Uº(fü(\&rä:È’ŠS(ùŒg DJ,¡a!V¡©ÈãUöÐBã¿¿óÎlûMÛ²ÿ_ÿîu÷¨v>¬€°VÀ HÞ–›¶hr¯,>lî+`m˜Y "*¦b–,3]éd YrŽEJþ<ÖÐclj¢ë7܉MØ-Œ_òßÇŒÔLŠr^×°^…ºGru‚ÊÍŒ¬}1®@1f¨YE7ªHGV,ŸXENiŠG·±bPÄ‚Eª¸Cà rËÞÆÄ&ªT,9èå–6E¨€Çx¾XZìvXõ•q…Œd9—EYûœWGÉËÅmdK˵ð˜˜eKWº>ô©‘â=fÜÌ’¥._òk ¸ÈŒ| âäFÈl‡<èè6¦[–.]јüœ ¦®I+4ðÂ#YÄ®d‘®^åUZE×±ò £oP¾éš&]$æ$5I!t`º'âcaoâ¸YfÜêå»û›²ë®ûž¶…Û+{ø£•í¾û*òãà X+`¬Àô*ÐÛ²E³€Õ~c•êç-¼š{A‚Ηº-ØÔfæslˆ{Ù× «ž,@[‚‡€€Cçô‚˜ÍèÆà̲c–"–QÄ4ÆâŤ¨8dá ‘gîë/_0Z`P¦îHÁN@ÎN‹É ÛË¥<螬Y¢˜Ø™CÄD †íϘä3eEs\Ï·FÓ=º^yOtq+^&°.¶˜XB’"? [PG~è6† Y^…]7" (D Ž˜¼AÜ@nŸ¡äHDnÐ ™™Üƒ`±J?⌠:‰1|òƒe€Ô¥CéЀHux“ 2 |JÏ¥k^écmA!ªîiü£4ê)ÏXK¸…½@LI“Y°òe¤ô̪¼X™ÌS>º¿ˆu‘ÿ/%¢±«G7ÜCÝ÷³Ù7oVB³ÿùÀý²ÿ¾÷ÞÙn›6)¨àà X+`¬Àô)ÐÛrã2Þå3`Å+Ñʼni®/‰n`È1v¢€cÂÀòŒ úº8Ò” Žþ˜ÃfE‹ ³ù±´,K¬büš|E8‚>ÆóØ.bIž#\ËÄ`]œäFÀº6«´†ÛèÞ ~‚bŸ¢#ÀJðç¢7àÆT{ê¦e/_x §1!ƒdóŸÜÌ+¡LBÿ&t®{Š3,_ò$“w]çŒôbA•>¤‘ky÷®â×=¬‚€!“VzÒ (¦›=ÔL¨8%­®Ëî%=<(鳑œ‹tíÊ Îè¢î€IÆU’~RÆ31$ºŠU”€:‹:c]ÄÙ×lŽ˜Ø‚[ÅMØ”ËÅÈA”‰èO骾²Í]Àdt;˽@(f¯ãq­Gs›6d»oÜ-»ý¶Û²›nº%ûÓ_ÿ”-ü‹×„<¿dužº%M¨© ä\†¸\¤ (À¯ÏÐrŠzÄ ¿}G㸦Oþ“´´©G¬ßЯPØ»ÿtíKEy'^ èTYT.ùµ(E•é,]ôTWœ).ÊŠ2ÇÒN="d^°s?¯Gò@û‰:­D-Õ#~[á´(LÕ# ”ñ»a'H…K­Xä…ˆ¼è/ ø£Êƺ‘;­í¸‰Aáƒðˆ›ëÈÇ\(|tâ7”!Ë1^˜ëzy™å½ò£|0iŠÉIè‘¿¤*RÅÍ‚ì±íc\WtÄ8éȃ4×=¥*ÊÂõHÚIo×#éàzÄ“@?´ÐoÒÏ£±|õ¶Ü°M›XèÁÈoW0°@¤,Gì¡g®X ’Žz`c9£ã‰’ ¤<>)îÍ€'öðÅ„¤†!&<(ÚÆúÍ)úŒIÀ&áÑ¸à†š¢xˆ›øà¢xžË=QÏ艬vP]ÀúGã 6@i¬H›þ¢! VLAžÎòÆLyèi¿»€ù™ Òð°û  §Ç~dŽpvöSeMSx4<ÀVä[ kûÍ+q3X ¤‘hŽ-ïÈ-†ÜjtÓÒ1±†;4²±P´[̶üH¬4û4&„7#€|Iñ+:ŠÍvaüJŒv]zë<FßåPQôB7¬Ç´]Í’ÖÆÏ Cq£´~˜a Tþó®uŠâšU£†EVI‰rˆ™ÎŠ3ÖaTXrx¦@cœdÈ•k<®õˆºµi·ZFù@W•$zaBà·³Rö:"”ÚE™!<§|W=PÙ3‡bÎá[Õ‘ò²v¸Õ=•¢(ê¦Îžt‘¨¨ ðfÀ`ü.t[n¢&*pE¡‡¹_¾C,yø|WÄr¢³€ <<\Ê'_Tÿ”?ÂȯD!“aý ›œÊ-ãbÃõ>Od¤‡9t[]žÄB~«˜Ç”ÅàKݶ¸¤¨é¶&©8Pˆ‘fI#åiHw¸,~ŠJÐ¥kò¨œÆbÌ ¬§@±¥íÀ J9ì¼/Æ#jy+]ÌHޡͬ îŽX˜€&ÑDbÍ?]Ç9 'c¡Œ}l`ÂZzÜVÀ1Ë•îl ÊH'Ý 3‚Ï@N\¢6º¥a.Æøð @'"ÆÎ(ó ²œÅì_]î1(QqÎ tOEnb² ]Ð:W(ð›®XÅÏD ƒI,X–dŠcB‡è5·PÊI¬¯ˆ_!Æ7y¨ñ§J‹üóE÷I>ÿÄx&¬y³Üçܪ²‘Xcç€M G}ç¬Q³wæì‰Œå“îîå¢Ò=#0ÆÊ¨¢“|ÒLäÊ*®Aƒ9`ëñê–áX²*k’0º¡Žë‘ë‘~·ú­úyäç‘Û5=Ý®‰Æ‹z[¶m?ˆ.‚†TH:˜ÀB¿XåfiaÒÐu5÷jáu ÉÒ³(0‰eP=ç …t̓O™í xhð 0®p&ÀHÀ 1tXÔHÆÛE³)‹]©A˜×L銛øØV-(° ‹e0;2s÷×׌f€Ç"ËrÉ8ÁEY¬Ø9ë%ãÚ#HZbŒŸàG¾v¤°íd9”{ö°‘À<â‡,xc8ã¼€”ýyEFò°åº± vXÛ°l Ùiƒ.kÖ”cåhV¾ QùÁz6'wdšmø;Iw:¦?f6Æ8?Ñ5VMîÖÜ‹é-lvWž×˜Mfø¢âL›Šƒ™ÄóúÑÑYÌw2›èj—–Q²öF¡*”å¢ú|ûlÇGHº;ˆDYjòˆt‰‰'š¥ ˆ³žâàš¡œ]\\ô;ÐÂö~ùyävÍíšÚÉhÇÇ”dÜ"ãÝŠ€‘d”¿¹2ÑKT1&vwÌĘ¥›‘î>­m×ß ¨$Ťq ]³Xµ˜¤˜©ûpa‹Yýa<œUcÁ,D8(âDеK*Ð6+ŠÙ©@Êî)A‚:|Y^E<¢‡ŒÒ£“Yíó Ð-œOÄ¥îJ¬†Š8 ¨•7±òðdtoAF!a¨—p 0ÅDÒßÁ5¥‡[Z1T@kÉéÚ`-€W@h*]Xéº $Àt²vÞ,šëÈäí»"pÎ){Œ•dl$æA,‘*™< kß¼4 ‹¥œÌbµ“KRÍú†pÞœ z‡tŸSfÀÙ°.ª¼˜­ÌìR ”‰1;”¯ ò“pDדÒJ:Wçy%,}ÕZ€ÂI¸üTCË…(~âzäzäzÄoÃÏ#?Ô(èqàvÍíÚ8óQïú_i'úÕKE@U€0P`ÕTÅ>°Œ›S3¥PÁ%cÍ8£ñ ÌØm‚®d#$(რHFY[nZL. äH8&ètN0È)ˆªt/ï –•«q{%9“[âv°Z)>yfáó?K[äë «dábÏ`Ö\ðÍ|9 9eÀ&î#_ä]–<Kábåc,ap§à\P~°|’¿|í?¬¸°8JŠ–*B…ƒ#@‹ôŤÀ’ø”NAb®¸Èe@˜$¨¯ny&æY±ŽôLZ5Sw†=÷tc‘ñ†Š[W“ Ù%œ8éʃ­Î<ÊâîKf.÷µ°7\= 0*(–¦á@´ Ž1âWYª€ä[5‹eP´Î¥%õ&I×#JÃõÈõÈÏ#?x%v»ÆÑíÚ¸òQoë¯nrì´è©¬¡Y͘yŠUoVðÇx6ý«Ê¬uŽú†eHÅ•LÔúFOËÀÊ8òÍX³9O,5Ö7…Gxi)ÖºL@•@V1NŽîO,‰À\¹”Z¬p13VÄ ôqƒI,,HËg_ð“[Þv&Ý£‚lbÂ/E‡åO€$3ñŠBåÐ#^ƒ B写ä @$·,ËÕú4ŧðb@¥@…HòKw­`M~c6­´eöqŒ‹”;e@:(Í@3]ÆrH§yŒ¿TYôµØrÌV¼1óXRßÚ_E¡ê>ᡇÊ0Ö5Ô9‹MDz1Òz†Ù+ “´-RÆ@2e"€c›?¶„‹4èœñ€ŠEÏ/¹Qzc¦¯¾‡E‘îô%hTz£©!®G®G®G~ùyävÍíÚÝ‚Ž9æ͇°VÀ X+`¬À”( aja³›’ì:›VÀ X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×+`¬€°VÀ L™À)+pg× X+`¬€°@×»­ÿûß³óÏ??»æšk"ûï¿ö’—¼$Ûk¯½î¶yrÂ'[?ÿùÏÙÍ7ßœmß¾=»õÖ[³üãÙÆ³^¯—ÝûÞ÷Îîw¿ûeûì³O¶÷Þ{O¶Î°»\Uàüü|öÆ7¾1ûë_ÿšÍÌ̬*C·ß~{vÚi§eûî»ïR84îÇ|vÿûßéÚjNfgg#ŽÝvÛmY0<ˆßð†7d Iù>>øÁf|¾æ5¯ÉîyÏ{. ¯éË]wÝ•yä‘Ùá‡ÞäÄ× ¼žøÄ'fßûÞ÷–¹~à˜ýô§?Í6mڴ캿X]¥ÏȯýëÙÉ'Ÿœ]qÅIÉà%æ-oyKö¾0 0É“Y+`P`ÕøÑèo£8¾õ­oe|ðRPÿøÇê³ta•'³öþE]”=ûÙÏ®½ç‹V`=ضm[ö¬g=+ûùÏ>t´¼¿öµ¯Íæææ†Ã­€°+èàøáØ?õÔSû |¨¿'?ùÉý³Î:«õÕW÷ÿð‡?,K‘ºIúßüæ7ûgžyf_ 9Tøx`ÿ¤“Nê_zé¥ýë®»®ÿ¯ýkY|Y\\ìoÞ¼¹ñÅ÷ßúÖ·¶Æó²—½¬Ùe—E8²D…¿ãŽ;®ÕOY›|à+â÷…Á¸à‚ õ`˜][5Pà»ßýîŠ:ú˜Ç<¦¯—Üþm·ÝÖß²eKŸzœò\{Üã׿ãŽ;Ö •Ò XiU eÆ5YÃV<ôÊðS=¤±ÒN8á„ÂûÛßp7h>·nÝÚ¿Ï}î³,.Yýúßÿþ÷ƒª{àWóËw`?Aµ£>º¯n÷FM›n|ö³Ÿ]V6e­o“j¾¾^ Èò·¢~>÷¹ÏíïØ±cE4Œ¤¯®ÞîËušs@ñoûÛ ÿ¾`¬€F‘ àí¶úàjûŽõp<ì°Ã’ãÀR7ìñº×½nY<Ÿþô§[ƒ"mGqÄ2?uyŸv@ÁÊ[X=ÔUÛשV]ën¶à /uaûšX¼ÐЫQýíÓKÒtüéOJzyæ…Ö‡°V` ŒyÃ}üã¿âáW}ß5ˆà|\{íµÉáŸx≇_x(`êÛwÙO‘Çêç4 ü¢½h©üÀa,€m]ÀW]uUQ„þ´ë®ÃHª¿y¾ó\l³à}ìc«õW ëg?ûÙºçÉZ+0y Œ‘èœsÎIzñ`{Ï{Þ3°ªŒác,MõÁX÷‡n]·KW¤@IÙÒø¾÷½¯ËKÜ7¶Ëô©O}jY¹ €T¿,œrÙóRÑžjßµé ´=¸ÇKPÝÁ°“r=n:?öØcë¼ûš°V` Öoºé¦¤¸aMK°$ÅÁ¸=¬ƒŒg,?€ùË_&Ñöð/›V à~ðƒeš¢Ç°¨µÔV„ExŒÛ¬N$J*8;²#P åå´ÉBÍXÀ”1ÔzЃú¸õa¬€Xk€ƒŽÓK…«rFSß–-%SöštþÅ/~q 0xà¦Z €õòÔÕI5«À¦Æ’®z-¶[Ÿ_µk¬€–“ꄸ¦^j¯¿º?¿ä¬q!:x+0% ¬ ¢]Û ýêCm‹K­Wû€¬†»A&°åYyƒ¤Ï¸ò—C£ØÔe?¬°ZFE™ÞÊTûŠ\l{¥·ë®ÀÙgŸÝøÂCÿÄ'>Q›&Ævý^ðo¬•Ï­€P5@Ò¡íºZ„ePûЇ>4PÒ/¼ðÂä°‰‡EXSº«]‹©³‹Äs%€¿”.zÊe˜e`ˆEû«®¨ž\ÔDî*Ú^|OÜ´LÓ¸Öòs’ssÝU%ëx­Àd)°ê½€õ@j<î¼óÎìÑ~t¦ nŠzóÒSö»Td‡~xö¥/}©ðÞù©·æL‹Ng÷½ï}[Ýj‹¦enÔý;Ð^¿¯ýë3Ack²f¯~õ«—¹ùç?ÿ™i–löï|'»þúë3AQÜÀ{#tÐAÙ¦M›–ùö‹ºÃ³Ÿþô§Ù/~ñ‹ì׿þuö—¿ü%›8Ùãxï½÷Îò‡d{ØÃ²}öÙ'ëõzE¥IÙ3žñŒŒ}»Ab&˜_áŒ8ISÛqà 7dûí·ß’Ò®­þ–][ºÙqÂ^­W^yeöãÿ8#ýÔ—™™™È?: G¹î°4{:sýyÏ{^GèƒÝÖ‹J²‡&¨CèOý%?¸Õ¶ˆÙžð„êtWb4™vÈÉ´N¦íÍ¢í¹çžßqî¿ÿþ¡gWX“rŸ2dó·¼å-±O:uóä“OŽ}Í7nÜX›Mʉßy×¾ÁõŽ|q”‡‚ñT?G9ä u¥ÕìnÔ*¾oZ+0U ¬i0J^qÅI ÖÔn`áâAüœç<'iÜ îe©i¨ê¸ÅA»Éï Èd ¢ÈKÊçÓŸþôþ]wÝETI€yÖYg5Æqä‘Gö_ð‚$MØaÒÍ-·ÜÒoêÄœ®¼¦À]feý1Êš-ãšÒA¸ìUÜt¿¸¾š1ŒuBª_?ùyÛÛÞÖ™Þ"ÝÅgùe¤.=M×.¿üòƸøM¾ò•¯¬½O¹ÊÊÝlÐüù(×Y¶kã.ܶ}–ÃXÛÜrï”SNiÌË 7Ž;î¸Îød½M^’jÐøíÞ XéR`ÍP݋ɡ”‡[5<ö®î.ÑôÐæáÎì䦣:sYݤMN¯§ ƒÀ±”5¥³ëzÛ,Âr€¿ºqxèðÍo~sEC‚–e ªK~Û¬Ã6àÕ¸Ê q9OÕsu¥/éˆö€PÊÑÔØzè¡ýíÛ·/Áx-u 7ZN [ò°Ê“üãÿÑG½”¯ª6åï¼Ô0¶µ¼lQù~Êù5×\“œjôzêÂÅ ‡õ­8°¤‘¾:·dÝAþÏ;ï¼Z?uáPOëüã²à~ÿûß÷±ø×ù©»Æ>½Ôkž/Ůү›lTÄÛ4ž¯#õ³ú\«K#×Y‘ 5n»³V`:XsD֦ƶî!§1D­%Qø_tÑnÛ¶-ùAO÷]Ó¡1…Káa7¹mºž€t‡Ñ€Ôå?õZ œ6éÞÖàå-ðêÒÓê¿ûÝïúX`´®ó_¾†•àüá¯ø+ƒD“Ö\×»ÞµGª5«É*u•ü×\¯³°Ëq—óVÖ¬|޵íMozÓ’å{©çäØé:€¿W½êUqÑý\=¨Mu½mÈÇ™gžÙO9_]ež²càÊrEmGu˜Æ¨A¬ü\+ç¯|ÞÍméö=+`¬@“뀘Ÿô0ça×6¦†èˆ#ŽX «è2æz´Ò&ëa”—­iZ­¿IÈâz iá‹3W±üæ7¿ésÌ1Kù+»«ž7å£HcÛª~ŠïŸüä' gµŸu[¶~‹Ï6.MÙ­…1lh?ìQ¶Î´m~ÛV]ßþö· gŸ§Ÿ~ú2]×»º2ëÆZ2N”­ð¶k‚¯¢‹Ï”ŠòЋÂ_ñÙÖýÞ4®¯í%‹<¤,óÔeõmÐ"í|v-CUîe¸Gy¤,ÿrõÕW2J‡e¬€è¯ VÇj•¼Õó6ëÒ¶’¥+Z¹+2uçĺ½b« EJƒXW@Æ Uá'fY/¬ÉZEºšÝBï6 FJyU'Ôi‘2µÅDòD¹¶Y”Šô5Yÿ¨O){F3 §¼VÛjÓ_¤«î3E?ÒMþ±bñû¨×^{í2`-Ê¿úy饗V½.ûÞ£ÅËØ2O;¿´ý6Ûüi¦kgÚ)÷¶aÕ»j¾‹ïmë„V“ •åѦ/ˆ]ãnG™‡e¬Àô(°.ˆœmÖƒâ!\|6uŸqÆK cÍÊ3b«ã÷аê>¿ño¬(á‹/¾x)ì6ËÄ • ©Øf…Û¼yóRZêÒϵ&-’SWc³ªðYøMÙÎ à(fKþªŸ)³–UMßÛÛ”Yâ„Qž­¹–éOѯ¨ 7ß|3I[qPÆ)ãyi:R ªÍzªeMës[=*¿ðÕÕáâÚg>󙦤Çõ”Ù¼mÄÊåÐõâÕšš›m/[¤»é7Z”/Y+`R`ݰ­K²xŸùÈGVd¢ú ¬.…Àƒ2uV>ª ܰݿ„™€mVNÂ(78…&ÕÏ.ìJ ¯Ö#ºGUjÜ|O±˜¥äc-jEÆt¡Í2šjEÄòZXIÊø¹º´t]KѲè²ÞuÕÂhK§õ¿èV¶ÆWóÕe=l꺧¦Àk×ï)åÙÓö{*/uÓ´‹G5Ï©ßyEÃòÏ1­3™„ÝY+`†R`ݰmìUùáÇyݽlualPÝì»”Ô„ü”»GÕýK ¤4¶]ݧ] &yhk°HG—°ËJ>ºvÝH˜õÀ.]Ð6e°}1&l-ÓŸ¢é­›|A(Ž”<·`€ˆ¯ú×U—°×W,Âi‹» å϶! M“¡Êþ9gÌdõ(¿x’Ï¶îæªß®ïÕ-^ª˜ïà X+° ¬’™”y<ˆ«Ö%”eë^“…ކ&eà8q°´GqŒªû—ðRÀ© R–„èjt»í®I$)ù˜T¤~h·…~ÓP„¢ÞPNmã0 wÃ~¦`W9°J¾šþš ¬lílòÛµ5Y¢êÂhë~Mù-æ‰'žX+sêdÂ8þøãW„Q~ñä7Á³hTVÓ²]å8ªxްVÖ»º’ÊCm£´TB,˜\¾×Ö0·uñ•Ã(fòUß›àr)1')àÔ€] &ùè@Æñ±ÛC9Ïåó¶1[d1%] V ÀtiÑ!÷À·«Ëy”5©;çţͺ4pð¢iî*‡¶qEž›°j/Ü—?¸¶ƒ±ºmëKm‹›w½Ì–¦0RÆÿy©û[þ4uU·å½í^¹kºm f[¾g¬€Vu@‚”mªx —»?úÑ. ÀÐ6æJû‘.¹-ìuŸ4@RµvöoQå£.^®uAÏ(ô°¨.À,iÁòˆ7hË’¬‰Wøiúì€éÒ¢)Ã^O&PÍ3ëízögWRô#]å°LMÉŒÞêË:a«êYþÞõ2“úâX<Èó¦HOùå²ülàyQ7줫 ÛîŸþùKºt•a[8¾g¬€Fu@˜ÒÅøè®ŽìðžOÅÞٶå1Fu€AE',§®¼&pfVÆ‘{ì±K R¡QÛgW£•ë €ƒÔº¼‚m]Ö|µç)ú‘Æ®rX –‡GÔé1ªkmyÀJ_^ÿ³)Îê°†òXÚ¿øÅ}vûhò[\//SÖ­ÉBºš2.€YÔ‡°V`=Xwd¹ŠâaÛõÉøÜMÒ¶dDY´”.#âfgÆzéXm÷/iW,ëSœÓµ™:@¾Ð¨üÙÖhG À¬7’®Q@ 3ÕG9ŒtUý(®r(@£\vÕó&ÀIñ[ k˜ï]y¸ä’K–~§MáWAêï|ç’¬ƒÛJëˆ6…A—?–òõ—Ño¼±Z<«þ^hÛe]uDÀ X+P£Àº`êÛ<hÞúË3±P¥å7ÿ¦}ÝõÕvÿ’¶q@öLÅ K—VŒ¬îvQçŽk]v Àì ¤œR·kÊ;×_ûÚ×Þ-& Ñ–—Õ cJÙC™2ô‹*ÀÕv¤Nð*Æ—ÝS×Yò¨Ú›P§Gñ’Y®»m“TÚÒÜuïÜsÏß q¶ì Ç÷­€°Ã(°îH"˜]~H§ZÊÌÅr¸œ¢û—¼+2žé¤“Nª…>òO#ÄŒGã¶Aû…vwg$Ÿ¬éVäeØÏº™£„=Š£ !méë*‡µ@,XM QB‡"Œ”½‘‹…šËòe?üR^± kØI‘¾A?SxÇwôy)óa¬€Xov –W·5lå{åI!)"•'Ž”Ãi:/7)á7¹7d¢ËûÞ÷¾VÐabÙ“:N® Ç Ù‚ ëL[Þ™¥Y=¦ É;݃)ûÎ6éHáZŒLžqÀAŸÕ:—ò;ì°Öú\-£ê¢ë)ÝÀå0N8á„”¤Ù°Vàn§À.@Td9 «á Ç c GÕýKúÆS¸:k²ÐwÚ°È7ãÆµ tŸ—w–)Â[íç8`jFeAïÒlÐ ¬€°“¨À.@ƽ4MD(¾Åg±hó :Öp”×8`yc¡aõ³Xf§NÓI@@É mƒî›U ЯêV÷}­fq¦Â×ZZS^$ÐdÐ!uõ.åZyrG]Y”¯ñ|©+ç”n`Âyæ3Ÿ¹f“|¨c¬YúÕ¯~µÿµ¯}­ýõ×§dßn¬€°#S`— 9ìÊì¦ó®½N›ÔHk8Ê~ÇÏ9çœN]iÜšÖœd<òÈ#C›SN9¥©Ú,]g!m–"jª—åë]¶è'〩u-F5Œ¢K¢Ô¥‹š^ìR»×ª[û+_ùJmzÜã/']ù÷}+`¬À(Ø¥XÞg³Ü˜–Ïéžv¿UºËËÈ”Ã-ÎGÙýKìj$ÏåðE>«ŸXlÆ«c´FQ©ÛÂ(ʇNRë/mÛê¡íZLL­Óh€¶ƒ?øÁÂ6ˆ¿ îòäjø]ÝÀkeÕ튗tÛX--·V`-Ø¥˜²Ñ{±®×°™¿âŠ+jß¶ j² _EøuŸ]3_S¬.M ³~‹Œuq×ÚÒ€…d½–ÀÉoõ >÷¹ÏÅÚmÕ{«ù^”O[x]ømÖ䦲¨ gkã€]¿¡¢Nñ9_wÝuK¿M¶,L=R†8t­ÝבÃ;iËCjy®Wwz[Z}Ï XÉW`— ò¶uçа®v‹¤®1C£ìþ%?`”Åêy|F  Öâ¾|¤ø%=Œb ¸º#u?å®nÏ”2¦ŒÊG9)]µe¿]çåòI]X¼³i‡™¦±f…¿a?Sô£,»Ê!e˜ºÙàEºËåQ­ËÕï¬Ã—²gîå—_¾ÃX½ºÆ÷V÷.òR|vu;줿î3eÃBÏQ?—êÒãkVÀ L·»±ô½êç+^ñŠ‘,¯Ñ™4Ü)Õ U¤ Õüß»ðöÛoï\¾…°êÆNµâ¿¼ïi‘ÇAÖÅëj¤RæË_þr}”wylhWKOªå3Hø^xam]] kÙIÕ¯ Ï8ãŒÚtõ‘Ï£Ž:ªUÁ¦¼—Ã(ÎÛÆ²ñ{«¦§®¶&F7™ÌÃï·ˆ³ü™ú»nêŽ]Ͱ“¶tw G)ç¡«LÛâñ=+`¬@Š»Û¬ ×\sMJ:Ý4Aæ‰'žØéw©ãﺺ§VÓðwY6Ê ç,}Ë-·ô!  z¿í{1Hþ _øBÿ¶Ûn[!Uj>ˆƒñt˕ׇlZ¦fED\¨ –Ô[o½5)„º‰A»’“"Ú騼v[9tAl5Ïuau½”¤t»VÃ}ó›ßÜç7L=Àª|ê©§®¨_¤ßÍ0G]y†ÔaMÝÀ«vÒ”—”r(446©èëVÀ ŒJ]€d¤®k-õ->Eˆ&Èìj8SÂ.»ijPЇzñÙµn\êZgoûÛËÑ/ÒÐiöóÀŒF½nÜÒ ÖÈjü€KŒú¨Ó†nèb¼¦øš`v­¶‚K™ÀThö¡}¨)Ù1œ k á4 )( (7YÝŠ´ ò9ÈDœr:Šó&@Þºuká¤õ³îeiÃNš"ÄÈÞÊ>¬€°k©ÀX`85mN?¬ÕF ˜ìçyÁ¬°n´5†4¸W^yå².èßþö·ý³Î:k p°˜Ñ\žÑÊ mqwݻꪫúMÝcM~IG9 E9ÕÁ}Såë]c¸Šðý¬À"^–ˆÁò Cq° ]Ô…›ò']…£B@x”gÙZ޳éœñee`&ÝäåC©M{]8€" \·v^¡]¯€[ÿA®FYç"üA>±J2æ°o—u½~µž¯…Õ¹ˆ3uñ{ tÐ…ï‹8üi¬€HU`,p(?ȱþÜxã©yHrWµàŒªû·^Ë R×y1ÉkÙj¬+Õîò³Ï>{YÃØ•îzè¡KëUõjóÿú׿¾þ(ùrÙ¶…SÜ@×êhÀ"~>!ô(_+Ÿ3ÎíÎ;ïy2S–ð)§£|8;’ ²u9 ÎÛ–"ÃX'«ãøªa´}Ç;ÞѸÑ ‚V_¼}q¨þ~ײîñœ{Ò“žÔX§ ÍÖª zPmíÞ XÉV`,‰ËÀ2è[|JUÇ0ªû7u§„âá^ý,ºïšº©«î›¾×åçꫯNšLì°eVu,V“嫜fcvÌòŲRöWwþœç<'Æ#v…·šûE7°ÍØGº O?ýô$Š4cmkšA½š´á·i\[wÛgyhA×,Ù¶pRh7ˆ¥’:P7qi5š•—çƪ_îÆÿ iÐÛºä“[ý‡Ý[+`RèáHÁ.?¶mÛ–ééŒdOyÊSFž¦óÎ;/{þóŸñ¨!Ê6mÚ4ò8Æ-@-öœÉª‘]zé¥y$dŸì‘|döèG?:{ìcךÒ}óÍ7g‚óL0˜=êQÊ4C9Ûÿý³§>õ©ÙA4†„EZ4)'Óò>å{ì‘=âÈžö´§e~ðƒ›’1²ë‚·Œ?m¿•ÉŠ·,\ê v³È´>]¶yóælï½÷Î4¹%#ë˜|ðÁ¡›,ÔËüMû Ì4{<ûÎw¾“iÀ¨O…np@Ôô»×½îµ&RQžÇsL¦¥l²—¿üåÇ!kiö¾phÿƒFÈ#—ß#Ï£7FC§¿øÅëò4½vo¬Àd*06ˆ¼ÀŠ,uñP\+¹e¹Éæææ²ÙÙÙµŠÂáZ+`¬€°V`¬+k¥œ8+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ LˆÀ )Hgà X+`¬€°© S•²;+`¬€°VÀ Lˆÿ_ÿÿ’eœÄ@IDATí ¬EöÆÏcGE@Va0(.3‚ 2&Äâ8ã  j" aÜPÅQAqwFGApQpcqTpaq‘M…Qÿ(ö¿¾š9mÝ~Ý}·¾÷½{ßWÉ¥—ª:UõëÖþÞ©­Â3AH€H€H€H€j Àó¬ÙP   °(ù" @ #@XÃ8›K$@$@$@€|H€H€H€H † ¬aœÍ%    @¾$@$@$@$PÃPÖ°Îæ’  ß’$ðã?ÊçŸ.ݺu+Éú³Ò$@$@$P•Š.ÿïÿþO6oÞ,[·n•;vÈwß}'ø˜#4lØPš6m*-Z´Ö­[K»ví¤~ýú‰ñùòË/åŠ+®öíÛKEE…o·mÛ¶2fÌ©U«–/xòꫯÊÃ?,x`0J`÷Ê+¯”C9¤Ro$G`çβråJY°`ÜtÓMrÜqÇÉâÅ‹¥víÚÉBK$@$@$PMBä}úé§²aÃlÒ¤‰4jÔÈŠ¾zõêYÔ{÷îµbð?ÿù|ÿý÷VüuîÜYºtébÓåû<`ó7Þéӧ˼yóRÌÝwß}rÎ9ç¤Üs/6nÜ(¯¿þº\}õÕ²~ýz?ªC‡rë­·ÊÿøG9à€üû}dÕªU2räH¹÷Þ{éªD©p7V¬X!={ö´ 2DæÏŸOX8Ü´L$@$P¦ .×®]+«W¯t³¶iÓÆzõЭ ˆ®;ˆ?WBîÛ·Ï @x ñûúë¯eË–-rÄGH÷îÝyðD|ðÁ)¶Ð½ûÎ;ïÈA”r?xñÀȨQ£äå—_–£¹F›ÿò—¿ÈK/½Déu8 †§$@$@$‚ @ˆ?xl0P¿U«V¶·Aƒ‚._ˆ?ŒÃsÇâ¡ÞðÎáˆ.a|ðÑ}¼mÛ6Y·n}ôщˆ@tG‡ÙëÚµ«¼÷Þ{±]ÎsæÌ‘aÆY±¨Þ¨,˜§M Œ.åý÷ߟ®- À^’ @ &¿øâ yë­·¬— :0Þâݾðø…_°îBèÆGã1qãïŽ9昴^º ½àu”Dº .¸@î¸ãŽÈ:ª|ûí·¥W¯^AÓy_ßpà rùå— Ç¸UFIX™ ï @¶ "á±Ãd ÌèíÔ©“/þÔë—M%Õ¨"pÓ¦Mvæpÿþýc½téÊP8kÖ,;¶öÜ7)$O&f;k;Àb8*Üÿýþdtqb²J>³\1޲Y³f¾˜E—úž={ìuÔ¤Ì̆ðÞo¿ýòbÕÆ°û˜ óÍ7ߨ(ü‘€áøƒ!‚Ðåƒahf“ã   F˜$Ö¬Yã½øâ‹žÇæ±á1‘w°[° Û(#ŸðÉ'Ÿx‰7mÚ4kæ®»î²×¸§?3)$´ˆÇܦ1ÀÐxÜ4BÅ»çž{|[jG3~Ð3Ýã)y·Ó»êª«BÓ#™mì™ePRòD]o©gÆ'z§œrŠgÆ5úÜÌÄ—ûƒ ò¾úê+ßÌSO=å™q‘)i̬gÏLÞñÓ(7·=ʉÂâQw#*}î‰éÚ÷ÆŸR¦Ú¾ýöÛ=#èÜäžùãÂ;òÈ#múáÇ{à†÷!Xo3™Ç3Ëó¤äå À À3•hÀûùçŸ÷>üðCÏxuZAˆ@Ø„m”š.“£ /#FŒH"Ofs%sé òvØaÖ„„"„«æSc–£ñmCØ@lA´i<Ê¿æšk¼k¯½Ö»îºë¼Ý»wûéÃN Ô Š4?Žf¡¹î=÷ÜŒyôL×z¥|nš;ï¼3¥8Ì /¼Ð/Gj"ÄϘ1ÃG;ÂÄë»ï¾ë§1Wbu9ï¼óüû't’}jÛ€°f=@?­[gœ÷ë×/%¯Úà‘H€H€j:Ä Y/Ï{á…<Óí™—@‹z0}°2PV®!(aÂE½K*& ~øá‡”bTÈ…y!bÔñâzÏ`ÄŒ‹L,f™Ûfv²¯®”1(Ë,X홫}ÚxÿÌܶ>¨7D™Æá¡øÌ3ÏØ¶Â{9qâD?í 2xå•Wüø D!ÔT‡ @ˆx-Ñ¢E)­2]¹~Ò,]ºÔw æ¿þúë=Ó}lÓ˜E¢Sò†=#ßOH€H€H †H\«c–R±ÝŽfü[âXa]Á(eå l¹LÆØ±c=·-qÐíf5³‰C«w÷Ýwû"ÂËõ2j½P¶˜“ÕÌžöÅìÌž=»R=Ü: ‹ív<¢'Ÿ|²­'„­ÌJ¿ QPÅtP‚å€l~Í`@<¼wÊ?NBð<©š7¬íÁô¼&  šF q¸xñbâ^$ˆˆ¤lÂ6Ê@Y¹ZaâÅL`ñ„ tQjˆ€f泟oðàÁ•¼šÿÛo¿õ½„°?yòdJCè O?Qš·;âÝæÁ`æöëê¶ÍMgfCûi‚ž4å‡ú‡1t=uAøæ›oZ»¿fV·[¤qo¶zóÎ8ã ëñÕ×nTÛÌÄ¿ÞauS[<’ @M%¸|î¹çìx7x¡ %acêPV®AL”@0³q}¡"ðµ×^³ÅE @ÔGÓš%\bÅ›+®\OŸÖ vr€¨¤ÚŠ/åå–Å@m .I @µU7­cØÑ€Qùõù ÞQm ³Í{$@$@$PS$.Ÿ|òI®àØ·$Â6Ê@Y¹@QâuôèѾ ƒ˜€àÀ$Ì– E=ö˜Ÿãßâ¸"Å+¨õ‚ýr€Q¼¸gJG‡q$@$@$ÄàÓO?m—üˆ?™U-:lcY”•kP¡%ab£wïÞ¾¨ƒ(ÃÌSä €®¨s½zaut»`]O–Ö«\ ËÔmw£°{€aTxH€H€²#¸\¸p¡‘Yè.`ÌúDY¹Zq¶1#B‚,ø v‹ºÀtÞ;-6Ýñ‚îýt6âÚž®›Õ-'ŠÚ@ƒmM—?J¨aL£N.ÁÀÏ>û,®•â¢ìº ]!Õ67=ÏI€H€H ¦H\bñdÌjÅr-…Û(#j¡æL¢ ˜L‚NZH'!H5 D£.MV-_ .–íÞ/6‰$N¨¹Â‹_Ç,̓Iú.ÅÙU;€J‚G  '¸üàƒìBÍøPç2ƒ5¼š¿Ý…MØÆ:r(+× B+¸_”½°µõ‚^1ˆR€8b¬`TpEŠ»Ð²Û5œn"I”mÜW‘ÕͪíG=£D°Ú@š`[Ýü—^zi¥ªÀµ Œ»L l¿ÿþû•òãDêஇHŠŠ7I€H€H +‰ @,6ŒuÛà¹)Ä8@Ø„m”²r *`Ü%XÒÙºä’KR^P!¿›kÙ…1€°ñ¿]G ÐÜv`l£.$\€Y½„éê û:%xË-·„šÌT†µÓ]up·‚sE. º‚—/_žR,<}ÅWXÞîR1Ù ÀL~Já¼   2'¸„çgÉ’%vV¬×—¤¶`{¼¢ ”•k˜9s¦Y™n)‡òÜŠÃ`p ×»§u]µj•/$ƒGwЀ8š;w®Íöè£ZZ&<ƒÞ7W`jV®\é×]ÍÚŪñ8º0¸¨µ hÔ?_l÷†½…§L™âÛÖxìµìnç eMƒ=’§Nj·½Ó{A>îŠAa©uwÇbf#ð5?$@$@$Pî€ÍŠ+¬§b$L\d 6` Þ?Øv½B™Ú‚ b(¸ÆßÈ‘#mw2vI0®Â%L"?º¦UÀàˆº#@HiþI“&Ub¡+2];º¡5ñÚˆmòÜ|Ø—×õDb)›SO=ÕOƒú`k7 °¶©'¶ ]‡ç1tèP߆[Îzè!oüøñ)ñ°§‹RCè÷-ÚòÁ³¿ùæ›SlÞvÛm¾MÔÿã?ö»žamÃûÂ@$@$@$ð‚@xé°ç-Dºò >òÈ °›°2² A¯UPp„yëÂÊ€ @^w‹²`:ˆ¥qãÆ¥ˆ-]žq‹Xë ƒÝIÒ·{TËrk×®µåº÷Üsír 77 Ĥ²`]1‰1ŒXFãü°ƒ­Ú\û8‡à ï gÔ̓ô¨Ëüùó+ÙÒt¤ $@$@$@ÿ%PƒùH&Œ§LÌD éÚµ«4oÞ\êÖ­+µk×–ŠŠŠ¬ÊBõL·§(»víãáÓE*棟•¤‘&íÛ·—ýöÛ/Ö48áå·¡U«Vrøá‡[qM·´˜îN›¤E‹R§N¸äU·}ûvÛ><§víÚùuÁó³OÏtKýúõÅx­lß“te0žH€H€Hà7€(ÂÌŠ•uëÖI=¬€ˆ¬U«Öo5ˆ93ž?+, &LתtëÖM:vì“‹Q$@$@$@$@q *Q0D é¶•N:IëÖ­­T¨^=ª3Gð™1k²iÓ&1]”qO“q$@$@$@$‚ @ÔÝ pð¢ëÝ‚ðâñç @€ðú!/<€UÝí›O&!   jO (0¦ÍL³LŠ|Íš5“ÆKƒ ìØ/Mc&wˆ™kÇ¿A ¶mÛÖŽ Ãø0    ü MjU1™cÇŽÖ»g–3{ÓŠCÄCä5jÔHš4ib½„˜ü€É# $@$@$@$@É(ºL®ê´D$@$@$@$  À\¨1 ”0 À~x¬: äB€0jÌC$@$@$@%L€°„«N$@$@$@¹ Ì…ó @  ,á‡Çª“ @.(s¡Æ<$@$@$@$PÂ(Køá±ê$@$@$@$  À\¨…äÙ·oŸ|öÙgÒ¹sç’ß½{/¯_¿¾,Úò¨x‹H€H€j<¢ @ÝxëÖ­vK¸ï¾ûN~üñGû 6l(M›6l׺uëÄ÷Æ>Ã×\sMÞu?~¼tèÐA>øàY¸p¡Ü~ûí¶›6m²ÛØå]H‘ ìÚµKÖ®]+ ,»ï¾»¤ÛRdt,ŽH€H€JŽ@Ñ DÞ§Ÿ~*6l°{ÿb¿_ìû ÑW¯^= nïÞ½Vx``쌽áQëÒ¥‹M—/Ý~øAz÷î-~øa¾¦ä™gž‘'žxBxàßÖhETóæÍý{¥pÏ%»¡TÛâ¶ç$@$@$@áŠ"?ÿüsùè£ä矖–-[ZYݺu¥N:RQQ!µjÕ²µûõ×_Åó‡ð0`€,Y²¤äÛR9³n$@$@$PÕ *!þV¬X!ݺu“V­ZYO[ƒ l—/ļø¹Þ6ü Ñ% ¯ºái[·n}ôÑ9‹@Ôâïå—_–ºÅÚsw¸ˆsèž>òÈ#墋.*;ˆv—‹˜E[H€H€H€ L~ñÅòÖ[oÉÁl»Q1Þâݾðø…_°zðÂ+á…‰#˜zÌ1ÇÈAÌ’öãQŸo¾ùÆ Ò`†L òœyæ™rÔQGÉÅ_lM”“h*§¶Ÿ/¯I€H€H€þK  »7ÞxÃÎèíÔ©“ìñ§^¿là«7PE fÙb.ƪaI¶]É芠™ @LPˆmܸ±->J4íܹÓNfAww»víBË Ö]Þ;vì°¬ –1#:߀çq˜`¹ÿþûKûöí#MFµ%2#H€H€H€J€‰‡5kÖx/¾ø¢gÆîy{öìñŒ/ï2`¶`¶QFÒáñÇ÷Ì´?Óì—Q\pÍc–…ñŒ§Òûä“O¼SN9Å·›°gDq¤=ä»ñÆSò Ÿ™µì½ù曑ùâ"ŒÕ{衇*Ù4]ض>o¿ýv¥ìÚ–`û'L˜àñè™.xûC<ê‡6oÞ¼ÙÖv5GÄ«g³_Ž{iïkº`9~Bž Œ¼B‰ãÁòžþyÏ,µb?üIˆ?­ lALÀ6Ê@YI†| ÄÌý÷ß_IpAáeº¡+U¢ö°Ã³in¹åÏÌ>ön¸á†;fÖq¥|énÜvÛmÖDñšÚäü±e¨Ï /¼PÉD”DB³ä_§‰'zf D+Êg¼–ÞðáÃýxØ7KåøñHƒ`<¹Þ{ï½g(x ­ $@$@$@Å%¸Ü¸q£I 4 M؆xAYI†|  =3“Ø3këÙªAp©¸C|PÈÁóÏâ-Z”Ò(µ‰c˜Ç.%ƒsï%)D–YÂÆ‰ñ¬ « E @³tŽo/J´™‰:6ÖÙL¶I)W/à!EšI“&é-I€H€H€ŠH qøî»ïzï¼óŽõü˜™¼‰76ÑŒ2PV’! xýõ×{èzuÃsÏ=ç ¹±cǦě]7lÜäÉ“Ý,öv†êç…‡-S¦fÝE? @ $.Ÿ|òIïÛo¿ÍJ0dÛFˆ”²’ Å€ny¯½öš¨a¶¾ó¢~hs6A½w*ÆôøðÃû^È =WjzO³ŒL0iäµ+xaã 5\}õÕvœ Y‹QoñH$@$@$Pd‰ À§Ÿ~Ú ™lÀXˆÙ­—žCtª7΋ï1ñ#8^Ñw·ÖÏÝÊ`ȵÿ‚txM$@$@UC ÅšyâÁ|ìÅLD3Aš7o.uëÖ•ÚµkKEEEVe¡z¦ëWŒ”]»v‰YTXÌLV1‚"+;¥ØÌn³mœO§Ô©SGZµj%mڴɹêFàY;°k<‘R¯^=19iß¾½½Ÿ³á,2â™uïÞ]Œ7UŒ—5‹œLJ$@$@$P(€¨°ñɺuë¤GVBÔ@ÖªU+£öï‘2fÖ¯tëÖM:vì˜Q~&ªzffµœzê©b–ì³øtÕWˆ5   ‚ @ð…4ݶҩS'iݺµ€*Õ¨GuFâ¨âÞ?3¦N6mÚ$f,Å_ ½´f-BùÃþ mÛ¶3¡¥h^ÇBĪ’ @•(¸D«Ð  ºnÑ% / ~®Tñ¯òÂYŽÝ¾UòÔ P¨Ÿ)“&M3¡FN<ñDéß¿¿L™2EÌXC1eÄŒe,@©4I$@$@$ ¢@T ãÐ̺}b–±‚¯Y³fÒ¸qciРԯ_ßÖiÌäÙ³g|ûí·˜ b½GíÚµóÓäÒHæ)<íê –d–½‘E‹ÑûÃk  ¨BE€ÚFLæÀ„x÷ÐEˆI ~‚5’&MšX/a‹-ìäÍËcõ%&Í,d1»†HçΫoÅY3  ¨Š.k ãÑdtÓ›mÞäæ›o¶Ýö˜øqã7Ú®û€$  "@XB‹U%   $P&A‘6H€H€H€H „P–ÐÃbUI€H€H€H €IP¤    (!€%ô°XU   H‚`iƒH€H€H€Jˆ` =,V•H€H€H€’ @˜EÚ    "@XB+ת~ñÅvÛ=ìÁ\ ‹J¯_¿Þî R·nݬ«üá‡ʾ}ûäðÃ÷÷™ÎÚ3dEïØöíÛ¥{÷îÒ°aìò21 @ñ ]êžÀ[·nµ[Â}÷Ýwòã?Ú–ãÃÑ´iSÁp­[·–Bí q饗ڽ†£>Vø vÚi2xðàâ?•²]ºtIÔ«øÖ[o >\cÇŽ­„½wïÞrÉ%—Èïÿ{9äC*ÅW÷aûò¾ýöÛÒ«W¯jYõÏ>ûÌ>c·rx „Ùx-=Ï“³Ï>[f̘aMAŒ,^¼Xj×®íšæyÂfÍš%§Ÿ~ºµŠç¶fÍiÕªU¥Р@’Š"?ÿüsùè£ä矖–-[Z¯ºöêÔ©c»èjÕªeÛô믿Z¯ÒÂ+„n%¤=ôÐC¥cÇŽI¶ÝÚZ½zµy䑾ݓN:Ižzê)[7ÿf žüôÓOrÑEÉ]wÝek_ âݲe‹ôïßßvÿæ"aí3fŒ5yå•WÊ5×\£æy,ü1Õ§Ok½k×®òÁ”ü?BE³$@$Pm\¢["«mÛ¶Ò¦MëÕƒg¢žˆ?WB¢ ÝÅø}ýõ×qpÄGØ1FIÒƒPÂÇkÕªUÖì½÷Þ+çž{n’EäeëÕW_µ]¸¹te>ùä“ò§?ýÉ–_ ÂÀ€¶½¹ @4öwÞ¼G={ö¤÷/¯·/óÌw¹mÛ6Û¼ÿþûgž‘)I€H€ª„@A Äߊ+¤[·n¶Kݽ 4°]¾•é£?ˆ@t C ¡û—uëÖÙ š'‚pÚ´i2zôè¤ÌçeÂ÷¨£Ž’‰'æT§9sæÈ°aÃlJA¢¢ãÆ“©S§J>0/èÌL$@$@5€@Á &Q kèàƒ¶:0ÞâݾðøAüň@xqà‚Hø@LÁìÐcŽ9F:è ¸ìÇUWèzÃr¥€¿LH$@$@5Š@A „…¤<€ð £¾˜I0¬“P'0Á»…á íÛ·Ê–rïúæÍ›ý嘽®öRæpnàO8þ`B];tè‘¥|ê•O¹A¦ÁÊbL/ž½¾;¨§¾`ž»`¹¼& 0+ñ`fz/¾ø¢gº0½={öxFÐä]lÀlÂ6ÊH"˜g&x‹ý±•bö‚ .ðãÆ&oçÎ~#ÔRâ‘æ¾ûîóãõÄx0½Ë.»¬RZ3æ-Ŧ ž™…\)Öoøðáž7j6öhfûvÀË|ؽ &ø÷ÔæôéÓ#í,_¾Ü;ì°Ã*åAÞÉ“'ÇÖÅtÛ{ãÇÍ{ûí·{f|g¥r•w3êmÆ–yf¹ûC<ê`QÊóøê«¯¼|Ð3žgû\ƒïžT6¾_¿~ÖÞ)#J¼àsìÛ·¯÷å—_VªŸÞ0cT=ÓU]©m¨ã 7Üà™qˆ6?lÄÙQ{îïÊ”)S*ÙF{Ñn3¡ÊMžržO½r-ïì³Ï>ë 2Ä2uÿû@åvìØá™‰UÞ™gžiÛdz<ãÝ÷nºé¦”6‚ݲeËRÚà   €ç"Ñ€úóÏ?ï™AáVp?Àù[1°2ÂD¶öÓ @ØÃÇ\ÅRP˜ Nf“X‰2„l@ª€3kÞÙ{° a‚`ºÎ½Ë/¿Ü -sàÀV\{íµž™¤b?ž6qš\xçwF 9”ó /T²†ºÖuÅGÁx&ýû°Þ}÷]? Ú 1h±Ü|ó;ý`›ÐfÔÇ-çf¢Œñà‚÷Ê>”‹ç‹4x Ø5˜®Z_¼!¼áùzâ‰'2ö‚»¢{îܹjÚÁ@ëfÖÞK‰ÃE>õÊ¥\ˆZüq¤ï°2qÅî?üà™É`•„¢¾‹`!÷HÛõGE¥ó @΀ðüࣀ®5WPä\Ã@FØ„m”²ò ™@”%L´|Wl ÙÃ~Üð‘ÃÑ ø¢ :(„âì¹ùãÎ]ð&ˆxÑnPÈ¡;P»~ÃDoW,O4^º`@<¼fZn°ÝavU´¡Žð¶Å3IÈ·€ÈçŠWÔ!¬Ûí&zFñ¾(— ‡ö]O¤YN·² ú® n·ÜrKJ^ˆ\å|Çò­W®å¢‚ú£naï Ò¸ö!¸ƒ4˜%‹ü¶]uÕUÈÂ@$@$P@‰ ÀÅ‹{fÇÏL®ð» “¬?>à°2PV¾¡˜H±àÇïÆoô \ÜàŠ·àÇÞMwîÚó"ºm~¸á ;ùä“íGB'è¹T¡†6½ÿþû~5ÔÛ O]°MšÂÝ«gœq†õæê}Õ.®Añ¦öU˜D ¥¸DkP4¡,Øzå•W´ˆ”cœ½”„1él¸mj̃7Ķv}* #®ˆÒ8xðr?š_m+'$Ó\•‰pËpë®¶ã¸a|Xiý\ï'òC¸©P‹ ZNº#<¨W_}µ- eÂcªåm'Y¯lÊE2yâ˜ÂF&Ï éH€H€’!¸|úé§íƒB @ èGYù÷ã…k! êxÝu×ùpýãqèzs>ÝiÒ…t6ܶG À—_~Ùþøã”ä™´-×zåSnÜ{¤ H÷.fÒ6µÅ# @þ€˜$€uàðQˆ›äkÕa¶QÊÊ%¸Ý­îÇ+N`¸È0Ï–»n_°;S„‰(x1]O’+FÜe3‚Þ·LÛœî£ë¶=(¯¿þz_à`áÞ`PA¥cÖTÇÈ©ˆ>ØÀZ„Q^)„[o½Õ¯ ìFÙDÚL„DTÝ‘!7¤Áì`0ìgŒûÃn"f+9DåÜ5ÃÆPbæ»r·¹Ô+ßrãÞ#…‘Ži&ÏMmñH$@$?Ä VñÇÚnðÞb lÂ6ÊÈvÇäÃ2,øˆÞu×]–žûñÂý°eAÐ /½ôR y×.l›ÎàD·x0¸ëƹÐ9Œ²s é>ºnÛ]Aæîšôä!Ÿ»–ŸËÌ ðdnÆX¾+®¸Â>‡àR1£G¶÷Ýú@ôëΰ‰8wf—+$–CAZ÷YºÌÕN:nÚ5Ž]Y’ .;p N|Áˆh?~a[æZ¯|Ëz\6é˜fòÜ\{<' È@â"¢ûŸb½¾$½€°›°2ÂU÷C‡(Æláã¥^¸¸åF\AQϺ(5¿~œ1{ïªÖŸŠN·Ž“Èz¼ï~!àñ„=LÁn™„»ï¾Û A¯$ò»mG›0ŽMCГ‡ü¨¸»{'£îðBª0›ñNë]'ô9„­'q«{Ò"+^Õ†ë} rs™á¹`Û¾{î¹ÇnÑO/ºö!Ô°kGp½Gµu„àÓºãˆ÷ûæ‚ûwÜ‘‡ñÌЂ‡zÈ–“O½ò)mqËÆssßcm«»_ð$¤qÿÛŒònª-I€H€ò'¸D•àÕ ž1|ÔÝ.¾\« °›°ôebyÜ,D >ðz/̤v]œ¦×#>ÆØ›X¯õ¨]§*qÿ¶Ûn³³Y!æÐ…­‚gêÔ©Z”=ºÝrjG|`—.À¶N&@>ÌÕ«WûÙà…Çص=Ñìܾ+W®L‰sÓ]xá…¼‚î=œë‡‚ÀÝf.˜ד&MJy/ ì±mš›Ý™QÚeÿÈ#¤ÄƒÅb³8X" <Âî^ð…™â¨Ò·zƒXu…fpÛ20t·tÅŽ[רóéÓ§§´ÓV$â¼ãC‡Mi£Ú…ØÄÌh³ÞÃ×Xn'ŸzåS.ØAÈ»uÂî*î@Áw±oß¾ö8Å€¡îû‚¶baj  Â(ˆÄÇž . | óÈ °›°2² °ãî«­AƒY/K:{Áðe—]f?¾:> !IÔUº€19ž -S*b4­{Ä M‡#êéŠ7­{®»g¸yõ" žÔ ×Rãݱg ,H)uÕÅ··mÛ–bÞÍ ·6Ô®a#8{Ûõvj:÷;A/•®céÞsÏQ&¼Rnׯsx Ð¶à}½V1Î4z?“£2sŸQÔ9þ0qíbÉõ΂³ÆÁC¸Áx5äS¯\ÊuǽjÜ#¶kŒ{ÿõ¯Ùe…Ü<î98ß)m+$@$@ù¨@vó?Ýăù«^L·¥˜nUiÞ¼¹Ô­[Wj×®-Y•…Ꙁ(f½˜™‘bÄ‹˜zVvÜÄfÀ¾/š½…ºÑãFÇž/’ mMš4‘ÆÛsãüôÚÞüß?¨?ÚŽ`<#bºŽí5lµoßþ©Âh3ì‚[>m·žþ.ÊFêÔ©#-Z´HÉ€v™¹¥Q£FÒ Aƒ”8½@³v Ô¯__ÐÞvíÚeý¨­êr4ÂEFŒ!f€¡q&ÆgŸxá]5ÝÓb„®¯¥/³˜±YUßVkïXÆ Sòâ2ÌH³fÍRî'Q¯\ÊM©/H€H€J‚@Á Zo¼6²nÝ:éÑ£‡BµjÕÊ>r!ÆÃ!ݺu“Ž;f”Ÿ‰H if,žŒ?^ŒWRÌ$“´æÍ‚Îö3fLÚ´ù$¨®õʧMÌK$@$P8€¨6D é¶•N:IëÖ­­T¨Þ@=ª3G€ð°˜5ò¬·…â¯p/-ÇxõÕWeÀ€bƨYïv:Ï1ÞcˆÄã?^ )«k½âi2–H€H * \¢qè2…€ƒݘèv…?ˆ?Wªøƒ„×yá„€¬Š.Ъ|8,»z˜5k–œ~úé¶Rf,¥œsÎ9‘D7í”)SÄLˆ³n¤ýã%2qžÕµ^y6‹ÙI€H€ H (õ7»wˆ™¥iÇÞAðaüÆËaìƇi3¹ÃŽ‘ÃØ2ˆÁ¶mÛÚqcšÆ&ä?$PÌLpéÙ³§_²™Ø#çž{®âдiS;\cKÍN rÓM7Ùt'اO?O!Nªk½ ÑVÚ$ H†@Ñ VäwìØa½{ð’˜eG¬8DXâ³ykú,^ÌzlÿF˜.ºöaÕø@ºtéb—ˆ)~é,‘H€H€H &HD®[·NŽ<òÈšÌ1붯ZµJºuëF˜59f   È—@"píÚµrÔQGå[—•ÿý÷ß—îÝ»SÖ¨§ÎÆ’ @õ PåÝÇýúõ“ƒ>X^|ñE»Ž`šmÛ¶Iß¾}cèàAâʥ(KõɱÞ$@$@$Púª\b6ìñÇ/o¿ý¶¼üòË2pàÀHªsæÌ‘aÆÉàÁƒåÉ'Ÿ´Û¬E&N1nÜ8™:uª@ˆUE÷5`šÄh   ‚¨rˆ–ýóŸÿ”óÏ?_Î=÷\¹çž{B·IÃ6j§œrŠ~sçΕ?ÿùÏyAQáÙ«W¯¼lå’™0jÌC$@$@$j!uI”Á¢Ôµk×¶ë6jÔ¨RÜ€˜;v¬Üu×]²téR9öØcCÓáf¦6# DDPF€ám   Â0b(¯`ºp½÷Þ{//f1ioèСži­÷ÔSO…ÚzðÁmü\o¼‚ž6 ìà7hÐ ÏLPII?a„”4šö´ÓNóŒØKI ›¯ÇñãÇ{fßã”t¹\€Ø1 @± À–WHB¢óçÏ·bkøðá•„„™™(bã_{íµ”úB<Ž=Újf<Ÿ1§‚ ÇeË–ùy&Nœ˜‡xÇ‘#GúåÂæ…^觃MˆIµi&¬x;wîômærB˜ 5æ!  H‚@µ€Tbø™­åRÚöÉ'ŸXñáõÃ?¤Ä=÷Üs¾0{æ™gü8xéÎ;ï<סCÏìUìÇáDE£˜r .´ùP—%K–øñ_|ñ…1!!™O Ì‡ó’ äC Ú@4Ý»W³gÏNiÓ?þñ{òäÉ)÷Í6t¾gð¡‡J‰Ã…Y.ÆëÝ»·ÍìZÖ²‚6 `ó,X° ’M³l ••ÇÜ ŒÃ(   ‚¨“@Œè³2°& »Ì öÆ=ÌÖÅ‘Ã;L“ –éÙ³§lß¾]¢&†a(gŸ}¶L›6MŒ×ÏÏ5 XmoŸ,_¾\ŒÐƒHöóaë»N8AoDœ]¿ÐÌâ„“@²€Å¤$@$@$@‰¨Vâ«OŸ>²aû>_çÎeõêÕv¾ãŽ;N-Z$uêÔñ`aèÃ?Ü^G @3yDÌø>¹êª«äïÿ»Ÿ7JªÍo¾ùÆOu’nqT>ܧŒ£Ã8   B¨V ½ýöÛåoû›Üwß}rÎ9çÈ 7Ü —_~¹Ì˜1Cþú׿¦°P±†›…€ŽíÚµ³KÁ¤l.à ÄÚ…]ºt FetM˜&&"  (j'±¯ð¡‡*ðø™Iöøõ×_ÛîÖ¶mÛ¦ ÐîÚBtÃæúõë%j-Á”ŠäpA˜4f!  H†@¾# Ë{@·Xò“0ößïꫯ¶.–†AMkHxÓ§OwÍØóüÑŸµ‹5ýÜ “@0à ®Íàd¤Ã1˜8‚ÙÁÆ èfÍ꜓@²ÂÅÄ$@$@$@ ¨V³€µ]3gδ¿^xA£*3Y&¸| DÖýƒm39ÄŠ:×°kÓŒ;ô£ vÍþÁ6_ЦŸ(à À A1 @âª]°e‚._íîÅ,\ÌþmÒ¤ ¢*CDÌîrÓM7Ù8ã9”C9DV¬Xá§}ýõ×¥ÿþþ5N° ܘ1cü{ï‡q‡µjÕ²³~/¾øb¹õÖ[m¼YûÏÎ>~ôÑGýôfAjÛ=íßÈò„]ÀYcr   ää+)“îF}ÜÝ=2]pݵ†JÊïÔSOõ°n_XÀzS¦LñÓ2Äß D뀵ƒ6Ï:ë,ÏL8 3™Õ=z³ÂÅÄ$@$@$@ ¨–À\å­¿'{öìñ³7mÚÔ?:A¬5Ø AƒÐ$jó—_~±i’šB`(nÞ$  (¼ ÄÓÇl—C©_¿~ª\úE` ã™”®]»JݺuK¿Al @IÈ[šîZ»\ ÆÞµnݺ¤_U•ݺu«ìÞ½Ûî"‚1‡ $@$@$@$PLy @TvóæÍòý÷ßK÷îÝ‹Y÷’- kbR ™f    bHDBümÚ´Iš7on÷Î-v#J©¼/¿üRvíÚ%:uŠœÙ\Jía]I€H€H€J@"%¶lÙ"Øš­U«VïÄŸ2Â27µk׎HÉÛ$@$@$@$P8‰@TÏìº!Û†-Ô0ä€-ZDή-\“ª—elW·cÇÙ¹s§Ý?¸eË–v¬dÆ «WEY   C 1ˆÉ ;ß}÷íâÄ$\Ã;h–­©1@݆VTTX/–˜Á$t‘ci\sò‡KŠç$@$@$@Å$˜D¥!÷îÝkXêç¸W“ „^½zõ¬WGü(þŠùг,   D ‡×?WüÕ4ïŽ|ïÇ1ú†ðH$@$@$P• "«²A,›H€H€H€H ž`<Æ’ @Ù ,»GÊ‘ @< Àx>Œ%   ²#@Xv” "   x€ñ|K$@$@$@eG€°ì)D$@$@$@ñ(ãù0–H€H€H€ÊŽ`Ù=R6ˆH€H€H€â PÆóa, ” À²{¤l Ä ŒçÃX   (;€e÷HÙ    ˆ'@χ±$@$@$@$Pv(ËA$@$@$@$O€0žcI€H€H€H ìP–Ý#eƒH€H€H€H ž`<Æ’ @Ù ,»GÊ‘ @< Àx>Œ%   ²#@Xv” "   x€ñ|K$@$@$@eG€°ì)D$@$@$@ñ(ãù0–H€H€H€ÊŽ`Ù=R6ˆH€H€H€â PÆóa, ” À²{¤l Ä ŒçÃX   (;€e÷HÙ    ˆ'@χ±$@$@$@$Pv(ËA$@$@$@$O€0žcI€H€H€H ìP–Ý#eƒH€H€H€H ž`<Æ’ @Ù ,»GÊ‘ @< ÀxS}Ê IDAT>Œ%   ²#@Xv” "   x€ñ|K$@$@$@eG€°ì)D$@$@$@ñ(ãù0–H€H€H€ÊŽ`Ù=R6ˆH€H€H€â PÆóa, ” À²{¤l Ä ŒçÃX   (;€e÷HÙ    ˆ'@χ±$@$@$@$Pv(ËA$@$@$@$O€0žcI€H€H€H ìP–Ý#eƒH€H€H€H ž`<Æ’ @Ùø|T/G°ªªIEND®B`‚aiohttp-3.0.1/demos/polls/Makefile0000666000000000000000000000106013240304665015215 0ustar 00000000000000# Some simple testing tasks (sorry, UNIX only). FLAGS= flake: pyflakes aiohttpdemo_polls pep8 aiohttpdemo_polls setup.py test: pytest tests clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]' ` rm -f `find . -type f -name '*~' ` rm -f `find . -type f -name '.*~' ` rm -f `find . -type f -name '@*' ` rm -f `find . -type f -name '#*#' ` rm -f `find . -type f -name '*.orig' ` rm -f `find . -type f -name '*.rej' ` rm -f .coverage rm -rf coverage rm -rf build rm -rf htmlcov rm -rf dist .PHONY: flake clean test aiohttp-3.0.1/demos/polls/README.rst0000666000000000000000000000147313240304665015254 0ustar 00000000000000Polls (demo for aiohttp) ======================== Example of polls project using aiohttp_, aiopg_ and aiohttp_jinja2_, similar to django one. Installation ============ Install the app:: $ cd demos/polls $ pip install -e . Create database for your project:: bash sql/install.sh Run application:: $ python -m aiohttpdemo_polls Open browser:: http://localhost:8080/ .. image:: https://raw.githubusercontent.com/andriisoldatenko/aiohttp_polls/master/images/example.png :align: center Run integration tests:: pip install tox tox Requirements ============ * aiohttp_ * aiopg_ * aiohttp_jinja2_ .. _Python: https://www.python.org .. _aiohttp: https://github.com/aio-libs/aiohttp .. _aiopg: https://github.com/aio-libs/aiopg .. _aiohttp_jinja2: https://github.com/aio-libs/aiohttp_jinja2 aiohttp-3.0.1/demos/polls/requirements.txt0000666000000000000000000000010413240304665017037 0ustar 00000000000000-e . docker-py==1.10.6 pytest-aiohttp==0.3.0 trafaret_config==1.0.1 aiohttp-3.0.1/demos/polls/setup.py0000666000000000000000000000203113240304665015266 0ustar 00000000000000import os import re from setuptools import find_packages, setup def read_version(): regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'") init_py = os.path.join(os.path.dirname(__file__), 'aiohttpdemo_polls', '__init__.py') with open(init_py) as f: for line in f: match = regexp.match(line) if match is not None: return match.group(1) else: msg = 'Cannot find version in aiohttpdemo_polls/__init__.py' raise RuntimeError(msg) install_requires = ['aiohttp', 'aiopg[sa]', 'aiohttp-jinja2', 'trafaret-config'] setup(name='aiohttpdemo-polls', version=read_version(), description='Polls project example from aiohttp', platforms=['POSIX'], packages=find_packages(), package_data={ '': ['templates/*.html', 'static/*.*'] }, include_package_data=True, install_requires=install_requires, zip_safe=False) aiohttp-3.0.1/demos/polls/sql/0000777000000000000000000000000013240305035014347 5ustar 00000000000000aiohttp-3.0.1/demos/polls/sql/create_tables.sql0000666000000000000000000000137513240304665017703 0ustar 00000000000000SET ROLE 'aiohttpdemo_user'; BEGIN; -- -- Create model Choice -- CREATE TABLE "choice" ("id" serial NOT NULL PRIMARY KEY, "choice_text" varchar(200) NOT NULL, "votes" integer NOT NULL); -- -- Create model Question -- CREATE TABLE "question" ("id" serial NOT NULL PRIMARY KEY, "question_text" varchar(200) NOT NULL, "pub_date" timestamp with time zone NOT NULL); -- -- Add field question to choice -- ALTER TABLE "choice" ADD COLUMN "question_id" integer NOT NULL; ALTER TABLE "choice" ALTER COLUMN "question_id" DROP DEFAULT; CREATE INDEX "choice_7aa0f6ee" ON "choice" ("question_id"); ALTER TABLE "choice" ADD CONSTRAINT "choice_question_id_c5b4b260_fk_question_id" FOREIGN KEY ("question_id") REFERENCES "question" ("id") DEFERRABLE INITIALLY DEFERRED; COMMIT; aiohttp-3.0.1/demos/polls/sql/install.sh0000666000000000000000000000115413240304665016362 0ustar 00000000000000# determine os unameOut="$(uname -s)" case "${unameOut}" in Darwin*) pg_cmd="psql -U postgres";; *) pg_cmd="sudo -u postgres psql" esac ${pg_cmd} -c "DROP DATABASE IF EXISTS aiohttpdemo_polls" ${pg_cmd} -c "DROP ROLE IF EXISTS aiohttpdemo_user" ${pg_cmd} -c "CREATE USER aiohttpdemo_user WITH PASSWORD 'aiohttpdemo_user';" ${pg_cmd} -c "CREATE DATABASE aiohttpdemo_polls ENCODING 'UTF8';" ${pg_cmd} -c "GRANT ALL PRIVILEGES ON DATABASE aiohttpdemo_polls TO aiohttpdemo_user;" cat sql/create_tables.sql | ${pg_cmd} -d aiohttpdemo_polls -a cat sql/sample_data.sql | ${pg_cmd} -d aiohttpdemo_polls -a aiohttp-3.0.1/demos/polls/sql/sample_data.sql0000666000000000000000000000125313240304665017353 0ustar 00000000000000SET ROLE 'aiohttpdemo_user'; INSERT INTO question (id, question_text, pub_date) VALUES (1, 'What''s new?', '2015-12-15 17:17:49.629+02'); -- -- Name: question_id_seq; Type: SEQUENCE SET; Schema: public; Owner: polls -- SELECT pg_catalog.setval('question_id_seq', 1, true); INSERT INTO choice (id, choice_text, votes, question_id) VALUES (1, 'Not much', 0, 1); INSERT INTO choice (id, choice_text, votes, question_id) VALUES (2, 'The sky', 0, 1); INSERT INTO choice (id, choice_text, votes, question_id) VALUES (3, 'Just hacking again', 0, 1); -- -- Name: choice_id_seq; Type: SEQUENCE SET; Schema: public; Owner: polls -- SELECT pg_catalog.setval('choice_id_seq', 3, true); aiohttp-3.0.1/demos/polls/tests/0000777000000000000000000000000013240305035014712 5ustar 00000000000000aiohttp-3.0.1/demos/polls/tests/conftest.py0000666000000000000000000000107213240304665017121 0ustar 00000000000000import pathlib import subprocess import pytest from aiohttpdemo_polls.main import init BASE_DIR = pathlib.Path(__file__).parent.parent @pytest.fixture def config_path(): path = BASE_DIR / 'config' / 'polls.yaml' return path.as_posix() @pytest.fixture def cli(loop, test_client, config_path): app = init(loop, ['-c', config_path]) return loop.run_until_complete(test_client(app)) @pytest.fixture def app_db(): subprocess.call( [(BASE_DIR / 'sql' / 'install.sh').as_posix()], shell=True, cwd=BASE_DIR.as_posix() ) aiohttp-3.0.1/demos/polls/tests/test_integration.py0000666000000000000000000000066713240304665020667 0ustar 00000000000000""" Integration tests. They need a running database. Beware, they destroy your db using sudo. """ async def test_index(cli, app_db): response = await cli.get('/poll/1') assert response.status == 200 assert 'What\'s new?' in await response.text() async def test_results(cli, app_db): response = await cli.get('/poll/1/results') assert response.status == 200 assert 'Just hacking again' in await response.text() aiohttp-3.0.1/demos/polls/tox.ini0000666000000000000000000000015513240304665015074 0ustar 00000000000000[tox] envlist = py35 [testenv] deps = pytest pytest-aiohttp usedevelop = True commands=py.test tests -s aiohttp-3.0.1/demos/README.rst0000666000000000000000000000003413240304665014113 0ustar 00000000000000aiohttp demos ============= aiohttp-3.0.1/docs/0000777000000000000000000000000013240305035012240 5ustar 00000000000000aiohttp-3.0.1/docs/abc.rst0000666000000000000000000001172413240304665013534 0ustar 00000000000000.. _aiohttp-abc: Abstract Base Classes ===================== .. module:: aiohttp .. currentmodule:: aiohttp Abstract routing ---------------- aiohttp has abstract classes for managing web interfaces. The most part of :mod:`aiohttp.web` is not intended to be inherited but few of them are. aiohttp.web is built on top of few concepts: *application*, *router*, *request* and *response*. *router* is a *pluggable* part: a library user may build a *router* from scratch, all other parts should work with new router seamlessly. :class:`AbstractRouter` has the only mandatory method: :meth:`AbstractRouter.resolve` coroutine. It must return an :class:`AbstractMatchInfo` instance. If the requested URL handler is found :meth:`AbstractMatchInfo.handler` is a :term:`web-handler` for requested URL and :attr:`AbstractMatchInfo.http_exception` is ``None``. Otherwise :attr:`AbstractMatchInfo.http_exception` is an instance of :exc:`~aiohttp.web.HTTPException` like *404: NotFound* or *405: Method Not Allowed*. :meth:`AbstractMatchInfo.handler` raises :attr:`~AbstractMatchInfo.http_exception` on call. .. class:: aiohttp.abc.AbstractRouter Abstract router, :class:`aiohttp.web.Application` accepts it as *router* parameter and returns as :attr:`aiohttp.web.Application.router`. .. coroutinemethod:: resolve(request) Performs URL resolving. It's an abstract method, should be overridden in *router* implementation. :param request: :class:`aiohttp.web.Request` instance for resolving, the request has :attr:`aiohttp.web.Request.match_info` equals to ``None`` at resolving stage. :return: :class:`AbstractMatchInfo` instance. .. class:: aiohttp.abc.AbstractMatchInfo Abstract *match info*, returned by :meth:`AbstractRouter.resolve` call. .. attribute:: http_exception :exc:`aiohttp.web.HTTPException` if no match was found, ``None`` otherwise. .. coroutinemethod:: handler(request) Abstract method performing :term:`web-handler` processing. :param request: :class:`aiohttp.web.Request` instance for resolving, the request has :attr:`aiohttp.web.Request.match_info` equals to ``None`` at resolving stage. :return: :class:`aiohttp.web.StreamResponse` or descendants. :raise: :class:`aiohttp.web.HTTPException` on error .. coroutinemethod:: expect_handler(request) Abstract method for handling *100-continue* processing. Abstract Class Based Views -------------------------- For *class based view* support aiohttp has abstract :class:`AbstractView` class which is *awaitable* (may be uses like ``await Cls()`` or ``yield from Cls()`` and has a *request* as an attribute. .. class:: AbstractView An abstract class, base for all *class based views* implementations. Methods ``__iter__`` and ``__await__`` should be overridden. .. attribute:: request :class:`aiohttp.web.Request` instance for performing the request. Abstract Cookie Jar ------------------- .. class:: aiohttp.abc.AbstractCookieJar The cookie jar instance is available as :attr:`ClientSession.cookie_jar`. The jar contains :class:`~http.cookies.Morsel` items for storing internal cookie data. API provides a count of saved cookies:: len(session.cookie_jar) These cookies may be iterated over:: for cookie in session.cookie_jar: print(cookie.key) print(cookie["domain"]) An abstract class for cookie storage. Implements :class:`collections.abc.Iterable` and :class:`collections.abc.Sized`. .. method:: update_cookies(cookies, response_url=None) Update cookies returned by server in ``Set-Cookie`` header. :param cookies: a :class:`collections.abc.Mapping` (e.g. :class:`dict`, :class:`~http.cookies.SimpleCookie`) or *iterable* of *pairs* with cookies returned by server's response. :param str response_url: URL of response, ``None`` for *shared cookies*. Regular cookies are coupled with server's URL and are sent only to this server, shared ones are sent in every client request. .. method:: filter_cookies(request_url) Return jar's cookies acceptable for URL and available in ``Cookie`` header for sending client requests for given URL. :param str response_url: request's URL for which cookies are asked. :return: :class:`http.cookies.SimpleCookie` with filtered cookies for given URL. Abstract Abstract Access Logger ------------------------------- .. class:: aiohttp.abc.AbstractAccessLogger An abstract class, base for all :class:`RequestHandler` ``access_logger`` implementations Method ``log`` should be overridden. .. method:: log(request, response, time) :param request: :class:`aiohttp.web.Request` object. :param response: :class:`aiohttp.web.Response` object. :param float time: Time taken to serve the request. aiohttp-3.0.1/docs/aiohttp-icon.svg0000666000000000000000000000774513240304665015404 0ustar 00000000000000 image/svg+xml aiohttp-3.0.1/docs/aiohttp-plain.svg0000666000000000000000000000774413240304665015556 0ustar 00000000000000 image/svg+xml aiohttp-3.0.1/docs/built_with.rst0000666000000000000000000000135613240304665015161 0ustar 00000000000000.. _aiohttp-built-with: Built with aiohttp ================== aiohttp is used to build useful libraries built on top of it, and there's a page dedicated to list them: :ref:`aiohttp-3rd-party`. There are also projects that leverage the power of aiohttp to provide end-user tools, like command lines or software with full user interfaces. This page aims to list those projects. If you are using aiohttp in your software and if it's playing a central role, you can add it here in this list. You can also add a **Built with aiohttp** link somewhere in your project, pointing to ``_. * `Molotov `_ Load testing tool. * `Arsenic `_ Async WebDriver. aiohttp-3.0.1/docs/changes.rst0000666000000000000000000000011713240304665014411 0ustar 00000000000000.. _aiohttp_changes: .. include:: ../CHANGES.rst .. include:: ../HISTORY.rst aiohttp-3.0.1/docs/client.rst0000666000000000000000000000044513240304665014263 0ustar 00000000000000.. _aiohttp-client: Client ====== .. module:: aiohttp The page contains all information about aiohttp Client API: .. toctree:: :name: client Quickstart Advanced Usage Reference Tracing Reference aiohttp-3.0.1/docs/client_advanced.rst0000666000000000000000000004454013240304665016114 0ustar 00000000000000.. _aiohttp-client-advanced: Advanced Client Usage ===================== .. currentmodule:: aiohttp .. _aiohttp-client-session: Client Session -------------- :class:`ClientSession` is the heart and the main entry point for all client API operations. Create the session first, use the instance for performing HTTP requests and initiating WebSocket connections. The session contains a cookie storage and connection pool, thus cookies and connections are shared between HTTP requests sent by the same session. Custom Request Headers ---------------------- If you need to add HTTP headers to a request, pass them in a :class:`dict` to the *headers* parameter. For example, if you want to specify the content-type directly:: url = 'http://example.com/image' payload = b'GIF89a\x01\x00\x01\x00\x00\xff\x00,\x00\x00' b'\x00\x00\x01\x00\x01\x00\x00\x02\x00;' headers = {'content-type': 'image/gif'} await session.post(url, data=payload, headers=headers) You also can set default headers for all session requests:: headers={"Authorization": "Basic bG9naW46cGFzcw=="} async with aiohttp.ClientSession(headers=headers) as session: async with session.get("http://httpbin.org/headers") as r: json_body = await r.json() assert json_body['headers']['Authorization'] == \ 'Basic bG9naW46cGFzcw==' Typical use case is sending JSON body. You can specify content type directly as shown above, but it is more convenient to use special keyword ``json``:: await session.post(url, json={'example': 'text'}) The same for *text/plain*:: await session.post(url, text='Привет, Мир!') Custom Cookies -------------- To send your own cookies to the server, you can use the *cookies* parameter of :class:`ClientSession` constructor:: url = 'http://httpbin.org/cookies' cookies = {'cookies_are': 'working'} async with ClientSession(cookies=cookies) as session: async with session.get(url) as resp: assert await resp.json() == { "cookies": {"cookies_are": "working"}} .. note:: ``httpbin.org/cookies`` endpoint returns request cookies in JSON-encoded body. To access session cookies see :attr:`ClientSession.cookie_jar`. :class:`~aiohttp.ClientSession` may be used for sharing cookies between multiple requests:: async with aiohttp.ClientSession() as session: await session.get( 'http://httpbin.org/cookies/set?my_cookie=my_value') filtered = session.cookie_jar.filter_cookies('http://httpbin.org') assert filtered['my_cookie'].value == 'my_value' async with session.get('http://httpbin.org/cookies') as r: json_body = await r.json() assert json_body['cookies']['my_cookie'] == 'my_value' Response Headers and Cookies ---------------------------- We can view the server's response :attr:`ClientResponse.headers` using a :class:`~multidict.CIMultiDictProxy`:: >>> resp.headers {'ACCESS-CONTROL-ALLOW-ORIGIN': '*', 'CONTENT-TYPE': 'application/json', 'DATE': 'Tue, 15 Jul 2014 16:49:51 GMT', 'SERVER': 'gunicorn/18.0', 'CONTENT-LENGTH': '331', 'CONNECTION': 'keep-alive'} The dictionary is special, though: it's made just for HTTP headers. According to `RFC 7230 `_, HTTP Header names are case-insensitive. It also supports multiple values for the same key as HTTP protocol does. So, we can access the headers using any capitalization we want:: >>> resp.headers['Content-Type'] 'application/json' >>> resp.headers.get('content-type') 'application/json' All headers are converted from binary data using UTF-8 with ``surrogateescape`` option. That works fine on most cases but sometimes unconverted data is needed if a server uses nonstandard encoding. While these headers are malformed from :rfc:`7230` perspective they may be retrieved by using :attr:`ClientResponse.raw_headers` property:: >>> resp.raw_headers ((b'SERVER', b'nginx'), (b'DATE', b'Sat, 09 Jan 2016 20:28:40 GMT'), (b'CONTENT-TYPE', b'text/html; charset=utf-8'), (b'CONTENT-LENGTH', b'12150'), (b'CONNECTION', b'keep-alive')) If a response contains some *HTTP Cookies*, you can quickly access them:: url = 'http://example.com/some/cookie/setting/url' async with session.get(url) as resp: print(resp.cookies['example_cookie_name']) .. note:: Response cookies contain only values, that were in ``Set-Cookie`` headers of the **last** request in redirection chain. To gather cookies between all redirection requests please use :ref:`aiohttp.ClientSession ` object. Redirection History ------------------- If a request was redirected, it is possible to view previous responses using the :attr:`~ClientResponse.history` attribute:: >>> resp = await session.get('http://example.com/some/redirect/') >>> resp >>> resp.history (,) If no redirects occurred or ``allow_redirects`` is set to ``False``, history will be an empty sequence. Cookie Jar ---------- .. _aiohttp-client-cookie-safety: Cookie Safety ^^^^^^^^^^^^^ By default :class:`~aiohttp.ClientSession` uses strict version of :class:`aiohttp.CookieJar`. :rfc:`2109` explicitly forbids cookie accepting from URLs with IP address instead of DNS name (e.g. `http://127.0.0.1:80/cookie`). It's good but sometimes for testing we need to enable support for such cookies. It should be done by passing `unsafe=True` to :class:`aiohttp.CookieJar` constructor:: jar = aiohttp.CookieJar(unsafe=True) session = aiohttp.ClientSession(cookie_jar=jar) .. _aiohttp-client-dummy-cookie-jar: Dummy Cookie Jar ^^^^^^^^^^^^^^^^ Sometimes cookie processing is not desirable. For this purpose it's possible to pass :class:`aiohttp.DummyCookieJar` instance into client session:: jar = aiohttp.DummyCookieJar() session = aiohttp.ClientSession(cookie_jar=jar) Uploading pre-compressed data ----------------------------- To upload data that is already compressed before passing it to aiohttp, call the request function with the used compression algorithm name (usually ``deflate`` or ``gzip``) as the value of the ``Content-Encoding`` header:: async def my_coroutine(session, headers, my_data): data = zlib.compress(my_data) headers = {'Content-Encoding': 'deflate'} async with session.post('http://httpbin.org/post', data=data, headers=headers) pass .. _aiohttp-client-tracing: Client Tracing -------------- The execution flow of a specific request can be followed attaching listeners coroutines to the signals provided by the :class:`TraceConfig` instance, this instance will be used as a parameter for the :class:`ClientSession` constructor having as a result a client that triggers the different signals supported by the :class:`TraceConfig`. By default any instance of :class:`ClientSession` class comes with the signals ability disabled. The following snippet shows how the start and the end signals of a request flow can be followed:: async def on_request_start( session, trace_config_ctx, params): print("Starting request") async def on_request_end(session, trace_config_ctx, params): print("Ending request") trace_config = aiohttp.TraceConfig() trace_config.on_request_start.append(on_request_start) trace_config.on_request_end.append(on_request_end) async with aiohttp.ClientSession(trace_configs=[trace_config]) as client: client.get('http://example.com/some/redirect/') The ``trace_configs`` is a list that can contain instances of :class:`TraceConfig` class that allow run the signals handlers coming from different :class:`TraceConfig` instances. The following example shows how two different :class:`TraceConfig` that have a different nature are installed to perform their job in each signal handle:: from mylib.traceconfig import AuditRequest from mylib.traceconfig import XRay async with aiohttp.ClientSession(trace_configs=[AuditRequest(), XRay()]) as client: client.get('http://example.com/some/redirect/') All signals take as a parameters first, the :class:`ClientSession` instance used by the specific request related to that signals and second, a :class:`SimpleNamespace` instance called ``trace_config_ctx``. The ``trace_config_ctx`` object can be used to share the state through to the different signals that belong to the same request and to the same :class:`TraceConfig` class, perhaps:: async def on_request_start( session, trace_config_ctx, params): trace_config_ctx.start = session.loop.time() async def on_request_end(session, trace_config_ctx, params): elapsed = session.loop.time() - trace_config_ctx.start print("Request took {}".format(elapsed)) The ``trace_config_ctx`` param is by default a :class:`SimpleNampespace` that is initialized at the beginning of the request flow. However, the factory used to create this object can be overwritten using the ``trace_config_ctx_factory`` constructor param of the :class:`TraceConfig` class. The ``trace_request_ctx`` param can given at the beginning of the request execution, accepted by all of the HTTP verbs, and will be passed as a keyword argument for the ``trace_config_ctx_factory`` factory. This param is useful to pass data that is only available at request time, perhaps:: async def on_request_start( session, trace_config_ctx, params): print(trace_config_ctx.trace_request_ctx) session.get('http://example.com/some/redirect/', trace_request_ctx={'foo': 'bar'}) .. seealso:: :ref:`aiohttp-client-tracing-reference` section for more information about the different signals supported. Connectors ---------- To tweak or change *transport* layer of requests you can pass a custom *connector* to :class:`~aiohttp.ClientSession` and family. For example:: conn = aiohttp.TCPConnector() session = aiohttp.ClientSession(connector=conn) .. note:: By default *session* object takes the ownership of the connector, among other things closing the connections once the *session* is closed. If you are keen on share the same *connector* through different *session* instances you must give the *connector_owner* parameter as **False** for each *session* instance. .. seealso:: :ref:`aiohttp-client-reference-connectors` section for more information about different connector types and configuration options. Limiting connection pool size ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To limit amount of simultaneously opened connections you can pass *limit* parameter to *connector*:: conn = aiohttp.TCPConnector(limit=30) The example limits total amount of parallel connections to `30`. The default is `100`. If you explicitly want not to have limits, pass `0`. For example:: conn = aiohttp.TCPConnector(limit=0) To limit amount of simultaneously opened connection to the same endpoint (``(host, port, is_ssl)`` triple) you can pass *limit_per_host* parameter to *connector*:: conn = aiohttp.TCPConnector(limit_per_host=30) The example limits amount of parallel connections to the same to `30`. The default is `0` (no limit on per host bases). Tuning the DNS cache ^^^^^^^^^^^^^^^^^^^^ By default :class:`~aiohttp.TCPConnector` comes with the DNS cache table enabled, and resolutions will be cached by default for `10` seconds. This behavior can be changed either to change of the TTL for a resolution, as can be seen in the following example:: conn = aiohttp.TCPConnector(ttl_dns_cache=300) or disabling the use of the DNS cache table, meaning that all requests will end up making a DNS resolution, as the following example shows:: conn = aiohttp.TCPConnector(use_dns_cache=False) Resolving using custom nameservers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In order to specify the nameservers to when resolving the hostnames, :term:`aiodns` is required:: from aiohttp.resolver import AsyncResolver resolver = AsyncResolver(nameservers=["8.8.8.8", "8.8.4.4"]) conn = aiohttp.TCPConnector(resolver=resolver) Unix domain sockets ^^^^^^^^^^^^^^^^^^^ If your HTTP server uses UNIX domain sockets you can use :class:`~aiohttp.UnixConnector`:: conn = aiohttp.UnixConnector(path='/path/to/socket') session = aiohttp.ClientSession(connector=conn) SSL control for TCP sockets --------------------------- By default *aiohttp* uses strict checks for HTTPS protocol. Certification checks can be relaxed by setting *ssl* to ``False``:: r = await session.get('https://example.com', ssl=False) If you need to setup custom ssl parameters (use own certification files for example) you can create a :class:`ssl.SSLContext` instance and pass it into the proper :class:`ClientSession` method:: sslcontext = ssl.create_default_context( cafile='/path/to/ca-bundle.crt') r = await session.get('https://example.com', ssl=sslcontext) If you need to verify *self-signed* certificates, you can do the same thing as the previous example, but add another call to :meth:`ssl.SSLContext.load_cert_chain` with the key pair:: sslcontext = ssl.create_default_context( cafile='/path/to/ca-bundle.crt') sslcontext.load_cert_chain('/path/to/client/public/device.pem', '/path/to/client/private/device.jey') r = await session.get('https://example.com', ssl=sslcontext) There is explicit errors when ssl verification fails :class:`aiohttp.ClientConnectorSSLError`:: try: await session.get('https://expired.badssl.com/') except aiohttp.ClientConnectorSSLError as e: assert isinstance(e, ssl.SSLError) :class:`aiohttp.ClientConnectorCertificateError`:: try: await session.get('https://wrong.host.badssl.com/') except aiohttp.ClientConnectorCertificateError as e: assert isinstance(e, ssl.CertificateError) If you need to skip both ssl related errors :class:`aiohttp.ClientSSLError`:: try: await session.get('https://expired.badssl.com/') except aiohttp.ClientSSLError as e: assert isinstance(e, ssl.SSLError) try: await session.get('https://wrong.host.badssl.com/') except aiohttp.ClientSSLError as e: assert isinstance(e, ssl.CertificateError) You may also verify certificates via *SHA256* fingerprint:: # Attempt to connect to https://www.python.org # with a pin to a bogus certificate: bad_fingerprint = b'0'*64 exc = None try: r = await session.get('https://www.python.org', ssl=aiohttp.Fingerprint(bad_fingerprint)) except aiohttp.FingerprintMismatch as e: exc = e assert exc is not None assert exc.expected == bad_fingerprint # www.python.org cert's actual fingerprint assert exc.got == b'...' Note that this is the fingerprint of the DER-encoded certificate. If you have the certificate in PEM format, you can convert it to DER with e.g:: openssl x509 -in crt.pem -inform PEM -outform DER > crt.der .. note:: Tip: to convert from a hexadecimal digest to a binary byte-string, you can use :func:`binascii.unhexlify`. *ssl* parameter could be passed to :class:`TCPConnector` as default, the value from :meth:`ClientSession.get` and others override default. Proxy support ------------- aiohttp supports HTTP/HTTPS proxies. You have to use *proxy* parameter:: async with aiohttp.ClientSession() as session: async with session.get("http://python.org", proxy="http://some.proxy.com") as resp: print(resp.status) It also supports proxy authorization:: async with aiohttp.ClientSession() as session: proxy_auth = aiohttp.BasicAuth('user', 'pass') async with session.get("http://python.org", proxy="http://some.proxy.com", proxy_auth=proxy_auth) as resp: print(resp.status) Authentication credentials can be passed in proxy URL:: session.get("http://python.org", proxy="http://user:pass@some.proxy.com") Contrary to the ``requests`` library, it won't read environment variables by default. But you can do so by passing ``trust_env=True`` into :class:`aiohttp.ClientSession` constructor for extracting proxy configuration from *HTTP_PROXY* or *HTTPS_PROXY* *environment variables* (both are case insensitive):: async with aiohttp.ClientSession(trust_env=True) as session: async with session.get("http://python.org") as resp: print(resp.status) Proxy credentials are given from ``~/.netrc`` file if present (see :class:`aiohttp.ClientSession` for more details). Graceful Shutdown ----------------- When :class:`ClientSession` closes at the end of an ``async with`` block (or through a direct :meth:`ClientSession.close()` call), the underlying connection remains open due to asyncio internal details. In practice, the underlying connection will close after a short while. However, if the event loop is stopped before the underlying connection is closed, an ``ResourceWarning: unclosed transport`` warning is emitted (when warnings are enabled). To avoid this situation, a small delay must be added before closing the event loop to allow any open underlying connections to close. For a :class:`ClientSession` without SSL, a simple zero-sleep (``await asyncio.sleep(0)``) will suffice:: async def read_website(): async with aiohttp.ClientSession() as session: async with session.get('http://example.org/') as response: await response.read() loop = asyncio.get_event_loop() loop.run_until_complete(read_website()) # Zero-sleep to allow underlying connections to close loop.run_until_complete(asyncio.sleep(0)) loop.close() For a :class:`ClientSession` with SSL, the application must wait a short duration before closing:: ... # Wait 250 ms for the underlying SSL connections to close loop.run_until_complete(asyncio.sleep(0.250)) loop.close() Note that the appropriate amount of time to wait will vary from application to application. All if this will eventually become obsolete when the asyncio internals are changed so that aiohttp itself can wait on the underlying connection to close. Please follow issue `#1925 `_ for the progress on this. aiohttp-3.0.1/docs/client_quickstart.rst0000666000000000000000000003155513240304665016543 0ustar 00000000000000.. _aiohttp-client-quickstart: Client Quickstart ================= .. currentmodule:: aiohttp Eager to get started? This page gives a good introduction in how to get started with aiohttp client API. First, make sure that aiohttp is :ref:`installed ` and *up-to-date* Let's get started with some simple examples. Make a Request -------------- Begin by importing the aiohttp module:: import aiohttp Now, let's try to get a web-page. For example let's get GitHub's public time-line:: async with aiohttp.ClientSession() as session: async with session.get('https://api.github.com/events') as resp: print(resp.status) print(await resp.text()) Now, we have a :class:`ClientSession` called ``session`` and a :class:`ClientResponse` object called ``resp``. We can get all the information we need from the response. The mandatory parameter of :meth:`ClientSession.get` coroutine is an HTTP url. In order to make an HTTP POST request use :meth:`ClientSession.post` coroutine:: session.post('http://httpbin.org/post', data=b'data') Other HTTP methods are available as well:: session.put('http://httpbin.org/put', data=b'data') session.delete('http://httpbin.org/delete') session.head('http://httpbin.org/get') session.options('http://httpbin.org/get') session.patch('http://httpbin.org/patch', data=b'data') .. note:: Don't create a session per request. Most likely you need a session per application which performs all requests altogether. A session contains a connection pool inside. Connection reusage and keep-alives (both are on by default) may speed up total performance. Passing Parameters In URLs -------------------------- You often want to send some sort of data in the URL's query string. If you were constructing the URL by hand, this data would be given as key/value pairs in the URL after a question mark, e.g. ``httpbin.org/get?key=val``. Requests allows you to provide these arguments as a :class:`dict`, using the ``params`` keyword argument. As an example, if you wanted to pass ``key1=value1`` and ``key2=value2`` to ``httpbin.org/get``, you would use the following code:: params = {'key1': 'value1', 'key2': 'value2'} async with session.get('http://httpbin.org/get', params=params) as resp: assert str(resp.url) == 'http://httpbin.org/get?key2=value2&key1=value1' You can see that the URL has been correctly encoded by printing the URL. For sending data with multiple values for the same key :class:`MultiDict` may be used as well. It is also possible to pass a list of 2 item tuples as parameters, in that case you can specify multiple values for each key:: params = [('key', 'value1'), ('key', 'value2')] async with session.get('http://httpbin.org/get', params=params) as r: assert str(r.url) == 'http://httpbin.org/get?key=value2&key=value1' You can also pass :class:`str` content as param, but beware -- content is not encoded by library. Note that ``+`` is not encoded:: async with session.get('http://httpbin.org/get', params='key=value+1') as r: assert str(r.url) == 'http://httpbin.org/get?key=value+1' .. note:: *aiohttp* internally performs URL canonization before sending request. Canonization encodes *host* part by :term:`IDNA` codec and applies :term:`requoting` to *path* and *query* parts. For example ``URL('http://example.com/путь%30?a=%31')`` is converted to ``URL('http://example.com/%D0%BF%D1%83%D1%82%D1%8C/0?a=1')``. Sometimes canonization is not desirable if server accepts exact representation and does not requote URL itself. To disable canonization use ``encoded=True`` parameter for URL construction:: await session.get(URL('http://example.com/%30', encoded=True)) .. warning:: Passing *params* overrides ``encoded=True``, never use both options. Response Content and Status Code -------------------------------- We can read the content of the server's response and it's status code. Consider the GitHub time-line again:: async with session.get('https://api.github.com/events') as resp: print(resp.status) print(await resp.text()) prints out something like:: 200 '[{"created_at":"2015-06-12T14:06:22Z","public":true,"actor":{... ``aiohttp`` automatically decodes the content from the server. You can specify custom encoding for the :meth:`~ClientResponse.text` method:: await resp.text(encoding='windows-1251') Binary Response Content ----------------------- You can also access the response body as bytes, for non-text requests:: print(await resp.read()) :: b'[{"created_at":"2015-06-12T14:06:22Z","public":true,"actor":{... The ``gzip`` and ``deflate`` transfer-encodings are automatically decoded for you. You can enable ``brotli`` transfer-encodings support, just install `brotlipy `_. JSON Request ------------ Any of session's request methods like :func:`request`, :meth:`ClientSession.get`, :meth:`ClientSesssion.post` etc. accept `json` parameter:: async with aiohttp.ClientSession() as session: async with session.post(url, json={'test': 'object'}) By default session uses python's standard :mod:`json` module for serialization. But it is possible to use different ``serializer``. :class:`ClientSession` accepts ``json_serialize`` parameter:: import ujson async with aiohttp.ClientSession(json_serialize=ujson.dumps) as session: async with session.post(url, json={'test': 'object'}) .. note:: ``ujson`` library is faster than standard :mod:`json` but slightly incompatible. JSON Response Content --------------------- There's also a built-in JSON decoder, in case you're dealing with JSON data:: async with session.get('https://api.github.com/events') as resp: print(await resp.json()) In case that JSON decoding fails, :meth:`~ClientResponse.json` will raise an exception. It is possible to specify custom encoding and decoder functions for the :meth:`~ClientResponse.json` call. .. note:: The methods above reads the whole response body into memory. If you are planning on reading lots of data, consider using the streaming response method documented below. Streaming Response Content -------------------------- While methods :meth:`~ClientResponse.read`, :meth:`~ClientResponse.json` and :meth:`~ClientResponse.text` are very convenient you should use them carefully. All these methods load the whole response in memory. For example if you want to download several gigabyte sized files, these methods will load all the data in memory. Instead you can use the :attr:`~ClientResponse.content` attribute. It is an instance of the :class:`aiohttp.StreamReader` class. The ``gzip`` and ``deflate`` transfer-encodings are automatically decoded for you:: async with session.get('https://api.github.com/events') as resp: await resp.content.read(10) In general, however, you should use a pattern like this to save what is being streamed to a file:: with open(filename, 'wb') as fd: while True: chunk = await resp.content.read(chunk_size) if not chunk: break fd.write(chunk) It is not possible to use :meth:`~ClientResponse.read`, :meth:`~ClientResponse.json` and :meth:`~ClientResponse.text` after explicit reading from :attr:`~ClientResponse.content`. More complicated POST requests ------------------------------ Typically, you want to send some form-encoded data -- much like an HTML form. To do this, simply pass a dictionary to the *data* argument. Your dictionary of data will automatically be form-encoded when the request is made:: payload = {'key1': 'value1', 'key2': 'value2'} async with session.post('http://httpbin.org/post', data=payload) as resp: print(await resp.text()) :: { ... "form": { "key2": "value2", "key1": "value1" }, ... } If you want to send data that is not form-encoded you can do it by passing a :class:`bytes` instead of a :class:`dict`. This data will be posted directly and content-type set to 'application/octet-stream' by default:: async with session.post(url, data=b'\x00Binary-data\x00') as resp: ... If you want to send JSON data:: async with session.post(url, json={'example': 'test'}) as resp: ... To send text with appropriate content-type just use ``text`` attribute :: async with session.post(url, text='ТеÑÑ‚') as resp: ... POST a Multipart-Encoded File ----------------------------- To upload Multipart-encoded files:: url = 'http://httpbin.org/post' files = {'file': open('report.xls', 'rb')} await session.post(url, data=files) You can set the ``filename`` and ``content_type`` explicitly:: url = 'http://httpbin.org/post' data = FormData() data.add_field('file', open('report.xls', 'rb'), filename='report.xls', content_type='application/vnd.ms-excel') await session.post(url, data=data) If you pass a file object as data parameter, aiohttp will stream it to the server automatically. Check :class:`~aiohttp.streams.StreamReader` for supported format information. .. seealso:: :ref:`aiohttp-multipart` Streaming uploads ----------------- :mod:`aiohttp` supports multiple types of streaming uploads, which allows you to send large files without reading them into memory. As a simple case, simply provide a file-like object for your body:: with open('massive-body', 'rb') as f: await session.post('http://httpbin.org/post', data=f) Or you can use :class:`aiohttp.streamer` decorator:: @aiohttp.streamer def file_sender(writer, file_name=None): with open(file_name, 'rb') as f: chunk = f.read(2**16) while chunk: yield from writer.write(chunk) chunk = f.read(2**16) # Then you can use file_sender as a data provider: async with session.post('http://httpbin.org/post', data=file_sender(file_name='huge_file')) as resp: print(await resp.text()) Also it is possible to use a :class:`~aiohttp.streams.StreamReader` object. Lets say we want to upload a file from another request and calculate the file SHA1 hash:: async def feed_stream(resp, stream): h = hashlib.sha256() while True: chunk = await resp.content.readany() if not chunk: break h.update(chunk) stream.feed_data(chunk) return h.hexdigest() resp = session.get('http://httpbin.org/post') stream = StreamReader() loop.create_task(session.post('http://httpbin.org/post', data=stream)) file_hash = await feed_stream(resp, stream) Because the response content attribute is a :class:`~aiohttp.streams.StreamReader`, you can chain get and post requests together:: r = await session.get('http://python.org') await session.post('http://httpbin.org/post', data=r.content) .. _aiohttp-client-websockets: WebSockets ---------- :mod:`aiohttp` works with client websockets out-of-the-box. You have to use the :meth:`aiohttp.ClientSession.ws_connect` coroutine for client websocket connection. It accepts a *url* as a first parameter and returns :class:`ClientWebSocketResponse`, with that object you can communicate with websocket server using response's methods:: session = aiohttp.ClientSession() async with session.ws_connect('http://example.org/websocket') as ws: async for msg in ws: if msg.type == aiohttp.WSMsgType.TEXT: if msg.data == 'close cmd': await ws.close() break else: await ws.send_str(msg.data + '/answer') elif msg.type == aiohttp.WSMsgType.CLOSED: break elif msg.type == aiohttp.WSMsgType.ERROR: break You **must** use the only websocket task for both reading (e.g. ``await ws.receive()`` or ``async for msg in ws:``) and writing but may have multiple writer tasks which can only send data asynchronously (by ``await ws.send_str('data')`` for example). Timeouts -------- By default all IO operations have 5min timeout. The timeout may be overridden by passing ``timeout`` parameter into :meth:`ClientSession.get` and family:: async with session.get('https://github.com', timeout=60) as r: ... ``None`` or ``0`` disables timeout check. The example wraps a client call in :func:`async_timeout.timeout` context manager, adding timeout for both connecting and response body reading procedures:: import async_timeout with async_timeout.timeout(0.001): async with session.get('https://github.com') as r: await r.text() .. note:: Timeout is cumulative time, it includes all operations like sending request, redirects, response parsing, consuming response, etc. aiohttp-3.0.1/docs/client_reference.rst0000666000000000000000000015771313240304665016314 0ustar 00000000000000.. _aiohttp-client-reference: Client Reference ================ .. module:: aiohttp .. currentmodule:: aiohttp Client Session -------------- Client session is the recommended interface for making HTTP requests. Session encapsulates a *connection pool* (*connector* instance) and supports keepalives by default. Unless you are connecting to a large, unknown number of different servers over the lifetime of your application, it is suggested you use a single session for the lifetime of your application to benefit from connection pooling. Usage example:: import aiohttp import asyncio async def fetch(client): async with client.get('http://python.org') as resp: assert resp.status == 200 return await resp.text() async def main(): async with aiohttp.ClientSession() as client: html = await fetch(client) print(html) loop = asyncio.get_event_loop() loop.run_until_complete(main()) The client session supports the context manager protocol for self closing. .. class:: ClientSession(*, connector=None, loop=None, cookies=None, \ headers=None, skip_auto_headers=None, \ auth=None, json_serialize=json.dumps, \ version=aiohttp.HttpVersion11, \ cookie_jar=None, read_timeout=None, \ conn_timeout=None, \ raise_for_status=False, \ connector_owner=True, \ auto_decompress=True, proxies=None) The class for creating client sessions and making requests. :param aiohttp.connector.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. :param loop: :ref:`event loop` used for processing HTTP requests. If *loop* is ``None`` the constructor borrows it from *connector* if specified. :func:`asyncio.get_event_loop` is used for getting default event loop otherwise. .. deprecated:: 2.0 :param dict cookies: Cookies to send with the request (optional) :param headers: HTTP Headers to send with every request (optional). May be either *iterable of key-value pairs* or :class:`~collections.abc.Mapping` (e.g. :class:`dict`, :class:`~multidict.CIMultiDict`). :param skip_auto_headers: set of headers for which autogeneration should be skipped. *aiohttp* autogenerates headers like ``User-Agent`` or ``Content-Type`` if these headers are not explicitly passed. Using ``skip_auto_headers`` parameter allows to skip that generation. Note that ``Content-Length`` autogeneration can't be skipped. Iterable of :class:`str` or :class:`~aiohttp.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param version: supported HTTP version, ``HTTP 1.1`` by default. :param cookie_jar: Cookie Jar, :class:`AbstractCookieJar` instance. By default every session instance has own private cookie jar for automatic cookies processing but user may redefine this behavior by providing own jar implementation. One example is not processing cookies at all when working in proxy mode. If no cookie processing is needed, a :class:`aiohttp.DummyCookieJar` instance can be provided. :param callable json_serialize: Json *serializer* callable. By default :func:`json.dumps` function. :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status()` for each response, ``False`` by default. .. versionadded:: 2.0 :param float read_timeout: Request operations timeout. ``read_timeout`` is cumulative for all request operations (request, redirects, responses, data consuming). By default, the read timeout is 5*60 seconds. Use ``None`` or ``0`` to disable timeout checks. :param float conn_timeout: timeout for connection establishing (optional). Values ``0`` or ``None`` mean no timeout. :param bool connector_owner: Close connector instance on session closing. Setting the parameter to ``False`` allows to share connection pool between sessions without sharing session state: cookies etc. .. versionadded:: 2.1 :param bool auto_decompress: Automatically decompress response body .. versionadded:: 2.3 :param bool trust_env: Get proxies information from *HTTP_PROXY* / *HTTPS_PROXY* environment variables if the parameter is ``True`` (``False`` by default). Get proxy credentials from ``~/.netrc`` file if present. .. seealso:: ``.netrc`` documentation: https://www.gnu.org/software/inetutils/manual/html_node/The-_002enetrc-file.html .. versionadded:: 2.3 .. versionchanged:: 3.0 Added support for ``~/.netrc`` file. .. attribute:: closed ``True`` if the session has been closed, ``False`` otherwise. A read-only property. .. attribute:: connector :class:`aiohttp.connector.BaseConnector` derived instance used for the session. A read-only property. .. attribute:: cookie_jar The session cookies, :class:`~aiohttp.AbstractCookieJar` instance. Gives access to cookie jar's content and modifiers. A read-only property. .. attribute:: requote_redirect_url aiohttp re quote's redirect urls by default, but some servers require exact url from location header. To disable *re-quote* system set :attr:`requote_redirect_url` attribute to ``False``. .. versionadded:: 2.1 .. note:: This parameter affects all subsequent requests. .. attribute:: loop A loop instance used for session creation. A read-only property. .. comethod:: request(method, url, *, params=None, data=None, json=None,\ headers=None, skip_auto_headers=None, \ auth=None, allow_redirects=True,\ max_redirects=10,\ compress=None, chunked=None, expect100=False,\ read_until_eof=True, proxy=None, proxy_auth=None,\ timeout=5*60, ssl=None, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None) :async-with: :coroutine: Performs an asynchronous HTTP request. Returns a response object. :param str method: HTTP method :param url: Request URL, :class:`str` or :class:`~yarl.URL`. :param params: Mapping, iterable of tuple of *key*/*value* pairs or string to be sent as parameters in the query string of the new request. Ignored for subsequent redirected requests (optional) Allowed values are: - :class:`collections.abc.Mapping` e.g. :class:`dict`, :class:`aiohttp.MultiDict` or :class:`aiohttp.MultiDictProxy` - :class:`collections.abc.Iterable` e.g. :class:`tuple` or :class:`list` - :class:`str` with preferably url-encoded content (**Warning:** content will not be encoded by *aiohttp*) :param data: Dictionary, bytes, or file-like object to send in the body of the request (optional) :param json: Any json compatible python object (optional). *json* and *data* parameters could not be used at the same time. :param dict headers: HTTP Headers to send with the request (optional) :param skip_auto_headers: set of headers for which autogeneration should be skipped. *aiohttp* autogenerates headers like ``User-Agent`` or ``Content-Type`` if these headers are not explicitly passed. Using ``skip_auto_headers`` parameter allows to skip that generation. Iterable of :class:`str` or :class:`~aiohttp.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param bool allow_redirects: If set to ``False``, do not follow redirects. ``True`` by default (optional). :param bool compress: Set to ``True`` if request has to be compressed with deflate encoding. If `compress` can not be combined with a *Content-Encoding* and *Content-Length* headers. ``None`` by default (optional). :param int chunked: Enable chunked transfer encoding. It is up to the developer to decide how to chunk data streams. If chunking is enabled, aiohttp encodes the provided chunks in the "Transfer-encoding: chunked" format. If *chunked* is set, then the *Transfer-encoding* and *content-length* headers are disallowed. ``None`` by default (optional). :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). :param bool read_until_eof: Read response until EOF if response does not have Content-Length header. ``True`` by default (optional). :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP Basic Authorization (optional) :param int timeout: override the session's timeout (``read_timeout``) for IO operations. :param ssl: SSL validation mode. ``None`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint validation, :class:`ssl.SSLContext` for custom SSL certificate validation. Supersedes *verify_ssl*, *ssl_context* and *fingerprint* parameters. .. versionadded:: 3.0 :param bool verify_ssl: Perform SSL certificate validation for *HTTPS* requests (enabled by default). May be disabled to skip validation for sites with invalid certificates. .. versionadded:: 2.3 .. deprecated:: 3.0 Use ``ssl=False`` :param bytes fingerprint: Pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning `_. Warning: use of MD5 or SHA1 digests is insecure and removed. .. versionadded:: 2.3 .. deprecated:: 3.0 Use ``ssl=aiohttp.Fingerprint(digest)`` :param ssl.SSLContext ssl_context: ssl context used for processing *HTTPS* requests (optional). *ssl_context* may be used for configuring certification authority channel, supported SSL options etc. .. versionadded:: 2.3 .. deprecated:: 3.0 Use ``ssl=ssl_context`` :param abc.Mapping proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. .. versionadded:: 2.3 :param trace_request_ctx: Object used to give as a kw param for each new :class:`TraceConfig` object instantiated, used to give information to the tracers that is only available at request time. .. versionadded:: 3.0 :return ClientResponse: a :class:`client response ` object. .. comethod:: get(url, *, allow_redirects=True, **kwargs) :async-with: :coroutine: Perform a ``GET`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :param bool allow_redirects: If set to ``False``, do not follow redirects. ``True`` by default (optional). :return ClientResponse: a :class:`client response ` object. .. comethod:: post(url, *, data=None, **kwargs) :async-with: :coroutine: Perform a ``POST`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :param data: Dictionary, bytes, or file-like object to send in the body of the request (optional) :return ClientResponse: a :class:`client response ` object. .. comethod:: put(url, *, data=None, **kwargs) :async-with: :coroutine: Perform a ``PUT`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :param data: Dictionary, bytes, or file-like object to send in the body of the request (optional) :return ClientResponse: a :class:`client response ` object. .. comethod:: delete(url, **kwargs) :async-with: :coroutine: Perform a ``DELETE`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :return ClientResponse: a :class:`client response ` object. .. comethod:: head(url, *, allow_redirects=False, **kwargs) :async-with: :coroutine: Perform a ``HEAD`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :param bool allow_redirects: If set to ``False``, do not follow redirects. ``False`` by default (optional). :return ClientResponse: a :class:`client response ` object. .. comethod:: options(url, *, allow_redirects=True, **kwargs) :async-with: :coroutine: Perform an ``OPTIONS`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :param bool allow_redirects: If set to ``False``, do not follow redirects. ``True`` by default (optional). :return ClientResponse: a :class:`client response ` object. .. comethod:: patch(url, *, data=None, **kwargs) :async-with: :coroutine: Perform a ``PATCH`` request. In order to modify inner :meth:`request` parameters, provide `kwargs`. :param url: Request URL, :class:`str` or :class:`~yarl.URL` :param data: Dictionary, bytes, or file-like object to send in the body of the request (optional) :return ClientResponse: a :class:`client response ` object. .. comethod:: ws_connect(url, *, protocols=(), timeout=10.0,\ receive_timeout=None,\ auth=None,\ autoclose=True,\ autoping=True,\ heartbeat=None,\ origin=None, \ proxy=None, proxy_auth=None, ssl=None, \ verify_ssl=None, fingerprint=None, \ ssl_context=None, proxy_headers=None, \ compress=0) :async-with: :coroutine: Create a websocket connection. Returns a :class:`ClientWebSocketResponse` object. :param url: Websocket server url, :class:`str` or :class:`~yarl.URL` :param tuple protocols: Websocket protocols :param float timeout: Timeout for websocket to close. ``10`` seconds by default :param float receive_timeout: Timeout for websocket to receive complete message. ``None`` (unlimited) seconds by default :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param bool autoclose: Automatically close websocket connection on close message from server. If *autoclose* is False them close procedure has to be handled manually :param bool autoping: automatically send *pong* on *ping* message from server :param float heartbeat: Send *ping* message every *heartbeat* seconds and wait *pong* response, if *pong* response is not received then close connection. The timer is reset on any data reception. :param str origin: Origin header to send to server :param str proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP Basic Authorization (optional) :param ssl: SSL validation mode. ``None`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint validation, :class:`ssl.SSLContext` for custom SSL certificate validation. Supersedes *verify_ssl*, *ssl_context* and *fingerprint* parameters. .. versionadded:: 3.0 :param bool verify_ssl: Perform SSL certificate validation for *HTTPS* requests (enabled by default). May be disabled to skip validation for sites with invalid certificates. .. versionadded:: 2.3 .. deprecated:: 3.0 Use ``ssl=False`` :param bytes fingerprint: Pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning `_. Note: use of MD5 or SHA1 digests is insecure and deprecated. .. versionadded:: 2.3 .. deprecated:: 3.0 Use ``ssl=aiohttp.Fingerprint(digest)`` :param ssl.SSLContext ssl_context: ssl context used for processing *HTTPS* requests (optional). *ssl_context* may be used for configuring certification authority channel, supported SSL options etc. .. versionadded:: 2.3 :param dict proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. .. versionadded:: 2.3 .. deprecated:: 3.0 Use ``ssl=ssl_context`` :param int compress: Enable Per-Message Compress Extension support. 0 for disable, 9 to 15 for window bit support. Default value is 0. .. versionadded:: 2.3 .. comethod:: close() Close underlying connector. Release all acquired resources. .. method:: detach() Detach connector from session without closing the former. Session is switched to closed state anyway. Basic API --------- While we encourage :class:`ClientSession` usage we also provide simple coroutines for making HTTP requests. Basic API is good for performing simple HTTP requests without keepaliving, cookies and complex connection stuff like properly configured SSL certification chaining. .. cofunction:: request(method, url, *, params=None, data=None, \ json=None,\ headers=None, cookies=None, auth=None, \ allow_redirects=True, max_redirects=10, \ encoding='utf-8', \ version=HttpVersion(major=1, minor=1), \ compress=None, chunked=None, expect100=False, \ connector=None, loop=None,\ read_until_eof=True) :async-with: Asynchronous context manager for performing an asynchronous HTTP request. Returns a :class:`ClientResponse` response object. :param str method: HTTP method :param url: Requested URL, :class:`str` or :class:`~yarl.URL` :param dict params: Parameters to be sent in the query string of the new request (optional) :param data: Dictionary, bytes, or file-like object to send in the body of the request (optional) :param json: Any json compatible python object (optional). *json* and *data* parameters could not be used at the same time. :param dict headers: HTTP Headers to send with the request (optional) :param dict cookies: Cookies to send with the request (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) :param bool allow_redirects: If set to ``False``, do not follow redirects. ``True`` by default (optional). :param aiohttp.protocol.HttpVersion version: Request HTTP version (optional) :param bool compress: Set to ``True`` if request has to be compressed with deflate encoding. ``False`` instructs aiohttp to not compress data. ``None`` by default (optional). :param int chunked: Enables chunked transfer encoding. ``None`` by default (optional). :param bool expect100: Expect 100-continue response from server. ``False`` by default (optional). :param aiohttp.connector.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. :param bool read_until_eof: Read response until EOF if response does not have Content-Length header. ``True`` by default (optional). :param loop: :ref:`event loop` used for processing HTTP requests. If param is ``None``, :func:`asyncio.get_event_loop` is used for getting default event loop. .. deprecated:: 2.0 :return ClientResponse: a :class:`client response ` object. Usage:: import aiohttp async def fetch(): async with aiohttp.request('GET', 'http://python.org/') as resp: assert resp.status == 200 print(await resp.text()) .. _aiohttp-client-reference-connectors: Connectors ---------- Connectors are transports for aiohttp client API. There are standard connectors: 1. :class:`TCPConnector` for regular *TCP sockets* (both *HTTP* and *HTTPS* schemes supported). 2. :class:`UnixConnector` for connecting via UNIX socket (it's used mostly for testing purposes). All connector classes should be derived from :class:`BaseConnector`. By default all *connectors* support *keep-alive connections* (behavior is controlled by *force_close* constructor's parameter). BaseConnector ^^^^^^^^^^^^^ .. class:: BaseConnector(*, keepalive_timeout=15, \ force_close=False, limit=100, limit_per_host=0, \ enable_cleanup_closed=False, loop=None) Base class for all connectors. :param float keepalive_timeout: timeout for connection reusing after releasing (optional). Values ``0``. For disabling *keep-alive* feature use ``force_close=True`` flag. :param int limit: total number simultaneous connections. If *limit* is ``None`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. If *limit* is ``0`` the connector has no limit (default: 0). :param bool force_close: close underlying sockets after connection releasing (optional). :param bool enable_cleanup_closed: some SSL servers do not properly complete SSL shutdown process, in that case asyncio leaks ssl connections. If this parameter is set to True, aiohttp additionally aborts underlining transport after 2 seconds. It is off by default. :param loop: :ref:`event loop` used for handling connections. If param is ``None``, :func:`asyncio.get_event_loop` is used for getting default event loop. .. deprecated:: 2.0 .. attribute:: closed Read-only property, ``True`` if connector is closed. .. attribute:: force_close Read-only property, ``True`` if connector should ultimately close connections on releasing. .. attribute:: limit The total number for simultaneous connections. If limit is 0 the connector has no limit. The default limit size is 100. .. attribute:: limit_per_host The limit for simultaneous connections to the same endpoint. Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. If *limit_per_host* is ``None`` the connector has no limit per host. Read-only property. .. method:: close() Close all opened connections. .. versionadded:: 2.0 .. comethod:: connect(request) Get a free connection from pool or create new one if connection is absent in the pool. The call may be paused if :attr:`limit` is exhausted until used connections returns to pool. :param aiohttp.ClientRequest request: request object which is connection initiator. :return: :class:`Connection` object. .. comethod:: _create_connection(req) Abstract method for actual connection establishing, should be overridden in subclasses. TCPConnector ^^^^^^^^^^^^ .. class:: TCPConnector(*, ssl=None, verify_ssl=True, fingerprint=None, \ use_dns_cache=True, ttl_dns_cache=10, \ family=0, ssl_context=None, local_addr=None, \ resolver=None, keepalive_timeout=sentinel, \ force_close=False, limit=100, limit_per_host=0, \ enable_cleanup_closed=False, loop=None) Connector for working with *HTTP* and *HTTPS* via *TCP* sockets. The most common transport. When you don't know what connector type to use, use a :class:`TCPConnector` instance. :class:`TCPConnector` inherits from :class:`BaseConnector`. Constructor accepts all parameters suitable for :class:`BaseConnector` plus several TCP-specific ones: :param ssl: SSL validation mode. ``None`` for default SSL check (:func:`ssl.create_default_context` is used), ``False`` for skip SSL certificate validation, :class:`aiohttp.Fingerprint` for fingerprint validation, :class:`ssl.SSLContext` for custom SSL certificate validation. Supersedes *verify_ssl*, *ssl_context* and *fingerprint* parameters. .. versionadded:: 3.0 :param bool verify_ssl: perform SSL certificate validation for *HTTPS* requests (enabled by default). May be disabled to skip validation for sites with invalid certificates. .. deprecated:: 2.3 Pass *verify_ssl* to ``ClientSession.get()`` etc. :param bytes fingerprint: pass the SHA256 digest of the expected certificate in DER format to verify that the certificate the server presents matches. Useful for `certificate pinning `_. Note: use of MD5 or SHA1 digests is insecure and deprecated. .. deprecated:: 2.3 Pass *verify_ssl* to ``ClientSession.get()`` etc. :param bool use_dns_cache: use internal cache for DNS lookups, ``True`` by default. Enabling an option *may* speedup connection establishing a bit but may introduce some *side effects* also. :param int ttl_dns_cache: expire after some seconds the DNS entries, ``None`` means cached forever. By default 10 seconds. By default DNS entries are cached forever, in some environments the IP addresses related to a specific HOST can change after a specific time. Use this option to keep the DNS cache updated refreshing each entry after N seconds. .. versionadded:: 2.0.8 :param int limit: total number simultaneous connections. If *limit* is ``None`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. If *limit* is ``0`` the connector has no limit (default: 0). :param aiohttp.abc.AbstractResolver resolver: custom resolver instance to use. ``aiohttp.DefaultResolver`` by default (asynchronous if ``aiodns>=1.1`` is installed). Custom resolvers allow to resolve hostnames differently than the way the host is configured. The resolver is ``aiohttp.ThreadedResolver`` by default, asynchronous version is pretty robust but might fail in very rare cases. :param int family: TCP socket family, both IPv4 and IPv6 by default. For *IPv4* only use :const:`socket.AF_INET`, for *IPv6* only -- :const:`socket.AF_INET6`. *family* is ``0`` by default, that means both IPv4 and IPv6 are accepted. To specify only concrete version please pass :const:`socket.AF_INET` or :const:`socket.AF_INET6` explicitly. :param ssl.SSLContext ssl_context: SSL context used for processing *HTTPS* requests (optional). *ssl_context* may be used for configuring certification authority channel, supported SSL options etc. :param tuple local_addr: tuple of ``(local_host, local_port)`` used to bind socket locally if specified. :param bool force_close: close underlying sockets after connection releasing (optional). :param tuple enable_cleanup_closed: Some ssl servers do not properly complete SSL shutdown process, in that case asyncio leaks SSL connections. If this parameter is set to True, aiohttp additionally aborts underlining transport after 2 seconds. It is off by default. .. attribute:: family *TCP* socket family e.g. :const:`socket.AF_INET` or :const:`socket.AF_INET6` Read-only property. .. attribute:: dns_cache Use quick lookup in internal *DNS* cache for host names if ``True``. Read-only :class:`bool` property. .. attribute:: cached_hosts The cache of resolved hosts if :attr:`dns_cache` is enabled. Read-only :class:`types.MappingProxyType` property. .. method:: clear_dns_cache(self, host=None, port=None) Clear internal *DNS* cache. Remove specific entry if both *host* and *port* are specified, clear all cache otherwise. UnixConnector ^^^^^^^^^^^^^ .. class:: UnixConnector(path, *, conn_timeout=None, \ keepalive_timeout=30, limit=100, \ force_close=False, loop=None) Unix socket connector. Use :class:`UnixConnector` for sending *HTTP/HTTPS* requests through *UNIX Sockets* as underlying transport. UNIX sockets are handy for writing tests and making very fast connections between processes on the same host. :class:`UnixConnector` is inherited from :class:`BaseConnector`. Usage:: conn = UnixConnector(path='/path/to/socket') session = ClientSession(connector=conn) async with session.get('http://python.org') as resp: ... Constructor accepts all parameters suitable for :class:`BaseConnector` plus UNIX-specific one: :param str path: Unix socket path .. attribute:: path Path to *UNIX socket*, read-only :class:`str` property. Connection ^^^^^^^^^^ .. class:: Connection Encapsulates single connection in connector object. End user should never create :class:`Connection` instances manually but get it by :meth:`BaseConnector.connect` coroutine. .. attribute:: closed :class:`bool` read-only property, ``True`` if connection was closed, released or detached. .. attribute:: loop Event loop used for connection .. attribute:: transport Connection transport .. method:: close() Close connection with forcibly closing underlying socket. .. method:: release() Release connection back to connector. Underlying socket is not closed, the connection may be reused later if timeout (30 seconds by default) for connection was not expired. .. method:: detach() Detach underlying socket from connection. Underlying socket is not closed, next :meth:`close` or :meth:`release` calls don't return socket to free pool. Response object --------------- .. class:: ClientResponse Client response returned be :meth:`ClientSession.request` and family. User never creates the instance of ClientResponse class but gets it from API calls. :class:`ClientResponse` supports async context manager protocol, e.g.:: resp = await client_session.get(url) async with resp: assert resp.status == 200 After exiting from ``async with`` block response object will be *released* (see :meth:`release` coroutine). .. attribute:: version Response's version, :class:`HttpVersion` instance. .. attribute:: status HTTP status code of response (:class:`int`), e.g. ``200``. .. attribute:: reason HTTP status reason of response (:class:`str`), e.g. ``"OK"``. .. attribute:: method Request's method (:class:`str`). .. attribute:: url URL of request (:class:`~yarl.URL`). .. attribute:: connection :class:`Connection` used for handling response. .. attribute:: content Payload stream, which contains response's BODY (:class:`StreamReader`). It supports various reading methods depending on the expected format. When chunked transfer encoding is used by the server, allows retrieving the actual http chunks. Reading from the stream may raise :exc:`aiohttp.ClientPayloadError` if the response object is closed before response receives all data or in case if any transfer encoding related errors like misformed chunked encoding of broken compression data. .. attribute:: cookies HTTP cookies of response (*Set-Cookie* HTTP header, :class:`~http.cookies.SimpleCookie`). .. attribute:: headers A case-insensitive multidict proxy with HTTP headers of response, :class:`~multidict.CIMultiDictProxy`. .. attribute:: raw_headers Unmodified HTTP headers of response as unconverted bytes, a sequence of ``(key, value)`` pairs. .. attribute:: content_type Read-only property with *content* part of *Content-Type* header. .. note:: Returns value is ``'application/octet-stream'`` if no Content-Type header present in HTTP headers according to :rfc:`2616`. To make sure Content-Type header is not present in the server reply, use :attr:`headers` or :attr:`raw_headers`, e.g. ``'CONTENT-TYPE' not in resp.headers``. .. attribute:: charset Read-only property that specifies the *encoding* for the request's BODY. The value is parsed from the *Content-Type* HTTP header. Returns :class:`str` like ``'utf-8'`` or ``None`` if no *Content-Type* header present in HTTP headers or it has no charset information. .. attribute:: content_disposition Read-only property that specified the *Content-Disposition* HTTP header. Instance of :class:`ContentDisposition` or ``None`` if no *Content-Disposition* header present in HTTP headers. .. attribute:: history A :class:`~collections.abc.Sequence` of :class:`ClientResponse` objects of preceding requests (earliest request first) if there were redirects, an empty sequence otherwise. .. method:: close() Close response and underlying connection. For :term:`keep-alive` support see :meth:`release`. .. comethod:: read() Read the whole response's body as :class:`bytes`. Close underlying connection if data reading gets an error, release connection otherwise. Raise an :exc:`aiohttp.ClientResponseError` if the data can't be read. :return bytes: read *BODY*. .. seealso:: :meth:`close`, :meth:`release`. .. comethod:: release() It is not required to call `release` on the response object. When the client fully receives the payload, the underlying connection automatically returns back to pool. If the payload is not fully read, the connection is closed .. method:: raise_for_status() Raise an :exc:`aiohttp.ClientResponseError` if the response status is 400 or higher. Do nothing for success responses (less than 400). .. comethod:: text(encoding=None) Read response's body and return decoded :class:`str` using specified *encoding* parameter. If *encoding* is ``None`` content encoding is autocalculated using ``Content-Type`` HTTP header and *chardet* tool if the header is not provided by server. :term:`cchardet` is used with fallback to :term:`chardet` if *cchardet* is not available. Close underlying connection if data reading gets an error, release connection otherwise. :param str encoding: text encoding used for *BODY* decoding, or ``None`` for encoding autodetection (default). :return str: decoded *BODY* .. note:: If response has no ``charset`` info in ``Content-Type`` HTTP header :term:`cchardet` / :term:`chardet` is used for content encoding autodetection. It may hurt performance. If page encoding is known passing explicit *encoding* parameter might help:: await resp.text('ISO-8859-1') .. comethod:: json(*, encoding=None, loads=json.loads, \ content_type='application/json') Read response's body as *JSON*, return :class:`dict` using specified *encoding* and *loader*. If data is not still available a ``read`` call will be done, If *encoding* is ``None`` content encoding is autocalculated using :term:`cchardet` or :term:`chardet` as fallback if *cchardet* is not available. if response's `content-type` does not match `content_type` parameter :exc:`aiohttp.ContentTypeError` get raised. To disable content type check pass ``None`` value. :param str encoding: text encoding used for *BODY* decoding, or ``None`` for encoding autodetection (default). :param callable loads: :func:`callable` used for loading *JSON* data, :func:`json.loads` by default. :param str content_type: specify response's content-type, if content type does not match raise :exc:`aiohttp.ClientResponseError`. To disable `content-type` check, pass ``None`` as value. (default: `application/json`). :return: *BODY* as *JSON* data parsed by *loads* parameter or ``None`` if *BODY* is empty or contains white-spaces only. .. attribute:: request_info A namedtuple with request URL and headers from :class:`ClientRequest` object, :class:`aiohttp.RequestInfo` instance. .. method:: get_encoding() Automatically detect content encoding using ``charset`` info in ``Content-Type`` HTTP header. If this info is not exists or there are no appropriate codecs for encoding then :term:`cchardet` / :term:`chardet` is used. .. versionadded:: 3.0 ClientWebSocketResponse ----------------------- To connect to a websocket server :func:`aiohttp.ws_connect` or :meth:`aiohttp.ClientSession.ws_connect` coroutines should be used, do not create an instance of class :class:`ClientWebSocketResponse` manually. .. class:: ClientWebSocketResponse() Class for handling client-side websockets. .. attribute:: closed Read-only property, ``True`` if :meth:`close` has been called or :const:`~aiohttp.WSMsgType.CLOSE` message has been received from peer. .. attribute:: protocol Websocket *subprotocol* chosen after :meth:`start` call. May be ``None`` if server and client protocols are not overlapping. .. method:: get_extra_info(name, default=None) Reads extra info from connection's transport .. method:: exception() Returns exception if any occurs or returns None. .. comethod:: ping(message=b'') Send :const:`~aiohttp.WSMsgType.PING` to peer. :param message: optional payload of *ping* message, :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. .. versionchanged:: 3.0 The method is converted into :term:`coroutine` .. comethod:: pong(message=b'') Send :const:`~aiohttp.WSMsgType.PONG` to peer. :param message: optional payload of *pong* message, :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. .. versionchanged:: 3.0 The method is converted into :term:`coroutine` .. comethod:: send_str(data, compress=None) Send *data* to peer as :const:`~aiohttp.WSMsgType.TEXT` message. :param str data: data to send. :param int compress: sets specific level of compression for single message, ``None`` for not overriding per-socket setting. :raise TypeError: if data is not :class:`str` .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, *compress* parameter added. .. comethod:: send_bytes(data, compress=None) Send *data* to peer as :const:`~aiohttp.WSMsgType.BINARY` message. :param data: data to send. :param int compress: sets specific level of compression for single message, ``None`` for not overriding per-socket setting. :raise TypeError: if data is not :class:`bytes`, :class:`bytearray` or :class:`memoryview`. .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, *compress* parameter added. .. comethod:: send_json(data, compress=None, *, dumps=json.dumps) Send *data* to peer as JSON string. :param data: data to send. :param int compress: sets specific level of compression for single message, ``None`` for not overriding per-socket setting. :param callable dumps: any :term:`callable` that accepts an object and returns a JSON string (:func:`json.dumps` by default). :raise RuntimeError: if connection is not started or closing :raise ValueError: if data is not serializable object :raise TypeError: if value returned by ``dumps(data)`` is not :class:`str` .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, *compress* parameter added. .. comethod:: close(*, code=1000, message=b'') A :ref:`coroutine` that initiates closing handshake by sending :const:`~aiohttp.WSMsgType.CLOSE` message. It waits for close response from server. To add a timeout to `close()` call just wrap the call with `asyncio.wait()` or `asyncio.wait_for()`. :param int code: closing code :param message: optional payload of *pong* message, :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. .. comethod:: receive() A :ref:`coroutine` that waits upcoming *data* message from peer and returns it. The coroutine implicitly handles :const:`~aiohttp.WSMsgType.PING`, :const:`~aiohttp.WSMsgType.PONG` and :const:`~aiohttp.WSMsgType.CLOSE` without returning the message. It process *ping-pong game* and performs *closing handshake* internally. :return: :class:`~aiohttp.WSMessage` .. coroutinemethod:: receive_str() A :ref:`coroutine` that calls :meth:`receive` but also asserts the message type is :const:`~aiohttp.WSMsgType.TEXT`. :return str: peer's message content. :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. .. coroutinemethod:: receive_bytes() A :ref:`coroutine` that calls :meth:`receive` but also asserts the message type is :const:`~aiohttp.WSMsgType.BINARY`. :return bytes: peer's message content. :raise TypeError: if message is :const:`~aiohttp.WSMsgType.TEXT`. .. coroutinemethod:: receive_json(*, loads=json.loads) A :ref:`coroutine` that calls :meth:`receive_str` and loads the JSON string to a Python dict. :param callable loads: any :term:`callable` that accepts :class:`str` and returns :class:`dict` with parsed JSON (:func:`json.loads` by default). :return dict: loaded JSON content :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. :raise ValueError: if message is not valid JSON. Utilities --------- RequestInfo ^^^^^^^^^^^ .. class:: RequestInfo() A namedtuple with request URL and headers from :class:`ClientRequest` object, available as :attr:`ClientResponse.request_info` attribute. .. attribute:: url Requested *url*, :class:`yarl.URL` instance. .. attribute:: method Request HTTP method like ``'GET'`` or ``'POST'``, :class:`str`. .. attribute:: headers HTTP headers for request, :class:`multidict.CIMultiDict` instance. BasicAuth ^^^^^^^^^ .. class:: BasicAuth(login, password='', encoding='latin1') HTTP basic authentication helper. :param str login: login :param str password: password :param str encoding: encoding (``'latin1'`` by default) Should be used for specifying authorization data in client API, e.g. *auth* parameter for :meth:`ClientSession.request`. .. classmethod:: decode(auth_header, encoding='latin1') Decode HTTP basic authentication credentials. :param str auth_header: The ``Authorization`` header to decode. :param str encoding: (optional) encoding ('latin1' by default) :return: decoded authentication data, :class:`BasicAuth`. .. classmethod:: from_url(url) Constructed credentials info from url's *user* and *password* parts. :return: credentials data, :class:`BasicAuth` or ``None`` is credentials are not provided. .. versionadded:: 2.3 .. method:: encode() Encode credentials into string suitable for ``Authorization`` header etc. :return: encoded authentication data, :class:`str`. CookieJar ^^^^^^^^^ .. class:: CookieJar(*, unsafe=False, loop=None) The cookie jar instance is available as :attr:`ClientSession.cookie_jar`. The jar contains :class:`~http.cookies.Morsel` items for storing internal cookie data. API provides a count of saved cookies:: len(session.cookie_jar) These cookies may be iterated over:: for cookie in session.cookie_jar: print(cookie.key) print(cookie["domain"]) The class implements :class:`collections.abc.Iterable`, :class:`collections.abc.Sized` and :class:`aiohttp.AbstractCookieJar` interfaces. Implements cookie storage adhering to RFC 6265. :param bool unsafe: (optional) Whether to accept cookies from IPs. :param bool loop: an :ref:`event loop` instance. See :class:`aiohttp.abc.AbstractCookieJar` .. deprecated:: 2.0 .. method:: update_cookies(cookies, response_url=None) Update cookies returned by server in ``Set-Cookie`` header. :param cookies: a :class:`collections.abc.Mapping` (e.g. :class:`dict`, :class:`~http.cookies.SimpleCookie`) or *iterable* of *pairs* with cookies returned by server's response. :param str response_url: URL of response, ``None`` for *shared cookies*. Regular cookies are coupled with server's URL and are sent only to this server, shared ones are sent in every client request. .. method:: filter_cookies(request_url) Return jar's cookies acceptable for URL and available in ``Cookie`` header for sending client requests for given URL. :param str response_url: request's URL for which cookies are asked. :return: :class:`http.cookies.SimpleCookie` with filtered cookies for given URL. .. method:: save(file_path) Write a pickled representation of cookies into the file at provided path. :param file_path: Path to file where cookies will be serialized, :class:`str` or :class:`pathlib.Path` instance. .. method:: load(file_path) Load a pickled representation of cookies from the file at provided path. :param file_path: Path to file from where cookies will be imported, :class:`str` or :class:`pathlib.Path` instance. .. class:: DummyCookieJar(*, loop=None) Dummy cookie jar which does not store cookies but ignores them. Could be useful e.g. for web crawlers to iterate over Internet without blowing up with saved cookies information. To install dummy cookie jar pass it into session instance:: jar = aiohttp.DummyCookieJar() session = aiohttp.ClientSession(cookie_jar=DummyCookieJar()) .. class:: Fingerprint(digest) Fingerprint helper for checking SSL certificates by *SHA256* digest. :param bytes digest: *SHA256* digest for certificate in DER-encoded binary form (see :meth:`ssl.SSLSocket.getpeercert`). To check fingerprint pass the object into :meth:`ClientSession.get` call, e.g.:: import hashlib with open(path_to_cert, 'rb') as f: digest = hashlib.sha256(f.read()).digest() await session.get(url, ssl=aiohttp.Fingerprint(digest)) .. versionadded:: 3.0 Client exceptions ----------------- Exception hierarchy has been significantly modified in version 2.0. aiohttp defines only exceptions that covers connection handling and server response misbehaviors. For developer specific mistakes, aiohttp uses python standard exceptions like :exc:`ValueError` or :exc:`TypeError`. Reading a response content may raise a :exc:`ClientPayloadError` exception. This exception indicates errors specific to the payload encoding. Such as invalid compressed data, malformed chunked-encoded chunks or not enough data that satisfy the content-length header. All exceptions are available as members of *aiohttp* module. .. exception:: ClientError Base class for all client specific exceptions. Derived from :exc:`Exception` .. class:: ClientPayloadError This exception can only be raised while reading the response payload if one of these errors occurs: 1. invalid compression 2. malformed chunked encoding 3. not enough data that satisfy ``Content-Length`` HTTP header. Derived from :exc:`ClientError` .. exception:: InvalidURL URL used for fetching is malformed, e.g. it does not contain host part. Derived from :exc:`ClientError` and :exc:`ValueError` .. attribute:: url Invalid URL, :class:`yarl.URL` instance. .. class:: ContentDisposition Represent Content-Disposition header .. attribute:: value A :class:`str` instance. Value of Content-Disposition header itself, e.g. ``attachment``. .. attribute:: filename A :class:`str` instance. Content filename extracted from parameters. May be ``None``. .. attribute:: parameters Read-only mapping contains all parameters. Response errors ^^^^^^^^^^^^^^^ .. exception:: ClientResponseError These exceptions could happen after we get response from server. Derived from :exc:`ClientError` .. attribute:: request_info Instance of :class:`RequestInfo` object, contains information about request. .. attribute:: code HTTP status code of response (:class:`int`), e.g. ``200``. .. attribute:: message Message of response (:class:`str`), e.g. ``"OK"``. .. attribute:: headers Headers in response, a list of pairs. .. attribute:: history History from failed response, if available, else empty tuple. A :class:`tuple` of :class:`ClientResponse` objects used for handle redirection responses. .. class:: WSServerHandshakeError Web socket server response error. Derived from :exc:`ClientResponseError` .. class:: WSServerHandshakeError Web socket server response error. Derived from :exc:`ClientResponseError` .. class:: ContentTypeError Invalid content type. Derived from :exc:`ClientResponseError` .. versionadded:: 2.3 Connection errors ^^^^^^^^^^^^^^^^^ .. class:: ClientConnectionError These exceptions related to low-level connection problems. Derived from :exc:`ClientError` .. class:: ClientOSError Subset of connection errors that are initiated by an :exc:`OSError` exception. Derived from :exc:`ClientConnectionError` and :exc:`OSError` .. class:: ClientConnectorError Connector related exceptions. Derived from :exc:`ClientOSError` .. class:: ClientProxyConnectionError Derived from :exc:`ClientConnectonError` .. class:: ServerConnectionError Derived from :exc:`ClientConnectonError` .. class:: ClientSSLError Derived from :exc:`ClientConnectonError` .. class:: ClientConnectorSSLError Response ssl error. Derived from :exc:`ClientSSLError` and :exc:`ssl.SSLError` .. class:: ClientConnectorCertificateError Response certificate error. Derived from :exc:`ClientSSLError` and :exc:`ssl.CertificateError` .. class:: ServerDisconnectedError Server disconnected. Derived from :exc:`ServerDisconnectonError` .. attribute:: message Partially parsed HTTP message (optional). .. class:: ServerTimeoutError Server operation timeout: read timeout, etc. Derived from :exc:`ServerConnectonError` and :exc:`asyncio.TimeoutError` .. class:: ServerFingerprintMismatch Server fingerprint mismatch. Derived from :exc:`ServerConnectonError` Hierarchy of exceptions ^^^^^^^^^^^^^^^^^^^^^^^ * :exc:`ClientError` * :exc:`ClientResponseError` * :exc:`ContentTypeError` * :exc:`WSServerHandshakeError` * :exc:`ClientHttpProxyError` * :exc:`ClientConnectionError` * :exc:`ClientOSError` * :exc:`ClientConnectorError` * :exc:`ClientSSLError` * :exc:`ClientConnectorCertificateError` * :exc:`ClientConnectorSSLError` * :exc:`ClientProxyConnectionError` * :exc:`ServerConnectionError` * :exc:`ServerDisconnectedError` * :exc:`ServerTimeoutError` * :exc:`ServerFingerprintMismatch` * :exc:`ClientPayloadError` * :exc:`InvalidURL` aiohttp-3.0.1/docs/conf.py0000666000000000000000000002572213240304665013557 0ustar 00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # aiohttp documentation build configuration file, created by # sphinx-quickstart on Wed Mar 5 12:35:35 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import codecs import os import re import sys _docs_path = os.path.dirname(__file__) _version_path = os.path.abspath(os.path.join(_docs_path, '..', 'aiohttp', '__init__.py')) with codecs.open(_version_path, 'r', 'latin1') as fp: try: _version_info = re.search(r"^__version__ = '" r"(?P\d+)" r"\.(?P\d+)" r"\.(?P\d+)" r"(?P.*)?'$", fp.read(), re.M).groupdict() except IndexError: raise RuntimeError('Unable to determine version.') # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx', 'sphinxcontrib.asyncio', ] try: import sphinxcontrib.spelling # noqa extensions.append('sphinxcontrib.spelling') except ImportError: pass intersphinx_mapping = { 'python': ('http://docs.python.org/3', None), 'multidict': ('https://multidict.readthedocs.io/en/stable/', None), 'yarl': ('https://yarl.readthedocs.io/en/stable/', None), 'aiohttpjinja2': ('https://aiohttp-jinja2.readthedocs.io/en/stable/', None), 'aiohttpremotes': ('https://aiohttp-remotes.readthedocs.io/en/stable/', None), 'aiohttpsession': ('https://aiohttp-session.readthedocs.io/en/stable/', None)} # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'aiohttp' copyright = '2013-2018, Aiohttp contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '{major}.{minor}'.format(**_version_info) # The full version, including alpha/beta/rc tags. release = '{major}.{minor}.{patch}-{tag}'.format(**_version_info) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. # pygments_style = 'sphinx' # The default language to highlight source code in. highlight_language = 'python3' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'aiohttp_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'logo': 'aiohttp-icon-128x128.png', 'description': 'Async HTTP client/server for asyncio and Python', 'canonical_url': 'http://docs.aiohttp.org/en/stable/', 'github_user': 'aio-libs', 'github_repo': 'aiohttp', 'github_button': True, 'github_type': 'star', 'github_banner': True, 'badges': [{'image': 'https://secure.travis-ci.org/aio-libs/aiohttp.svg?branch=master', 'target': 'https://travis-ci.org/aio-libs/aiohttp', 'height': '20', 'alt': 'Travis CI status'}, {'image': 'https://codecov.io/github/aio-libs/aiohttp/coverage.svg?branch=master', 'target': 'https://codecov.io/github/aio-libs/aiohttp', 'height': '20', 'alt': 'Code coverage status'}, {'image': 'https://badge.fury.io/py/aiohttp.svg', 'target': 'https://badge.fury.io/py/aiohttp', 'height': '20', 'alt': 'Latest PyPI package version'}, {'image': 'https://badges.gitter.im/Join%20Chat.svg', 'target': 'https://gitter.im/aio-libs/Lobby', 'height': '20', 'alt': 'Chat on Gitter'}], } # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [alabaster.get_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = 'aiohttp-icon.svg' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = 'favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': [ 'about.html', 'navigation.html', 'searchbox.html', ] } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'aiohttpdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'aiohttp.tex', 'aiohttp Documentation', 'aiohttp contributors', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'aiohttp', 'aiohttp Documentation', ['aiohttp'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'aiohttp', 'aiohttp Documentation', 'Aiohttp contributors', 'aiohttp', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False aiohttp-3.0.1/docs/contributing.rst0000666000000000000000000000007413240304665015512 0ustar 00000000000000.. _aiohttp-contributing: .. include:: ../CONTRIBUTING.rst aiohttp-3.0.1/docs/deployment.rst0000666000000000000000000002071713240304665015171 0ustar 00000000000000.. _aiohttp-deployment: ================= Server Deployment ================= There are several options for aiohttp server deployment: * Standalone server * Running a pool of backend servers behind of :term:`nginx`, HAProxy or other *reverse proxy server* * Using :term:`gunicorn` behind of *reverse proxy* Every method has own benefits and disadvantages. .. _aiohttp-deployment-standalone: Standalone ========== Just call :func:`aiohttp.web.run_app` function passing :class:`aiohttp.web.Application` instance. The method is very simple and could be the best solution in some trivial cases. But it does not utilize all CPU cores. For running multiple aiohttp server instances use *reverse proxies*. .. _aiohttp-deployment-nginx-supervisord: Nginx+supervisord ================= Running aiohttp servers behind :term:`nginx` makes several advantages. At first, nginx is the perfect frontend server. It may prevent many attacks based on malformed http protocol etc. Second, running several aiohttp instances behind nginx allows to utilize all CPU cores. Third, nginx serves static files much faster than built-in aiohttp static file support. But this way requires more complex configuration. Nginx configuration -------------------- Here is short extraction about writing Nginx configuration file. It does not cover all available Nginx options. For full reference read `Nginx tutorial `_ and `official Nginx documentation `_. First configure HTTP server itself: .. code-block:: nginx http { server { listen 80; client_max_body_size 4G; server_name example.com; location / { proxy_set_header Host $http_host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_redirect off; proxy_buffering off; proxy_pass http://aiohttp; } location /static { # path for static files root /path/to/app/static; } } } This config listens on port ``80`` for server named ``example.com`` and redirects everything to ``aiohttp`` backend group. Also it serves static files from ``/path/to/app/static`` path as ``example.com/static``. Next we need to configure *aiohttp upstream group*: .. code-block:: nginx http { upstream aiohttp { # fail_timeout=0 means we always retry an upstream even if it failed # to return a good HTTP response # Unix domain servers server unix:/tmp/example_1.sock fail_timeout=0; server unix:/tmp/example_2.sock fail_timeout=0; server unix:/tmp/example_3.sock fail_timeout=0; server unix:/tmp/example_4.sock fail_timeout=0; # Unix domain sockets are used in this example due to their high performance, # but TCP/IP sockets could be used instead: # server 127.0.0.1:8081 fail_timeout=0; # server 127.0.0.1:8082 fail_timeout=0; # server 127.0.0.1:8083 fail_timeout=0; # server 127.0.0.1:8084 fail_timeout=0; } } All HTTP requests for ``http://example.com`` except ones for ``http://example.com/static`` will be redirected to ``example1.sock``, ``example2.sock``, ``example3.sock`` or ``example4.sock`` backend servers. By default, Nginx uses round-robin algorithm for backend selection. .. note:: Nginx is not the only existing *reverse proxy server* but the most popular one. Alternatives like HAProxy may be used as well. Supervisord ----------- After configuring Nginx we need to start our aiohttp backends. Better to use some tool for starting them automatically after system reboot or backend crash. There are very many ways to do it: Supervisord, Upstart, Systemd, Gaffer, Circus, Runit etc. Here we'll use `Supervisord `_ for example: .. code-block:: cfg [program:aiohttp] numprocs = 4 numprocs_start = 1 process_name = example_%(process_num)s ; Unix socket paths are specified by command line. command=/path/to/aiohttp_example.py --path=/tmp/example_%(process_num)s.sock ; We can just as easily pass TCP port numbers: ; command=/path/to/aiohttp_example.py --port=808%(process_num)s user=nobody autostart=true autorestart=true aiohttp server -------------- The last step is preparing aiohttp server for working with supervisord. Assuming we have properly configured :class:`aiohttp.web.Application` and port is specified by command line, the task is trivial: .. code-block:: python3 # aiohttp_example.py import argparse from aiohttp import web parser = argparse.ArgumentParser(description="aiohttp server example") parser.add_argument('--path') parser.add_argument('--port') if __name__ == '__main__': app = web.Application() # configure app args = parser.parse_args() web.run_app(app, path=args.path, port=args.port) For real use cases we perhaps need to configure other things like logging etc., but it's out of scope of the topic. .. _aiohttp-deployment-gunicorn: Nginx+Gunicorn ============== aiohttp can be deployed using `Gunicorn `_, which is based on a pre-fork worker model. Gunicorn launches your app as worker processes for handling incoming requests. In opposite to deployment with :ref:`bare Nginx ` the solution does not need to manually run several aiohttp processes and use tool like supervisord for monitoring it. But nothing is for free: running aiohttp application under gunicorn is slightly slower. Prepare environment ------------------- You firstly need to setup your deployment environment. This example is based on `Ubuntu `_ 16.04. Create a directory for your application:: >> mkdir myapp >> cd myapp Create Python virtual environment:: >> python3 -m venv venv >> source venv/bin/activate Now that the virtual environment is ready, we'll proceed to install aiohttp and gunicorn:: >> pip install gunicorn >> pip install aiohttp Application ----------- Lets write a simple application, which we will save to file. We'll name this file *my_app_module.py*:: from aiohttp import web def index(request): return web.Response(text="Welcome home!") my_web_app = web.Application() my_web_app.router.add_get('/', index) Start Gunicorn -------------- When `Running Gunicorn `_, you provide the name of the module, i.e. *my_app_module*, and the name of the app, i.e. *my_web_app*, along with other `Gunicorn Settings `_ provided as command line flags or in your config file. In this case, we will use: * the *'--bind'* flag to set the server's socket address; * the *'--worker-class'* flag to tell Gunicorn that we want to use a custom worker subclass instead of one of the Gunicorn default worker types; * you may also want to use the *'--workers'* flag to tell Gunicorn how many worker processes to use for handling requests. (See the documentation for recommendations on `How Many Workers? `_) The custom worker subclass is defined in ``aiohttp.GunicornWebWorker``:: >> gunicorn my_app_module:my_web_app --bind localhost:8080 --worker-class aiohttp.GunicornWebWorker [2017-03-11 18:27:21 +0000] [1249] [INFO] Starting gunicorn 19.7.1 [2017-03-11 18:27:21 +0000] [1249] [INFO] Listening at: http://127.0.0.1:8080 (1249) [2017-03-11 18:27:21 +0000] [1249] [INFO] Using worker: aiohttp.worker.GunicornWebWorker [2015-03-11 18:27:21 +0000] [1253] [INFO] Booting worker with pid: 1253 Gunicorn is now running and ready to serve requests to your app's worker processes. .. note:: If you want to use an alternative asyncio event loop `uvloop `_, you can use the ``aiohttp.GunicornUVLoopWebWorker`` worker class. More information ---------------- The Gunicorn documentation recommends deploying Gunicorn behind an Nginx proxy server. See the `official documentation `_ for more information about suggested nginx configuration. Logging configuration --------------------- ``aiohttp`` and ``gunicorn`` use different format for specifying access log. By default aiohttp uses own defaults:: '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' For more information please read :ref:`Format Specification for Access Log `. aiohttp-3.0.1/docs/essays.rst0000666000000000000000000000014213240304665014306 0ustar 00000000000000Essays ====== .. toctree:: new_router whats_new_1_1 migration_to_2xx whats_new_3_0 aiohttp-3.0.1/docs/external.rst0000666000000000000000000000060613240304665014626 0ustar 00000000000000Who use aiohttp? ================ The list of *aiohttp* users: both libraries, big projects and web sites. Please don't hesitate to add your awesome project to the list by making a Pull Request on GitHub_. If you like the project -- please go to GitHub_ and press *Star* button! .. toctree:: third_party built_with powered_by .. _GitHub: https://github.com/aio-libs/aiohttp aiohttp-3.0.1/docs/faq.rst0000666000000000000000000003033713240304665013557 0ustar 00000000000000FAQ === .. contents:: :local: Are there any plans for @app.route decorator like in Flask? ----------------------------------------------------------- We have it already (*aiohttp>=2.3* required): :ref:`aiohttp-web-alternative-routes-definition`. The difference is: ``@app.route`` should have an ``app`` in module global namespace, which makes *circular import hell* easy. *aiohttp* provides a :class:`~aiohttp.web.RouteTableDef` decoupled from an application instance:: routes = web.RouteTableDef() @routes.get('/get') async def handle_get(request): ... @routes.post('/post') async def handle_post(request): ... app.router.add_routes(routes) Has aiohttp the Flask Blueprint or Django App concept? ------------------------------------------------------ If you're planing to write big applications, maybe you must consider use nested applications. They acts as a Flask Blueprint or like the Django application concept. Using nested application you can add sub-applications to the main application. see: :ref:`aiohttp-web-nested-applications`. How to create route that catches urls with given prefix? --------------------------------------------------------- Try something like:: app.router.add_route('*', '/path/to/{tail:.+}', sink_handler) Where first argument, star, means catch any possible method (*GET, POST, OPTIONS*, etc), second matching ``url`` with desired prefix, third -- handler. Where to put my database connection so handlers can access it? -------------------------------------------------------------- :class:`aiohttp.web.Application` object supports :class:`dict` interface, and right place to store your database connections or any other resource you want to share between handlers. Take a look on following example:: async def go(request): db = request.app['db'] cursor = await db.cursor() await cursor.execute('SELECT 42') # ... return web.Response(status=200, text='ok') async def init_app(loop): app = Application(loop=loop) db = await create_connection(user='user', password='123') app['db'] = db app.router.add_get('/', go) return app Why the minimal supported version is Python 3.4.2 -------------------------------------------------- As of aiohttp **v0.18.0** we dropped support for Python 3.3 up to 3.4.1. The main reason for that is the :meth:`object.__del__` method, which is fully working since Python 3.4.1 and we need it for proper resource closing. The last Python 3.3, 3.4.0 compatible version of aiohttp is **v0.17.4**. This should not be an issue for most aiohttp users (for example `Ubuntu` 14.04.3 LTS provides python upgraded to 3.4.3), however libraries depending on aiohttp should consider this and either freeze aiohttp version or drop Python 3.3 support as well. As of aiohttp **v1.0.0** we dropped support for Python 3.4.1 up to 3.4.2+ also. The reason is: `loop.is_closed` appears in 3.4.2+ Again, it should be not an issue at 2016 Summer because all major distributions are switched to Python 3.5 now. How a middleware may store a data for using by web-handler later? ----------------------------------------------------------------- :class:`aiohttp.web.Request` supports :class:`dict` interface as well as :class:`aiohttp.web.Application`. Just put data inside *request*:: async def handler(request): request['unique_key'] = data See https://github.com/aio-libs/aiohttp_session code for inspiration, ``aiohttp_session.get_session(request)`` method uses ``SESSION_KEY`` for saving request specific session info. .. _aiohttp_faq_parallel_event_sources: How to receive an incoming events from different sources in parallel? --------------------------------------------------------------------- For example we have two event sources: 1. WebSocket for event from end user 2. Redis PubSub from receiving events from other parts of app for sending them to user via websocket. The most native way to perform it is creation of separate task for pubsub handling. Parallel :meth:`aiohttp.web.WebSocketResponse.receive` calls are forbidden, only the single task should perform websocket reading. But other tasks may use the same websocket object for sending data to peer:: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) task = request.app.loop.create_task( read_subscription(ws, request.app['redis'])) try: async for msg in ws: # handle incoming messages # use ws.send_str() to send data back ... finally: task.cancel() async def read_subscription(ws, redis): channel, = await redis.subscribe('channel:1') try: async for msg in channel.iter(): answer = process message(msg) ws.send_str(answer) finally: await redis.unsubscribe('channel:1') .. _aiohttp_faq_terminating_websockets: How to programmatically close websocket server-side? ---------------------------------------------------- For example we have an application with two endpoints: 1. ``/echo`` a websocket echo server that authenticates the user somehow 2. ``/logout_user`` that when invoked needs to close all open websockets for that user. One simple solution is keeping a shared registry of websocket responses for a user in the :class:`aiohttp.web.Application` instance and call :meth:`aiohttp.web.WebSocketResponse.close` on all of them in ``/logout_user`` handler:: async def echo_handler(request): ws = web.WebSocketResponse() user_id = authenticate_user(request) await ws.prepare(request) request.app['websockets'][user_id].add(ws) try: async for msg in ws: ws.send_str(msg.data) finally: request.app['websockets'][user_id].remove(ws) return ws async def logout_handler(request): user_id = authenticate_user(request) ws_closers = [ws.close() for ws in request.app['websockets'][user_id] if not ws.closed] # Watch out, this will keep us from returing the response until all are closed ws_closers and await asyncio.gather(*ws_closers) return web.Response(text='OK') def main(): loop = asyncio.get_event_loop() app = web.Application(loop=loop) app.router.add_route('GET', '/echo', echo_handler) app.router.add_route('POST', '/logout', logout_handler) app['websockets'] = defaultdict(set) web.run_app(app, host='localhost', port=8080) How to make request from a specific IP address? ----------------------------------------------- If your system has several IP interfaces you may choose one which will be used used to bind socket locally:: conn = aiohttp.TCPConnector(local_addr=('127.0.0.1', 0), loop=loop) async with aiohttp.ClientSession(connector=conn) as session: ... .. seealso:: :class:`aiohttp.TCPConnector` and ``local_addr`` parameter. .. _aiohttp_faq_tests_and_implicit_loop: How to use aiohttp test features with code which works with implicit loop? -------------------------------------------------------------------------- Passing explicit loop everywhere is the recommended way. But sometimes, in case you have many nested non well-written services, this is impossible. There is a technique based on monkey-patching your low level service that depends on aioes, to inject the loop at that level. This way, you just need your ``AioESService`` with the loop in its signature. An example would be the following:: import pytest from unittest.mock import patch, MagicMock from main import AioESService, create_app class TestAcceptance: async def test_get(self, test_client, loop): with patch("main.AioESService", MagicMock( side_effect=lambda *args, **kwargs: AioESService(*args, **kwargs, loop=loop))): client = await test_client(create_app) resp = await client.get("/") assert resp.status == 200 Note how we are patching the ``AioESService`` with and instance of itself but adding the explicit loop as an extra (you need to load the loop fixture in your test signature). The final code to test all this (you will need a local instance of elasticsearch running):: import asyncio from aioes import Elasticsearch from aiohttp import web class AioESService: def __init__(self, loop=None): self.es = Elasticsearch(["127.0.0.1:9200"], loop=loop) async def get_info(self): return await self.es.info() class MyService: def __init__(self): self.aioes_service = AioESService() async def get_es_info(self): return await self.aioes_service.get_info() async def hello_aioes(request): my_service = MyService() cluster_info = await my_service.get_es_info() return web.Response(text="{}".format(cluster_info)) def create_app(loop=None): app = web.Application(loop=loop) app.router.add_route('GET', '/', hello_aioes) return app if __name__ == "__main__": web.run_app(create_app()) And the full tests file:: from unittest.mock import patch, MagicMock from main import AioESService, create_app class TestAioESService: async def test_get_info(self, loop): cluster_info = await AioESService("random_arg", loop=loop).get_info() assert isinstance(cluster_info, dict) class TestAcceptance: async def test_get(self, test_client, loop): with patch("main.AioESService", MagicMock( side_effect=lambda *args, **kwargs: AioESService(*args, **kwargs, loop=loop))): client = await test_client(create_app) resp = await client.get("/") assert resp.status == 200 Note how we are using the ``side_effect`` feature for injecting the loop to the ``AioESService.__init__`` call. The use of ``**args, **kwargs`` is mandatory in order to propagate the arguments being used by the caller. API stability and deprecation policy ------------------------------------ aiohttp tries to not break existing users code. Obsolete attributes and methods are marked as *deprecated* in documentation and raises :class:`DeprecationWarning` on usage. Deprecation period is usually a year and half. After the period is passed out deprecated code is be removed. Unfortunately we should break own rules if new functionality or bug fixing forces us to do it (for example proper cookies support on client side forced us to break backward compatibility twice). All *backward incompatible* changes are explicitly marked in :ref:`CHANGES ` chapter. How to enable gzip compression globally for the whole application? ------------------------------------------------------------------ It's impossible. Choosing what to compress and where don't apply such time consuming operation is very tricky matter. If you need global compression -- write own custom middleware. Or enable compression in NGINX (you are deploying aiohttp behind reverse proxy, is not it). How to manage ClientSession inside web server? ---------------------------------------------- :class:`aiohttp.ClientSession` should be created once for the lifetime of the server in order to benefit from connection pooling. Session saves cookies internally. If you don't need cookies processing use :class:`aiohttp.DummyCookieJar`. If you need separate cookies for different http calls but process them in logical chains use single :class:`aiohttp.TCPConnector` with separate client session and ``own_connector=False``. How to access db connection stored in app from subapplication? -------------------------------------------------------------- Restricting access from subapplication to main (or outer) app is the deliberate choice. Subapplication is an isolated unit by design. If you need to share database object please do it explicitly:: subapp['db'] = mainapp['db'] mainapp.add_subapp('/prefix', subapp) aiohttp-3.0.1/docs/favicon.ico0000666000000000000000000001027613240304665014377 0ustar 00000000000000  ¨( @ ––´[,´[,´[,(´[,l´[,¯´[,Ü´[,ó´[,û´[,û´[,ó´[,Ü´[,¯´[,l´[,(´[,´[,´[,´[,´[,5´[,˜´[,Ý´[,à´[,¹´[,´[,m´[,^´[,^´[,m´[,´[,¹´[,à´[,Þ´[,˜´[,5´[,´[,´Z+´[,´[,´[,ˆ´[,æ´[,Ê´[,m´[,'´[,¸\,³[,²[-¾Z(´[,´[,'´[,m´[,Ê´[,æ´[,ˆ´[,´[,³[-´[,´[,´[,3´[,´[,ï´[,x´[,´\,´Z,³\,µZ,´[,´[,m´[,Þ´[,Á´[,3´[,´[,´[,´[,´[,?´[,Ù´[,¼´[,r´[,#´[,´[,´[,´[,E´[,Ú´[,Ø´[,?´[,´[,³[,´[,´[,4´[,Ø´[,¯´[,)´[,Ž´[,R´[,´[,´[,´[,´[,C´[,«´[,X´[,­´[,Ø´[,3´[,´Z-´[,´[,´[,´[,¾´[,´[,4´[,ò´[,¥´[,´[,´[,´[,´[,…´[,ø´[,H´[,´[,¾´[,Á´[,´[,´[,´[,´[,‰´[,Ý´[,2´[,´[, ´[,n´[,^´[,´[-³[,´[,´[,S´[,]´[, ´[,´[,2´[,Þ´[,ˆ´[,´[,´[,´[,6´[,ä´[,n´[,´[,´[,´[,´[,U´[,´[,´[,´[,*´[,Š´[,2´[,´Z,´[,´[,´[,´[,Y´[,´[,´[,´[,´[,p´[,å´[,6´[,´[,´[,˜´[,È´[,´[,´[,´[,´[,U´[,´[,´[,´[,´[,´[,Š´[,ý´[,›´[,G´[,.´[,´[,ƒ´[,Ì´[,|´[,´[, ´[,6´[,I´[,d´[,Û´[,–´[,´[,)´[,Ü´[,n´[,³[,´Z,´[,´[,M´[,a´[,´[,´[,,´[,U´[,R´[,4´[,M´[,!´[,#´[,>´[,~´[,÷´[,ÿ´[,ã´[,f´[,K´[,8´[,"´[, ´[,o´[,Ü´[,(´[,l´[,ß´[,'´[,³[,´[,´[,´[,º´[,û´[,¿´[,d´[,G´[,´[,´[,·[/´[,´[,0´[,å´[,ÿ´[,À´[,´[,´Z,´[,´[,(´[,ß´[,l´[,¯´[,Á´[,!´[,&´[,4´[,A´[,J´[,r´[,ò´[,ÿ´[,î´[,8´[,´[,´[,´[,´[,€´[,q´[,*´[,´[,´[,´[,´[,¹´[,¯´[,Û´[,²´[,N´[,E´[,8´[,+´[,´[,´[,´[,ç´[,¸´[,´[,´[,³[-´[,O´[,*´[,¸Z*´[,´[,´[,Ž´[,Ü´[,ó´[,m´[,³[+³Z,´[,´[, ´[,!´[,G´[,>´[,´[,´[,´[,%´[,S´[,´[,´[,´[,n´[,ò´[,û´[,^´[,´[,´[,´[,X´[,"´[,´[,´[,´[,Z´[,´[,´[,´[,^´[,û´[,û´[,^´[,´[,´[,´[,\´[, ´[,´\,´[,´[,7´[,E´[,³[,´[,´[,^´[,û´[,ó´[,m´[,ÊR9´\,´[,´[,4´[,K´Z,²\+»Y/´[,´[,\´[,´[,´[,´[,n´[,ó´[,Ü´[,Ž´[,´[,´[,´[,´[,O´[,l´[,–´[,š´[,†´[,:´[,³[,´Z-´[,´[,´[,Ü´[,¯´[,º´[,´[,´[,´Z-´[,w´[,ú´[,ÿ´[,ÿ´[,ü´[,Š´[,´[,´[,´[,´[,¹´[,¯´[,m´[,à´[,(´[,´[,´[,%´[,ß´[,ÿ´[,ÿ´[,ÿ´[,ÿ´[,î´[,9´[,´[,´[,(´[,ß´[,l´[,)´[,Ý´[,o´[,´[,´[,´[,D´[,÷´[,ÿ´[,ÿ´[,ÿ´[,ÿ´[,þ´[,_´[,´[,´[,´[,n´[,Ü´[,(´[,´[,˜´[,É´[,´[,´[,´[,2´[,ì´[,ÿ´[,ÿ´[,ÿ´[,ÿ´[,ú´[,}´[,$´[, ½R´[,´[,´[,´[,È´[,—³[,´[,´[,6´[,ä´[,n´[-´[,´[,´[, ´[,®´[,ÿ´[,ÿ´[,ÿ´[,ÿ´[,µ´[,0´[,H´[,Z´[,G´[,R´[,†´[,´[,´[,´[,n´[,ä´[,6´[,´[,³[,´[,‰´[,Ý´[,2´[,´[,´[,´\,´[,7´[,^´[,‰´[,д[,Ô´[,–´[,"´[,®Z+´[, ´[,&´[,¡´[,û´[,y´[,´[,5´[,Ý´[,ˆ´[-´[,´[,´[,´[,´[,¾´[,´[,´Z-´[,´[-´[,:´[,S´[,´[,´[,´[,´[,´[,³[,´[,´[,´[,M´[,.´[,c´[,Ö´[,À´[,´[,´[-´[,´[,4´[,Ø´[,°´[,´[,´[,´Z-´[,;´[,R´[,´[,³Z-´[,´[,´[,¯´[,Ø´[,4´[,´[,´[,´[,´[,?´[,Ù´[,¿´[,1´[+´[,;´[,P´[,´[,´[,¸T#´[,1´[,¿´[,Ù´[,?´[,´[,µ[,³[,´[,4´[,ô[,Þ´[,‹´[,]´[,´[,²\,µZ,´[,´[,n´[,à´[,ô[,4µ[,³[,´Z,´[,´[,´[,‰´[,ê´[,Ì´[,m´[,'´[,´Y*´[-²\-ÅU%´[,´[,'´[,n´[,Ê´[,æ´[,‰´[,´[,³\-´[,´[,´[,5´[,˜´[,Ý´[,à´[,¹´[,´[,m´[,^´[,^´[,m´[,´[,º´[,á´[,Þ´[,˜´[,5´[,´[,´[,´[,´[,(´[,l´[,¯´[,Ü´[,ó´[,û´[,û´[,ó´[,Ý´[,°´[,m´[,(´[,´[,ÿ€ÿþüà?ø?üðÿàþÀ?þ„þ!Ž>8q8ðxðøãü?ãü?ÇÇü?ãÏü?ðü?ðüðøðøðøððð1‡àÃÀ>á‡ÿ‡ðþøüüà?þÿ€ÿaiohttp-3.0.1/docs/glossary.rst0000666000000000000000000000572613240304665014657 0ustar 00000000000000.. _aiohttp-glossary: ========== Glossary ========== .. if you add new entries, keep the alphabetical sorting! .. glossary:: :sorted: aiodns DNS resolver for asyncio. https://pypi.python.org/pypi/aiodns asyncio The library for writing single-threaded concurrent code using coroutines, multiplexing I/O access over sockets and other resources, running network clients and servers, and other related primitives. Reference implementation of :pep:`3156` https://pypi.python.org/pypi/asyncio/ callable Any object that can be called. Use :func:`callable` to check that. chardet The Universal Character Encoding Detector https://pypi.python.org/pypi/chardet/ cchardet cChardet is high speed universal character encoding detector - binding to charsetdetect. https://pypi.python.org/pypi/cchardet/ gunicorn Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. http://gunicorn.org/ IDNA An Internationalized Domain Name in Applications (IDNA) is an industry standard for encoding Internet Domain Names that contain in whole or in part, in a language-specific script or alphabet, such as Arabic, Chinese, Cyrillic, Tamil, Hebrew or the Latin alphabet-based characters with diacritics or ligatures, such as French. These writing systems are encoded by computers in multi-byte Unicode. Internationalized domain names are stored in the Domain Name System as ASCII strings using Punycode transcription. keep-alive A technique for communicating between HTTP client and server when connection is not closed after sending response but kept open for sending next request through the same socket. It makes communication faster by getting rid of connection establishment for every request. nginx Nginx [engine x] is an HTTP and reverse proxy server, a mail proxy server, and a generic TCP/UDP proxy server. https://nginx.org/en/ percent-encoding A mechanism for encoding information in a Uniform Resource Locator (URL) if URL parts don't fit in safe characters space. requoting Applying :term:`percent-encoding` to non-safe symbols and decode percent encoded safe symbols back. resource A concept reflects the HTTP **path**, every resource corresponds to *URI*. May have a unique name. Contains :term:`route`\'s for different HTTP methods. route A part of :term:`resource`, resource's *path* coupled with HTTP method. web-handler An endpoint that returns HTTP response. websocket A protocol providing full-duplex communication channels over a single TCP connection. The WebSocket protocol was standardized by the IETF as :rfc:`6455` yarl A library for operating with URL objects. https://pypi.python.org/pypi/yarl aiohttp-3.0.1/docs/index.rst0000666000000000000000000001113013240304665014105 0ustar 00000000000000.. aiohttp documentation master file, created by sphinx-quickstart on Wed Mar 5 12:35:35 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. ================== Welcome to AIOHTTP ================== Asynchronous HTTP Client/Server for :term:`asyncio` and Python. Current version is |release|. .. _GitHub: https://github.com/aio-libs/aiohttp Key Features ============ - Supports both :ref:`aiohttp-client` and :ref:`HTTP Server `. - Supports both :ref:`Server WebSockets ` and :ref:`Client WebSockets ` out-of-the-box. - Web-server has :ref:`aiohttp-web-middlewares`, :ref:`aiohttp-web-signals` and pluggable routing. .. _aiohttp-installation: Library Installation ==================== .. code-block:: bash $ pip install aiohttp You may want to install *optional* :term:`cchardet` library as faster replacement for :term:`chardet`: .. code-block:: bash $ pip install cchardet For speeding up DNS resolving by client API you may install :term:`aiodns` as well. This option is highly recommended: .. code-block:: bash $ pip install aiodns Getting Started =============== Client example:: import aiohttp import asyncio import async_timeout async def fetch(session, url): async with async_timeout.timeout(10): async with session.get(url) as response: return await response.text() async def main(): async with aiohttp.ClientSession() as session: html = await fetch(session, 'http://python.org') print(html) loop = asyncio.get_event_loop() loop.run_until_complete(main()) Server example:: from aiohttp import web async def handle(request): name = request.match_info.get('name', "Anonymous") text = "Hello, " + name return web.Response(text=text) app = web.Application() app.router.add_get('/', handle) app.router.add_get('/{name}', handle) web.run_app(app) For more information please visit :ref:`aiohttp-client` and :ref:`aiohttp-web` pages. What's new in aiohttp 3? ======================== Go to :ref:`aiohttp_whats_new_3_0` page for aiohttp 3.0 major release changes. Tutorial ======== :ref:`Polls tutorial ` Source code =========== The project is hosted on GitHub_ Please feel free to file an issue on the `bug tracker `_ if you have found a bug or have some suggestion in order to improve the library. The library uses `Travis `_ for Continuous Integration. Dependencies ============ - Python 3.5.3+ - *async_timeout* - *attrs* - *chardet* - *multidict* - *yarl* - *Optional* :term:`cchardet` as faster replacement for :term:`chardet`. Install it explicitly via: .. code-block:: bash $ pip install cchardet - *Optional* :term:`aiodns` for fast DNS resolving. The library is highly recommended. .. code-block:: bash $ pip install aiodns Communication channels ====================== *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs Feel free to post your questions and ideas here. *gitter chat* https://gitter.im/aio-libs/Lobby We support `Stack Overflow `_. Please add *aiohttp* tag to your question there. Contributing ============ Please read the :ref:`instructions for contributors` before making a Pull Request. Authors and License =================== The ``aiohttp`` package is written mostly by Nikolay Kim and Andrew Svetlov. It's *Apache 2* licensed and freely available. Feel free to improve this package and send a pull request to GitHub_. .. _aiohttp-backward-compatibility-policy: Policy for Backward Incompatible Changes ======================================== *aiohttp* keeps backward compatibility. After deprecating some *Public API* (method, class, function argument, etc.) the library guaranties the usage of *deprecated API* is still allowed at least for a year and half after publishing new release with deprecation. All deprecations are reflected in documentation and raises :exc:`DeprecationWarning`. Sometimes we are forced to break the own rule for sake of very strong reason. Most likely the reason is a critical bug which cannot be solved without major API change, but we are working hard for keeping these changes as rare as possible. Table Of Contents ================= .. toctree:: :name: mastertoc client web utilities faq misc external contributing aiohttp-3.0.1/docs/logging.rst0000666000000000000000000001120013240304665014422 0ustar 00000000000000.. _aiohttp-logging: Logging ======= .. currentmodule:: aiohttp *aiohttp* uses standard :mod:`logging` for tracking the library activity. We have the following loggers enumerated by names: - ``'aiohttp.access'`` - ``'aiohttp.client'`` - ``'aiohttp.internal'`` - ``'aiohttp.server'`` - ``'aiohttp.web'`` - ``'aiohttp.websocket'`` You may subscribe to these loggers for getting logging messages. The page does not provide instructions for logging subscribing while the most friendly method is :func:`logging.config.dictConfig` for configuring whole loggers in your application. Access logs ----------- Access log by default is switched on and uses ``'aiohttp.access'`` logger name. The log may be controlled by :meth:`aiohttp.web.Application.make_handler` call. Pass *access_log* parameter with value of :class:`logging.Logger` instance to override default logger. .. note:: Use ``web.run_app(app, access_log=None)`` for disabling access logs. Other parameter called *access_log_format* may be used for specifying log format (see below). .. _aiohttp-logging-access-log-format-spec: Format specification ^^^^^^^^^^^^^^^^^^^^ The library provides custom micro-language to specifying info about request and response: +--------------+---------------------------------------------------------+ | Option | Meaning | +==============+=========================================================+ | ``%%`` | The percent sign | +--------------+---------------------------------------------------------+ | ``%a`` | Remote IP-address | | | (IP-address of proxy if using reverse proxy) | +--------------+---------------------------------------------------------+ | ``%t`` | Time when the request was started to process | +--------------+---------------------------------------------------------+ | ``%P`` | The process ID of the child that serviced the request | +--------------+---------------------------------------------------------+ | ``%r`` | First line of request | +--------------+---------------------------------------------------------+ | ``%s`` | Response status code | +--------------+---------------------------------------------------------+ | ``%b`` | Size of response in bytes, excluding HTTP headers | +--------------+---------------------------------------------------------+ | ``%T`` | The time taken to serve the request, in seconds | +--------------+---------------------------------------------------------+ | ``%Tf`` | The time taken to serve the request, in seconds | | | with fraction in %.06f format | +--------------+---------------------------------------------------------+ | ``%D`` | The time taken to serve the request, in microseconds | +--------------+---------------------------------------------------------+ | ``%{FOO}i`` | ``request.headers['FOO']`` | +--------------+---------------------------------------------------------+ | ``%{FOO}o`` | ``response.headers['FOO']`` | +--------------+---------------------------------------------------------+ Default access log format is:: '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' .. versionadded:: 2.3.0 *access_log_class* introduced. Example of drop-in replacement for :class:`aiohttp.helpers.AccessLogger`:: from aiohttp.abc import AbstractAccessLogger class AccessLogger(AbstractAccessLogger): def log(self, request, response, time): self.logger.info(f'{request.remote} ' f'"{request.method} {request.path} ' f'done in {time}s: {response.status}') .. note:: When `Gunicorn `_ is used for :ref:`deployment ` its default access log format will be automatically replaced with the default aiohttp's access log format. If Gunicorn's option access_logformat_ is specified explicitly it should use aiohttp's format specification. Error logs ---------- *aiohttp.web* uses logger named ``'aiohttp.server'`` to store errors given on web requests handling. The log is enabled by default. To use different logger name please specify *logger* parameter (:class:`logging.Logger` instance) on performing :meth:`aiohttp.web.Application.make_handler` call. .. _access_logformat: http://docs.gunicorn.org/en/stable/settings.html#access-log-format aiohttp-3.0.1/docs/make.bat0000777000000000000000000001505713240304665013670 0ustar 00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiohttp.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiohttp.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end aiohttp-3.0.1/docs/Makefile0000666000000000000000000001533313240304665013715 0ustar 00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiohttp.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiohttp.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/aiohttp" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiohttp" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." spelling: $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling @echo @echo "Build finished." aiohttp-3.0.1/docs/migration_to_2xx.rst0000666000000000000000000001601013240304665016274 0ustar 00000000000000.. _aiohttp-migration: Migration to 2.x ================ Client ------ chunking ^^^^^^^^ aiohttp does not support custom chunking sizes. It is up to the developer to decide how to chunk data streams. If chunking is enabled, aiohttp encodes the provided chunks in the "Transfer-encoding: chunked" format. aiohttp does not enable chunked encoding automatically even if a *transfer-encoding* header is supplied: *chunked* has to be set explicitly. If *chunked* is set, then the *Transfer-encoding* and *content-length* headers are disallowed. compression ^^^^^^^^^^^ Compression has to be enabled explicitly with the *compress* parameter. If compression is enabled, adding a *content-encoding* header is not allowed. Compression also enables the *chunked* transfer-encoding. Compression can not be combined with a *Content-Length* header. Client Connector ^^^^^^^^^^^^^^^^ 1. By default a connector object manages a total number of concurrent connections. This limit was a per host rule in version 1.x. In 2.x, the `limit` parameter defines how many concurrent connection connector can open and a new `limit_per_host` parameter defines the limit per host. By default there is no per-host limit. 2. BaseConnector.close is now a normal function as opposed to coroutine in version 1.x 3. BaseConnector.conn_timeout was moved to ClientSession ClientResponse.release ^^^^^^^^^^^^^^^^^^^^^^ Internal implementation was significantly redesigned. It is not required to call `release` on the response object. When the client fully receives the payload, the underlying connection automatically returns back to pool. If the payload is not fully read, the connection is closed Client exceptions ^^^^^^^^^^^^^^^^^ Exception hierarchy has been significantly modified. aiohttp now defines only exceptions that covers connection handling and server response misbehaviors. For developer specific mistakes, aiohttp uses python standard exceptions like ValueError or TypeError. Reading a response content may raise a ClientPayloadError exception. This exception indicates errors specific to the payload encoding. Such as invalid compressed data, malformed chunked-encoded chunks or not enough data that satisfy the content-length header. All exceptions are moved from `aiohttp.errors` module to top level `aiohttp` module. New hierarchy of exceptions: * `ClientError` - Base class for all client specific exceptions - `ClientResponseError` - exceptions that could happen after we get response from server * `WSServerHandshakeError` - web socket server response error - `ClientHttpProxyError` - proxy response - `ClientConnectionError` - exceptions related to low-level connection problems * `ClientOSError` - subset of connection errors that are initiated by an OSError exception - `ClientConnectorError` - connector related exceptions * `ClientProxyConnectionError` - proxy connection initialization error - `ServerConnectionError` - server connection related errors * `ServerDisconnectedError` - server disconnected * `ServerTimeoutError` - server operation timeout, (read timeout, etc) * `ServerFingerprintMismatch` - server fingerprint mismatch - `ClientPayloadError` - This exception can only be raised while reading the response payload if one of these errors occurs: invalid compression, malformed chunked encoding or not enough data that satisfy content-length header. Client payload (form-data) ^^^^^^^^^^^^^^^^^^^^^^^^^^ To unify form-data/payload handling a new `Payload` system was introduced. It handles customized handling of existing types and provide implementation for user-defined types. 1. FormData.__call__ does not take an encoding arg anymore and its return value changes from an iterator or bytes to a Payload instance. aiohttp provides payload adapters for some standard types like `str`, `byte`, `io.IOBase`, `StreamReader` or `DataQueue`. 2. a generator is not supported as data provider anymore, `streamer` can be used instead. For example, to upload data from file:: @aiohttp.streamer def file_sender(writer, file_name=None): with open(file_name, 'rb') as f: chunk = f.read(2**16) while chunk: yield from writer.write(chunk) chunk = f.read(2**16) # Then you can use `file_sender` like this: async with session.post('http://httpbin.org/post', data=file_sender(file_name='huge_file')) as resp: print(await resp.text()) Various ^^^^^^^ 1. the `encoding` parameter is deprecated in `ClientSession.request()`. Payload encoding is controlled at the payload level. It is possible to specify an encoding for each payload instance. 2. the `version` parameter is removed in `ClientSession.request()` client version can be specified in the `ClientSession` constructor. 3. `aiohttp.MsgType` dropped, use `aiohttp.WSMsgType` instead. 4. `ClientResponse.url` is an instance of `yarl.URL` class (`url_obj` is deprecated) 5. `ClientResponse.raise_for_status()` raises :exc:`aiohttp.ClientResponseError` exception 6. `ClientResponse.json()` is strict about response's content type. if content type does not match, it raises :exc:`aiohttp.ClientResponseError` exception. To disable content type check you can pass ``None`` as `content_type` parameter. Server ------ ServerHttpProtocol and low-level details ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Internal implementation was significantly redesigned to provide better performance and support HTTP pipelining. ServerHttpProtocol is dropped, implementation is merged with RequestHandler a lot of low-level api's are dropped. Application ^^^^^^^^^^^ 1. Constructor parameter `loop` is deprecated. Loop is get configured by application runner, `run_app` function for any of gunicorn workers. 2. `Application.router.add_subapp` is dropped, use `Application.add_subapp` instead 3. `Application.finished` is dropped, use `Application.cleanup` instead WebRequest and WebResponse ^^^^^^^^^^^^^^^^^^^^^^^^^^ 1. the `GET` and `POST` attributes no longer exist. Use the `query` attribute instead of `GET` 2. Custom chunking size is not support `WebResponse.chunked` - developer is responsible for actual chunking. 3. Payloads are supported as body. So it is possible to use client response's content object as body parameter for `WebResponse` 4. `FileSender` api is dropped, it is replaced with more general `FileResponse` class:: async def handle(request): return web.FileResponse('path-to-file.txt') 5. `WebSocketResponse.protocol` is renamed to `WebSocketResponse.ws_protocol`. `WebSocketResponse.protocol` is instance of `RequestHandler` class. RequestPayloadError ^^^^^^^^^^^^^^^^^^^ Reading request's payload may raise a `RequestPayloadError` exception. The behavior is similar to `ClientPayloadError`. WSGI ^^^^ *WSGI* support has been dropped, as well as gunicorn wsgi support. We still provide default and uvloop gunicorn workers for `web.Application` aiohttp-3.0.1/docs/misc.rst0000666000000000000000000000041713240304665013737 0ustar 00000000000000.. _aiohttp-misc: Miscellaneous ============= Helpful pages. .. toctree:: :name: misc essays glossary .. toctree:: :hidden: changes * :ref:`aiohttp_changes` Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search` aiohttp-3.0.1/docs/multipart.rst0000666000000000000000000002535013240304665015030 0ustar 00000000000000.. module:: aiohttp .. _aiohttp-multipart: Working with Multipart ====================== ``aiohttp`` supports a full featured multipart reader and writer. Both are designed with steaming processing in mind to avoid unwanted footprint which may be significant if you're dealing with large payloads, but this also means that most I/O operation are only possible to be executed a single time. Reading Multipart Responses --------------------------- Assume you made a request, as usual, and want to process the response multipart data:: async with aiohttp.request(...) as resp: pass First, you need to wrap the response with a :meth:`MultipartReader.from_response`. This needs to keep the implementation of :class:`MultipartReader` separated from the response and the connection routines which makes it more portable:: reader = aiohttp.MultipartReader.from_response(resp) Let's assume with this response you'd received some JSON document and multiple files for it, but you don't need all of them, just a specific one. So first you need to enter into a loop where the multipart body will be processed:: metadata = None filedata = None while True: part = await reader.next() The returned type depends on what the next part is: if it's a simple body part then you'll get :class:`BodyPartReader` instance here, otherwise, it will be another :class:`MultipartReader` instance for the nested multipart. Remember, that multipart format is recursive and supports multiple levels of nested body parts. When there are no more parts left to fetch, ``None`` value will be returned - that's the signal to break the loop:: if part is None: break Both :class:`BodyPartReader` and :class:`MultipartReader` provides access to body part headers: this allows you to filter parts by their attributes:: if part.headers[aiohttp.hdrs.CONTENT_TYPE] == 'application/json': metadata = await part.json() continue Nor :class:`BodyPartReader` or :class:`MultipartReader` instances does not read the whole body part data without explicitly asking for. :class:`BodyPartReader` provides a set of helpers methods to fetch popular content types in friendly way: - :meth:`BodyPartReader.text` for plain text data; - :meth:`BodyPartReader.json` for JSON; - :meth:`BodyPartReader.form` for `application/www-urlform-encode` Each of these methods automatically recognizes if content is compressed by using `gzip` and `deflate` encoding (while it respects `identity` one), or if transfer encoding is base64 or `quoted-printable` - in each case the result will get automatically decoded. But in case you need to access to raw binary data as it is, there are :meth:`BodyPartReader.read` and :meth:`BodyPartReader.read_chunk` coroutine methods as well to read raw binary data as it is all-in-single-shot or by chunks respectively. When you have to deal with multipart files, the :attr:`BodyPartReader.filename` property comes to help. It's a very smart helper which handles `Content-Disposition` handler right and extracts the right filename attribute from it:: if part.filename != 'secret.txt': continue If current body part does not matches your expectation and you want to skip it - just continue a loop to start a next iteration of it. Here is where magic happens. Before fetching the next body part ``await reader.next()`` it ensures that the previous one was read completely. If it was not, all its content sends to the void in term to fetch the next part. So you don't have to care about cleanup routines while you're within a loop. Once you'd found a part for the file you'd searched for, just read it. Let's handle it as it is without applying any decoding magic:: filedata = await part.read(decode=False) Later you may decide to decode the data. It's still simple and possible to do:: filedata = part.decode(filedata) Once you are done with multipart processing, just break a loop:: break Sending Multipart Requests -------------------------- :class:`MultipartWriter` provides an interface to build multipart payload from the Python data and serialize it into chunked binary stream. Since multipart format is recursive and supports deeply nesting, you can use ``with`` statement to design your multipart data closer to how it will be:: with aiohttp.MultipartWriter('mixed') as mpwriter: ... with aiohttp.MultipartWriter('related') as subwriter: ... mpwriter.append(subwriter) with aiohttp.MultipartWriter('related') as subwriter: ... with aiohttp.MultipartWriter('related') as subsubwriter: ... subwriter.append(subsubwriter) mpwriter.append(subwriter) with aiohttp.MultipartWriter('related') as subwriter: ... mpwriter.append(subwriter) The :meth:`MultipartWriter.append` is used to join new body parts into a single stream. It accepts various inputs and determines what default headers should be used for. For text data default `Content-Type` is :mimetype:`text/plain; charset=utf-8`:: mpwriter.append('hello') For binary data :mimetype:`application/octet-stream` is used:: mpwriter.append(b'aiohttp') You can always override these default by passing your own headers with the second argument:: mpwriter.append(io.BytesIO(b'GIF89a...'), {'CONTENT-TYPE': 'image/gif'}) For file objects `Content-Type` will be determined by using Python's mod:`mimetypes` module and additionally `Content-Disposition` header will include the file's basename:: part = root.append(open(__file__, 'rb')) If you want to send a file with a different name, just handle the :class:`BodyPartWriter` instance which :meth:`MultipartWriter.append` will always return and set `Content-Disposition` explicitly by using the :meth:`BodyPartWriter.set_content_disposition` helper:: part.set_content_disposition('attachment', filename='secret.txt') Additionally, you may want to set other headers here:: part.headers[aiohttp.hdrs.CONTENT_ID] = 'X-12345' If you'd set `Content-Encoding`, it will be automatically applied to the data on serialization (see below):: part.headers[aiohttp.hdrs.CONTENT_ENCODING] = 'gzip' There are also :meth:`MultipartWriter.append_json` and :meth:`MultipartWriter.append_form` helpers which are useful to work with JSON and form urlencoded data, so you don't have to encode it every time manually:: mpwriter.append_json({'test': 'passed'}) mpwriter.append_form([('key', 'value')]) When it's done, to make a request just pass a root :class:`MultipartWriter` instance as :meth:`aiohttp.ClientSession.request` ``data`` argument:: await session.post('http://example.com', data=mpwriter) Behind the scenes :meth:`MultipartWriter.serialize` will yield chunks of every part and if body part has `Content-Encoding` or `Content-Transfer-Encoding` they will be applied on streaming content. Please note, that on :meth:`MultipartWriter.serialize` all the file objects will be read until the end and there is no way to repeat a request without rewinding their pointers to the start. Hacking Multipart ----------------- The Internet is full of terror and sometimes you may find a server which implements multipart support in strange ways when an oblivious solution does not work. For instance, is server used :class:`cgi.FieldStorage` then you have to ensure that no body part contains a `Content-Length` header:: for part in mpwriter: part.headers.pop(aiohttp.hdrs.CONTENT_LENGTH, None) On the other hand, some server may require to specify `Content-Length` for the whole multipart request. `aiohttp` does not do that since it sends multipart using chunked transfer encoding by default. To overcome this issue, you have to serialize a :class:`MultipartWriter` by our own in the way to calculate its size:: body = b''.join(mpwriter.serialize()) await aiohttp.post('http://example.com', data=body, headers=mpwriter.headers) Sometimes the server response may not be well formed: it may or may not contains nested parts. For instance, we request a resource which returns JSON documents with the files attached to it. If the document has any attachments, they are returned as a nested multipart. If it has not it responds as plain body parts: .. code-block:: none CONTENT-TYPE: multipart/mixed; boundary=--: --: CONTENT-TYPE: application/json {"_id": "foo"} --: CONTENT-TYPE: multipart/related; boundary=----: ----: CONTENT-TYPE: application/json {"_id": "bar"} ----: CONTENT-TYPE: text/plain CONTENT-DISPOSITION: attachment; filename=bar.txt bar! bar! bar! ----:-- --: CONTENT-TYPE: application/json {"_id": "boo"} --: CONTENT-TYPE: multipart/related; boundary=----: ----: CONTENT-TYPE: application/json {"_id": "baz"} ----: CONTENT-TYPE: text/plain CONTENT-DISPOSITION: attachment; filename=baz.txt baz! baz! baz! ----:-- --:-- Reading such kind of data in single stream is possible, but is not clean at all:: result = [] while True: part = await reader.next() if part is None: break if isinstance(part, aiohttp.MultipartReader): # Fetching files while True: filepart = await part.next() if filepart is None: break result[-1].append((await filepart.read())) else: # Fetching document result.append([(await part.json())]) Let's hack a reader in the way to return pairs of document and reader of the related files on each iteration:: class PairsMultipartReader(aiohttp.MultipartReader): # keep reference on the original reader multipart_reader_cls = aiohttp.MultipartReader async def next(self): """Emits a tuple of document object (:class:`dict`) and multipart reader of the followed attachments (if any). :rtype: tuple """ reader = await super().next() if self._at_eof: return None, None if isinstance(reader, self.multipart_reader_cls): part = await reader.next() doc = await part.json() else: doc = await reader.json() return doc, reader And this gives us a more cleaner solution:: reader = PairsMultipartReader.from_response(resp) result = [] while True: doc, files_reader = await reader.next() if doc is None: break files = [] while True: filepart = await files_reader.next() if file.part is None: break files.append((await filepart.read())) result.append((doc, files)) .. seealso:: :ref:`aiohttp-multipart-reference` aiohttp-3.0.1/docs/multipart_reference.rst0000666000000000000000000001102113240304665017034 0ustar 00000000000000.. module:: aiohttp .. _aiohttp-multipart-reference: Multipart reference =================== .. class:: MultipartResponseWrapper(resp, stream) Wrapper around the :class:`MultipartBodyReader` to take care about underlying connection and close it when it needs in. .. method:: at_eof() Returns ``True`` when all response data had been read. :rtype: bool .. comethod:: next() Emits next multipart reader object. .. comethod:: release() Releases the connection gracefully, reading all the content to the void. .. class:: BodyPartReader(boundary, headers, content) Multipart reader for single body part. .. comethod:: read(*, decode=False) Reads body part data. :param bool decode: Decodes data following by encoding method from ``Content-Encoding`` header. If it missed data remains untouched :rtype: bytearray .. comethod:: read_chunk(size=chunk_size) Reads body part content chunk of the specified size. :param int size: chunk size :rtype: bytearray .. comethod:: readline() Reads body part by line by line. :rtype: bytearray .. comethod:: release() Like :meth:`read`, but reads all the data to the void. :rtype: None .. comethod:: text(*, encoding=None) Like :meth:`read`, but assumes that body part contains text data. :param str encoding: Custom text encoding. Overrides specified in charset param of ``Content-Type`` header :rtype: str .. comethod:: json(*, encoding=None) Like :meth:`read`, but assumes that body parts contains JSON data. :param str encoding: Custom JSON encoding. Overrides specified in charset param of ``Content-Type`` header .. comethod:: form(*, encoding=None) Like :meth:`read`, but assumes that body parts contains form urlencoded data. :param str encoding: Custom form encoding. Overrides specified in charset param of ``Content-Type`` header .. method:: at_eof() Returns ``True`` if the boundary was reached or ``False`` otherwise. :rtype: bool .. method:: decode(data) Decodes data according the specified ``Content-Encoding`` or ``Content-Transfer-Encoding`` headers value. Supports ``gzip``, ``deflate`` and ``identity`` encodings for ``Content-Encoding`` header. Supports ``base64``, ``quoted-printable``, ``binary`` encodings for ``Content-Transfer-Encoding`` header. :param bytearray data: Data to decode. :raises: :exc:`RuntimeError` - if encoding is unknown. :rtype: bytes .. method:: get_charset(default=None) Returns charset parameter from ``Content-Type`` header or default. .. attribute:: name A field *name* specified in ``Content-Disposition`` header or ``None`` if missed or header is malformed. Readonly :class:`str` property. .. attribute:: name A field *filename* specified in ``Content-Disposition`` header or ``None`` if missed or header is malformed. Readonly :class:`str` property. .. class:: MultipartReader(headers, content) Multipart body reader. .. classmethod:: from_response(cls, response) Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance .. method:: at_eof() Returns ``True`` if the final boundary was reached or ``False`` otherwise. :rtype: bool .. comethod:: next() Emits the next multipart body part. .. comethod:: release() Reads all the body parts to the void till the final boundary. .. comethod:: fetch_next_part() Returns the next body part reader. .. class:: MultipartWriter(subtype='mixed', boundary=None) Multipart body writer. ``boundary`` may be an ASCII-only string. .. attribute:: boundary The string (:class:`str`) representation of the boundary. .. versionchanged:: 3.0 Property type was changed from :class:`bytes` to :class:`str`. .. method:: append(obj, headers=None) Append an object to writer. .. method:: append_payload(payload) Adds a new body part to multipart writer. .. method:: append_json(obj, headers=None) Helper to append JSON part. .. method:: append_form(obj, headers=None) Helper to append form urlencoded part. .. attribute:: size Size of the payload. .. comethod:: write(writer) Write body. aiohttp-3.0.1/docs/new_router.rst0000666000000000000000000000545513240304665015204 0ustar 00000000000000.. _aiohttp-router-refactoring-021: Router refactoring in 0.21 ========================== Rationale --------- First generation (v1) of router has mapped ``(method, path)`` pair to :term:`web-handler`. Mapping is named **route**. Routes used to have unique names if any. The main mistake with the design is coupling the **route** to ``(method, path)`` pair while really URL construction operates with **resources** (**location** is a synonym). HTTP method is not part of URI but applied on sending HTTP request only. Having different **route names** for the same path is confusing. Moreover **named routes** constructed for the same path should have unique non overlapping names which is cumbersome is certain situations. From other side sometimes it's desirable to bind several HTTP methods to the same web handler. For *v1* router it can be solved by passing '*' as HTTP method. Class based views require '*' method also usually. Implementation -------------- The change introduces **resource** as first class citizen:: resource = router.add_resource('/path/{to}', name='name') *Resource* has a **path** (dynamic or constant) and optional **name**. The name is **unique** in router context. *Resource* has **routes**. *Route* corresponds to *HTTP method* and :term:`web-handler` for the method:: route = resource.add_route('GET', handler) User still may use wildcard for accepting all HTTP methods (maybe we will add something like ``resource.add_wildcard(handler)`` later). Since **names** belongs to **resources** now ``app.router['name']`` returns a **resource** instance instead of :class:`aiohttp.web.Route`. **resource** has ``.url()`` method, so ``app.router['name'].url(parts={'a': 'b'}, query={'arg': 'param'})`` still works as usual. The change allows to rewrite static file handling and implement nested applications as well. Decoupling of *HTTP location* and *HTTP method* makes life easier. Backward compatibility ---------------------- The refactoring is 99% compatible with previous implementation. 99% means all example and the most of current code works without modifications but we have subtle API backward incompatibles. ``app.router['name']`` returns a :class:`aiohttp.web.BaseResource` instance instead of :class:`aiohttp.web.Route` but resource has the same ``resource.url(...)`` most useful method, so end user should feel no difference. ``route.match(...)`` is **not** supported anymore, use :meth:`aiohttp.web.AbstractResource.resolve` instead. ``app.router.add_route(method, path, handler, name='name')`` now is just shortcut for:: resource = app.router.add_resource(path, name=name) route = resource.add_route(method, handler) return route ``app.router.register_route(...)`` is still supported, it creates :class:`aiohttp.web.ResourceAdapter` for every call (but it's deprecated now). aiohttp-3.0.1/docs/old-logo.svg0000666000000000000000000014174613240304665014522 0ustar 00000000000000 aiohttp-icon Created with Sketch. aiohttp-3.0.1/docs/powered_by.rst0000666000000000000000000000153013240304665015140 0ustar 00000000000000.. _aiohttp-powered-by: Powered by aiohttp ================== Web sites powered by aiohttp. Feel free to fork documentation on github, add a link to your site and make a Pull Request! * `Farmer Business Network `_ * `Home Assistant `_ * `KeepSafe `_ * `Skyscanner Hotels `_ * `Ocean S.A. `_ * `GNS3 `_ * `TutorCruncher socket `_ * `Morpheus messaging microservice `_ * `Eyepea - Custom telephony solutions `_ * `ALLOcloud - Telephony in the cloud `_ * `helpmanual - comprehensive help and man page database `_ aiohttp-3.0.1/docs/signals.rst0000666000000000000000000000220513240304665014441 0ustar 00000000000000Signals ======= .. currentmodule:: aiohttp Signal is a list of registered asynchronous callbacks. The signal's life-cycle has two stages: after creation it's content could be filled by using standard list operations: ``sig.append()`` etc. After ``sig.freeze()`` call the signal is *frozen*: adding, removing and dropping callbacks are forbidden. The only available operation is calling previously registered callbacks by ``await sig.send(data)``. For concrete usage examples see :ref:`signals in aiohttp.web ` chapter. .. versionchanged:: 3.0 ``sig.send()`` call is forbidden for non-frozen signal. Support for regular (non-async) callbacks is dropped. All callbacks should be async functions. .. class:: Signal The signal, implements :class:`collections.abc.MutableSequence` interface. .. comethod:: send(*args, **kwargs) Call all registered callbacks one by one starting from the begin of list. .. attribute:: frozen ``True`` if :meth:`freeze` was called, read-only property. .. method:: freeze() Freeze the list. After the call any content modification is forbidden. aiohttp-3.0.1/docs/spelling_wordlist.txt0000666000000000000000000000470313240304665016561 0ustar 00000000000000abc aiodns aioes aiohttp aiohttpdemo aiohttp’s aiopg alives api api’s app app’s arg Arsenic async asyncio auth autocalculated autodetection autogenerates autogeneration awaitable backend backends Backport BaseEventLoop basename BasicAuth BodyPartReader botocore Bugfixes builtin BytesIO cchardet cChardet Changelog charset charsetdetect chunked chunking CIMultiDict ClientSession cls cmd codec Codings committer committers config Config configs conjunction contextmanager CookieJar coroutine Coroutine coroutines cpu css ctor Ctrl Cython cythonized de deduplicate # de-facto: deprecations DER Dev dict Dict Discord django Django dns DNSResolver docstring Dup elasticsearch encodings env environ eof epoll Facebook facto fallback filename finalizers frontend getall gethostbyname github google gunicorn Gunicorn gunicorn’s gzipped hackish highlevel hostnames HTTPException HttpProcessingError httpretty https impl incapsulates Indices infos inline intaking io ip IP ipdb IPv ish iterable iterables javascript Jinja json keepalive keepalived keepalives keepaliving kwarg latin linux localhost Locator login lookup lookups lossless Mako manylinux metadata microservice middleware middlewares miltidict misbehaviors misformed Mongo msg MsgType multi multidict multidicts multidict’s Multidicts multipart Multipart Nagle Nagle’s namedtuple nameservers namespace nginx Nginx Nikolay noop nowait OAuth optimizations os outcoming Overridable Paolini param params pathlib peername ping pipelining pluggable plugin poller pong Postgres pre programmatically proxied PRs pubsub Punycode py pyenv pyflakes pytest Pytest Quickstart quote’s readonly readpayload rebase redirections Redis refactor Refactor refactored refactoring regex regexps regexs reloader renderer renderers repo repr repr’s RequestContextManager request’s Request’s requote requoting resolvers reusage Runit sa Satisfiable schemas sendfile serializable shourtcuts skipuntil Skyscanner SocketSocketTransport ssl SSLContext startup subapplication subclasses submodules subpackage subprotocol subprotocols subtype supervisord Supervisord Svetlov symlink symlinks syscall syscalls Systemd tarball TCP teardown Teardown TestClient Testsuite Tf timestamps toolbar toplevel tp tuples UI un unawaited unicode unittest Unittest unix unsets upstr url urldispatcher urlencoded urls url’s utf utils uvloop vcvarsall waituntil webapp websocket websockets websocket’s Websockets wildcard Workflow ws wsgi WSMessage WSMsgType wss www aiohttp-3.0.1/docs/streams.rst0000666000000000000000000001120613240304665014460 0ustar 00000000000000.. _aiohttp-streams: Streaming API ============= .. module:: aiohttp .. currentmodule:: aiohttp ``aiohttp`` uses streams for retrieving *BODIES*: :attr:`aiohttp.web.Request.content` and :attr:`aiohttp.ClientResponse.content` are properties with stream API. .. class:: StreamReader The reader from incoming stream. User should never instantiate streams manually but use existing :attr:`aiohttp.web.Request.content` and :attr:`aiohttp.ClientResponse.content` properties for accessing raw BODY data. Reading Methods --------------- .. comethod:: StreamReader.read(n=-1) Read up to *n* bytes. If *n* is not provided, or set to ``-1``, read until EOF and return all read bytes. If the EOF was received and the internal buffer is empty, return an empty bytes object. :param int n: how many bytes to read, ``-1`` for the whole stream. :return bytes: the given data .. comethod:: StreamReader.readany() Read next data portion for the stream. Returns immediately if internal buffer has a data. :return bytes: the given data .. comethod:: StreamReader.readexactly(n) Read exactly *n* bytes. Raise an :exc:`asyncio.IncompleteReadError` if the end of the stream is reached before *n* can be read, the :attr:`asyncio.IncompleteReadError.partial` attribute of the exception contains the partial read bytes. :param int n: how many bytes to read. :return bytes: the given data .. comethod:: StreamReader.readline() Read one line, where “line†is a sequence of bytes ending with ``\n``. If EOF is received, and ``\n`` was not found, the method will return the partial read bytes. If the EOF was received and the internal buffer is empty, return an empty bytes object. :return bytes: the given line .. comethod:: StreamReader.readchunk() Read a chunk of data as it was received by the server. Returns a tuple of (data, end_of_HTTP_chunk). When chunked transfer encoding is used, end_of_HTTP_chunk is a :class:`bool` indicating if the end of the data corresponds to the end of a HTTP chunk, otherwise it is always ``False``. :return tuple[bytes, bool]: a chunk of data and a :class:`bool` that is ``True`` when the end of the returned chunk corresponds to the end of a HTTP chunk. Asynchronous Iteration Support ------------------------------ Stream reader supports asynchronous iteration over BODY. By default it iterates over lines:: async for line in response.content: print(line) Also there are methods for iterating over data chunks with maximum size limit and over any available data. .. comethod:: StreamReader.iter_chunked(n) :async-for: Iterates over data chunks with maximum size limit:: async for data in response.content.iter_chunked(1024): print(data) .. comethod:: StreamReader.iter_any() :async-for: Iterates over data chunks in order of intaking them into the stream:: async for data in response.content.iter_any(): print(data) .. comethod:: StreamReader.iter_chunks() :async-for: Iterates over data chunks as received from the server:: async for data, _ in response.content.iter_chunks(): print(data) If chunked transfer encoding is used, the original http chunks formatting can be retrieved by reading the second element of returned tuples:: buffer = b"" async for data, end_of_http_chunk in response.content.iter_chunks(): buffer += data if end_of_http_chunk: print(buffer) buffer = b"" Helpers ------- .. method:: StreamReader.exception() Get the exception occurred on data reading. .. method:: is_eof() Return ``True`` if EOF was reached. Internal buffer may be not empty at the moment. .. seealso:: :meth:`StreamReader.at_eof()` .. method:: StreamReader.at_eof() Return ``True`` if the buffer is empty and EOF was reached. .. method:: StreamReader.read_nowait(n=None) Returns data from internal buffer if any, empty bytes object otherwise. Raises :exc:`RuntimeError` if other coroutine is waiting for stream. :param int n: how many bytes to read, ``-1`` for the whole internal buffer. :return bytes: the given data .. method:: StreamReader.unread_data(data) Rollback reading some data from stream, inserting it to buffer head. :param bytes data: data to push back into the stream. .. warning:: The method does not wake up waiters. E.g. :meth:`~StreamReader.read()` will not be resumed. .. comethod:: wait_eof() Wait for EOF. The given data may be accessible by upcoming read calls. aiohttp-3.0.1/docs/testing.rst0000666000000000000000000006175213240304665014472 0ustar 00000000000000.. _aiohttp-testing: Testing ======= .. currentmodule:: aiohttp.test_utils Testing aiohttp web servers --------------------------- aiohttp provides plugin for *pytest* making writing web server tests extremely easy, it also provides :ref:`test framework agnostic utilities ` for testing with other frameworks such as :ref:`unittest `. Before starting to write your tests, you may also be interested on reading :ref:`how to write testable services` that interact with the loop. For using pytest plugin please install pytest-aiohttp_ library: .. code-block:: shell $ pip install pytest-aiohttp If you don't want to install *pytest-aiohttp* for some reason you may insert ``pytest_plugins = 'aiohttp.pytest_plugin'`` line into ``conftest.py`` instead for the same functionality. Provisional Status ~~~~~~~~~~~~~~~~~~ The module is a **provisional**. *aiohttp* has a year and half period for removing deprecated API (:ref:`aiohttp-backward-compatibility-policy`). But for :mod:`aiohttp.test_tools` the deprecation period could be reduced. Moreover we may break *backward compatibility* without *deprecation peroid* for some very strong reason. The Test Client and Servers ~~~~~~~~~~~~~~~~~~~~~~~~~~~ *aiohttp* test utils provides a scaffolding for testing aiohttp-based web servers. They are consist of two parts: running test server and making HTTP requests to this server. :class:`~aiohttp.test_utils.TestServer` runs :class:`aiohttp.web.Application` based server, :class:`~aiohttp.test_utils.RawTestServer` starts :class:`aiohttp.web.WebServer` low level server. For performing HTTP requests to these servers you have to create a test client: :class:`~aiohttp.test_utils.TestClient` instance. The client incapsulates :class:`aiohttp.ClientSession` by providing proxy methods to the client for common operations such as *ws_connect*, *get*, *post*, etc. Pytest ~~~~~~ The :data:`aiohttp_client` fixture available from pytest-aiohttp_ plugin allows you to create a client to make requests to test your app. A simple would be:: from aiohttp import web async def hello(request): return web.Response(text='Hello, world') async def test_hello(aiohttp_client, loop): app = web.Application() app.router.add_get('/', hello) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 text = await resp.text() assert 'Hello, world' in text It also provides access to the app instance allowing tests to check the state of the app. Tests can be made even more succinct with a fixture to create an app test client:: import pytest from aiohttp import web async def previous(request): if request.method == 'POST': request.app['value'] = (await request.post())['value'] return web.Response(body=b'thanks for the data') return web.Response( body='value: {}'.format(request.app['value']).encode('utf-8')) @pytest.fixture def cli(loop, aiohttp_client): app = web.Application() app.router.add_get('/', previous) app.router.add_post('/', previous) return loop.run_until_complete(aiohttp_client(app)) async def test_set_value(cli): resp = await cli.post('/', data={'value': 'foo'}) assert resp.status == 200 assert await resp.text() == 'thanks for the data' assert cli.server.app['value'] == 'foo' async def test_get_value(cli): cli.server.app['value'] = 'bar' resp = await cli.get('/') assert resp.status == 200 assert await resp.text() == 'value: bar' Pytest tooling has the following fixtures: .. data:: aiohttp_server(app, *, port=None, **kwargs) A fixture factory that creates :class:`~aiohttp.test_utils.TestServer`:: async def test_f(aiohttp_server): app = web.Application() # fill route table server = await aiohttp_server(app) The server will be destroyed on exit from test function. *app* is the :class:`aiohttp.web.Application` used to start server. *port* optional, port the server is run at, if not provided a random unused port is used. .. versionadded:: 3.0 *kwargs* are parameters passed to :meth:`aiohttp.web.Application.make_handler` .. versionchanged:: 3.0 The fixture was renamed from ``test_server`` to ``aiohttp_server``. .. data:: aiohttp_client(app, server_kwargs=None, **kwargs) aiohttp_client(server, **kwargs) aiohttp_client(raw_server, **kwargs) A fixture factory that creates :class:`~aiohttp.test_utils.TestClient` for access to tested server:: async def test_f(aiohttp_client): app = web.Application() # fill route table client = await aiohttp_client(app) resp = await client.get('/') *client* and responses are cleaned up after test function finishing. The fixture accepts :class:`aiohttp.web.Application`, :class:`aiohttp.test_utils.TestServer` or :class:`aiohttp.test_utils.RawTestServer` instance. *server_kwargs* are parameters passed to the test server if an app is passed, else ignored. *kwargs* are parameters passed to :class:`aiohttp.test_utils.TestClient` constructor. .. versionchanged:: 3.0 The fixture was renamed from ``test_client`` to ``aiohttp_client``. .. data:: aiohttp_raw_server(handler, *, port=None, **kwargs) A fixture factory that creates :class:`~aiohttp.test_utils.RawTestServer` instance from given web handler.:: async def test_f(aiohttp_raw_server, aiohttp_client): async def handler(request): return web.Response(text="OK") raw_server = await aiohttp_raw_server(handler) client = await aiohttp_client(raw_server) resp = await client.get('/') *handler* should be a coroutine which accepts a request and returns response, e.g. *port* optional, port the server is run at, if not provided a random unused port is used. .. versionadded:: 3.0 .. data:: aiohttp_unused_port() Function to return an unused port number for IPv4 TCP protocol:: async def test_f(aiohttp_client, aiohttp_unused_port): port = aiohttp_unused_port() app = web.Application() # fill route table client = await aiohttp_client(app, server_kwargs={'port': port}) ... .. versionchanged:: 3.0 The fixture was renamed from ``unused_port`` to ``aiohttp_unused_port``. .. _aiohttp-testing-unittest-example: .. _aiohttp-testing-unittest-style: Unittest ~~~~~~~~ To test applications with the standard library's unittest or unittest-based functionality, the AioHTTPTestCase is provided:: from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop from aiohttp import web class MyAppTestCase(AioHTTPTestCase): async def get_application(self): """ Override the get_app method to return your application. """ async def hello(request): return web.Response(text='Hello, world') app = web.Application() app.router.add_get('/', hello) return app # the unittest_run_loop decorator can be used in tandem with # the AioHTTPTestCase to simplify running # tests that are asynchronous @unittest_run_loop async def test_example(self): request = await self.client.request("GET", "/") assert request.status == 200 text = await request.text() assert "Hello, world" in text # a vanilla example def test_example_vanilla(self): async def test_get_route(): url = "/" resp = await self.client.request("GET", url) assert resp.status == 200 text = await resp.text() assert "Hello, world" in text self.loop.run_until_complete(test_get_route()) .. class:: AioHTTPTestCase A base class to allow for unittest web applications using aiohttp. Derived from :class:`unittest.TestCase` Provides the following: .. attribute:: client an aiohttp test client, :class:`TestClient` instance. .. attribute:: server an aiohttp test server, :class:`TestServer` instance. .. versionadded:: 2.3 .. attribute:: loop The event loop in which the application and server are running. .. attribute:: app The application returned by :meth:`get_app` (:class:`aiohttp.web.Application` instance). .. comethod:: get_client() This async method can be overridden to return the :class:`TestClient` object used in the test. :return: :class:`TestClient` instance. .. versionadded:: 2.3 .. comethod:: get_server() This async method can be overridden to return the :class:`TestServer` object used in the test. :return: :class:`TestServer` instance. .. versionadded:: 2.3 .. comethod:: get_application() This async method should be overridden to return the :class:`aiohttp.web.Application` object to test. :return: :class:`aiohttp.web.Application` instance. .. comethod:: setUpAsync() This async method do nothing by default and can be overridden to execute asynchronous code during the ``setUp`` stage of the ``TestCase``. .. versionadded:: 2.3 .. comethod:: tearDownAsync() This async method do nothing by default and can be overridden to execute asynchronous code during the ``tearDown`` stage of the ``TestCase``. .. versionadded:: 2.3 .. method:: setUp() Standard test initialization method. .. method:: tearDown() Standard test finalization method. .. note:: The ``TestClient``'s methods are asynchronous: you have to execute function on the test client using asynchronous methods. A basic test class wraps every test method by :func:`unittest_run_loop` decorator:: class TestA(AioHTTPTestCase): @unittest_run_loop async def test_f(self): resp = await self.client.get('/') .. decorator:: unittest_run_loop: A decorator dedicated to use with asynchronous methods of an :class:`AioHTTPTestCase`. Handles executing an asynchronous function, using the :attr:`AioHTTPTestCase.loop` of the :class:`AioHTTPTestCase`. Faking request object --------------------- aiohttp provides test utility for creating fake :class:`aiohttp.web.Request` objects: :func:`aiohttp.test_utils.make_mocked_request`, it could be useful in case of simple unit tests, like handler tests, or simulate error conditions that hard to reproduce on real server:: from aiohttp import web from aiohttp.test_utils import make_mocked_request def handler(request): assert request.headers.get('token') == 'x' return web.Response(body=b'data') def test_handler(): req = make_mocked_request('GET', '/', headers={'token': 'x'}) resp = handler(req) assert resp.body == b'data' .. warning:: We don't recommend to apply :func:`~aiohttp.test_utils.make_mocked_request` everywhere for testing web-handler's business object -- please use test client and real networking via 'localhost' as shown in examples before. :func:`~aiohttp.test_utils.make_mocked_request` exists only for testing complex cases (e.g. emulating network errors) which are extremely hard or even impossible to test by conventional way. .. function:: make_mocked_request(method, path, headers=None, *, \ version=HttpVersion(1, 1), \ closing=False, \ app=None, \ match_info=sentinel, \ reader=sentinel, \ writer=sentinel, \ transport=sentinel, \ payload=sentinel, \ sslcontext=None, \ loop=...) Creates mocked web.Request testing purposes. Useful in unit tests, when spinning full web server is overkill or specific conditions and errors are hard to trigger. :param method: str, that represents HTTP method, like; GET, POST. :type method: str :param path: str, The URL including *PATH INFO* without the host or scheme :type path: str :param headers: mapping containing the headers. Can be anything accepted by the multidict.CIMultiDict constructor. :type headers: dict, multidict.CIMultiDict, list of pairs :param match_info: mapping containing the info to match with url parameters. :type match_info: dict :param version: namedtuple with encoded HTTP version :type version: aiohttp.protocol.HttpVersion :param closing: flag indicates that connection should be closed after response. :type closing: bool :param app: the aiohttp.web application attached for fake request :type app: aiohttp.web.Application :param writer: object for managing outcoming data :type wirter: aiohttp.streams.StreamWriter :param transport: asyncio transport instance :type transport: asyncio.transports.Transport :param payload: raw payload reader object :type payload: aiohttp.streams.FlowControlStreamReader :param sslcontext: ssl.SSLContext object, for HTTPS connection :type sslcontext: ssl.SSLContext :param loop: An event loop instance, mocked loop by default. :type loop: :class:`asyncio.AbstractEventLoop` :return: :class:`aiohttp.web.Request` object. .. versionadded:: 2.3 *match_info* parameter. .. _aiohttp-testing-writing-testable-services: .. _aiohttp-testing-framework-agnostic-utilities: Framework Agnostic Utilities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ High level test creation:: from aiohttp.test_utils import TestClient, loop_context from aiohttp import request # loop_context is provided as a utility. You can use any # asyncio.BaseEventLoop class in it's place. with loop_context() as loop: app = _create_example_app() with TestClient(app, loop=loop) as client: async def test_get_route(): nonlocal client resp = await client.get("/") assert resp.status == 200 text = await resp.text() assert "Hello, world" in text loop.run_until_complete(test_get_route()) If it's preferred to handle the creation / teardown on a more granular basis, the TestClient object can be used directly:: from aiohttp.test_utils import TestClient with loop_context() as loop: app = _create_example_app() client = TestClient(app, loop=loop) loop.run_until_complete(client.start_server()) root = "http://127.0.0.1:{}".format(port) async def test_get_route(): resp = await client.get("/") assert resp.status == 200 text = await resp.text() assert "Hello, world" in text loop.run_until_complete(test_get_route()) loop.run_until_complete(client.close()) A full list of the utilities provided can be found at the :data:`api reference ` Writing testable services ------------------------- Some libraries like motor, aioes and others depend on the asyncio loop for executing the code. When running your normal program, these libraries pick the main event loop by doing ``asyncio.get_event_loop``. The problem during testing is that there is no main loop assigned because an independent loop for each test is created without assigning it as the main one. This raises a problem when those libraries try to find it. Luckily, the ones that are well written, allow passing the loop explicitly. Let's have a look at the aioes client signature:: def __init__(self, endpoints, *, loop=None, **kwargs) As you can see, there is an optional ``loop`` kwarg. Of course, we are not going to test directly the aioes client but our service that depends on it will. So, if we want our ``AioESService`` to be easily testable, we should define it as follows:: import asyncio from aioes import Elasticsearch class AioESService: def __init__(self, loop=None): self.es = Elasticsearch(["127.0.0.1:9200"], loop=loop) async def get_info(self): cluster_info = await self.es.info() print(cluster_info) if __name__ == "__main__": client = AioESService() loop = asyncio.get_event_loop() loop.run_until_complete(client.get_info()) Note that it is accepting an optional ``loop`` kwarg. For the normal flow of execution it won't affect because we can still call the service without passing the loop explicitly having a main loop available. The problem comes when you try to do a test like:: import pytest from main import AioESService class TestAioESService: async def test_get_info(self): cluster_info = await AioESService().get_info() assert isinstance(cluster_info, dict) If you try to run the test, it will fail with a similar error:: ... RuntimeError: There is no current event loop in thread 'MainThread'. If you check the stack trace, you will see aioes is complaining that there is no current event loop in the main thread. Pass explicit loop to solve it. If you rely on code which works with *implicit* loops only you may try to use hackish approach from :ref:`FAQ `. Testing API Reference --------------------- Test server ~~~~~~~~~~~ Runs given :class:`aiohttp.web.Application` instance on random TCP port. After creation the server is not started yet, use :meth:`~aiohttp.test_utils.TestServer.start_server` for actual server starting and :meth:`~aiohttp.test_utils.TestServer.close` for stopping/cleanup. Test server usually works in conjunction with :class:`aiohttp.test_utils.TestClient` which provides handy client methods for accessing to the server. .. class:: BaseTestServer(*, scheme='http', host='127.0.0.1', port=None) Base class for test servers. :param str scheme: HTTP scheme, non-protected ``"http"`` by default. :param str host: a host for TCP socket, IPv4 *local host* (``'127.0.0.1'``) by default. :param int port: optional port for TCP socket, if not provided a random unused port is used. .. versionadded:: 3.0 .. attribute:: scheme A *scheme* for tested application, ``'http'`` for non-protected run and ``'https'`` for TLS encrypted server. .. attribute:: host *host* used to start a test server. .. attribute:: port *port* used to start the test server. .. attribute:: handler :class:`aiohttp.web.WebServer` used for HTTP requests serving. .. attribute:: server :class:`asyncio.AbstractServer` used for managing accepted connections. .. comethod:: start_server(loop=None, **kwargs) :param loop: the event_loop to use :type loop: asyncio.AbstractEventLoop Start a test server. .. comethod:: close() Stop and finish executed test server. .. method:: make_url(path) Return an *absolute* :class:`~yarl.URL` for given *path*. .. class:: RawTestServer(handler, *, scheme="http", host='127.0.0.1') Low-level test server (derived from :class:`BaseTestServer`). :param handler: a coroutine for handling web requests. The handler should accept :class:`aiohttp.web.BaseRequest` and return a response instance, e.g. :class:`~aiohttp.web.StreamResponse` or :class:`~aiohttp.web.Response`. The handler could raise :class:`~aiohttp.web.HTTPException` as a signal for non-200 HTTP response. :param str scheme: HTTP scheme, non-protected ``"http"`` by default. :param str host: a host for TCP socket, IPv4 *local host* (``'127.0.0.1'``) by default. :param int port: optional port for TCP socket, if not provided a random unused port is used. .. versionadded:: 3.0 .. class:: TestServer(app, *, scheme="http", host='127.0.0.1') Test server (derived from :class:`BaseTestServer`) for starting :class:`~aiohttp.web.Application`. :param app: :class:`aiohttp.web.Application` instance to run. :param str scheme: HTTP scheme, non-protected ``"http"`` by default. :param str host: a host for TCP socket, IPv4 *local host* (``'127.0.0.1'``) by default. :param int port: optional port for TCP socket, if not provided a random unused port is used. .. versionadded:: 3.0 .. attribute:: app :class:`aiohttp.web.Application` instance to run. Test Client ~~~~~~~~~~~ .. class:: TestClient(app_or_server, *, loop=None, \ scheme='http', host='127.0.0.1', \ cookie_jar=None, **kwargs) A test client used for making calls to tested server. :param app_or_server: :class:`BaseTestServer` instance for making client requests to it. If the parameter is :class:`aiohttp.web.Application` the tool creates :class:`TestServer` implicitly for serving the application. :param cookie_jar: an optional :class:`aiohttp.CookieJar` instance, may be useful with ``CookieJar(unsafe=True)`` option. :param str scheme: HTTP scheme, non-protected ``"http"`` by default. :param asyncio.AbstractEventLoop loop: the event_loop to use :param str host: a host for TCP socket, IPv4 *local host* (``'127.0.0.1'``) by default. .. attribute:: scheme A *scheme* for tested application, ``'http'`` for non-protected run and ``'https'`` for TLS encrypted server. .. attribute:: host *host* used to start a test server. .. attribute:: port *port* used to start the server .. attribute:: server :class:`BaseTestServer` test server instance used in conjunction with client. .. attribute:: session An internal :class:`aiohttp.ClientSession`. Unlike the methods on the :class:`TestClient`, client session requests do not automatically include the host in the url queried, and will require an absolute path to the resource. .. comethod:: start_server(**kwargs) Start a test server. .. comethod:: close() Stop and finish executed test server. .. method:: make_url(path) Return an *absolute* :class:`~yarl.URL` for given *path*. .. comethod:: request(method, path, *args, **kwargs) Routes a request to tested http server. The interface is identical to :meth:`asyncio.ClientSession.request`, except the loop kwarg is overridden by the instance used by the test server. .. comethod:: get(path, *args, **kwargs) Perform an HTTP GET request. .. comethod:: post(path, *args, **kwargs) Perform an HTTP POST request. .. comethod:: options(path, *args, **kwargs) Perform an HTTP OPTIONS request. .. comethod:: head(path, *args, **kwargs) Perform an HTTP HEAD request. .. comethod:: put(path, *args, **kwargs) Perform an HTTP PUT request. .. comethod:: patch(path, *args, **kwargs) Perform an HTTP PATCH request. .. comethod:: delete(path, *args, **kwargs) Perform an HTTP DELETE request. .. comethod:: ws_connect(path, *args, **kwargs) Initiate websocket connection. The api corresponds to :meth:`aiohttp.ClientSession.ws_connect`. Utilities ~~~~~~~~~ .. function:: make_mocked_coro(return_value) Creates a coroutine mock. Behaves like a coroutine which returns *return_value*. But it is also a mock object, you might test it as usual :class:`~unittest.mock.Mock`:: mocked = make_mocked_coro(1) assert 1 == await mocked(1, 2) mocked.assert_called_with(1, 2) :param return_value: A value that the the mock object will return when called. :returns: A mock object that behaves as a coroutine which returns *return_value* when called. .. function:: unused_port() Return an unused port number for IPv4 TCP protocol. :return int: ephemeral port number which could be reused by test server. .. function:: loop_context(loop_factory=) A contextmanager that creates an event_loop, for test purposes. Handles the creation and cleanup of a test loop. .. function:: setup_test_loop(loop_factory=) Create and return an :class:`asyncio.AbstractEventLoop` instance. The caller should also call teardown_test_loop, once they are done with the loop. .. function:: teardown_test_loop(loop) Teardown and cleanup an event_loop created by setup_test_loop. :param loop: the loop to teardown :type loop: asyncio.AbstractEventLoop .. _pytest: http://pytest.org/latest/ .. _pytest-aiohttp: https://pypi.python.org/pypi/pytest-aiohttp aiohttp-3.0.1/docs/third_party.rst0000666000000000000000000001554113240304665015341 0ustar 00000000000000.. _aiohttp-3rd-party: Third-Party libraries ===================== aiohttp is not the library for making HTTP requests and creating WEB server only. It is the grand basement for libraries built *on top* of aiohttp. This page is a list of these tools. Please feel free to add your open sourced library if it's not enlisted yet by making Pull Request to https://github.com/aio-libs/aiohttp/ * Why do you might want to include your awesome library into the list? * Just because the list increases your library visibility. People will have an easy way to find it. Officially supported -------------------- This list contains libraries which are supported by *aio-libs* team and located on https://github.com/aio-libs aiohttp extensions ^^^^^^^^^^^^^^^^^^ - `aiohttp-session `_ provides sessions for :mod:`aiohttp.web`. - `aiohttp-debugtoolbar `_ is a library for *debug toolbar* support for :mod:`aiohttp.web`. - `aiohttp-security `_ auth and permissions for :mod:`aiohttp.web`. - `aiohttp-devtools `_ provides development tools for :mod:`aiohttp.web` applications. - `aiohttp-cors `_ CORS support for aiohttp. - `aiohttp-sse `_ Server-sent events support for aiohttp. - `pytest-aiohttp `_ pytest plugin for aiohttp support. - `aiohttp-mako `_ Mako template renderer for aiohttp.web. - `aiohttp-jinja2 `_ Jinja2 template renderer for aiohttp.web. Database drivers ^^^^^^^^^^^^^^^^ - `aiopg `_ PostgreSQL async driver. - `aiomysql `_ MySql async driver. - `aioredis `_ Redis async driver. Other tools ^^^^^^^^^^^ - `aiodocker `_ Python Docker API client based on asyncio and aiohttp. - `aiobotocore `_ asyncio support for botocore library using aiohttp. Approved third-party libraries ------------------------------ The libraries are not part of ``aio-libs`` but they are proven to be very well written and highly recommended for usage. - `uvloop `_ Ultra fast implementation of asyncio event loop on top of ``libuv``. We are highly recommending to use it instead of standard ``asyncio``. Database drivers ^^^^^^^^^^^^^^^^ - `asyncpg `_ Another PostgreSQL async driver. It's much faster than ``aiopg`` but it is not drop-in replacement -- the API is different. Anyway please take a look on it -- the driver is really incredible fast. Others ------ The list of libraries which are exists but not enlisted in former categories. They may be perfect or not -- we don't know. Please add your library reference here first and after some time period ask to raise the status. - `aiohttp-cache `_ A cache system for aiohttp server. - `aiocache `_ Caching for asyncio with multiple backends (framework agnostic) - `gain `_ Web crawling framework based on asyncio for everyone. - `aiohttp-swagger `_ Swagger API Documentation builder for aiohttp server. - `aiohttp-swaggerify `_ Library to automatically generate swagger2.0 definition for aiohttp endpoints. - `aiohttp-validate `_ Simple library that helps you validate your API endpoints requests/responses with json schema. - `raven-aiohttp `_ An aiohttp transport for raven-python (Sentry client). - `webargs `_ A friendly library for parsing HTTP request arguments, with built-in support for popular web frameworks, including Flask, Django, Bottle, Tornado, Pyramid, webapp2, Falcon, and aiohttp. - `aioauth-client `_ OAuth client for aiohttp. - `aiohttpretty `_ A simple asyncio compatible httpretty mock using aiohttp. - `aioresponses `_ a helper for mock/fake web requests in python aiohttp package. - `aiohttp-transmute `_ A transmute implementation for aiohttp. - `aiohttp_apiset `_ Package to build routes using swagger specification. - `aiohttp-login `_ Registration and authorization (including social) for aiohttp applications. - `aiohttp_utils `_ Handy utilities for building aiohttp.web applications. - `aiohttpproxy `_ Simple aiohttp HTTP proxy. - `aiohttp_traversal `_ Traversal based router for aiohttp.web. - `aiohttp_autoreload `_ Makes aiohttp server auto-reload on source code change. - `gidgethub `_ An async GitHub API library for Python. - `aiohttp_jrpc `_ aiohttp JSON-RPC service. - `fbemissary `_ A bot framework for the Facebook Messenger platform, built on asyncio and aiohttp. - `aioslacker `_ slacker wrapper for asyncio. - `aioreloader `_ Port of tornado reloader to asyncio. - `aiohttp_babel `_ Babel localization support for aiohttp. - `python-mocket `_ a socket mock framework - for all kinds of socket animals, web-clients included. - `aioraft `_ asyncio RAFT algorithm based on aiohttp. - `home-assistant `_ Open-source home automation platform running on Python 3. - `discord.py `_ Discord client library. - `aiohttp-graphql `_ GraphQL and GraphIQL interface for aiohttp. - `aiohttp-sentry `_ An aiohttp middleware for reporting errors to Sentry. Python 3.5+ is required. - `async-v20 `_ Asynchronous FOREX client for OANDA's v20 API. Python 3.6+ aiohttp-3.0.1/docs/tracing_reference.rst0000666000000000000000000001774613240304665016466 0ustar 00000000000000.. _aiohttp-client-tracing-reference: Tracing Reference ================= .. currentmodule:: aiohttp .. versionadded:: 3.0 A reference for client tracing API. .. seealso:: :ref:`aiohttp-client-tracing` for tracing usage instructions. TraceConfig ----------- .. class:: TraceConfig(trace_config_ctx_factory=SimpleNamespace) Trace config is the configuration object used to trace requests launched by a :class:`ClientSession` object using different events related to different parts of the request flow. :param trace_config_ctx_factory: factory used to create trace contexts, default class used :class:`types.SimpleNamespace` .. method:: trace_config_ctx(trace_request_ctx=None) :param trace_request_ctx: Will be used to pass as a kw for the ``trace_config_ctx_factory``. Build a new trace context from the config. Every signal handler should have the following signature:: async def on_signal(session, context, params): ... where ``session`` is :class:`ClientSession` instance, ``context`` is an object returned by :meth:`trace_config_ctx` call and ``params`` is a data class with signal parameters. The type of ``params`` depends on subscribed signal and described below. .. attribute:: on_request_start Property that gives access to the signals that will be executed when a request starts. ``params`` is :class:`aiohttp.TraceRequestStartParams` instance. .. attribute:: on_request_redirect Property that gives access to the signals that will be executed when a redirect happens during a request flow. ``params`` is :class:`aiohttp.TraceRequestRedirectParams` instance. .. attribute:: on_request_end Property that gives access to the signals that will be executed when a request ends. ``params`` is :class:`aiohttp.TraceRequestEndParams` instance. .. attribute:: on_request_exception Property that gives access to the signals that will be executed when a request finishes with an exception. ``params`` is :class:`aiohttp.TraceRequestExceptionParams` instance. .. attribute:: on_connection_queued_start Property that gives access to the signals that will be executed when a request has been queued waiting for an available connection. ``params`` is :class:`aiohttp.TraceConnectionQueuedStartParams` instance. .. attribute:: on_connection_queued_end Property that gives access to the signals that will be executed when a request that was queued already has an available connection. ``params`` is :class:`aiohttp.TraceConnectionQueuedEndParams` instance. .. attribute:: on_connection_create_start Property that gives access to the signals that will be executed when a request creates a new connection. ``params`` is :class:`aiohttp.TraceConnectionCreateStartParams` instance. .. attribute:: on_connection_create_end Property that gives access to the signals that will be executed when a request that created a new connection finishes its creation. ``params`` is :class:`aiohttp.TraceConnectionCreateEndParams` instance. .. attribute:: on_connection_reuseconn Property that gives access to the signals that will be executed when a request reuses a connection. ``params`` is :class:`aiohttp.TraceConnectionReuseconnParams` instance. .. attribute:: on_dns_resolvehost_start Property that gives access to the signals that will be executed when a request starts to resolve the domain related with the request. ``params`` is :class:`aiohttp.TraceDnsResolveHostStartParams` instance. .. attribute:: on_dns_resolvehost_end Property that gives access to the signals that will be executed when a request finishes to resolve the domain related with the request. ``params`` is :class:`aiohttp.TraceDnsResolveHostEndParams` instance. .. attribute:: on_dns_cache_hit Property that gives access to the signals that will be executed when a request was able to use a cached DNS resolution for the domain related with the request. ``params`` is :class:`aiohttp.TraceDnsCacheHitParams` instance. .. attribute:: on_dns_cache_miss Property that gives access to the signals that will be executed when a request was not able to use a cached DNS resolution for the domain related with the request. ``params`` is :class:`aiohttp.TraceDnsCacheMissParams` instance. TraceRequestStartParams ----------------------- .. class:: TraceRequestStartParams See :attr:`TraceConfig.on_request_start` for details. .. attribute:: method Method that will be used to make the request. .. attribute:: url URL that will be used for the request. .. attribute:: headers Headers that will be used for the request, can be mutated. TraceRequestEndParams --------------------- .. class:: TraceRequestEndParams See :attr:`TraceConfig.on_request_end` for details. .. attribute:: method Method used to make the request. .. attribute:: url URL used for the request. .. attribute:: headers Headers used for the request. .. attribute:: response Response :class:`ClientResponse`. TraceRequestExceptionParams --------------------------- .. class:: TraceRequestExceptionParams See :attr:`TraceConfig.on_request_exception` for details. .. attribute:: method Method used to make the request. .. attribute:: url URL used for the request. .. attribute:: headers Headers used for the request. .. attribute:: exception Exception raised during the request. TraceRequestRedirectParams -------------------------- .. class:: TraceRequestRedirectParams See :attr:`TraceConfig.on_request_redirect` for details. .. attribute:: method Method used to get this redirect request. .. attribute:: url URL used for this redirect request. .. attribute:: headers Headers used for this redirect. .. attribute:: response Response :class:`ClientResponse` got from the redirect. TraceConnectionQueuedStartParams -------------------------------- .. class:: TraceConnectionQueuedStartParams See :attr:`TraceConfig.on_connection_queued_start` for details. There are no attributes right now. TraceConnectionQueuedEndParams ------------------------------ .. class:: TraceConnectionQueuedEndParams See :attr:`TraceConfig.on_connection_queued_end` for details. There are no attributes right now. TraceConnectionCreateStartParams -------------------------------- .. class:: TraceConnectionCreateStartParams See :attr:`TraceConfig.on_connection_create_start` for details. There are no attributes right now. TraceConnectionCreateEndParams ------------------------------ .. class:: TraceConnectionCreateEndParams See :attr:`TraceConfig.on_connection_create_end` for details. There are no attributes right now. TraceConnectionReuseconnParams ------------------------------ .. class:: TraceConnectionReuseconnParams See :attr:`TraceConfig.on_connection_reuseconn` for details. There are no attributes right now. TraceDnsResolveHostStartParams ------------------------------ .. class:: TraceDnsResolveHostStartParams See :attr:`TraceConfig.on_dns_resolvehost_start` for details. .. attribute:: Host Host that will be resolved. TraceDnsResolveHostEndParams ---------------------------- .. class:: TraceDnsResolveHostEndParams See :attr:`TraceConfig.on_dns_resolvehost_end` for details. .. attribute:: Host Host that has been resolved. TraceDnsCacheHitParams ---------------------- .. class:: TraceDnsCacheHitParams See :attr:`TraceConfig.on_dns_cache_hit` for details. .. attribute:: Host Host found in the cache. TraceDnsCacheMissParams ----------------------- .. class:: TraceDnsCacheMissParams See :attr:`TraceConfig.on_dns_cache_miss` for details. .. attribute:: Host Host didn't find the cache. aiohttp-3.0.1/docs/tutorial.rst0000666000000000000000000002504713240304665014655 0ustar 00000000000000.. _aiohttp-tutorial: Server Tutorial =============== Are you going to learn *aiohttp* but don't know where to start? We have example for you. Polls application is a great example for getting started with aiohttp. If you want the full source code in advance or for comparison, check out the `demo source`_. .. _demo source: https://github.com/aio-libs/aiohttp/tree/master/demos/polls/ .. _aiohttp-tutorial-setup: Setup your environment ---------------------- First of all check you python version: .. code-block:: shell $ python -V Python 3.5.0 Tutorial requires Python 3.5.0 or newer. We’ll assume that you have already installed *aiohttp* library. You can check aiohttp is installed and which version by running the following command: .. code-block:: shell $ python3 -c 'import aiohttp; print(aiohttp.__version__)' 2.0.5 Project structure looks very similar to other python based web projects: .. code-block:: none . ├── README.rst └── polls ├── Makefile ├── README.rst ├── aiohttpdemo_polls │ ├── __init__.py │ ├── __main__.py │ ├── db.py │ ├── main.py │ ├── routes.py │ ├── templates │ ├── utils.py │ └── views.py ├── config │ └── polls.yaml ├── images │ └── example.png ├── setup.py ├── sql │ ├── create_tables.sql │ ├── install.sh │ └── sample_data.sql └── static └── style.css .. _aiohttp-tutorial-introduction: Getting started with aiohttp first app -------------------------------------- This tutorial based on Django polls tutorial. Application ----------- All aiohttp server is built around :class:`aiohttp.web.Application` instance. It is used for registering *startup*/*cleanup* signals, connecting routes etc. The following code creates an application:: from aiohttp import web app = web.Application() web.run_app(app, host='127.0.0.1', port=8080) Save it under ``aiohttpdemo_polls/main.py`` and start the server: .. code-block:: shell $ python3 main.py You'll see the following output on the command line: .. code-block:: shell ======== Running on http://127.0.0.1:8080 ======== (Press CTRL+C to quit) Open ``http://127.0.0.1:8080`` in browser or do .. code-block:: shell $ curl -X GET localhost:8080 Alas, for now both return only ``404: Not Found``. To show something more meaningful let's create a route and a view. .. _aiohttp-tutorial-views: Views ----- Let's start from first views. Create the file ``aiohttpdemo_polls/views.py`` with the following:: from aiohttp import web async def index(request): return web.Response(text='Hello Aiohttp!') This is the simplest view possible in Aiohttp. Now we should create a route for this ``index`` view. Put this into ``aiohttpdemo_polls/routes.py`` (it is a good practice to separate views, routes, models etc. You'll have more of each, and it is nice to have them in different places):: from views import index def setup_routes(app): app.router.add_get('/', index) Also, we should call ``setup_routes`` function somewhere, and the best place is in the ``main.py`` :: from aiohttp import web from routes import setup_routes app = web.Application() setup_routes(app) web.run_app(app, host='127.0.0.1', port=8080) Start server again. Now if we open browser we can see: .. code-block:: shell $ curl -X GET localhost:8080 Hello Aiohttp! Success! For now your working directory should look like this: .. code-block:: none . ├── .. └── polls ├── aiohttpdemo_polls │ ├── main.py │ ├── routes.py │ └── views.py .. _aiohttp-tutorial-config: Configuration files ------------------- aiohttp is configuration agnostic. It means the library does not require any configuration approach and does not have builtin support for any config schema. But please take into account these facts: 1. 99% of servers have configuration files. 2. Every product (except Python-based solutions like Django and Flask) does not store config files as part as source code. For example Nginx has own configuration files stored by default under ``/etc/nginx`` folder. Mongo pushes config as ``/etc/mongodb.conf``. 3. Config files validation is good idea, strong checks may prevent silly errors during product deployment. Thus we **suggest** to use the following approach: 1. Pushing configs as ``yaml`` files (``json`` or ``ini`` is also good but ``yaml`` is the best). 2. Loading ``yaml`` config from a list of predefined locations, e.g. ``./config/app_cfg.yaml``, ``/etc/app_cfg.yaml``. 3. Keeping ability to override config file by command line parameter, e.g. ``./run_app --config=/opt/config/app_cfg.yaml``. 4. Applying strict validation checks to loaded dict. `trafaret `_, `colander `_ or `JSON schema `_ are good candidates for such job. Load config and push into application:: # load config from yaml file in current dir conf = load_config(str(pathlib.Path('.') / 'config' / 'polls.yaml')) app['config'] = conf .. _aiohttp-tutorial-database: Database -------- Setup ^^^^^ In this tutorial we will use the latest PostgreSQL database. You can install PostgreSQL using this instruction http://www.postgresql.org/download/ Database schema ^^^^^^^^^^^^^^^ We use SQLAlchemy to describe database schemas. For this tutorial we can use two simple models ``question`` and ``choice``:: import sqlalchemy as sa meta = sa.MetaData() question = sa.Table( 'question', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_text', sa.String(200), nullable=False), sa.Column('pub_date', sa.Date, nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='question_id_pkey')) choice = sa.Table( 'choice', meta, sa.Column('id', sa.Integer, nullable=False), sa.Column('question_id', sa.Integer, nullable=False), sa.Column('choice_text', sa.String(200), nullable=False), sa.Column('votes', sa.Integer, server_default="0", nullable=False), # Indexes # sa.PrimaryKeyConstraint('id', name='choice_id_pkey'), sa.ForeignKeyConstraint(['question_id'], [question.c.id], name='choice_question_id_fkey', ondelete='CASCADE'), ) You can find below description of tables in database: First table is question: +---------------+ | question | +===============+ | id | +---------------+ | question_text | +---------------+ | pub_date | +---------------+ and second table is choice table: +---------------+ | choice | +===============+ | id | +---------------+ | choice_text | +---------------+ | votes | +---------------+ | question_id | +---------------+ Creating connection engine ^^^^^^^^^^^^^^^^^^^^^^^^^^ For making DB queries we need an engine instance. Assuming ``conf`` is a :class:`dict` with configuration info Postgres connection could be done by the following coroutine: .. literalinclude:: ../demos/polls/aiohttpdemo_polls/db.py :pyobject: init_pg The best place for connecting to DB is :attr:`~aiohtp.web.Application.on_startup` signal:: app.on_startup.append(init_pg) Graceful shutdown ^^^^^^^^^^^^^^^^^ There is a good practice to close all resources on program exit. Let's close DB connection in :attr:`~aiohtp.web.Application.on_cleanup` signal:: app.on_cleanup.append(close_pg) .. literalinclude:: ../demos/polls/aiohttpdemo_polls/db.py :pyobject: close_pg .. _aiohttp-tutorial-templates: Templates --------- Let's add more useful views: .. literalinclude:: ../demos/polls/aiohttpdemo_polls/views.py :pyobject: poll Templates are very convenient way for web page writing. We return a dict with page content, ``aiohttp_jinja2.template`` decorator processes it by jinja2 template renderer. For setting up template engine we need to install ``aiohttp_jinja2`` library first: .. code-block:: shell $ pip install aiohttp_jinja2 After installing we need to setup the library:: import aiohttp_jinja2 import jinja2 aiohttp_jinja2.setup( app, loader=jinja2.PackageLoader('aiohttpdemo_polls', 'templates')) In the tutorial we push template files under ``polls/aiohttpdemo_polls/templates`` folder. .. _aiohttp-tutorial-static: Static files ------------ Any web site has static files: images, JavaScript sources, CSS files etc. The best way to handle static in production is setting up reverse proxy like NGINX or using CDN services. But for development handling static files by aiohttp server is very convenient. Fortunately it can be done easy by single call: .. literalinclude:: ../demos/polls/aiohttpdemo_polls/routes.py :pyobject: setup_static_routes where ``project_root`` is the path to root folder. .. _aiohttp-tutorial-middlewares: Middlewares ----------- Middlewares are stacked around every web-handler. They are called *before* handler for pre-processing request and *after* getting response back for post-processing given response. Here we'll add a simple middleware for displaying pretty looking pages for *404 Not Found* and *500 Internal Error*. Middlewares could be registered in ``app`` by adding new middleware to ``app.middlewares`` list: .. literalinclude:: ../demos/polls/aiohttpdemo_polls/middlewares.py :pyobject: setup_middlewares Middleware itself is a factory which accepts *application* and *next handler* (the following middleware or *web-handler* in case of the latest middleware in the list). The factory returns *middleware handler* which has the same signature as regular *web-handler* -- it accepts *request* and returns *response*. Middleware for processing HTTP exceptions: .. literalinclude:: ../demos/polls/aiohttpdemo_polls/middlewares.py :pyobject: error_pages Registered overrides are trivial Jinja2 template renderers: .. literalinclude:: ../demos/polls/aiohttpdemo_polls/middlewares.py :pyobject: handle_404 .. literalinclude:: ../demos/polls/aiohttpdemo_polls/middlewares.py :pyobject: handle_500 .. seealso:: :ref:`aiohttp-web-middlewares` aiohttp-3.0.1/docs/utilities.rst0000666000000000000000000000037113240304665015016 0ustar 00000000000000.. _aiohttp-utilities: Utilities ========= Miscellaneous API Shared between Client And Server. .. currentmodule:: aiohttp .. toctree:: :name: utilities abc multipart multipart_reference streams signals websocket_utilities aiohttp-3.0.1/docs/web.rst0000666000000000000000000000055413240304665013563 0ustar 00000000000000.. _aiohttp-web: Server ====== .. module:: aiohttp.web The page contains all information about aiohttp Server API: .. toctree:: :name: server Tutorial Quickstart Advanced Usage Low Level Reference Logging Testing Deployment aiohttp-3.0.1/docs/websocket_utilities.rst0000666000000000000000000001050013240304665017057 0ustar 00000000000000WebSocket utilities =================== .. currentmodule:: aiohttp .. class:: WSCloseCode An :class:`~enum.IntEnum` for keeping close message code. .. attribute:: OK A normal closure, meaning that the purpose for which the connection was established has been fulfilled. .. attribute:: GOING_AWAY An endpoint is "going away", such as a server going down or a browser having navigated away from a page. .. attribute:: PROTOCOL_ERROR An endpoint is terminating the connection due to a protocol error. .. attribute:: UNSUPPORTED_DATA An endpoint is terminating the connection because it has received a type of data it cannot accept (e.g., an endpoint that understands only text data MAY send this if it receives a binary message). .. attribute:: INVALID_TEXT An endpoint is terminating the connection because it has received data within a message that was not consistent with the type of the message (e.g., non-UTF-8 :rfc:`3629` data within a text message). .. attribute:: POLICY_VIOLATION An endpoint is terminating the connection because it has received a message that violates its policy. This is a generic status code that can be returned when there is no other more suitable status code (e.g., :attr:`~WSCloseCode.unsupported_data` or :attr:`~WSCloseCode.message_too_big`) or if there is a need to hide specific details about the policy. .. attribute:: MESSAGE_TOO_BIG An endpoint is terminating the connection because it has received a message that is too big for it to process. .. attribute:: MANDATORY_EXTENSION An endpoint (client) is terminating the connection because it has expected the server to negotiate one or more extension, but the server did not return them in the response message of the WebSocket handshake. The list of extensions that are needed should appear in the /reason/ part of the Close frame. Note that this status code is not used by the server, because it can fail the WebSocket handshake instead. .. attribute:: INTERNAL_ERROR A server is terminating the connection because it encountered an unexpected condition that prevented it from fulfilling the request. .. attribute:: SERVICE_RESTART The service is restarted. a client may reconnect, and if it chooses to do, should reconnect using a randomized delay of 5-30s. .. attribute:: TRY_AGAIN_LATER The service is experiencing overload. A client should only connect to a different IP (when there are multiple for the target) or reconnect to the same IP upon user action. .. class:: WSMsgType An :class:`~enum.IntEnum` for describing :class:`WSMessage` type. .. attribute:: CONTINUATION A mark for continuation frame, user will never get the message with this type. .. attribute:: TEXT Text message, the value has :class:`str` type. .. attribute:: BINARY Binary message, the value has :class:`bytes` type. .. attribute:: PING Ping frame (sent by client peer). .. attribute:: PONG Pong frame, answer on ping. Sent by server peer. .. attribute:: CLOSE Close frame. .. attribute:: CLOSED FRAME Actually not frame but a flag indicating that websocket was closed. .. attribute:: ERROR Actually not frame but a flag indicating that websocket was received an error. .. class:: WSMessage Websocket message, returned by ``.receive()`` calls. .. attribute:: type Message type, :class:`WSMsgType` instance. .. attribute:: data Message payload. 1. :class:`str` for :attr:`WSMsgType.TEXT` messages. 2. :class:`bytes` for :attr:`WSMsgType.BINARY` messages. 3. :class:`WSCloseCode` for :attr:`WSMsgType.CLOSE` messages. 4. :class:`bytes` for :attr:`WSMsgType.PING` messages. 5. :class:`bytes` for :attr:`WSMsgType.PONG` messages. .. attribute:: extra Additional info, :class:`str`. Makes sense only for :attr:`WSMsgType.CLOSE` messages, contains optional message description. .. method:: json(*, loads=json.loads) Returns parsed JSON data. :param loads: optional JSON decoder function. aiohttp-3.0.1/docs/web_advanced.rst0000666000000000000000000007041113240304665015407 0ustar 00000000000000.. _aiohttp-web-advanced: Web Server Advanced =================== .. currentmodule:: aiohttp.web Web Handler Cancellation ------------------------ .. warning:: :term:`web-handler` execution could be canceled on every ``await`` if client drops connection without reading entire response's BODY. The behavior is very different from classic WSGI frameworks like Flask and Django. Sometimes it is a desirable behavior: on processing ``GET`` request the code might fetch data from database or other web resource, the fetching is potentially slow. Canceling this fetch is very good: the peer dropped connection already, there is no reason to waste time and resources (memory etc) by getting data from DB without any chance to send it back to peer. But sometimes the cancellation is bad: on ``POST`` request very often is needed to save data to DB regardless to peer closing. Cancellation prevention could be implemented in several ways: * Applying :func:`asyncio.shield` to coroutine that saves data into DB. * Spawning a new task for DB saving * Using aiojobs_ or other third party library. :func:`asyncio.shield` works pretty good. The only disadvantage is you need to split web handler into exactly two async functions: one for handler itself and other for protected code. For example the following snippet is not safe:: async def handler(request): await asyncio.shield(write_to_redis(request)) await asyncio.shield(write_to_postgres(request)) return web.Response('OK') Cancellation might be occurred just after saving data in REDIS, ``write_to_postgres`` will be not called. Spawning a new task is much worse: there is no place to ``await`` spawned tasks:: async def handler(request): request.loop.create_task(write_to_redis(request)) return web.Response('OK') In this case errors from ``write_to_redis`` are not awaited, it leads to many asyncio log messages *Future exception was never retrieved* and *Task was destroyed but it is pending!*. Moreover on :ref:`aiohttp-web-graceful-shutdown` phase *aiohttp* don't wait for these tasks, you have a great chance to loose very important data. On other hand aiojobs_ provides an API for spawning new jobs and awaiting their results etc. It stores all scheduled activity in internal data structures and could terminate them gracefully:: from aiojobs.aiohttp import setup, spawn async def coro(timeout): await asyncio.sleep(timeout) # do something in background async def handler(request): await spawn(request, coro()) return web.Response() app = web.Application() setup(app) app.router.add_get('/', handler) All not finished jobs will be terminated on :attr:`aiohttp.web.Application.on_cleanup` signal. To prevent cancellation of the whole :term:`web-handler` use ``@atomic`` decorator:: from aiojobs.aiohttp import atomic @atomic async def handler(request): await write_to_db() return web.Response() app = web.Application() setup(app) app.router.add_post('/', handler) It prevents all ``handler`` async function from cancellation, ``write_to_db`` will be never interrupted. .. _aiojobs: http://aiojobs.readthedocs.io/en/latest/ Custom Routing Criteria ----------------------- Sometimes you need to register :ref:`handlers ` on more complex criteria than simply a *HTTP method* and *path* pair. Although :class:`UrlDispatcher` does not support any extra criteria, routing based on custom conditions can be accomplished by implementing a second layer of routing in your application. The following example shows custom routing based on the *HTTP Accept* header:: class AcceptChooser: def __init__(self): self._accepts = {} async def do_route(self, request): for accept in request.headers.getall('ACCEPT', []): acceptor = self._accepts.get(accept) if acceptor is not None: return (await acceptor(request)) raise HTTPNotAcceptable() def reg_acceptor(self, accept, handler): self._accepts[accept] = handler async def handle_json(request): # do json handling async def handle_xml(request): # do xml handling chooser = AcceptChooser() app.router.add_get('/', chooser.do_route) chooser.reg_acceptor('application/json', handle_json) chooser.reg_acceptor('application/xml', handle_xml) .. _aiohttp-web-static-file-handling: Static file handling -------------------- The best way to handle static files (images, JavaScripts, CSS files etc.) is using `Reverse Proxy`_ like `nginx`_ or `CDN`_ services. .. _Reverse Proxy: https://en.wikipedia.org/wiki/Reverse_proxy .. _nginx: https://nginx.org/ .. _CDN: https://en.wikipedia.org/wiki/Content_delivery_network But for development it's very convenient to handle static files by aiohttp server itself. To do it just register a new static route by :meth:`UrlDispatcher.add_static` call:: app.router.add_static('/prefix', path_to_static_folder) When a directory is accessed within a static route then the server responses to client with ``HTTP/403 Forbidden`` by default. Displaying folder index instead could be enabled with ``show_index`` parameter set to ``True``:: app.router.add_static('/prefix', path_to_static_folder, show_index=True) When a symlink from the static directory is accessed, the server responses to client with ``HTTP/404 Not Found`` by default. To allow the server to follow symlinks, parameter ``follow_symlinks`` should be set to ``True``:: app.router.add_static('/prefix', path_to_static_folder, follow_symlinks=True) When you want to enable cache busting, parameter ``append_version`` can be set to ``True`` Cache busting is the process of appending some form of file version hash to the filename of resources like JavaScript and CSS files. The performance advantage of doing this is that we can tell the browser to cache these files indefinitely without worrying about the client not getting the latest version when the file changes:: app.router.add_static('/prefix', path_to_static_folder, append_version=True) Template Rendering ------------------ :mod:`aiohttp.web` does not support template rendering out-of-the-box. However, there is a third-party library, :mod:`aiohttp_jinja2`, which is supported by the *aiohttp* authors. Using it is rather simple. First, setup a *jinja2 environment* with a call to :func:`aiohttp_jinja2.setup`:: app = web.Application() aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('/path/to/templates/folder')) After that you may use the template engine in your :ref:`handlers `. The most convenient way is to simply wrap your handlers with the :func:`aiohttp_jinja2.template` decorator:: @aiohttp_jinja2.template('tmpl.jinja2') def handler(request): return {'name': 'Andrew', 'surname': 'Svetlov'} If you prefer the `Mako`_ template engine, please take a look at the `aiohttp_mako`_ library. .. _Mako: http://www.makotemplates.org/ .. _aiohttp_mako: https://github.com/aio-libs/aiohttp_mako .. _aiohttp-web-websocket-read-same-task: Reading from the same task in WebSockets ---------------------------------------- Reading from the *WebSocket* (``await ws.receive()``) **must only** be done inside the request handler *task*; however, writing (``ws.send_str(...)``) to the *WebSocket*, closing (``await ws.close()``) and canceling the handler task may be delegated to other tasks. See also :ref:`FAQ section `. :mod:`aiohttp.web` creates an implicit :class:`asyncio.Task` for handling every incoming request. .. note:: While :mod:`aiohttp.web` itself only supports *WebSockets* without downgrading to *LONG-POLLING*, etc., our team supports SockJS_, an aiohttp-based library for implementing SockJS-compatible server code. .. _SockJS: https://github.com/aio-libs/sockjs .. warning:: Parallel reads from websocket are forbidden, there is no possibility to call :meth:`aiohttp.web.WebSocketResponse.receive` from two tasks. See :ref:`FAQ section ` for instructions how to solve the problem. .. _aiohttp-web-data-sharing: Data Sharing aka No Singletons Please ------------------------------------- :mod:`aiohttp.web` discourages the use of *global variables*, aka *singletons*. Every variable should have its own context that is *not global*. So, :class:`aiohttp.web.Application` and :class:`aiohttp.web.Request` support a :class:`collections.abc.MutableMapping` interface (i.e. they are dict-like objects), allowing them to be used as data stores. For storing *global-like* variables, feel free to save them in an :class:`~.Application` instance:: app['my_private_key'] = data and get it back in the :term:`web-handler`:: async def handler(request): data = request.app['my_private_key'] Variables that are only needed for the lifetime of a :class:`~.Request`, can be stored in a :class:`~.Request`:: async def handler(request): request['my_private_key'] = "data" ... This is mostly useful for :ref:`aiohttp-web-middlewares` and :ref:`aiohttp-web-signals` handlers to store data for further processing by the next handlers in the chain. :class:`aiohttp.web.StreamResponse` and :class:`aiohttp.web.Response` objects also support :class:`collections.abc.MutableMapping` interface. This is useful when you want to share data with signals and middlewares once all the work in the handler is done:: async def handler(request): [ do all the work ] response['my_metric'] = 123 return response To avoid clashing with other *aiohttp* users and third-party libraries, please choose a unique key name for storing data. If your code is published on PyPI, then the project name is most likely unique and safe to use as the key. Otherwise, something based on your company name/url would be satisfactory (i.e. ``org.company.app``). .. _aiohttp-web-middlewares: Middlewares ----------- :mod:`aiohttp.web` provides a powerful mechanism for customizing :ref:`request handlers` via *middlewares*. A *middleware* is a coroutine that can modify either the request or response. For example, here's a simple *middleware* which appends ``' wink'`` to the response:: from aiohttp.web import middleware @middleware async def middleware(request, handler): resp = await handler(request) resp.text = resp.text + ' wink' return resp (Note: this example won't work with streamed responses or websockets) Every *middleware* should accept two parameters, a :class:`request ` instance and a *handler*, and return the response. When creating an :class:`Application`, these *middlewares* are passed to the keyword-only ``middlewares`` parameter:: app = web.Application(middlewares=[middleware_1, middleware_2]) Internally, a single :ref:`request handler ` is constructed by applying the middleware chain to the original handler in reverse order, and is called by the :class:`RequestHandler` as a regular *handler*. Since *middlewares* are themselves coroutines, they may perform extra ``await`` calls when creating a new handler, e.g. call database etc. *Middlewares* usually call the handler, but they may choose to ignore it, e.g. displaying *403 Forbidden page* or raising :exc:`HTTPForbidden` exception if the user does not have permissions to access the underlying resource. They may also render errors raised by the handler, perform some pre- or post-processing like handling *CORS* and so on. The following code demonstrates middlewares execution order:: from aiohttp import web def test(request): print('Handler function called') return web.Response(text="Hello") @web.middleware async def middleware1(request, handler): print('Middleware 1 called') response = await handler(request) print('Middleware 1 finished') return response @web.middleware async def middleware2(request, handler): print('Middleware 2 called') response = await handler(request) print('Middleware 2 finished') return response app = web.Application(middlewares=[middleware1, middleware2]) app.router.add_get('/', test) web.run_app(app) Produced output:: Middleware 1 called Middleware 2 called Handler function called Middleware 2 finished Middleware 1 finished Example ^^^^^^^ A common use of middlewares is to implement custom error pages. The following example will render 404 errors using a JSON response, as might be appropriate a JSON REST service:: from aiohttp import web @web.middleware async def error_middleware(request, handler): try: response = await handler(request) if response.status != 404: return response message = response.message except web.HTTPException as ex: if ex.status != 404: raise message = ex.reason return web.json_response({'error': message}) app = web.Application(middlewares=[error_middleware]) Old Style Middleware ^^^^^^^^^^^^^^^^^^^^ .. deprecated:: 2.3 Prior to *v2.3* middleware required an outer *middleware factory* which returned the middleware coroutine. Since *v2.3* this is not required; instead the ``@middleware`` decorator should be used. Old style middleware (with an outer factory and no ``@middleware`` decorator) is still supported. Furthermore, old and new style middleware can be mixed. A *middleware factory* is simply a coroutine that implements the logic of a *middleware*. For example, here's a trivial *middleware factory*:: async def middleware_factory(app, handler): async def middleware_handler(request): resp = await handler(request) resp.text = resp.text + ' wink' return resp return middleware_handler A *middleware factory* should accept two parameters, an :class:`app ` instance and a *handler*, and return a new handler. .. note:: Both the outer *middleware_factory* coroutine and the inner *middleware_handler* coroutine are called for every request handled. *Middleware factories* should return a new handler that has the same signature as a :ref:`request handler `. That is, it should accept a single :class:`Request` instance and return a :class:`Response`, or raise an exception. .. _aiohttp-web-signals: Signals ------- Although :ref:`middlewares ` can customize :ref:`request handlers` before or after a :class:`Response` has been prepared, they can't customize a :class:`Response` **while** it's being prepared. For this :mod:`aiohttp.web` provides *signals*. For example, a middleware can only change HTTP headers for *unprepared* responses (see :meth:`~aiohttp.web.StreamResponse.prepare`), but sometimes we need a hook for changing HTTP headers for streamed responses and WebSockets. This can be accomplished by subscribing to the :attr:`~aiohttp.web.Application.on_response_prepare` signal:: async def on_prepare(request, response): response.headers['My-Header'] = 'value' app.on_response_prepare.append(on_prepare) Additionally, the :attr:`~aiohttp.web.Application.on_startup` and :attr:`~aiohttp.web.Application.on_cleanup` signals can be subscribed to for application component setup and tear down accordingly. The following example will properly initialize and dispose an aiopg connection engine:: from aiopg.sa import create_engine async def create_aiopg(app): app['pg_engine'] = await create_engine( user='postgre', database='postgre', host='localhost', port=5432, password='' ) async def dispose_aiopg(app): app['pg_engine'].close() await app['pg_engine'].wait_closed() app.on_startup.append(create_aiopg) app.on_cleanup.append(dispose_aiopg) Signal handlers should not return a value but may modify incoming mutable parameters. Signal handlers will be run sequentially, in order they were added. If handler is asynchronous, it will be awaited before calling next one. .. warning:: Signals API has provisional status, meaning it may be changed in future releases. Signal subscription and sending will most likely be the same, but signal object creation is subject to change. As long as you are not creating new signals, but simply reusing existing ones, you will not be affected. .. _aiohttp-web-nested-applications: Nested applications ------------------- Sub applications are designed for solving the problem of the big monolithic code base. Let's assume we have a project with own business logic and tools like administration panel and debug toolbar. Administration panel is a separate application by its own nature but all toolbar URLs are served by prefix like ``/admin``. Thus we'll create a totally separate application named ``admin`` and connect it to main app with prefix by :meth:`~aiohttp.web.Application.add_subapp`:: admin = web.Application() # setup admin routes, signals and middlewares app.add_subapp('/admin/', admin) Middlewares and signals from ``app`` and ``admin`` are chained. It means that if URL is ``'/admin/something'`` middlewares from ``app`` are applied first and ``admin.middlewares`` are the next in the call chain. The same is going for :attr:`~aiohttp.web.Application.on_response_prepare` signal -- the signal is delivered to both top level ``app`` and ``admin`` if processing URL is routed to ``admin`` sub-application. Common signals like :attr:`~aiohttp.web.Application.on_startup`, :attr:`~aiohttp.web.Application.on_shutdown` and :attr:`~aiohttp.web.Application.on_cleanup` are delivered to all registered sub-applications. The passed parameter is sub-application instance, not top-level application. Third level sub-applications can be nested into second level ones -- there are no limitation for nesting level. Url reversing for sub-applications should generate urls with proper prefix. But for getting URL sub-application's router should be used:: admin = web.Application() admin.router.add_get('/resource', handler, name='name') app.add_subapp('/admin/', admin) url = admin.router['name'].url_for() The generated ``url`` from example will have a value ``URL('/admin/resource')``. If main application should do URL reversing for sub-application it could use the following explicit technique:: admin = web.Application() admin.router.add_get('/resource', handler, name='name') app.add_subapp('/admin/', admin) app['admin'] = admin async def handler(request): # main application's handler admin = request.app['admin'] url = admin.router['name'].url_for() .. _aiohttp-web-expect-header: *Expect* Header --------------- :mod:`aiohttp.web` supports *Expect* header. By default it sends ``HTTP/1.1 100 Continue`` line to client, or raises :exc:`HTTPExpectationFailed` if header value is not equal to "100-continue". It is possible to specify custom *Expect* header handler on per route basis. This handler gets called if *Expect* header exist in request after receiving all headers and before processing application's :ref:`aiohttp-web-middlewares` and route handler. Handler can return *None*, in that case the request processing continues as usual. If handler returns an instance of class :class:`StreamResponse`, *request handler* uses it as response. Also handler can raise a subclass of :exc:`HTTPException`. In this case all further processing will not happen and client will receive appropriate http response. .. note:: A server that does not understand or is unable to comply with any of the expectation values in the Expect field of a request MUST respond with appropriate error status. The server MUST respond with a 417 (Expectation Failed) status if any of the expectations cannot be met or, if there are other problems with the request, some other 4xx status. http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.20 If all checks pass, the custom handler *must* write a *HTTP/1.1 100 Continue* status code before returning. The following example shows how to setup a custom handler for the *Expect* header:: async def check_auth(request): if request.version != aiohttp.HttpVersion11: return if request.headers.get('EXPECT') != '100-continue': raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) if request.headers.get('AUTHORIZATION') is None: raise HTTPForbidden() request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n") async def hello(request): return web.Response(body=b"Hello, world") app = web.Application() app.router.add_get('/', hello, expect_handler=check_auth) .. _aiohttp-web-custom-resource: Custom resource implementation ------------------------------ To register custom resource use :meth:`UrlDispatcher.register_resource`. Resource instance must implement `AbstractResource` interface. .. _aiohttp-web-app-runners: Application runners ------------------- :func:`run_app` provides a simple *blocking* API for running an :class:`Application`. For starting the application *asynchronously* on serving on multiple HOST/PORT :class:`AppRunner` exists. The simple startup code for serving HTTP site on ``'localhost'``, port ``8080`` looks like:: runner = web.AppRunner(app) await runner.setup() site = web.TCPSite(runner, 'localhost', 8080) await site.start() To stop serving call :meth:`AppRunner.cleanup`:: await runner.cleanup() .. versionadded:: 3.0 .. _aiohttp-web-graceful-shutdown: Graceful shutdown ------------------ Stopping *aiohttp web server* by just closing all connections is not always satisfactory. The problem is: if application supports :term:`websocket`\s or *data streaming* it most likely has open connections at server shutdown time. The *library* has no knowledge how to close them gracefully but developer can help by registering :attr:`Application.on_shutdown` signal handler and call the signal on *web server* closing. Developer should keep a list of opened connections (:class:`Application` is a good candidate). The following :term:`websocket` snippet shows an example for websocket handler:: app = web.Application() app['websockets'] = [] async def websocket_handler(request): ws = web.WebSocketResponse() await ws.prepare(request) request.app['websockets'].append(ws) try: async for msg in ws: ... finally: request.app['websockets'].remove(ws) return ws Signal handler may look like:: async def on_shutdown(app): for ws in app['websockets']: await ws.close(code=WSCloseCode.GOING_AWAY, message='Server shutdown') app.on_shutdown.append(on_shutdown) Both :func:`run_app` and :meth:`AppRunner.cleanup` call shutdown signal handlers. .. _aiohttp-web-background-tasks: Background tasks ----------------- Sometimes there's a need to perform some asynchronous operations just after application start-up. Even more, in some sophisticated systems there could be a need to run some background tasks in the event loop along with the application's request handler. Such as listening to message queue or other network message/event sources (e.g. ZeroMQ, Redis Pub/Sub, AMQP, etc.) to react to received messages within the application. For example the background task could listen to ZeroMQ on :data:`zmq.SUB` socket, process and forward retrieved messages to clients connected via WebSocket that are stored somewhere in the application (e.g. in the :obj:`application['websockets']` list). To run such short and long running background tasks aiohttp provides an ability to register :attr:`Application.on_startup` signal handler(s) that will run along with the application's request handler. For example there's a need to run one quick task and two long running tasks that will live till the application is alive. The appropriate background tasks could be registered as an :attr:`Application.on_startup` signal handlers as shown in the example below:: async def listen_to_redis(app): try: sub = await aioredis.create_redis(('localhost', 6379), loop=app.loop) ch, *_ = await sub.subscribe('news') async for msg in ch.iter(encoding='utf-8'): # Forward message to all connected websockets: for ws in app['websockets']: ws.send_str('{}: {}'.format(ch.name, msg)) except asyncio.CancelledError: pass finally: await sub.unsubscribe(ch.name) await sub.quit() async def start_background_tasks(app): app['redis_listener'] = app.loop.create_task(listen_to_redis(app)) async def cleanup_background_tasks(app): app['redis_listener'].cancel() await app['redis_listener'] app = web.Application() app.on_startup.append(start_background_tasks) app.on_cleanup.append(cleanup_background_tasks) web.run_app(app) The task :func:`listen_to_redis` will run forever. To shut it down correctly :attr:`Application.on_cleanup` signal handler may be used to send a cancellation to it. Handling error pages -------------------- Pages like *404 Not Found* and *500 Internal Error* could be handled by custom middleware, see :ref:`aiohttp-tutorial-middlewares` for details. .. _aiohttp-web-forwarded-support: Deploying behind a Proxy ------------------------ As discussed in :ref:`aiohttp-deployment` the preferable way is deploying *aiohttp* web server behind a *Reverse Proxy Server* like :term:`nginx` for production usage. In this way properties like :attr:`~BaseRequest.scheme` :attr:`~BaseRequest.host` and :attr:`~BaseRequest.remote` are incorrect. Real values should be given from proxy server, usually either ``Forwarded`` or old-fashion ``X-Forwarded-For``, ``X-Forwarded-Host``, ``X-Forwarded-Proto`` HTTP headers are used. *aiohttp* does not take *forwarded* headers into account by default because it produces *security issue*: HTTP client might add these headers too, pushing non-trusted data values. That's why *aiohttp server* should setup *forwarded* headers in custom middleware in tight conjunction with *reverse proxy configuration*. For changing :attr:`~BaseRequest.scheme` :attr:`~BaseRequest.host` and :attr:`~BaseRequest.remote` the middleware might use :meth:`~BaseRequest.clone`. .. seealso:: https://github.com/aio-libs/aiohttp-remotes provides secure helpers for modifying *scheme*, *host* and *remote* attributes according to ``Forwarded`` and ``X-Forwarded-*`` HTTP headers. Swagger support --------------- `aiohttp-swagger `_ is a library that allow to add Swagger documentation and embed the Swagger-UI into your :mod:`aiohttp.web` project. CORS support ------------ :mod:`aiohttp.web` itself does not support `Cross-Origin Resource Sharing `_, but there is an aiohttp plugin for it: `aiohttp_cors `_. Debug Toolbar ------------- `aiohttp-debugtoolbar`_ is a very useful library that provides a debugging toolbar while you're developing an :mod:`aiohttp.web` application. Install it via ``pip``: .. code-block:: shell $ pip install aiohttp_debugtoolbar After that attach the :mod:`aiohttp_debugtoolbar` middleware to your :class:`aiohttp.web.Application` and call :func:`aiohttp_debugtoolbar.setup`:: import aiohttp_debugtoolbar from aiohttp_debugtoolbar import toolbar_middleware_factory app = web.Application(middlewares=[toolbar_middleware_factory]) aiohttp_debugtoolbar.setup(app) The toolbar is ready to use. Enjoy!!! .. _aiohttp-debugtoolbar: https://github.com/aio-libs/aiohttp_debugtoolbar Dev Tools --------- `aiohttp-devtools`_ provides a couple of tools to simplify development of :mod:`aiohttp.web` applications. Install via ``pip``: .. code-block:: shell $ pip install aiohttp-devtools * ``runserver`` provides a development server with auto-reload, live-reload, static file serving and aiohttp_debugtoolbar_ integration. * ``start`` is a `cookiecutter command which does the donkey work of creating new :mod:`aiohttp.web` Applications. Documentation and a complete tutorial of creating and running an app locally are available at `aiohttp-devtools`_. .. _aiohttp-devtools: https://github.com/aio-libs/aiohttp-devtools aiohttp-3.0.1/docs/web_lowlevel.rst0000666000000000000000000000503413240304665015472 0ustar 00000000000000.. _aiohttp-web-lowlevel: Low Level Server ================ .. currentmodule:: aiohttp.web This topic describes :mod:`aiohttp.web` based *low level* API. Abstract -------- Sometimes user don't need high-level concepts introduced in :ref:`aiohttp-web`: applications, routers, middlewares and signals. All what is needed is supporting asynchronous callable which accepts a request and returns a response object. This is done by introducing :class:`aiohttp.web.Server` class which serves a *protocol factory* role for :meth:`asyncio.AbstractEventLoop.create_server` and bridges data stream to *web handler* and sends result back. Low level *web handler* should accept the single :class:`BaseRequest` parameter and performs one of the following actions: 1. Return a :class:`Response` with the whole HTTP body stored in memory. 2. Create a :class:`StreamResponse`, send headers by :meth:`StreamResponse.prepare` call, send data chunks by :meth:`StreamResponse.write` and return finished response. 3. Raise :class:`HTTPException` derived exception (see :ref:`aiohttp-web-exceptions` section). All other exceptions not derived from :class:`HTTPException` leads to *500 Internal Server Error* response. 4. Initiate and process Web-Socket connection by :class:`WebSocketResponse` using (see :ref:`aiohttp-web-websockets`). Run a Basic Low-Level Server ---------------------------- The following code demonstrates very trivial usage example:: import asyncio from aiohttp import web async def handler(request): return web.Response(text="OK") async def main(loop): server = web.Server(handler) await loop.create_server(server, "127.0.0.1", 8080) print("======= Serving on http://127.0.0.1:8080/ ======") # pause here for very long time by serving HTTP requests and # waiting for keyboard interruption await asyncio.sleep(100*3600) loop = asyncio.get_event_loop() try: loop.run_until_complete(main(loop)) except KeyboardInterrupt: pass loop.close() In the snippet we have ``handler`` which returns a regular :class:`Response` with ``"OK"`` in BODY. This *handler* is processed by ``server`` (:class:`Server` which acts as *protocol factory*). Network communication is created by ``loop.create_server`` call to serve ``http://127.0.0.1:8080/``. The handler should process every request: ``GET``, ``POST``, Web-Socket for every *path*. The example is very basic: it always return ``200 OK`` response, real life code should be much more complex. aiohttp-3.0.1/docs/web_quickstart.rst0000666000000000000000000005276713240304665016052 0ustar 00000000000000.. _aiohttp-web-quickstart: Web Server Quickstart ===================== .. currentmodule:: aiohttp.web Run a Simple Web Server ----------------------- In order to implement a web server, first create a :ref:`request handler `. A request handler is a :ref:`coroutine ` or regular function that accepts a :class:`Request` instance as its only parameter and returns a :class:`Response` instance:: from aiohttp import web async def hello(request): return web.Response(text="Hello, world") Next, create an :class:`Application` instance and register the request handler with the application's :class:`router ` on a particular *HTTP method* and *path*:: app = web.Application() app.router.add_get('/', hello) After that, run the application by :func:`run_app` call:: web.run_app(app) That's it. Now, head over to ``http://localhost:8080/`` to see the results. .. seealso:: :ref:`aiohttp-web-graceful-shutdown` section explains what :func:`run_app` does and how to implement complex server initialization/finalization from scratch. :ref:`aiohttp-web-app-runners` for more handling more complex cases like *asynchronous* web application serving and multiple hosts support. .. _aiohttp-web-cli: Command Line Interface (CLI) ---------------------------- :mod:`aiohttp.web` implements a basic CLI for quickly serving an :class:`Application` in *development* over TCP/IP: .. code-block:: shell $ python -m aiohttp.web -H localhost -P 8080 package.module:init_func ``package.module:init_func`` should be an importable :term:`callable` that accepts a list of any non-parsed command-line arguments and returns an :class:`Application` instance after setting it up:: def init_func(argv): app = web.Application() app.router.add_get("/", index_handler) return app .. _aiohttp-web-handler: Handler ------- A request handler can be any :term:`callable` that accepts a :class:`Request` instance as its only argument and returns a :class:`StreamResponse` derived (e.g. :class:`Response`) instance:: def handler(request): return web.Response() A handler **may** also be a :ref:`coroutine`, in which case :mod:`aiohttp.web` will ``await`` the handler:: async def handler(request): return web.Response() Handlers are setup to handle requests by registering them with the :attr:`Application.router` on a particular route (*HTTP method* and *path* pair) using methods like :class:`UrlDispatcher.add_get` and :class:`UrlDispatcher.add_post`:: app.router.add_get('/', handler) app.router.add_post('/post', post_handler) app.router.add_put('/put', put_handler) :meth:`~UrlDispatcher.add_route` also supports the wildcard *HTTP method*, allowing a handler to serve incoming requests on a *path* having **any** *HTTP method*:: app.router.add_route('*', '/path', all_handler) The *HTTP method* can be queried later in the request handler using the :attr:`Request.method` property. By default endpoints added with :meth:`~UrlDispatcher.add_get` will accept ``HEAD`` requests and return the same response headers as they would for a ``GET`` request. You can also deny ``HEAD`` requests on a route:: app.router.add_get('/', handler, allow_head=False) Here ``handler`` won't be called and the server will response with ``405``. .. note:: This is a change as of **aiohttp v2.0** to act in accordance with `RFC 7231 `_. Previous version always returned ``405`` for ``HEAD`` requests to routes added with :meth:`~UrlDispatcher.add_get`. If you have handlers which perform lots of processing to write the response body you may wish to improve performance by skipping that processing in the case of ``HEAD`` requests while still taking care to respond with the same headers as with ``GET`` requests. .. _aiohttp-web-resource-and-route: Resources and Routes -------------------- Internally *router* is a list of *resources*. Resource is an entry in *route table* which corresponds to requested URL. Resource in turn has at least one *route*. Route corresponds to handling *HTTP method* by calling *web handler*. :meth:`UrlDispatcher.add_get` / :meth:`UrlDispatcher.add_post` and family are plain shortcuts for :meth:`UrlDispatcher.add_route`. :meth:`UrlDispatcher.add_route` in turn is just a shortcut for pair of :meth:`UrlDispatcher.add_resource` and :meth:`Resource.add_route`:: resource = app.router.add_resource(path, name=name) route = resource.add_route(method, handler) return route .. seealso:: :ref:`aiohttp-router-refactoring-021` for more details .. _aiohttp-web-variable-handler: Variable Resources ^^^^^^^^^^^^^^^^^^ Resource may have *variable path* also. For instance, a resource with the path ``'/a/{name}/c'`` would match all incoming requests with paths such as ``'/a/b/c'``, ``'/a/1/c'``, and ``'/a/etc/c'``. A variable *part* is specified in the form ``{identifier}``, where the ``identifier`` can be used later in a :ref:`request handler ` to access the matched value for that *part*. This is done by looking up the ``identifier`` in the :attr:`Request.match_info` mapping:: async def variable_handler(request): return web.Response( text="Hello, {}".format(request.match_info['name'])) resource = app.router.add_resource('/{name}') resource.add_route('GET', variable_handler) By default, each *part* matches the regular expression ``[^{}/]+``. You can also specify a custom regex in the form ``{identifier:regex}``:: resource = app.router.add_resource(r'/{name:\d+}') .. note:: Regex should match against *percent encoded* URL (``request.raw_path``). E.g. *space character* is encoded as ``%20``. According to `RFC 3986 `_ allowed in path symbols are:: allowed = unreserved / pct-encoded / sub-delims / ":" / "@" / "/" pct-encoded = "%" HEXDIG HEXDIG unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "=" .. _aiohttp-web-named-routes: Reverse URL Constructing using Named Resources ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Routes can also be given a *name*:: resource = app.router.add_resource('/root', name='root') Which can then be used to access and build a *URL* for that resource later (e.g. in a :ref:`request handler `):: >>> request.app.router['root'].url_for().with_query({"a": "b", "c": "d"}) URL('/root?a=b&c=d') A more interesting example is building *URLs* for :ref:`variable resources `:: app.router.add_resource(r'/{user}/info', name='user-info') In this case you can also pass in the *parts* of the route:: >>> request.app.router['user-info'].url_for(user='john_doe')\ ... .with_query("a=b") '/john_doe/info?a=b' Organizing Handlers in Classes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ As discussed above, :ref:`handlers ` can be first-class functions or coroutines:: async def hello(request): return web.Response(text="Hello, world") app.router.add_get('/', hello) But sometimes it's convenient to group logically similar handlers into a Python *class*. Since :mod:`aiohttp.web` does not dictate any implementation details, application developers can organize handlers in classes if they so wish:: class Handler: def __init__(self): pass def handle_intro(self, request): return web.Response(text="Hello, world") async def handle_greeting(self, request): name = request.match_info.get('name', "Anonymous") txt = "Hello, {}".format(name) return web.Response(text=txt) handler = Handler() app.router.add_get('/intro', handler.handle_intro) app.router.add_get('/greet/{name}', handler.handle_greeting) .. _aiohttp-web-class-based-views: Class Based Views ^^^^^^^^^^^^^^^^^ :mod:`aiohttp.web` has support for django-style class based views. You can derive from :class:`View` and define methods for handling http requests:: class MyView(web.View): async def get(self): return await get_resp(self.request) async def post(self): return await post_resp(self.request) Handlers should be coroutines accepting self only and returning response object as regular :term:`web-handler`. Request object can be retrieved by :attr:`View.request` property. After implementing the view (``MyView`` from example above) should be registered in application's router:: app.router.add_view('/path/to', MyView) Example will process GET and POST requests for */path/to* but raise *405 Method not allowed* exception for unimplemented HTTP methods. Resource Views ^^^^^^^^^^^^^^ *All* registered resources in a router can be viewed using the :meth:`UrlDispatcher.resources` method:: for resource in app.router.resources(): print(resource) Similarly, a *subset* of the resources that were registered with a *name* can be viewed using the :meth:`UrlDispatcher.named_resources` method:: for name, resource in app.router.named_resources().items(): print(name, resource) .. _aiohttp-web-alternative-routes-definition: Alternative ways for registering routes --------------------------------------- Code examples shown above use *imperative* style for adding new routes: they call ``app.router.add_get(...)`` etc. There are two alternatives: route tables and route decorators. Route tables look like Django way:: async def handle_get(request): ... async def handle_post(request): ... app.router.add_routes([web.get('/get', handle_get), web.post('/post', handle_post), The snippet calls :meth:`~aiohttp.web.UrlDispather.add_routes` to register a list of *route definitions* (:class:`aiohttp.web.RouteDef` instances) created by :func:`aiohttp.web.get` or :func:`aiohttp.web.post` functions. .. seealso:: :ref:`aiohttp-web-route-def` reference. Route decorators are closer to Flask approach:: routes = web.RouteTableDef() @routes.get('/get') async def handle_get(request): ... @routes.post('/post') async def handle_post(request): ... app.router.add_routes(routes) It is also possible to use decorators with class-based views:: routes = web.RouteTableDef() @routes.view("/view") class MyView(web.View): async def get(self): ... async def post(self): ... app.router.add_routes(routes) The example creates a :class:`aiohttp.web.RouteTableDef` container first. The container is a list-like object with additional decorators :meth:`aiohttp.web.RouteTableDef.get`, :meth:`aiohttp.web.RouteTableDef.post` etc. for registering new routes. After filling the container :meth:`~aiohttp.web.UrlDispather.add_routes` is used for adding registered *route definitions* into application's router. .. seealso:: :ref:`aiohttp-web-route-table-def` reference. All tree ways (imperative calls, route tables and decorators) are equivalent, you could use what do you prefer or even mix them on your own. .. versionadded:: 2.3 JSON Response ------------- It is a common case to return JSON data in response, :mod:`aiohttp.web` provides a shortcut for returning JSON -- :func:`aiohttp.web.json_response`:: def handler(request): data = {'some': 'data'} return web.json_response(data) The shortcut method returns :class:`aiohttp.web.Response` instance so you can for example set cookies before returning it from handler. User Sessions ------------- Often you need a container for storing user data across requests. The concept is usually called a *session*. :mod:`aiohttp.web` has no built-in concept of a *session*, however, there is a third-party library, :mod:`aiohttp_session`, that adds *session* support:: import asyncio import time import base64 from cryptography import fernet from aiohttp import web from aiohttp_session import setup, get_session, session_middleware from aiohttp_session.cookie_storage import EncryptedCookieStorage async def handler(request): session = await get_session(request) last_visit = session['last_visit'] if 'last_visit' in session else None text = 'Last visited: {}'.format(last_visit) return web.Response(text=text) def make_app(): app = web.Application() # secret_key must be 32 url-safe base64-encoded bytes fernet_key = fernet.Fernet.generate_key() secret_key = base64.urlsafe_b64decode(fernet_key) setup(app, EncryptedCookieStorage(secret_key)) app.router.add_route('GET', '/', handler) return app web.run_app(make_app()) .. _aiohttp-web-forms: HTTP Forms ---------- HTTP Forms are supported out of the box. If form's method is ``"GET"`` (``
    ``) use :attr:`Request.query` for getting form data. To access form data with ``"POST"`` method use :meth:`Request.post` or :meth:`Request.multipart`. :meth:`Request.post` accepts both ``'application/x-www-form-urlencoded'`` and ``'multipart/form-data'`` form's data encoding (e.g. ````). It stores files data in temporary directory. If `client_max_size` is specified `post` raises `ValueError` exception. For efficiency use :meth:`Request.multipart`, It is especially effective for uploading large files (:ref:`aiohttp-web-file-upload`). Values submitted by the following form: .. code-block:: html
    could be accessed as:: async def do_login(request): data = await request.post() login = data['login'] password = data['password'] .. _aiohttp-web-file-upload: File Uploads ------------ :mod:`aiohttp.web` has built-in support for handling files uploaded from the browser. First, make sure that the HTML ``
    `` element has its *enctype* attribute set to ``enctype="multipart/form-data"``. As an example, here is a form that accepts an MP3 file: .. code-block:: html
    Then, in the :ref:`request handler ` you can access the file input field as a :class:`FileField` instance. :class:`FileField` is simply a container for the file as well as some of its metadata:: async def store_mp3_handler(request): # WARNING: don't do that if you plan to receive large files! data = await request.post() mp3 = data['mp3'] # .filename contains the name of the file in string format. filename = mp3.filename # .file contains the actual file data that needs to be stored somewhere. mp3_file = data['mp3'].file content = mp3_file.read() return web.Response(body=content, headers=MultiDict( {'CONTENT-DISPOSITION': mp3_file})) You might have noticed a big warning in the example above. The general issue is that :meth:`Request.post` reads the whole payload in memory, resulting in possible :abbr:`OOM (Out Of Memory)` errors. To avoid this, for multipart uploads, you should use :meth:`Request.multipart` which returns a :ref:`multipart reader `:: async def store_mp3_handler(request): reader = await request.multipart() # /!\ Don't forget to validate your inputs /!\ # reader.next() will `yield` the fields of your form field = await reader.next() assert field.name == 'name' name = await field.read(decode=True) field = await reader.next() assert field.name == 'mp3' filename = field.filename # You cannot rely on Content-Length if transfer is chunked. size = 0 with open(os.path.join('/spool/yarrr-media/mp3/', filename), 'wb') as f: while True: chunk = await field.read_chunk() # 8192 bytes by default. if not chunk: break size += len(chunk) f.write(chunk) return web.Response(text='{} sized of {} successfully stored' ''.format(filename, size)) .. _aiohttp-web-websockets: WebSockets ---------- :mod:`aiohttp.web` supports *WebSockets* out-of-the-box. To setup a *WebSocket*, create a :class:`WebSocketResponse` in a :ref:`request handler ` and then use it to communicate with the peer:: async def websocket_handler(request): ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: if msg.type == aiohttp.WSMsgType.TEXT: if msg.data == 'close': await ws.close() else: await ws.send_str(msg.data + '/answer') elif msg.type == aiohttp.WSMsgType.ERROR: print('ws connection closed with exception %s' % ws.exception()) print('websocket connection closed') return ws The handler should be registered as HTTP GET processor:: app.router.add_get('/ws', websocket_handler) .. _aiohttp-web-exceptions: Exceptions ---------- :mod:`aiohttp.web` defines a set of exceptions for every *HTTP status code*. Each exception is a subclass of :class:`~HTTPException` and relates to a single HTTP status code. The exceptions are also a subclass of :class:`Response`, allowing you to either ``raise`` or ``return`` them in a :ref:`request handler ` for the same effect. .. warning:: Returning :class:`~HTTPException` or its subclasses is deprecated and will be removed in subsequent aiohttp versions. The following snippets are the same:: async def handler(request): return aiohttp.web.HTTPFound('/redirect') and:: async def handler(request): raise aiohttp.web.HTTPFound('/redirect') Each exception class has a status code according to :rfc:`2068`: codes with 100-300 are not really errors; 400s are client errors, and 500s are server errors. HTTP Exception hierarchy chart:: Exception HTTPException HTTPSuccessful * 200 - HTTPOk * 201 - HTTPCreated * 202 - HTTPAccepted * 203 - HTTPNonAuthoritativeInformation * 204 - HTTPNoContent * 205 - HTTPResetContent * 206 - HTTPPartialContent HTTPRedirection * 300 - HTTPMultipleChoices * 301 - HTTPMovedPermanently * 302 - HTTPFound * 303 - HTTPSeeOther * 304 - HTTPNotModified * 305 - HTTPUseProxy * 307 - HTTPTemporaryRedirect * 308 - HTTPPermanentRedirect HTTPError HTTPClientError * 400 - HTTPBadRequest * 401 - HTTPUnauthorized * 402 - HTTPPaymentRequired * 403 - HTTPForbidden * 404 - HTTPNotFound * 405 - HTTPMethodNotAllowed * 406 - HTTPNotAcceptable * 407 - HTTPProxyAuthenticationRequired * 408 - HTTPRequestTimeout * 409 - HTTPConflict * 410 - HTTPGone * 411 - HTTPLengthRequired * 412 - HTTPPreconditionFailed * 413 - HTTPRequestEntityTooLarge * 414 - HTTPRequestURITooLong * 415 - HTTPUnsupportedMediaType * 416 - HTTPRequestRangeNotSatisfiable * 417 - HTTPExpectationFailed * 421 - HTTPMisdirectedRequest * 422 - HTTPUnprocessableEntity * 424 - HTTPFailedDependency * 426 - HTTPUpgradeRequired * 428 - HTTPPreconditionRequired * 429 - HTTPTooManyRequests * 431 - HTTPRequestHeaderFieldsTooLarge * 451 - HTTPUnavailableForLegalReasons HTTPServerError * 500 - HTTPInternalServerError * 501 - HTTPNotImplemented * 502 - HTTPBadGateway * 503 - HTTPServiceUnavailable * 504 - HTTPGatewayTimeout * 505 - HTTPVersionNotSupported * 506 - HTTPVariantAlsoNegotiates * 507 - HTTPInsufficientStorage * 510 - HTTPNotExtended * 511 - HTTPNetworkAuthenticationRequired All HTTP exceptions have the same constructor signature:: HTTPNotFound(*, headers=None, reason=None, body=None, text=None, content_type=None) If not directly specified, *headers* will be added to the *default response headers*. Classes :class:`HTTPMultipleChoices`, :class:`HTTPMovedPermanently`, :class:`HTTPFound`, :class:`HTTPSeeOther`, :class:`HTTPUseProxy`, :class:`HTTPTemporaryRedirect` have the following constructor signature:: HTTPFound(location, *, headers=None, reason=None, body=None, text=None, content_type=None) where *location* is value for *Location HTTP header*. :class:`HTTPMethodNotAllowed` is constructed by providing the incoming unsupported method and list of allowed methods:: HTTPMethodNotAllowed(method, allowed_methods, *, headers=None, reason=None, body=None, text=None, content_type=None) aiohttp-3.0.1/docs/web_reference.rst0000666000000000000000000023701113240304665015601 0ustar 00000000000000.. _aiohttp-web-reference: Server Reference ================ .. module:: aiohttp.web .. currentmodule:: aiohttp.web .. _aiohttp-web-request: Request and Base Request ------------------------ The Request object contains all the information about an incoming HTTP request. :class:`BaseRequest` is used for :ref:`Low-Level Servers` (which have no applications, routers, signals and middlewares). :class:`Request` has an :attr:`Request.app` and :attr:`Request.match_info` attributes. A :class:`BaseRequest` / :class:`Request` are :obj:`dict` like objects, allowing them to be used for :ref:`sharing data` among :ref:`aiohttp-web-middlewares` and :ref:`aiohttp-web-signals` handlers. .. class:: BaseRequest .. attribute:: version *HTTP version* of request, Read-only property. Returns :class:`aiohttp.protocol.HttpVersion` instance. .. attribute:: method *HTTP method*, read-only property. The value is upper-cased :class:`str` like ``"GET"``, ``"POST"``, ``"PUT"`` etc. .. attribute:: url A :class:`~yarl.URL` instance with absolute URL to resource (*scheme*, *host* and *port* are included). .. note:: In case of malformed request (e.g. without ``"HOST"`` HTTP header) the absolute url may be unavailable. .. attribute:: rel_url A :class:`~yarl.URL` instance with relative URL to resource (contains *path*, *query* and *fragment* parts only, *scheme*, *host* and *port* are excluded). The property is equal to ``.url.relative()`` but is always present. .. seealso:: A note from :attr:`url`. .. attribute:: scheme A string representing the scheme of the request. The scheme is ``'https'`` if transport for request handling is *SSL*, ``'http'`` otherwise. The value could be overridden by :meth:`~BaseRequest.clone`. ``'http'`` otherwise. Read-only :class:`str` property. .. versionchanged:: 2.3 *Forwarded* and *X-Forwarded-Proto* are not used anymore. Call ``.clone(scheme=new_scheme)`` for setting up the value explicitly. .. seealso:: :ref:`aiohttp-web-forwarded-support` .. attribute:: secure Shorthand for ``request.url.scheme == 'https'`` Read-only :class:`bool` property. .. seealso:: :attr:`scheme` .. attribute:: forwarded A tuple containing all parsed Forwarded header(s). Makes an effort to parse Forwarded headers as specified by :rfc:`7239`: - It adds one (immutable) dictionary per Forwarded ``field-value``, i.e. per proxy. The element corresponds to the data in the Forwarded ``field-value`` added by the first proxy encountered by the client. Each subsequent item corresponds to those added by later proxies. - It checks that every value has valid syntax in general as specified in :rfc:`7239#section-4`: either a ``token`` or a ``quoted-string``. - It un-escapes ``quoted-pairs``. - It does NOT validate 'by' and 'for' contents as specified in :rfc:`7239#section-6`. - It does NOT validate ``host`` contents (Host ABNF). - It does NOT validate ``proto`` contents for valid URI scheme names. Returns a tuple containing one or more ``MappingProxy`` objects .. seealso:: :attr:`scheme` .. seealso:: :attr:`host` .. attribute:: host Host name of the request, resolved in this order: - Overridden value by :meth:`~BaseRequest.clone` call. - *Host* HTTP header - :func:`socket.gtfqdn` Read-only :class:`str` property. .. versionchanged:: 2.3 *Forwarded* and *X-Forwarded-Host* are not used anymore. Call ``.clone(host=new_host)`` for setting up the value explicitly. .. seealso:: :ref:`aiohttp-web-forwarded-support` .. attribute:: remote Originating IP address of a client initiated HTTP request. The IP is resolved through the following headers, in this order: - Overridden value by :meth:`~BaseRequest.clone` call. - Peer name of opened socket. Read-only :class:`str` property. Call ``.clone(remote=new_remote)`` for setting up the value explicitly. .. versionadded:: 2.3 .. seealso:: :ref:`aiohttp-web-forwarded-support` .. attribute:: path_qs The URL including PATH_INFO and the query string. e.g., ``/app/blog?id=10`` Read-only :class:`str` property. .. attribute:: path The URL including *PATH INFO* without the host or scheme. e.g., ``/app/blog``. The path is URL-unquoted. For raw path info see :attr:`raw_path`. Read-only :class:`str` property. .. attribute:: raw_path The URL including raw *PATH INFO* without the host or scheme. Warning, the path may be quoted and may contains non valid URL characters, e.g. ``/my%2Fpath%7Cwith%21some%25strange%24characters``. For unquoted version please take a look on :attr:`path`. Read-only :class:`str` property. .. attribute:: query A multidict with all the variables in the query string. Read-only :class:`~multidict.MultiDictProxy` lazy property. .. attribute:: query_string The query string in the URL, e.g., ``id=10`` Read-only :class:`str` property. .. attribute:: headers A case-insensitive multidict proxy with all headers. Read-only :class:`~multidict.CIMultiDictProxy` property. .. attribute:: raw_headers HTTP headers of response as unconverted bytes, a sequence of ``(key, value)`` pairs. .. attribute:: keep_alive ``True`` if keep-alive connection enabled by HTTP client and protocol version supports it, otherwise ``False``. Read-only :class:`bool` property. .. attribute:: transport An :ref:`transport` used to process request, Read-only property. The property can be used, for example, for getting IP address of client's peer:: peername = request.transport.get_extra_info('peername') if peername is not None: host, port = peername .. attribute:: loop An event loop instance used by HTTP request handling. Read-only :class:`asyncio.AbstractEventLoop` property. .. versionadded:: 2.3 .. attribute:: cookies A multidict of all request's cookies. Read-only :class:`~multidict.MultiDictProxy` lazy property. .. attribute:: content A :class:`~aiohttp.StreamReader` instance, input stream for reading request's *BODY*. Read-only property. .. attribute:: body_exists Return ``True`` if request has *HTTP BODY*, ``False`` otherwise. Read-only :class:`bool` property. .. versionadded:: 2.3 .. attribute:: can_read_body Return ``True`` if request's *HTTP BODY* can be read, ``False`` otherwise. Read-only :class:`bool` property. .. versionadded:: 2.3 .. attribute:: has_body Return ``True`` if request's *HTTP BODY* can be read, ``False`` otherwise. Read-only :class:`bool` property. .. deprecated:: 2.3 Use :meth:`can_read_body` instead. .. attribute:: content_type Read-only property with *content* part of *Content-Type* header. Returns :class:`str` like ``'text/html'`` .. note:: Returns value is ``'application/octet-stream'`` if no Content-Type header present in HTTP headers according to :rfc:`2616` .. attribute:: charset Read-only property that specifies the *encoding* for the request's BODY. The value is parsed from the *Content-Type* HTTP header. Returns :class:`str` like ``'utf-8'`` or ``None`` if *Content-Type* has no charset information. .. attribute:: content_length Read-only property that returns length of the request's BODY. The value is parsed from the *Content-Length* HTTP header. Returns :class:`int` or ``None`` if *Content-Length* is absent. .. attribute:: http_range Read-only property that returns information about *Range* HTTP header. Returns a :class:`slice` where ``.start`` is *left inclusive bound*, ``.stop`` is *right exclusive bound* and ``.step`` is ``1``. The property might be used in two manners: 1. Attribute-access style (example assumes that both left and right borders are set, the real logic for case of open bounds is more complex):: rng = request.http_range with open(filename, 'rb') as f: f.seek(rng.start) return f.read(rng.stop-rng.start) 2. Slice-style:: return buffer[request.http_range] .. attribute:: if_modified_since Read-only property that returns the date specified in the *If-Modified-Since* header. Returns :class:`datetime.datetime` or ``None`` if *If-Modified-Since* header is absent or is not a valid HTTP date. .. method:: clone(*, method=..., rel_url=..., headers=...) Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object. :param str method: http method :param rel_url: url to use, :class:`str` or :class:`~yarl.URL` :param headers: :class:`~multidict.CIMultiDict` or compatible headers container. :return: a cloned :class:`Request` instance. .. comethod:: read() Read request body, returns :class:`bytes` object with body content. .. note:: The method **does** store read data internally, subsequent :meth:`~Request.read` call will return the same value. .. comethod:: text() Read request body, decode it using :attr:`charset` encoding or ``UTF-8`` if no encoding was specified in *MIME-type*. Returns :class:`str` with body content. .. note:: The method **does** store read data internally, subsequent :meth:`~Request.text` call will return the same value. .. comethod:: json(*, loads=json.loads) Read request body decoded as *json*. The method is just a boilerplate :ref:`coroutine ` implemented as:: async def json(self, *, loads=json.loads): body = await self.text() return loads(body) :param callable loads: any :term:`callable` that accepts :class:`str` and returns :class:`dict` with parsed JSON (:func:`json.loads` by default). .. note:: The method **does** store read data internally, subsequent :meth:`~Request.json` call will return the same value. .. comethod:: multipart(*, reader=aiohttp.multipart.MultipartReader) Returns :class:`aiohttp.multipart.MultipartReader` which processes incoming *multipart* request. The method is just a boilerplate :ref:`coroutine ` implemented as:: async def multipart(self, *, reader=aiohttp.multipart.MultipartReader): return reader(self.headers, self._payload) This method is a coroutine for consistency with the else reader methods. .. warning:: The method **does not** store read data internally. That means once you exhausts multipart reader, you cannot get the request payload one more time. .. seealso:: :ref:`aiohttp-multipart` .. comethod:: post() A :ref:`coroutine ` that reads POST parameters from request body. Returns :class:`~multidict.MultiDictProxy` instance filled with parsed data. If :attr:`method` is not *POST*, *PUT*, *PATCH*, *TRACE* or *DELETE* or :attr:`content_type` is not empty or *application/x-www-form-urlencoded* or *multipart/form-data* returns empty multidict. .. note:: The method **does** store read data internally, subsequent :meth:`~Request.post` call will return the same value. .. comethod:: release() Release request. Eat unread part of HTTP BODY if present. .. note:: User code may never call :meth:`~Request.release`, all required work will be processed by :mod:`aiohttp.web` internal machinery. .. class:: Request An request used for receiving request's information by *web handler*. Every :ref:`handler` accepts a request instance as the first positional parameter. The class in derived from :class:`BaseRequest`, shares all parent's attributes and methods but has a couple of additional properties: .. attribute:: match_info Read-only property with :class:`~aiohttp.abc.AbstractMatchInfo` instance for result of route resolving. .. note:: Exact type of property depends on used router. If ``app.router`` is :class:`UrlDispatcher` the property contains :class:`UrlMappingMatchInfo` instance. .. attribute:: app An :class:`Application` instance used to call :ref:`request handler `, Read-only property. .. note:: You should never create the :class:`Request` instance manually -- :mod:`aiohttp.web` does it for you. But :meth:`~BaseRequest.clone` may be used for cloning *modified* request copy with changed *path*, *method* etc. .. _aiohttp-web-response: Response classes ---------------- For now, :mod:`aiohttp.web` has three classes for the *HTTP response*: :class:`StreamResponse`, :class:`Response` and :class:`FileResponse`. Usually you need to use the second one. :class:`StreamResponse` is intended for streaming data, while :class:`Response` contains *HTTP BODY* as an attribute and sends own content as single piece with the correct *Content-Length HTTP header*. For sake of design decisions :class:`Response` is derived from :class:`StreamResponse` parent class. The response supports *keep-alive* handling out-of-the-box if *request* supports it. You can disable *keep-alive* by :meth:`~StreamResponse.force_close` though. The common case for sending an answer from :ref:`web-handler` is returning a :class:`Response` instance:: def handler(request): return Response("All right!") Response classes are :obj:`dict` like objects, allowing them to be used for :ref:`sharing data` among :ref:`aiohttp-web-middlewares` and :ref:`aiohttp-web-signals` handlers:: resp['key'] = value .. versionadded:: 3.0 Dict-like interface support. StreamResponse ^^^^^^^^^^^^^^ .. class:: StreamResponse(*, status=200, reason=None) The base class for the *HTTP response* handling. Contains methods for setting *HTTP response headers*, *cookies*, *response status code*, writing *HTTP response BODY* and so on. The most important thing you should know about *response* --- it is *Finite State Machine*. That means you can do any manipulations with *headers*, *cookies* and *status code* only before :meth:`prepare` coroutine is called. Once you call :meth:`prepare` any change of the *HTTP header* part will raise :exc:`RuntimeError` exception. Any :meth:`write` call after :meth:`write_eof` is also forbidden. :param int status: HTTP status code, ``200`` by default. :param str reason: HTTP reason. If param is ``None`` reason will be calculated basing on *status* parameter. Otherwise pass :class:`str` with arbitrary *status* explanation.. .. attribute:: prepared Read-only :class:`bool` property, ``True`` if :meth:`prepare` has been called, ``False`` otherwise. .. attribute:: task A task that serves HTTP request handling. May be useful for graceful shutdown of long-running requests (streaming, long polling or web-socket). .. attribute:: status Read-only property for *HTTP response status code*, :class:`int`. ``200`` (OK) by default. .. attribute:: reason Read-only property for *HTTP response reason*, :class:`str`. .. method:: set_status(status, reason=None) Set :attr:`status` and :attr:`reason`. *reason* value is auto calculated if not specified (``None``). .. attribute:: keep_alive Read-only property, copy of :attr:`Request.keep_alive` by default. Can be switched to ``False`` by :meth:`force_close` call. .. method:: force_close Disable :attr:`keep_alive` for connection. There are no ways to enable it back. .. attribute:: compression Read-only :class:`bool` property, ``True`` if compression is enabled. ``False`` by default. .. seealso:: :meth:`enable_compression` .. method:: enable_compression(force=None) Enable compression. When *force* is unset compression encoding is selected based on the request's *Accept-Encoding* header. *Accept-Encoding* is not checked if *force* is set to a :class:`ContentCoding`. .. seealso:: :attr:`compression` .. attribute:: chunked Read-only property, indicates if chunked encoding is on. Can be enabled by :meth:`enable_chunked_encoding` call. .. seealso:: :attr:`enable_chunked_encoding` .. method:: enable_chunked_encoding Enables :attr:`chunked` encoding for response. There are no ways to disable it back. With enabled :attr:`chunked` encoding each :meth:`write` operation encoded in separate chunk. .. warning:: chunked encoding can be enabled for ``HTTP/1.1`` only. Setting up both :attr:`content_length` and chunked encoding is mutually exclusive. .. seealso:: :attr:`chunked` .. attribute:: headers :class:`~multidict.CIMultiDict` instance for *outgoing* *HTTP headers*. .. attribute:: cookies An instance of :class:`http.cookies.SimpleCookie` for *outgoing* cookies. .. warning:: Direct setting up *Set-Cookie* header may be overwritten by explicit calls to cookie manipulation. We are encourage using of :attr:`cookies` and :meth:`set_cookie`, :meth:`del_cookie` for cookie manipulations. .. method:: set_cookie(name, value, *, path='/', expires=None, \ domain=None, max_age=None, \ secure=None, httponly=None, version=None) Convenient way for setting :attr:`cookies`, allows to specify some additional properties like *max_age* in a single call. :param str name: cookie name :param str value: cookie value (will be converted to :class:`str` if value has another type). :param expires: expiration date (optional) :param str domain: cookie domain (optional) :param int max_age: defines the lifetime of the cookie, in seconds. The delta-seconds value is a decimal non- negative integer. After delta-seconds seconds elapse, the client should discard the cookie. A value of zero means the cookie should be discarded immediately. (optional) :param str path: specifies the subset of URLs to which this cookie applies. (optional, ``'/'`` by default) :param bool secure: attribute (with no value) directs the user agent to use only (unspecified) secure means to contact the origin server whenever it sends back this cookie. The user agent (possibly under the user's control) may determine what level of security it considers appropriate for "secure" cookies. The *secure* should be considered security advice from the server to the user agent, indicating that it is in the session's interest to protect the cookie contents. (optional) :param bool httponly: ``True`` if the cookie HTTP only (optional) :param int version: a decimal integer, identifies to which version of the state management specification the cookie conforms. (Optional, *version=1* by default) .. warning:: In HTTP version 1.1, ``expires`` was deprecated and replaced with the easier-to-use ``max-age``, but Internet Explorer (IE6, IE7, and IE8) **does not** support ``max-age``. .. method:: del_cookie(name, *, path='/', domain=None) Deletes cookie. :param str name: cookie name :param str domain: optional cookie domain :param str path: optional cookie path, ``'/'`` by default .. attribute:: content_length *Content-Length* for outgoing response. .. attribute:: content_type *Content* part of *Content-Type* for outgoing response. .. attribute:: charset *Charset* aka *encoding* part of *Content-Type* for outgoing response. The value converted to lower-case on attribute assigning. .. attribute:: last_modified *Last-Modified* header for outgoing response. This property accepts raw :class:`str` values, :class:`datetime.datetime` objects, Unix timestamps specified as an :class:`int` or a :class:`float` object, and the value ``None`` to unset the header. .. comethod:: prepare(request) :param aiohttp.web.Request request: HTTP request object, that the response answers. Send *HTTP header*. You should not change any header data after calling this method. The coroutine calls :attr:`~aiohttp.web.Application.on_response_prepare` signal handlers. .. comethod:: write(data) Send byte-ish data as the part of *response BODY*:: await resp.write(data) :meth:`prepare` must be invoked before the call. Raises :exc:`TypeError` if data is not :class:`bytes`, :class:`bytearray` or :class:`memoryview` instance. Raises :exc:`RuntimeError` if :meth:`prepare` has not been called. Raises :exc:`RuntimeError` if :meth:`write_eof` has been called. .. comethod:: write_eof() A :ref:`coroutine` *may* be called as a mark of the *HTTP response* processing finish. *Internal machinery* will call this method at the end of the request processing if needed. After :meth:`write_eof` call any manipulations with the *response* object are forbidden. Response ^^^^^^^^ .. class:: Response(*, status=200, headers=None, content_type=None, \ charset=None, body=None, text=None) The most usable response class, inherited from :class:`StreamResponse`. Accepts *body* argument for setting the *HTTP response BODY*. The actual :attr:`body` sending happens in overridden :meth:`~StreamResponse.write_eof`. :param bytes body: response's BODY :param int status: HTTP status code, 200 OK by default. :param collections.abc.Mapping headers: HTTP headers that should be added to response's ones. :param str text: response's BODY :param str content_type: response's content type. ``'text/plain'`` if *text* is passed also, ``'application/octet-stream'`` otherwise. :param str charset: response's charset. ``'utf-8'`` if *text* is passed also, ``None`` otherwise. .. attribute:: body Read-write attribute for storing response's content aka BODY, :class:`bytes`. Setting :attr:`body` also recalculates :attr:`~StreamResponse.content_length` value. Resetting :attr:`body` (assigning ``None``) sets :attr:`~StreamResponse.content_length` to ``None`` too, dropping *Content-Length* HTTP header. .. attribute:: text Read-write attribute for storing response's content, represented as string, :class:`str`. Setting :attr:`text` also recalculates :attr:`~StreamResponse.content_length` value and :attr:`~StreamResponse.body` value Resetting :attr:`text` (assigning ``None``) sets :attr:`~StreamResponse.content_length` to ``None`` too, dropping *Content-Length* HTTP header. WebSocketResponse ^^^^^^^^^^^^^^^^^ .. class:: WebSocketResponse(*, timeout=10.0, receive_timeout=None, \ autoclose=True, autoping=True, heartbeat=None, \ protocols=(), compress=True) Class for handling server-side websockets, inherited from :class:`StreamResponse`. After starting (by :meth:`prepare` call) the response you cannot use :meth:`~StreamResponse.write` method but should to communicate with websocket client by :meth:`send_str`, :meth:`receive` and others. To enable back-pressure from slow websocket clients treat methods :meth:`ping()`, :meth:`pong()`, :meth:`send_str()`, :meth:`send_bytes()`, :meth:`send_json()` as coroutines. By default write buffer size is set to 64k. :param bool autoping: Automatically send :const:`~aiohttp.WSMsgType.PONG` on :const:`~aiohttp.WSMsgType.PING` message from client, and handle :const:`~aiohttp.WSMsgType.PONG` responses from client. Note that server does not send :const:`~aiohttp.WSMsgType.PING` requests, you need to do this explicitly using :meth:`ping` method. :param float heartbeat: Send `ping` message every `heartbeat` seconds and wait `pong` response, close connection if `pong` response is not received. The timer is reset on any data reception. :param float receive_timeout: Timeout value for `receive` operations. Default value is None (no timeout for receive operation) :param float compress: Enable per-message deflate extension support. False for disabled, default value is True. The class supports ``async for`` statement for iterating over incoming messages:: ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: print(msg.data) .. comethod:: prepare(request) Starts websocket. After the call you can use websocket methods. :param aiohttp.web.Request request: HTTP request object, that the response answers. :raises HTTPException: if websocket handshake has failed. .. method:: can_prepare(request) Performs checks for *request* data to figure out if websocket can be started on the request. If :meth:`can_prepare` call is success then :meth:`prepare` will success too. :param aiohttp.web.Request request: HTTP request object, that the response answers. :return: :class:`WebSocketReady` instance. :attr:`WebSocketReady.ok` is ``True`` on success, :attr:`WebSocketReady.protocol` is websocket subprotocol which is passed by client and accepted by server (one of *protocols* sequence from :class:`WebSocketResponse` ctor). :attr:`WebSocketReady.protocol` may be ``None`` if client and server subprotocols are not overlapping. .. note:: The method never raises exception. .. attribute:: closed Read-only property, ``True`` if connection has been closed or in process of closing. :const:`~aiohttp.WSMsgType.CLOSE` message has been received from peer. .. attribute:: close_code Read-only property, close code from peer. It is set to ``None`` on opened connection. .. attribute:: protocol Websocket *subprotocol* chosen after :meth:`start` call. May be ``None`` if server and client protocols are not overlapping. .. method:: exception() Returns last occurred exception or None. .. comethod:: ping(message=b'') Send :const:`~aiohttp.WSMsgType.PING` to peer. :param message: optional payload of *ping* message, :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. :raise RuntimeError: if connections is not started or closing. .. versionchanged:: 3.0 The method is converted into :term:`coroutine` .. comethod:: pong(message=b'') Send *unsolicited* :const:`~aiohttp.WSMsgType.PONG` to peer. :param message: optional payload of *pong* message, :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. :raise RuntimeError: if connections is not started or closing. .. versionchanged:: 3.0 The method is converted into :term:`coroutine` .. comethod:: send_str(data, compress=None) Send *data* to peer as :const:`~aiohttp.WSMsgType.TEXT` message. :param str data: data to send. :param int compress: sets specific level of compression for single message, ``None`` for not overriding per-socket setting. :raise RuntimeError: if connection is not started or closing :raise TypeError: if data is not :class:`str` .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, *compress* parameter added. .. comethod:: send_bytes(data, compress=None) Send *data* to peer as :const:`~aiohttp.WSMsgType.BINARY` message. :param data: data to send. :param int compress: sets specific level of compression for single message, ``None`` for not overriding per-socket setting. :raise RuntimeError: if connection is not started or closing :raise TypeError: if data is not :class:`bytes`, :class:`bytearray` or :class:`memoryview`. .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, *compress* parameter added. .. comethod:: send_json(data, compress=None, *, dumps=json.dumps) Send *data* to peer as JSON string. :param data: data to send. :param int compress: sets specific level of compression for single message, ``None`` for not overriding per-socket setting. :param callable dumps: any :term:`callable` that accepts an object and returns a JSON string (:func:`json.dumps` by default). :raise RuntimeError: if connection is not started or closing :raise ValueError: if data is not serializable object :raise TypeError: if value returned by ``dumps`` param is not :class:`str` .. versionchanged:: 3.0 The method is converted into :term:`coroutine`, *compress* parameter added. .. comethod:: close(*, code=1000, message=b'') A :ref:`coroutine` that initiates closing handshake by sending :const:`~aiohttp.WSMsgType.CLOSE` message. It is save to call `close()` from different task. :param int code: closing code :param message: optional payload of *pong* message, :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. :raise RuntimeError: if connection is not started .. comethod:: receive(timeout=None) A :ref:`coroutine` that waits upcoming *data* message from peer and returns it. The coroutine implicitly handles :const:`~aiohttp.WSMsgType.PING`, :const:`~aiohttp.WSMsgType.PONG` and :const:`~aiohttp.WSMsgType.CLOSE` without returning the message. It process *ping-pong game* and performs *closing handshake* internally. .. note:: Can only be called by the request handling task. :param timeout: timeout for `receive` operation. timeout value overrides response`s receive_timeout attribute. :return: :class:`~aiohttp.WSMessage` :raise RuntimeError: if connection is not started .. comethod:: receive_str(*, timeout=None) A :ref:`coroutine` that calls :meth:`receive` but also asserts the message type is :const:`~aiohttp.WSMsgType.TEXT`. .. note:: Can only be called by the request handling task. :param timeout: timeout for `receive` operation. timeout value overrides response`s receive_timeout attribute. :return str: peer's message content. :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. .. comethod:: receive_bytes(*, timeout=None) A :ref:`coroutine` that calls :meth:`receive` but also asserts the message type is :const:`~aiohttp.WSMsgType.BINARY`. .. note:: Can only be called by the request handling task. :param timeout: timeout for `receive` operation. timeout value overrides response`s receive_timeout attribute. :return bytes: peer's message content. :raise TypeError: if message is :const:`~aiohttp.WSMsgType.TEXT`. .. comethod:: receive_json(*, loads=json.loads, timeout=None) A :ref:`coroutine` that calls :meth:`receive_str` and loads the JSON string to a Python dict. .. note:: Can only be called by the request handling task. :param callable loads: any :term:`callable` that accepts :class:`str` and returns :class:`dict` with parsed JSON (:func:`json.loads` by default). :param timeout: timeout for `receive` operation. timeout value overrides response`s receive_timeout attribute. :return dict: loaded JSON content :raise TypeError: if message is :const:`~aiohttp.WSMsgType.BINARY`. :raise ValueError: if message is not valid JSON. .. seealso:: :ref:`WebSockets handling` WebSocketReady ^^^^^^^^^^^^^^ .. class:: WebSocketReady A named tuple for returning result from :meth:`WebSocketResponse.can_prepare`. Has :class:`bool` check implemented, e.g.:: if not await ws.can_prepare(...): cannot_start_websocket() .. attribute:: ok ``True`` if websocket connection can be established, ``False`` otherwise. .. attribute:: protocol :class:`str` represented selected websocket sub-protocol. .. seealso:: :meth:`WebSocketResponse.can_prepare` json_response ------------- .. function:: json_response([data], *, text=None, body=None, \ status=200, reason=None, headers=None, \ content_type='application/json', \ dumps=json.dumps) Return :class:`Response` with predefined ``'application/json'`` content type and *data* encoded by ``dumps`` parameter (:func:`json.dumps` by default). .. _aiohttp-web-app-and-router: Application and Router ---------------------- Application ^^^^^^^^^^^ Application is a synonym for web-server. To get fully working example, you have to make *application*, register supported urls in *router* and create a *server socket* with :class:`~aiohttp.web.Server` as a *protocol factory*. *Server* could be constructed with :meth:`Application.make_handler`. *Application* contains a *router* instance and a list of callbacks that will be called during application finishing. :class:`Application` is a :obj:`dict`-like object, so you can use it for :ref:`sharing data` globally by storing arbitrary properties for later access from a :ref:`handler` via the :attr:`Request.app` property:: app = Application() app['database'] = await aiopg.create_engine(**db_config) async def handler(request): with (await request.app['database']) as conn: conn.execute("DELETE * FROM table") Although :class:`Application` is a :obj:`dict`-like object, it can't be duplicated like one using :meth:`Application.copy`. .. class:: Application(*, logger=, router=None,middlewares=(), \ handler_args=None, client_max_size=1024**2, \ loop=None, debug=...) The class inherits :class:`dict`. :param logger: :class:`logging.Logger` instance for storing application logs. By default the value is ``logging.getLogger("aiohttp.web")`` :param router: :class:`aiohttp.abc.AbstractRouter` instance, the system creates :class:`UrlDispatcher` by default if *router* is ``None``. :param middlewares: :class:`list` of middleware factories, see :ref:`aiohttp-web-middlewares` for details. :param handler_args: dict-like object that overrides keyword arguments of :meth:`Application.make_handler` :param client_max_size: client's maximum size in a request, in bytes. If a POST request exceeds this value, it raises an `HTTPRequestEntityTooLarge` exception. :param loop: event loop .. deprecated:: 2.0 The parameter is deprecated. Loop is get set during freeze stage. :param debug: Switches debug mode. .. attribute:: router Read-only property that returns *router instance*. .. attribute:: logger :class:`logging.Logger` instance for storing application logs. .. attribute:: loop :ref:`event loop` used for processing HTTP requests. .. attribute:: debug Boolean value indicating whether the debug mode is turned on or off. .. attribute:: on_response_prepare A :class:`~aiohttp.Signal` that is fired at the beginning of :meth:`StreamResponse.prepare` with parameters *request* and *response*. It can be used, for example, to add custom headers to each response before sending. Signal handlers should have the following signature:: async def on_prepare(request, response): pass .. attribute:: on_startup A :class:`~aiohttp.Signal` that is fired on application start-up. Subscribers may use the signal to run background tasks in the event loop along with the application's request handler just after the application start-up. Signal handlers should have the following signature:: async def on_startup(app): pass .. seealso:: :ref:`aiohttp-web-background-tasks`. .. attribute:: on_shutdown A :class:`~aiohttp.Signal` that is fired on application shutdown. Subscribers may use the signal for gracefully closing long running connections, e.g. websockets and data streaming. Signal handlers should have the following signature:: async def on_shutdown(app): pass It's up to end user to figure out which :term:`web-handler`\s are still alive and how to finish them properly. We suggest keeping a list of long running handlers in :class:`Application` dictionary. .. seealso:: :ref:`aiohttp-web-graceful-shutdown` and :attr:`on_cleanup`. .. attribute:: on_cleanup A :class:`~aiohttp.Signal` that is fired on application cleanup. Subscribers may use the signal for gracefully closing connections to database server etc. Signal handlers should have the following signature:: async def on_cleanup(app): pass .. seealso:: :ref:`aiohttp-web-graceful-shutdown` and :attr:`on_shutdown`. .. method:: make_handler(loop=None, **kwargs) Creates HTTP protocol factory for handling requests. :param loop: :ref:`event loop` used for processing HTTP requests. If param is ``None`` :func:`asyncio.get_event_loop` used for getting default event loop. .. deprecated:: 2.0 :param bool tcp_keepalive: Enable TCP Keep-Alive. Default: ``True``. :param int keepalive_timeout: Number of seconds before closing Keep-Alive connection. Default: ``75`` seconds (NGINX's default value). :param logger: Custom logger object. Default: :data:`aiohttp.log.server_logger`. :param access_log: Custom logging object. Default: :data:`aiohttp.log.access_logger`. :param access_log_class: class for `access_logger`. Default: :data:`aiohttp.helpers.AccessLogger`. Must to be a subclass of :class:`aiohttp.abc.AbstractAccessLogger`. :param str access_log_format: Access log format string. Default: :attr:`helpers.AccessLogger.LOG_FORMAT`. :param int max_line_size: Optional maximum header line size. Default: ``8190``. :param int max_headers: Optional maximum header size. Default: ``32768``. :param int max_field_size: Optional maximum header field size. Default: ``8190``. :param float lingering_time: maximum time during which the server reads and ignore additional data coming from the client when lingering close is on. Use ``0`` for disabling lingering on server channel closing. :param float lingering_timeout: maximum waiting time for more client data to arrive when lingering close is in effect You should pass result of the method as *protocol_factory* to :meth:`~asyncio.AbstractEventLoop.create_server`, e.g.:: loop = asyncio.get_event_loop() app = Application() # setup route table # app.router.add_route(...) await loop.create_server(app.make_handler(), '0.0.0.0', 8080) .. comethod:: startup() A :ref:`coroutine` that will be called along with the application's request handler. The purpose of the method is calling :attr:`on_startup` signal handlers. .. comethod:: shutdown() A :ref:`coroutine` that should be called on server stopping but before :meth:`cleanup()`. The purpose of the method is calling :attr:`on_shutdown` signal handlers. .. comethod:: cleanup() A :ref:`coroutine` that should be called on server stopping but after :meth:`shutdown`. The purpose of the method is calling :attr:`on_cleanup` signal handlers. .. note:: Application object has :attr:`router` attribute but has no ``add_route()`` method. The reason is: we want to support different router implementations (even maybe not url-matching based but traversal ones). For sake of that fact we have very trivial ABC for :class:`AbstractRouter`: it should have only :meth:`AbstractRouter.resolve` coroutine. No methods for adding routes or route reversing (getting URL by route name). All those are router implementation details (but, sure, you need to deal with that methods after choosing the router for your application). Server ^^^^^^ A protocol factory compatible with :meth:`~asyncio.AbstreactEventLoop.create_server`. .. class:: Server The class is responsible for creating HTTP protocol objects that can handle HTTP connections. .. attribute:: Server.connections List of all currently opened connections. .. attribute:: requests_count Amount of processed requests. .. comethod:: Server.shutdown(timeout) A :ref:`coroutine` that should be called to close all opened connections. Router ^^^^^^ For dispatching URLs to :ref:`handlers` :mod:`aiohttp.web` uses *routers*. Router is any object that implements :class:`AbstractRouter` interface. :mod:`aiohttp.web` provides an implementation called :class:`UrlDispatcher`. :class:`Application` uses :class:`UrlDispatcher` as :meth:`router` by default. .. class:: UrlDispatcher() Straightforward url-matching router, implements :class:`collections.abc.Mapping` for access to *named routes*. Before running :class:`Application` you should fill *route table* first by calling :meth:`add_route` and :meth:`add_static`. :ref:`Handler` lookup is performed by iterating on added *routes* in FIFO order. The first matching *route* will be used to call corresponding *handler*. If on route creation you specify *name* parameter the result is *named route*. *Named route* can be retrieved by ``app.router[name]`` call, checked for existence by ``name in app.router`` etc. .. seealso:: :ref:`Route classes ` .. method:: add_resource(path, *, name=None) Append a :term:`resource` to the end of route table. *path* may be either *constant* string like ``'/a/b/c'`` or *variable rule* like ``'/a/{var}'`` (see :ref:`handling variable paths `) :param str path: resource path spec. :param str name: optional resource name. :return: created resource instance (:class:`PlainResource` or :class:`DynamicResource`). .. method:: add_route(method, path, handler, *, \ name=None, expect_handler=None) Append :ref:`handler` to the end of route table. *path* may be either *constant* string like ``'/a/b/c'`` or *variable rule* like ``'/a/{var}'`` (see :ref:`handling variable paths `) Pay attention please: *handler* is converted to coroutine internally when it is a regular function. :param str method: HTTP method for route. Should be one of ``'GET'``, ``'POST'``, ``'PUT'``, ``'DELETE'``, ``'PATCH'``, ``'HEAD'``, ``'OPTIONS'`` or ``'*'`` for any method. The parameter is case-insensitive, e.g. you can push ``'get'`` as well as ``'GET'``. :param str path: route path. Should be started with slash (``'/'``). :param callable handler: route handler. :param str name: optional route name. :param coroutine expect_handler: optional *expect* header handler. :returns: new :class:`PlainRoute` or :class:`DynamicRoute` instance. .. method:: add_routes(routes_table) Register route definitions from *routes_table*. The table is a :class:`list` of :class:`RouteDef` items or :class:`RouteTableDef`. .. versionadded:: 2.3 .. method:: add_get(path, handler, *, name=None, allow_head=True, **kwargs) Shortcut for adding a GET handler. Calls the :meth:`add_route` with \ ``method`` equals to ``'GET'``. If *allow_head* is ``True`` (default) the route for method HEAD is added with the same handler as for GET. If *name* is provided the name for HEAD route is suffixed with ``'-head'``. For example ``router.add_get(path, handler, name='route')`` call adds two routes: first for GET with name ``'route'`` and second for HEAD with name ``'route-head'``. .. method:: add_post(path, handler, **kwargs) Shortcut for adding a POST handler. Calls the :meth:`add_route` with \ ``method`` equals to ``'POST'``. .. method:: add_head(path, handler, **kwargs) Shortcut for adding a HEAD handler. Calls the :meth:`add_route` with \ ``method`` equals to ``'HEAD'``. .. method:: add_put(path, handler, **kwargs) Shortcut for adding a PUT handler. Calls the :meth:`add_route` with \ ``method`` equals to ``'PUT'``. .. method:: add_patch(path, handler, **kwargs) Shortcut for adding a PATCH handler. Calls the :meth:`add_route` with \ ``method`` equals to ``'PATCH'``. .. method:: add_delete(path, handler, **kwargs) Shortcut for adding a DELETE handler. Calls the :meth:`add_route` with \ ``method`` equals to ``'DELETE'``. .. method:: add_view(path, handler, **kwargs) Shortcut for adding a class-based view handler. Calls the \ :meth:`add_routre` with ``method`` equals to ``'*'``. .. versionadded:: 3.0 .. method:: add_static(prefix, path, *, name=None, expect_handler=None, \ chunk_size=256*1024, \ response_factory=StreamResponse, \ show_index=False, \ follow_symlinks=False, \ append_version=False) Adds a router and a handler for returning static files. Useful for serving static content like images, javascript and css files. On platforms that support it, the handler will transfer files more efficiently using the ``sendfile`` system call. In some situations it might be necessary to avoid using the ``sendfile`` system call even if the platform supports it. This can be accomplished by by setting environment variable ``AIOHTTP_NOSENDFILE=1``. If a gzip version of the static content exists at file path + ``.gz``, it will be used for the response. .. warning:: Use :meth:`add_static` for development only. In production, static content should be processed by web servers like *nginx* or *apache*. :param str prefix: URL path prefix for handled static files :param path: path to the folder in file system that contains handled static files, :class:`str` or :class:`pathlib.Path`. :param str name: optional route name. :param coroutine expect_handler: optional *expect* header handler. :param int chunk_size: size of single chunk for file downloading, 256Kb by default. Increasing *chunk_size* parameter to, say, 1Mb may increase file downloading speed but consumes more memory. :param bool show_index: flag for allowing to show indexes of a directory, by default it's not allowed and HTTP/403 will be returned on directory access. :param bool follow_symlinks: flag for allowing to follow symlinks from a directory, by default it's not allowed and HTTP/404 will be returned on access. :param bool append_version: flag for adding file version (hash) to the url query string, this value will be used as default when you call to :meth:`StaticRoute.url` and :meth:`StaticRoute.url_for` methods. :returns: new :class:`StaticRoute` instance. .. method:: add_subapp(prefix, subapp) Register nested sub-application under given path *prefix*. In resolving process if request's path starts with *prefix* then further resolving is passed to *subapp*. :param str prefix: path's prefix for the resource. :param Application subapp: nested application attached under *prefix*. :returns: a :class:`PrefixedSubAppResource` instance. .. comethod:: resolve(request) A :ref:`coroutine` that returns :class:`AbstractMatchInfo` for *request*. The method never raises exception, but returns :class:`AbstractMatchInfo` instance with: 1. :attr:`~AbstractMatchInfo.http_exception` assigned to :exc:`HTTPException` instance. 2. :attr:`~AbstractMatchInfo.handler` which raises :exc:`HTTPNotFound` or :exc:`HTTPMethodNotAllowed` on handler's execution if there is no registered route for *request*. *Middlewares* can process that exceptions to render pretty-looking error page for example. Used by internal machinery, end user unlikely need to call the method. .. note:: The method uses :attr:`Request.raw_path` for pattern matching against registered routes. .. method:: resources() The method returns a *view* for *all* registered resources. The view is an object that allows to: 1. Get size of the router table:: len(app.router.resources()) 2. Iterate over registered resources:: for resource in app.router.resources(): print(resource) 3. Make a check if the resources is registered in the router table:: route in app.router.resources() .. method:: routes() The method returns a *view* for *all* registered routes. .. method:: named_resources() Returns a :obj:`dict`-like :class:`types.MappingProxyType` *view* over *all* named **resources**. The view maps every named resource's **name** to the :class:`BaseResource` instance. It supports the usual :obj:`dict`-like operations, except for any mutable operations (i.e. it's **read-only**):: len(app.router.named_resources()) for name, resource in app.router.named_resources().items(): print(name, resource) "name" in app.router.named_resources() app.router.named_resources()["name"] .. _aiohttp-web-resource: Resource ^^^^^^^^ Default router :class:`UrlDispatcher` operates with :term:`resource`\s. Resource is an item in *routing table* which has a *path*, an optional unique *name* and at least one :term:`route`. :term:`web-handler` lookup is performed in the following way: 1. Router iterates over *resources* one-by-one. 2. If *resource* matches to requested URL the resource iterates over own *routes*. 3. If route matches to requested HTTP method (or ``'*'`` wildcard) the route's handler is used as found :term:`web-handler`. The lookup is finished. 4. Otherwise router tries next resource from the *routing table*. 5. If the end of *routing table* is reached and no *resource* / *route* pair found the *router* returns special :class:`AbstractMatchInfo` instance with :attr:`AbstractMatchInfo.http_exception` is not ``None`` but :exc:`HTTPException` with either *HTTP 404 Not Found* or *HTTP 405 Method Not Allowed* status code. Registered :attr:`AbstractMatchInfo.handler` raises this exception on call. User should never instantiate resource classes but give it by :meth:`UrlDispatcher.add_resource` call. After that he may add a :term:`route` by calling :meth:`Resource.add_route`. :meth:`UrlDispatcher.add_route` is just shortcut for:: router.add_resource(path).add_route(method, handler) Resource with a *name* is called *named resource*. The main purpose of *named resource* is constructing URL by route name for passing it into *template engine* for example:: url = app.router['resource_name'].url_for().with_query({'a': 1, 'b': 2}) Resource classes hierarchy:: AbstractResource Resource PlainResource DynamicResource StaticResource .. class:: AbstractResource A base class for all resources. Inherited from :class:`collections.abc.Sized` and :class:`collections.abc.Iterable`. ``len(resource)`` returns amount of :term:`route`\s belongs to the resource, ``for route in resource`` allows to iterate over these routes. .. attribute:: name Read-only *name* of resource or ``None``. .. comethod:: resolve(method, path) Resolve resource by finding appropriate :term:`web-handler` for ``(method, path)`` combination. :param str method: requested HTTP method. :param str path: *path* part of request. :return: (*match_info*, *allowed_methods*) pair. *allowed_methods* is a :class:`set` or HTTP methods accepted by resource. *match_info* is either :class:`UrlMappingMatchInfo` if request is resolved or ``None`` if no :term:`route` is found. .. method:: get_info() A resource description, e.g. ``{'path': '/path/to'}`` or ``{'formatter': '/path/{to}', 'pattern': re.compile(r'^/path/(?P[a-zA-Z][_a-zA-Z0-9]+)$`` .. method:: url_for(*args, **kwargs) Construct an URL for route with additional params. *args* and **kwargs** depend on a parameters list accepted by inherited resource class. :return: :class:`~yarl.URL` -- resulting URL instance. .. class:: Resource A base class for new-style resources, inherits :class:`AbstractResource`. .. method:: add_route(method, handler, *, expect_handler=None) Add a :term:`web-handler` to resource. :param str method: HTTP method for route. Should be one of ``'GET'``, ``'POST'``, ``'PUT'``, ``'DELETE'``, ``'PATCH'``, ``'HEAD'``, ``'OPTIONS'`` or ``'*'`` for any method. The parameter is case-insensitive, e.g. you can push ``'get'`` as well as ``'GET'``. The method should be unique for resource. :param callable handler: route handler. :param coroutine expect_handler: optional *expect* header handler. :returns: new :class:`ResourceRoute` instance. .. class:: PlainResource A resource, inherited from :class:`Resource`. The class corresponds to resources with plain-text matching, ``'/path/to'`` for example. .. method:: url_for() Returns a :class:`~yarl.URL` for the resource. .. class:: DynamicResource A resource, inherited from :class:`Resource`. The class corresponds to resources with :ref:`variable ` matching, e.g. ``'/path/{to}/{param}'`` etc. .. method:: url_for(**params) Returns a :class:`~yarl.URL` for the resource. :param params: -- a variable substitutions for dynamic resource. E.g. for ``'/path/{to}/{param}'`` pattern the method should be called as ``resource.url_for(to='val1', param='val2')`` .. class:: StaticResource A resource, inherited from :class:`Resource`. The class corresponds to resources for :ref:`static file serving `. .. method:: url_for(filename, append_version=None) Returns a :class:`~yarl.URL` for file path under resource prefix. :param filename: -- a file name substitution for static file handler. Accepts both :class:`str` and :class:`pathlib.Path`. E.g. an URL for ``'/prefix/dir/file.txt'`` should be generated as ``resource.url_for(filename='dir/file.txt')`` :param bool append_version: -- a flag for adding file version (hash) to the url query string for cache boosting By default has value from an constructor (``False`` by default) When set to ``True`` - ``v=FILE_HASH`` query string param will be added When set to ``False`` has no impact if file not found has no impact .. class:: PrefixedSubAppResource A resource for serving nested applications. The class instance is returned by :class:`~aiohttp.web.Application.add_subapp` call. .. method:: url_for(**kwargs) The call is not allowed, it raises :exc:`RuntimeError`. .. _aiohttp-web-route: Route ^^^^^ Route has *HTTP method* (wildcard ``'*'`` is an option), :term:`web-handler` and optional *expect handler*. Every route belong to some resource. Route classes hierarchy:: AbstractRoute ResourceRoute SystemRoute :class:`ResourceRoute` is the route used for resources, :class:`SystemRoute` serves URL resolving errors like *404 Not Found* and *405 Method Not Allowed*. .. class:: AbstractRoute Base class for routes served by :class:`UrlDispatcher`. .. attribute:: method HTTP method handled by the route, e.g. *GET*, *POST* etc. .. attribute:: handler :ref:`handler` that processes the route. .. attribute:: name Name of the route, always equals to name of resource which owns the route. .. attribute:: resource Resource instance which holds the route, ``None`` for :class:`SystemRoute`. .. method:: url_for(*args, **kwargs) Abstract method for constructing url handled by the route. Actually it's a shortcut for ``route.resource.url_for(...)``. .. comethod:: handle_expect_header(request) ``100-continue`` handler. .. class:: ResourceRoute The route class for handling different HTTP methods for :class:`Resource`. .. class:: SystemRoute The route class for handling URL resolution errors like like *404 Not Found* and *405 Method Not Allowed*. .. attribute:: status HTTP status code .. attribute:: reason HTTP status reason .. _aiohttp-web-route-def: RouteDef ^^^^^^^^ Route definition, a description for not registered yet route. Could be used for filing route table by providing a list of route definitions (Django style). The definition is created by functions like :func:`get` or :func:`post`, list of definitions could be added to router by :meth:`UrlDispatcher.add_routes` call:: from aiohttp import web async def handle_get(request): ... async def handle_post(request): ... app.router.add_routes([web.get('/get', handle_get), web.post('/post', handle_post), .. class:: RouteDef A definition for not added yet route. .. attribute:: method HTTP method (``GET``, ``POST`` etc.) (:class:`str`). .. attribute:: path Path to resource, e.g. ``/path/to``. Could contain ``{}`` brackets for :ref:`variable resources ` (:class:`str`). .. attribute:: handler An async function to handle HTTP request. .. attribute:: kwargs A :class:`dict` of additional arguments. .. versionadded:: 2.3 .. function:: get(path, handler, *, name=None, allow_head=True, \ expect_handler=None) Return :class:`RouteDef` for processing ``GET`` requests. See :meth:`UrlDispatcher.add_get` for information about parameters. .. versionadded:: 2.3 .. function:: post(path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``POST`` requests. See :meth:`UrlDispatcher.add_post` for information about parameters. .. versionadded:: 2.3 .. function:: head(path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``HEAD`` requests. See :meth:`UrlDispatcher.add_head` for information about parameters. .. versionadded:: 2.3 .. function:: put(path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``PUT`` requests. See :meth:`UrlDispatcher.add_put` for information about parameters. .. versionadded:: 2.3 .. function:: patch(path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``PATCH`` requests. See :meth:`UrlDispatcher.add_patch` for information about parameters. .. versionadded:: 2.3 .. function:: delete(path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``DELETE`` requests. See :meth:`UrlDispatcher.add_delete` for information about parameters. .. versionadded:: 2.3 .. function:: view(path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``ANY`` requests. See :meth:`UrlDispatcher.add_view` for information about parameters. .. versionadded:: 3.0 .. function:: route(method, path, handler, *, name=None, expect_handler=None) Return :class:`RouteDef` for processing ``POST`` requests. See :meth:`UrlDispatcher.add_route` for information about parameters. .. versionadded:: 2.3 .. _aiohttp-web-route-table-def: RouteTableDef ^^^^^^^^^^^^^ A routes table definition used for describing routes by decorators (Flask style):: from aiohttp import web routes = web.RouteTableDef() @routes.get('/get') async def handle_get(request): ... @routes.post('/post') async def handle_post(request): ... app.router.add_routes(routes) @routes.view("/view") class MyView(web.View): async def get(self): ... async def post(self): ... .. class:: RouteTableDef() A sequence of :class:`RouteDef` instances (implements :class:`abc.collections.Sequence` protocol). In addition to all standard :class:`list` methods the class provides also methods like ``get()`` and ``post()`` for adding new route definition. .. decoratormethod:: get(path, *, allow_head=True, \ name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``GET`` web-handler. See :meth:`UrlDispatcher.add_get` for information about parameters. .. decoratormethod:: post(path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``POST`` web-handler. See :meth:`UrlDispatcher.add_post` for information about parameters. .. decoratormethod:: head(path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``HEAD`` web-handler. See :meth:`UrlDispatcher.add_head` for information about parameters. .. decoratormethod:: put(path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``PUT`` web-handler. See :meth:`UrlDispatcher.add_put` for information about parameters. .. decoratormethod:: patch(path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``PATCH`` web-handler. See :meth:`UrlDispatcher.add_patch` for information about parameters. .. decoratormethod:: delete(path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``DELETE`` web-handler. See :meth:`UrlDispatcher.add_delete` for information about parameters. .. decoratormethod:: view(path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering ``ANY`` methods against a class-based view. See :meth:`UrlDispatcher.add_view` for information about parameters. .. versionadded:: 3.0 .. decoratormethod:: route(method, path, *, name=None, expect_handler=None) Add a new :class:`RouteDef` item for registering a web-handler for arbitrary HTTP method. See :meth:`UrlDispatcher.add_route` for information about parameters. .. versionadded:: 2.3 MatchInfo ^^^^^^^^^ After route matching web application calls found handler if any. Matching result can be accessible from handler as :attr:`Request.match_info` attribute. In general the result may be any object derived from :class:`AbstractMatchInfo` (:class:`UrlMappingMatchInfo` for default :class:`UrlDispatcher` router). .. class:: UrlMappingMatchInfo Inherited from :class:`dict` and :class:`AbstractMatchInfo`. Dict items are filled by matching info and is :term:`resource`\-specific. .. attribute:: expect_handler A coroutine for handling ``100-continue``. .. attribute:: handler A coroutine for handling request. .. attribute:: route :class:`Route` instance for url matching. View ^^^^ .. class:: View(request) Inherited from :class:`AbstractView`. Base class for class based views. Implementations should derive from :class:`View` and override methods for handling HTTP verbs like ``get()`` or ``post()``:: class MyView(View): async def get(self): resp = await get_response(self.request) return resp async def post(self): resp = await post_response(self.request) return resp app.router.add_view('/view', MyView) The view raises *405 Method Not allowed* (:class:`HTTPMethodNotAllowed`) if requested web verb is not supported. :param request: instance of :class:`Request` that has initiated a view processing. .. attribute:: request Request sent to view's constructor, read-only property. Overridable coroutine methods: ``connect()``, ``delete()``, ``get()``, ``head()``, ``options()``, ``patch()``, ``post()``, ``put()``, ``trace()``. .. seealso:: :ref:`aiohttp-web-class-based-views` .. _aiohttp-web-app-runners-reference: Running Applications -------------------- To start web application there is ``AppRunner`` and site classes. Runner is a storage for running application, sites are for running application on specific TCP or Unix socket, e.g.:: runner = web.AppRunner(app) await runner.setup() site = web.TCPSite(runner, 'localhost', 8080) await site.start() # wait for finish signal await runner.cleanup() .. versionadded:: 3.0 :class:`AppRunner` and :class:`TCPSite` / :class:`UnixSite` / :class:`SockSite` are added in aiohttp 3.0 .. class:: AppRunner(app, *, handle_signals=False, **kwargs) A runner for :class:`Application`. Used with conjunction with sites to serve on specific port. :param Application app: web application instance to serve. :param bool handle_signals: add signal handlers for :data:`signal.SIGINT` and :data:`signal.SIGTERM` (``False`` by default). :param kwargs: named parameters to pass into :meth:`Application.make_handler`. .. attribute:: app Read-only attribute for accessing to :class:`Application` served instance. .. attribute:: server Low-level web :class:`Server` for handling HTTP requests, read-only attribute. .. attribute:: sites A read-only :class:`set` of served sites (:class:`TCPSite` / :class:`UnixSite` / :class:`SockSite` instances). .. comethod:: setup() Initialize application. Should be called before adding sites. The method calls :attr:`Application.on_startup` registered signals. .. comethod:: cleanup() Stop handling all registered sites and cleanup used resources. :attr:`Application.on_shutdown` and :attr:`Application.on_cleanup` signals are called internally. .. class:: BaseSite An abstract class for handled sites. .. attribute:: name An identifier for site, read-only :class:`str` property. Could be an handled URL or UNIX socket path. .. comethod:: start() Start handling a site. .. comethod:: stop() Stop handling a site. .. class:: TCPSite(runner, host=None, port=None, *, \ shutdown_timeout=60.0, ssl_context=None, \ backlog=128, reuse_address=None, reuse_port=None) Serve a runner on TCP socket. :param runner: a runner to serve. :param str host: HOST to listen on, ``'0.0.0.0'`` if ``None`` (default). :param int port: PORT to listed on, ``8080`` if ``None`` (default). :param float shutdown_timeout: a timeout for closing opened connections on :meth:`BaseSite.stop` call. :param ssl_context: a :class:`ssl.SSLContext` instance for serving SSL/TLS secure server, ``None`` for plain HTTP server (default). :param int backlog: a number of unaccepted connections that the system will allow before refusing new connections, see :meth:`socket.listen` for details. ``128`` by default. :param bool reuse_address: tells the kernel to reuse a local socket in TIME_WAIT state, without waiting for its natural timeout to expire. If not specified will automatically be set to True on UNIX. :param bool reuse_port: tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints are bound to, so long as they all set this flag when being created. This option is not supported on Windows. .. class:: UnixSite(runner, path, *, \ shutdown_timeout=60.0, ssl_context=None, \ backlog=128) Serve a runner on UNIX socket. :param runner: a runner to serve. :param str path: PATH to UNIX socket to listen. :param float shutdown_timeout: a timeout for closing opened connections on :meth:`BaseSite.stop` call. :param ssl_context: a :class:`ssl.SSLContext` instance for serving SSL/TLS secure server, ``None`` for plain HTTP server (default). :param int backlog: a number of unaccepted connections that the system will allow before refusing new connections, see :meth:`socket.listen` for details. ``128`` by default. .. class:: SockSite(runner, sock, *, \ shutdown_timeout=60.0, ssl_context=None, \ backlog=128) Serve a runner on UNIX socket. :param runner: a runner to serve. :param sock: :class:`socket.socket` to listen. :param float shutdown_timeout: a timeout for closing opened connections on :meth:`BaseSite.stop` call. :param ssl_context: a :class:`ssl.SSLContext` instance for serving SSL/TLS secure server, ``None`` for plain HTTP server (default). :param int backlog: a number of unaccepted connections that the system will allow before refusing new connections, see :meth:`socket.listen` for details. ``128`` by default. Utilities --------- .. class:: FileField A :class:`~collections.namedtuple` instance that is returned as multidict value by :meth:`Request.POST` if field is uploaded file. .. attribute:: name Field name .. attribute:: filename File name as specified by uploading (client) side. .. attribute:: file An :class:`io.IOBase` instance with content of uploaded file. .. attribute:: content_type *MIME type* of uploaded file, ``'text/plain'`` by default. .. seealso:: :ref:`aiohttp-web-file-upload` .. function:: run_app(app, *, host=None, port=None, path=None, \ sock=None, shutdown_timeout=60.0, \ ssl_context=None, print=print, backlog=128, \ access_log_class=aiohttp.helpers.AccessLogger, \ access_log_format=aiohttp.helpers.AccessLogger.LOG_FORMAT, \ access_log=aiohttp.log.access_logger, \ handle_signals=True, \ reuse_address=None, \ reuse_port=None) A utility function for running an application, serving it until keyboard interrupt and performing a :ref:`aiohttp-web-graceful-shutdown`. Suitable as handy tool for scaffolding aiohttp based projects. Perhaps production config will use more sophisticated runner but it good enough at least at very beginning stage. The server will listen on any host or Unix domain socket path you supply. If no hosts or paths are supplied, or only a port is supplied, a TCP server listening on 0.0.0.0 (all hosts) will be launched. Distributing HTTP traffic to multiple hosts or paths on the same application process provides no performance benefit as the requests are handled on the same event loop. See :doc:`deployment` for ways of distributing work for increased performance. :param app: :class:`Application` instance to run :param str host: TCP/IP host or a sequence of hosts for HTTP server. Default is ``'0.0.0.0'`` if *port* has been specified or if *path* is not supplied. :param int port: TCP/IP port for HTTP server. Default is ``8080`` for plain text HTTP and ``8443`` for HTTP via SSL (when *ssl_context* parameter is specified). :param str path: file system path for HTTP server Unix domain socket. A sequence of file system paths can be used to bind multiple domain sockets. Listening on Unix domain sockets is not supported by all operating systems. :param socket sock: a preexisting socket object to accept connections on. A sequence of socket objects can be passed. :param int shutdown_timeout: a delay to wait for graceful server shutdown before disconnecting all open client sockets hard way. A system with properly :ref:`aiohttp-web-graceful-shutdown` implemented never waits for this timeout but closes a server in a few milliseconds. :param ssl_context: :class:`ssl.SSLContext` for HTTPS server, ``None`` for HTTP connection. :param print: a callable compatible with :func:`print`. May be used to override STDOUT output or suppress it. Passing `None` disables output. :param int backlog: the number of unaccepted connections that the system will allow before refusing new connections (``128`` by default). :param access_log_class: class for `access_logger`. Default: :data:`aiohttp.helpers.AccessLogger`. Must to be a subclass of :class:`aiohttp.abc.AbstractAccessLogger`. :param access_log: :class:`logging.Logger` instance used for saving access logs. Use ``None`` for disabling logs for sake of speedup. :param access_log_format: access log format, see :ref:`aiohttp-logging-access-log-format-spec` for details. :param bool handle_signals: override signal TERM handling to gracefully exit the application. :param bool reuse_address: tells the kernel to reuse a local socket in TIME_WAIT state, without waiting for its natural timeout to expire. If not specified will automatically be set to True on UNIX. :param bool reuse_port: tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints are bound to, so long as they all set this flag when being created. This option is not supported on Windows. .. versionadded:: 3.0 Support *access_log_class* parameter. Support *reuse_address*, *reuse_port* parameter. Constants --------- .. class:: ContentCoding An :class:`enum.Enum` class of available Content Codings. .. attribute:: deflate *DEFLATE compression* .. attribute:: gzip *GZIP compression* .. attribute:: identity *no compression* Middlewares ----------- Normalize path middleware ^^^^^^^^^^^^^^^^^^^^^^^^^ .. function:: normalize_path_middleware(*, \ append_slash=True, merge_slashes=True) Middleware that normalizes the path of a request. By normalizing it means: - Add a trailing slash to the path. - Double slashes are replaced by one. The middleware returns as soon as it finds a path that resolves correctly. The order if all enabled is: 1. *merge_slashes* 2. *append_slash* 3. both *merge_slashes* and *append_slash* If the path resolves with at least one of those conditions, it will redirect to the new path. If *append_slash* is ``True`` append slash when needed. If a resource is defined with trailing slash and the request comes without it, it will append it automatically. If *merge_slashes* is ``True``, merge multiple consecutive slashes in the path into one. aiohttp-3.0.1/docs/whats_new_1_1.rst0000666000000000000000000001046513240304665015447 0ustar 00000000000000========================= What's new in aiohttp 1.1 ========================= YARL and URL encoding ====================== Since aiohttp 1.1 the library uses :term:`yarl` for URL processing. New API ------- :class:`yarl.URL` gives handy methods for URL operations etc. Client API still accepts :class:`str` everywhere *url* is used, e.g. ``session.get('http://example.com')`` works as well as ``session.get(yarl.URL('http://example.com'))``. Internal API has been switched to :class:`yarl.URL`. :class:`aiohttp.CookieJar` accepts :class:`~yarl.URL` instances only. On server side has added :class:`web.Request.url` and :class:`web.Request.rel_url` properties for representing relative and absolute request's URL. URL using is the recommended way, already existed properties for retrieving URL parts are deprecated and will be eventually removed. Redirection web exceptions accepts :class:`yarl.URL` as *location* parameter. :class:`str` is still supported and will be supported forever. Reverse URL processing for *router* has been changed. The main API is :class:`aiohttp.web.Request.url_for(name, **kwargs)` which returns a :class:`yarl.URL` instance for named resource. It does not support *query args* but adding *args* is trivial: ``request.url_for('named_resource', param='a').with_query(arg='val')``. The method returns a *relative* URL, absolute URL may be constructed by ``request.url.join(request.url_for(...)`` call. URL encoding ------------ YARL encodes all non-ASCII symbols on :class:`yarl.URL` creation. Thus ``URL('https://www.python.org/путь')`` becomes ``'https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C'``. On filling route table it's possible to use both non-ASCII and percent encoded paths:: app.router.add_get('/путь', handler) and:: app.router.add_get('/%D0%BF%D1%83%D1%82%D1%8C', handler) are the same. Internally ``'/путь'`` is converted into percent-encoding representation. Route matching also accepts both URL forms: raw and encoded by converting the route pattern to *canonical* (encoded) form on route registration. Sub-Applications ================ Sub applications are designed for solving the problem of the big monolithic code base. Let's assume we have a project with own business logic and tools like administration panel and debug toolbar. Administration panel is a separate application by its own nature but all toolbar URLs are served by prefix like ``/admin``. Thus we'll create a totally separate application named ``admin`` and connect it to main app with prefix:: admin = web.Application() # setup admin routes, signals and middlewares app.add_subapp('/admin/', admin) Middlewares and signals from ``app`` and ``admin`` are chained. It means that if URL is ``'/admin/something'`` middlewares from ``app`` are applied first and ``admin.middlewares`` are the next in the call chain. The same is going for :attr:`~aiohttp.web.Application.on_response_prepare` signal -- the signal is delivered to both top level ``app`` and ``admin`` if processing URL is routed to ``admin`` sub-application. Common signals like :attr:`~aiohttp.web.Application.on_startup`, :attr:`~aiohttp.web.Application.on_shutdown` and :attr:`~aiohttp.web.Application.on_cleanup` are delivered to all registered sub-applications. The passed parameter is sub-application instance, not top-level application. Third level sub-applications can be nested into second level ones -- there are no limitation for nesting level. Url reversing ------------- Url reversing for sub-applications should generate urls with proper prefix. But for getting URL sub-application's router should be used:: admin = web.Application() admin.add_get('/resource', handler, name='name') app.add_subapp('/admin/', admin) url = admin.router['name'].url_for() The generated ``url`` from example will have a value ``URL('/admin/resource')``. Application freezing ==================== Application can be used either as main app (``app.make_handler()``) or as sub-application -- not both cases at the same time. After connecting application by ``.add_subapp()`` call or starting serving web-server as toplevel application the application is **frozen**. It means that registering new routes, signals and middlewares is forbidden. Changing state (``app['name'] = 'value'``) of frozen application is deprecated and will be eventually removed. aiohttp-3.0.1/docs/whats_new_3_0.rst0000666000000000000000000000461413240304665015447 0ustar 00000000000000.. _aiohttp_whats_new_3_0: ========================= What's new in aiohttp 3.0 ========================= async/await everywhere ====================== The main change is dropping ``yield from`` support and using ``async``/``await`` everywhere. Farewell, Python 3.4. The minimal supported Python version is **3.5.3** now. Why not *3.5.0*? Because *3.5.3* has a crucial change: :func:`asyncio.get_event_loop()` returns the running loop instead of *default*, which may be different, e.g.:: loop = asyncio.new_event_loop() loop.run_until_complete(f()) Note, :func:`asyncio.set_event_loop` was not called and default loop is not equal to actually executed one. Application Runners =================== People constantly asked about ability to run aiohttp servers together with other asyncio code, but :func:`aiohttp.web.run_app` is blocking synchronous call. aiohttp had support for starting the application without ``run_app`` but the API was very low-level and cumbersome. Now application runners solve the task in a few lines of code, see :ref:`aiohttp-web-app-runners` for details. Client Tracing ============== Other long awaited feature is tracing client request life cycle to figure out when and why client request spends a time waiting for connection establishment, getting server response headers etc. Now it is possible by registering special signal handlers on every request processing stage. :ref:`aiohttp-client-tracing` provides more info about the feature. HTTPS support ============= Unfortunately asyncio has a bug with checking SSL certificates for non-ASCII site DNS names, e.g. `https://иÑторик.рф `_ or `https://雜è‰å·¥ä½œå®¤.香港 `_. The bug has been fixed in upcoming Python 3.7 only (the change requires breaking backward compatibility in :mod:`ssl` API). aiohttp installs a fix for older Python versions (3.5 and 3.6). Dropped obsolete API ==================== A switch to new major version is a great chance for dropping already deprecated features. The release dropped a lot, see :ref:`aiohttp_changes` for details. All removals was already marked as deprecated or related to very low level implementation details. If user code did not raise :exc:`DeprecationWarning` it is compatible with aiohttp 3.0 most likely. Summary ======= Enjoy aiohttp 3.0 release! The full change log is here: :ref:`aiohttp_changes`. aiohttp-3.0.1/docs/_static/0000777000000000000000000000000013240305035013666 5ustar 00000000000000aiohttp-3.0.1/docs/_static/aiohttp-icon-128x128.png0000666000000000000000000002075713240304665017750 0ustar 00000000000000‰PNG  IHDR€€Ã>aËsBIT|dˆ pHYs½=ÀžtEXtSoftwarewww.inkscape.org›î< IDATxœíyx”ÕÙð÷™I jpƒ Za&A î­‚+$¨K@fÒÚâkµjßV[«}Km]ê×Ö¥êký’ Áª8“ ¢BÕº‚ ™¡bQq«$ÉÌsÌ!óL2If&ûý®‹ë"Ï9Ï9÷̹ç<Ï9ç^„o c¼ #œ"c-ô‰r"û¡: ò¢ø-Í@Ø| l>FؤʃÙQ]»®®|c_|žL"}-@oqÏ\²ÆrpP|+CÝ}¬^ÑçDòŸ_³àŒ3ÔWVèw P:wUÞŽíOÔš"ÊW‹Ôú¸ª,0hÿçVß;¡­åéýB*+ëë ŠNr&°O_Ë”„OtÑèðö•‹Oöµ@]‘Ó 0úÜ%æÇ,Kø/‘}-OwøÐäEw¿þà”÷ûZžd䤸«Ç£üø>`zÑÔ—À[¨|ˆè‡¨| ú±@TE¿«½¢ed/ƒ5@aˆ(C988س2DÌÖÔ–½Ø‹v2BN)€Ë8C„k€ãzpûfT_Ãó–8^·Ô¬[W3ù_é+þ¢9NTŽTC)ÊwºÝðœX\ÛTçy2r¥ƒœP€boÉ*z#pB7nÛ¦ð”YꈘeÙžfKª–ŽV+z’ñ z*00å›Eþ®*W…jËþ‘9 S¥/;wÍiØ_"z+Š7EY¶~µ´¦õ Çë—–µfXÄ”(ë/ ·pº"^” bû ]¡óÅä]Ñ—KÉ>Q€ÊÊzǺüŸ×{¥pËë*rg«JýúÚ²gX¼^qÄœG¾äÏPÕ‹€ânùåªàa«î¡ºÚêºzzɺŒÕpŒ±ô.`|U-TÌŸ›êʟɆléÆ]埨*— L¥«ïZY…C. .(_•ébdM&N\áÜ:|ÛÕ¨\ 8:©ª  ªæ7¡ºò׳%_&)™p[–^â¥óÏEô†à¨Õ×ek6ÈŠŒñúuˆ©=ª«º*rN¨¦üoÙ+Û¸¼ Gˆè-À©×”'ŒqÎÊÆ»AÆÀU˜*Ê|:Öo@¹ áZàs‡CN~c~ùÛ™–­¯(öúËTäOÀ¨du>IJ¼M §þ=“²ôf“¥ T\¾Àµ¢WKÏ -¬ðwÕV|¹ø8Q•Ó¾)+‚Îpûüg‚Ì#ùÙÃ3μ¶i¯Ï;ó‹tõ™6Sõø(“wdwº¥¥sWåµ6ô$§zo„ Ž]½ø´/»Ófo(öQái`?Q=»©®¢1[}§ƒQS [gä…÷Æ´6?Ýq»×=+0Õ¨Lç뽠ЦÉeÁCW•u罫[ àö~‹ðk›¢/Œ8Ž]S3å­î´—ÆxF8DW ¨ª•©ì2æ%³nµx°ƒcK“3ê˜QugÍ™jI¼l¢õˆ,Rˈ`½Ì×®m_#\¬ñܘª)+€Ëë?ZDž'Ñ AA¦kË©¶•n\³ý‰%O£|ôÜ`mÅ#võFMi,¸·õ=&ƒŽd¨ÂŸ‰òO…¢ªwwê»”ÎõîØ&¯‰p˜Mñ6`»X,B£‹ͪü³Eå›û#ªzB¨®âåTdIIâëý—±7ãº=Xë¹$•v2I‰¯qxT­§D8XQo¨¶â¡ö²ÒÊå{… Z/Qå2ºö*RDž~¬)_ž Y]¾Àyó’V#rsWæanoÃý¶+1‘5…ûMHÅM-¥UÀ›/Ä~ð›¶å]‘J™fMmÙ+: xW…nŸ€Û×àiÍo]¯Êu¤æR&¨ž„ênoÃ#‡Ï~xp:å<²²q¨åÕQee*¶ƒ¬‹ M%­-[.OEž.gל†ý¥Mß$ñÍ3,È„¦Úò¦T:ÊqyŸF <¢p6½ÛñÜh ž5 <‰_tŠ{{«p&È Ð“gçwHEªÔøÙ‹$>š[Ô¡cBó+6uv—3€DôVì,z”[smðBóÊ·“7 تp½?ñaYnSôd°Ö“³Û¯®ªÀm¢ü,CÍ·cÎxà­-[NÇ’éˆN#öˆÜ,]dL~`Í‚3š3$CRܾÀrlLÎU99TçYawOÒ—‘¸—nB[¹²ç"f–Ãg?šú"KúÚmÍ`~ca%(€¹°UÛ n”iç¹Z¬õÌ蘙£“Í‘tó$°P”Gšê<Ÿg¡¿”qWPÊ:^W¬£CµS_éxÝ~ˆgH¸jYÖu½1ƒ(gd¥•ËsÕHU,þG…)tøq æ@Â7aàší?˜–Ø2KsÝF_#³Ñ1VVúé MužÕvç!g—ø‡w¼˜¸ ŒÊùØ:0ê-i/Óì—Né~tl¢¶DŽ¨êœŽwS€ÊÊz‡À;VÑ×ú‰ ^¦âîŽÊ ¬ôÓCšêÊŸ´Dϧºz·1ßíX(6¦ àþt ˜AZ²ÓõUvúé)¢À= Wa¤{}én¶”𸆝´Z­yÉÌ“s ¬„r¥Óýõ\ âÏ#¶7±;jvã P:wU^<ãî -žüYúEL? ¯e¥‡¼š~zÛóÏúx,±Ä:kâÄ;W;`Çö¿‡Ýö©Åƒ‘03dÃ.pCpg]úé5*òhâUÙ÷³áÍ;Ïv*€¨5ŦÚÖ’òÉT_£á–GŒîÆ©R“ÉöÓÉ€ÖüFbQÐwÃRÙ9Ö»(‰ <Z<}[¦L7¡ÅÓ·)zW»hÚ½æ$«Ÿö%JB„krûÿ Ä"aÚFݶè3;¿ž2 <à÷f¢mA®.œÖñ7§Q‘% ×TŽpU>¾´ÏÆow³¨Ú ä2«Ÿö¥ŠÌÒMã™ü¢ýþæ63ŽÓ]£àl;v>$1D«ðIÓBOÖͼÓA̘S/4MM¾iL^eËðÆü²w€ÄUœC¾V…£*(ÏÅ7ú%ÁÚŠ»EäÂ44õ2VtbÿÍ "ªèK‰Wccn@EìBšŠ¼é2Š…¶ÛñõpãFßÚñ™9±¿=÷;b»0õ%fŒ·ñ löÐE¥ÇV°¹€Ûç?]”Ë]u°Šž‡’JÖíÀýŒ08Ú“Ðõ9…¥$ÌÀàq³ýÜN‘±jó¨´Ñœ>úíŒÃfú‡Ä‚-ÉbéTW[!˜Ì7»á°hD¿§p„ˆ E(DõKDßEͪ‚"ëéÕ÷V´YÙ8´-ß:ÌŸ*+ëÇ÷‡ô/Ép:5&ÚþD£¸œzˆYжÐ|Ï?³ [FÈ3r°Ÿ9-¸ |7‹xÒ.£¾¶¸ìw•ÿZ”?®Í/¼ÈäþBFyãÕÿr¿=¡¡`×ë¢2ÒˆèH›{Þî¯/€ÅU LSá–Ђ²§{ÓÖÝ ¬¸®}ÝÜ/©®¶Ôæ Ì2b€‰whF6R2MñLÿáªzÊ*Z[þ§·í­\9)"Ê¥À`“ß–Ûæp] †„k0  íX "ýN\•õùj¤P‡S|Ýu®HFSçITUä‚b_C* rIPU†`HBU«ÿ)€ä¼¯"—¥;Ò¸ ?"*Ú«€L}‰`mM¼Æ`C,—n‡íçÿíÄâãÈÏGB5å÷¥»ý¦:Ï{ÀP¾ëòÎNwûYAÅîPoˆÁ&ÔˆŠôÈ=©/(öö¶û¶DágªŸ‚"½Ø$˜ÛºòÌIŒÚy*bY´wCTÓ‘:“¨p70弸LFX}oE‹ W‚~;Ü,¿ÈT?ÄN l@é àªjø0á¶l$clª-[<£ð«1Þ›ÕS¤ €QÉù÷ŒÆïˆêm@p›³è7ÙéUÔŠšË€|‡Ð¥ï}.¡jÙF3Ø™ ‰m²œaâÄNqZµ@ž Þt'Q茵–½&p?è w•b¶úí-FwߌÓj«¢¹­Ÿ Ûv½Â1 ?ë‹(%ΰ¹ øBTîØÕÂ6—Q{û—ƒÄøs9‚»Ê?Q+ Özœ²Ák‹Ë>Q¸VÁµuضõ… =ÀfL¥Ù($¼9KŽ>н½Q3øÈ6?êËóŠEûß ¬¹>]É2‰Úlø~j°QÍ’“ewQÃ] Ã~üÚâ²OúR–Õ÷Nh3èO!Ëw˜Óˆý˜n5 _¤¢gA¦nQìm˜ƒr®ÀŸ‚5ž†¾–`MmÅS K€ Kf’¦}ËÎ|¶:±9&ddÆÅI¸ÙþaV›Ž ;#Žõ¯-.ûÄUµd”ªþ‘5Û?•´ç!è 1?ÇaMŽZü‘.SÂö)‡t¼ ªïÕÄcB`D¶=нþ2·¯áåhT>PcVª‘çÛò­\¾À“¢ŽÅ€ÓŠF½ë—–µfS®®.*{‘ÛNq{IRçô5*ؤ©5*ŒÁØ)@¡Ûø~IÕÒÑéHLÔ.Ÿÿw*Ò`†^NŽ@äj„ 7ßlF¸5ßWwSµlÚPï‚Ù ®Ùþƒ$*]¹UoبÂ&`£ ­(1ïç‡ó7õ&G€Ëø‰@JîV*â Õ”/ìi_™Äí T DøŸ¦Ïõ}-Ï®¸}þÓAgÛ¢ÑaÏLõ‰–Á#ò¢¨5BaÈHà ì³Z~lDdjmÙ¤èFÐbé¦dfÕñºè<³ø®lÙ#ÜrHOƒ)f·¯á9`ÑȘàƒßÏ›J·Ï%HÇŸk=Cœ±µt` °{.‘¼`MyB\ bo`oËèA #Ä’‘ #ŒèAŠŒ@­c@* ¸}íÄ^67ª°1`a£Âá¤>øû•?p2`› oÅ4\Š¥/átÞ Ììk‰Úd¼Í†Éh'¼ŒvPÕ++ëÍ¡ãqñ>Þ°ë,ž”apˆeô@cÉ–áQ庌–Ýæ(rRb›ÝUÿ‹r~ñÌÇînZ85Á3·/P°ñmˆ9‹8ý‡’pƽ×[ÇÝŠ†K/þ/‰W8¿ØÇð¨±RÑj [©ßTuîÔÏ6Fò~miÛ9jÌín_àYD¾+ªCâ;®ÏbEïÉf6ô’ª¥£-³)zâ¾"ùÏcãH©01Ý­\9)òúƒSÞoª+•nÿBŒQ»UKΰæ;/l^$æn÷TKÜù·q¼æöùo®¬¬· Å—~,+z²Ýe ;¾V€¸ãcÂK13(‚Ú…£íUs®Û8+[_`wq¯Ÿpt±Ô€\¾.¿ðÏÙG„É6—_oû´‹Aˆ.M¨¦zÊè.ÉØ”ÛTçyRDSì¤ð>èa[—_ø®»ªáò\rØpyg ¤ê‘ü“bo £;‡%³–)ØEvß9Ö;@U a“9,XðR‹ëó…DÌ©n®p°Õ›%?òOWUàž\Ø‹‘ÿîN}2j[ÕÈd``B¿–îL°S Úÿ9쎆Uì2†¤PMÅFðÐyŒ¿÷¬¨99¸¨ìÝÐâéÛBµž»‚µåcU™Œ°R”YMÅ^ÿSÅUþï÷Åã¡tx> î0)v_fÕé6—?Ù¾Óõ·ýþb_à^…ަÕ¬èðLûÈ—ÌZVdEÛV"IÌHe»Æœ8ŒD¼uÿ´¤Ñ9cél¹d±ª (w9óÛî{}Þ™_dRîvRÜQMÀ`¾½¦¶,1ÝKoå©||Él:nM߬õ\ÔþÇnëqA)ÒQœˆsÑ—–5ßya»ûÒƒYªõtëÙOôxéè.¹:¯Õ1G•‹E¸%Ò–Wíö5,PäöPmÙÚ ‰ÎsùV[›ô(“¹…5ßíkxQ±^u]½fÁÔô¬rò£3I|DeÑ®念ÃÛW®Ë/ú'è·w»Ëp>èí™´Àq½3¾d_#Úã\DñYâvª«ï,~çè3ÀºDÑ ½Àí <¥Âí¡Q«»æÖuÏ\²Ÿˆ9Õq:òB*ne®ÊúA&Ð÷T¬“Q&EÚ8Bl#¬wÉW .`’ X–àö>WxUÐWÕQåÕuužõÝýîµs’ÙÐtØ+Ïí^¯noÃ5Ä6h:4hÊšjËì^ÓB{¿b阦…o¦«Ý’ª¥££VôbÎöÞÑ;ó[üµ5ÇLÿÃ׿¹gÈæÂŸîšý¸ÊúÛò OP˜„0 8ŠøG•·EXºBc.KU6UùM¨®ü†_ãð¨XãRߺ=p—ªÿFä5E_5–®V‡¼:fGËÛÉ‚V¸¼IKªÝ‘«ƒµžßîz!QÎ}ôÛ8œè¨Ñ"Ö”OLõÃu·/ð04XëI0\H£|{`ýPb+ˆQÄ"‹Äæ;Påzƒ<p=å˜öà ï‹Ê `…Ãi=ýÆüŠÍí÷•V.ß«5¿õ l]î; ¼k$o\²äR®9 û›°5ÞÂŒÑñÄ2·îÚî6bÛñ¯*¬6È«ƒ7®[¹rRÄ]Õ𪚌8:rWy±ûðžÀâxÒÅÝe¶ôø¦…iDzEп4ÕV\œîöw£ºÚ¸ß9ªLÐ{:üÊløPái`…1ÖŠ®žÑ®ª%£DO™Õ}àpÈ)Ýõb>|öÃVÁx±t¼JñÄÎXÚÇq;ÐL !¼, Ö–ŸÛ±MÛCÁüA±@Å\6Å{I[žN&æ”±GÌNª«­ Š}Ïé\~aÄXS3¥[±C5ÓÖ»g.9c~Mì…:qŽ9ãùeÝvaû>.ÿb³Î޼Öñ"ŒWt¼ gb“ FÔ>ãKR³¯$I#-+j&¬}°,­aÙÝU…(Ó ŠtÈê{+²’ôÁí V4r~:Û-~û¨ÙØE|…'›êÊÇ1N§É£ÕÒ«°9&¸)]‡DBcòƆj+ê¹ý“P­ç+jŽî–µŠîŒÞ^:wUž{ý„ÚNœMn¹(IYJô: ÌÈ9+ jk^F’·ZÐÀáíÓ;úô»|þß ò+QöùOZÿ'¥ºÚ¸ß™ð¦BÞÐÍE‡~uضüÍ<”lÚGäïÛœ…“{%µ3@Œ÷çMÚáÌk›†HWñ|•_ØXZ¹|·8F¤À85'cfêjK”[F~2¬å‚Öfy"éàíùÓÒ"·× ðú¼3¿P'g$W&¶æ·¾âªòk¿ J @8’›A)û‚¯ò‹æ z˜Î7Æš'“{–gWÒ¢¡yå[œÎðIØ‘Ä8TT^(®òÿ€¸óÿ ¢pó÷él¯Däïá‚“BóÊ·¤«Ï´‡‚5¥±`à>VUñ×èE^¸Ó`Ž[S[f—Úô?†ã*ë~•?ð÷ —$«£°dÏpËÌtÇGJÛ ÐÎú¥e­‡‡[ÎmË·Gf ±½ÿèø PRåÿï¼Â×;|àž±á–³3+ƒÁ UÜU W¢\G§û rwA‘õól„æ®ÊúARPx ±¼„É<„#¢\ÝTçÉØAYÆ£{NDtaN›UôסšŠù™–'pÍôWˆ‘;ˆ…ÝKÆf#Ììé_ªd%lɬeûZVdhçÖ/ÊrË2¿L·ßA®Pì ”ªð{ºˆ),ð¸3lfg#"z㫸«— r ɧ¼vžÄÈ•Á婤{Ïy\Uþq‚\…rçDµúºîžêõ”¬'‡ryýG‹È}Ä¢fu†*<†1-˜²¢ÿ%³V)žé?Ñ2òËN6tv"ð’ˆþ¤;Æé O²ƒ•Î]•×Ú¼å à*:·hgð—‚pÁütm€dŠ#+‡¶åYç!ú#TLÝ?åʦÃVÝ—­_ý®ôiz¸XXs3èôeÙ,Ue1m-Ðâéž«g‹ÒÊå{µ„Ï=e6¹mP„ûÃQýU*Ö»™"'òÆmÛo {QC·£² ѧ-Ëzz튵Ù|L”T-m™¬*;ëN°§'-#W¯]PþR¦äK•œP€v\ÞÀ$›å!J`IDAT1r ª'õàöžUôU0Mê°Ö„æWlJ‡\£Ï]r`¾#oœJôUŽähì’nwÍ ±¬ks%†0䘴7Aÿp6½0Z!æÑôO`“À‡ª²Yáßm¶Œî0V,I¶ { ìiA‘û#AG‚ŒÀ&ØR7ˆ€ü í¹¸ªÉIhgLÕã8­Èlæbo™Ë| B­eÉ_ÖÕ•w;|\¶ÈiØIuµq¯/=d:pj“+'ÐA˪oýê³}ñVß]ú‡ìBee½ã­‚Âã-•)"ÖdU9‚¾û ¼†ò¸ª.ÙþB²È]¹J¿S€Ž>ûáÁ޶¼ãqÈ ¢ Œ2@úSQ}Ã^Ì?4ìx¾=êv¥ß+€ãfû‡E£¸DÌÁz0ªß6Êþ*2˜X ÕBb/—ížÍÿ&æçÐ |ª°ô#`“QÙ F68Œµ¶cˆµoÿð"©¹ODIEND®B`‚aiohttp-3.0.1/examples/0000777000000000000000000000000013240305035013126 5ustar 00000000000000aiohttp-3.0.1/examples/background_tasks.py0000666000000000000000000000360113240304665017034 0ustar 00000000000000#!/usr/bin/env python3 """Example of aiohttp.web.Application.on_startup signal handler""" import asyncio import aioredis from aiohttp.web import Application, WebSocketResponse, run_app async def websocket_handler(request): ws = WebSocketResponse() await ws.prepare(request) request.app['websockets'].append(ws) try: async for msg in ws: print(msg) await asyncio.sleep(1) finally: request.app['websockets'].remove(ws) return ws async def on_shutdown(app): for ws in app['websockets']: await ws.close(code=999, message='Server shutdown') async def listen_to_redis(app): try: sub = await aioredis.create_redis(('localhost', 6379), loop=app.loop) ch, *_ = await sub.subscribe('news') async for msg in ch.iter(encoding='utf-8'): # Forward message to all connected websockets: for ws in app['websockets']: await ws.send_str('{}: {}'.format(ch.name, msg)) print("message in {}: {}".format(ch.name, msg)) except asyncio.CancelledError: pass finally: print('Cancel Redis listener: close connection...') await sub.unsubscribe(ch.name) await sub.quit() print('Redis connection closed.') async def start_background_tasks(app): app['redis_listener'] = app.loop.create_task(listen_to_redis(app)) async def cleanup_background_tasks(app): print('cleanup background tasks...') app['redis_listener'].cancel() await app['redis_listener'] async def init(loop): app = Application() app['websockets'] = [] app.router.add_get('/news', websocket_handler) app.on_startup.append(start_background_tasks) app.on_cleanup.append(cleanup_background_tasks) app.on_shutdown.append(on_shutdown) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) run_app(app) aiohttp-3.0.1/examples/basic_srv.py0000666000000000000000000000271113240304665015464 0ustar 00000000000000#!/usr/bin/env python3 """Basic HTTP server with minimal setup""" import asyncio from urllib.parse import parse_qsl, urlparse import aiohttp import aiohttp.server from aiohttp import MultiDict class HttpRequestHandler(aiohttp.server.ServerHttpProtocol): async def handle_request(self, message, payload): response = aiohttp.Response( self.writer, 200, http_version=message.version) get_params = MultiDict(parse_qsl(urlparse(message.path).query)) if message.method == 'POST': post_params = await payload.read() else: post_params = None content = "

    It Works!

    " if get_params: content += "

    Get params

    " + str(get_params) + "

    " if post_params: content += "

    Post params

    " + str(post_params) + "

    " bcontent = content.encode('utf-8') response.add_header('Content-Type', 'text/html; charset=UTF-8') response.add_header('Content-Length', str(len(bcontent))) response.send_headers() response.write(bcontent) await response.write_eof() if __name__ == '__main__': loop = asyncio.get_event_loop() f = loop.create_server( lambda: HttpRequestHandler(debug=True, keep_alive=75), '0.0.0.0', 8080) srv = loop.run_until_complete(f) print('serving on', srv.sockets[0].getsockname()) try: loop.run_forever() except KeyboardInterrupt: pass aiohttp-3.0.1/examples/client_auth.py0000666000000000000000000000105013240304665016003 0ustar 00000000000000import asyncio import aiohttp async def fetch(session): print('Query http://httpbin.org/basic-auth/andrew/password') async with session.get( 'http://httpbin.org/basic-auth/andrew/password') as resp: print(resp.status) body = await resp.text() print(body) async def go(loop): async with aiohttp.ClientSession( auth=aiohttp.BasicAuth('andrew', 'password'), loop=loop) as session: await fetch(session) loop = asyncio.get_event_loop() loop.run_until_complete(go(loop)) aiohttp-3.0.1/examples/client_json.py0000666000000000000000000000070013240304665016014 0ustar 00000000000000import asyncio import aiohttp async def fetch(session): print('Query http://httpbin.org/get') async with session.get( 'http://httpbin.org/get') as resp: print(resp.status) data = await resp.json() print(data) async def go(loop): async with aiohttp.ClientSession(loop=loop) as session: await fetch(session) loop = asyncio.get_event_loop() loop.run_until_complete(go(loop)) loop.close() aiohttp-3.0.1/examples/client_ws.py0000666000000000000000000000407513240304665015505 0ustar 00000000000000#!/usr/bin/env python3 """websocket cmd client for wssrv.py example.""" import argparse import asyncio import signal import sys import aiohttp async def start_client(loop, url): name = input('Please enter your name: ') # input reader def stdin_callback(): line = sys.stdin.buffer.readline().decode('utf-8') if not line: loop.stop() else: ws.send_str(name + ': ' + line) loop.add_reader(sys.stdin.fileno(), stdin_callback) async def dispatch(): while True: msg = await ws.receive() if msg.type == aiohttp.WSMsgType.TEXT: print('Text: ', msg.data.strip()) elif msg.type == aiohttp.WSMsgType.BINARY: print('Binary: ', msg.data) elif msg.type == aiohttp.WSMsgType.PING: ws.pong() elif msg.type == aiohttp.WSMsgType.PONG: print('Pong received') else: if msg.type == aiohttp.WSMsgType.CLOSE: await ws.close() elif msg.type == aiohttp.WSMsgType.ERROR: print('Error during receive %s' % ws.exception()) elif msg.type == aiohttp.WSMsgType.CLOSED: pass break # send request async with aiohttp.ws_connect(url, autoclose=False, autoping=False) as ws: await dispatch() ARGS = argparse.ArgumentParser( description="websocket console client for wssrv.py example.") ARGS.add_argument( '--host', action="store", dest='host', default='127.0.0.1', help='Host name') ARGS.add_argument( '--port', action="store", dest='port', default=8080, type=int, help='Port number') if __name__ == '__main__': args = ARGS.parse_args() if ':' in args.host: args.host, port = args.host.split(':', 1) args.port = int(port) url = 'http://{}:{}'.format(args.host, args.port) loop = asyncio.get_event_loop() loop.add_signal_handler(signal.SIGINT, loop.stop) loop.create_task(start_client(loop, url)) loop.run_forever() aiohttp-3.0.1/examples/cli_app.py0000666000000000000000000000261613240304665015124 0ustar 00000000000000""" Example of serving an Application using the `aiohttp.web` CLI. Serve this app using:: $ python -m aiohttp.web -H localhost -P 8080 --repeat 10 cli_app:init \ > "Hello World" Here ``--repeat`` & ``"Hello World"`` are application specific command-line arguments. `aiohttp.web` only parses & consumes the command-line arguments it needs (i.e. ``-H``, ``-P`` & ``entry-func``) and passes on any additional arguments to the `cli_app:init` function for processing. """ from argparse import ArgumentParser from aiohttp.web import Application, Response def display_message(req): args = req.app["args"] text = "\n".join([args.message] * args.repeat) return Response(text=text) def init(argv): arg_parser = ArgumentParser( prog="aiohttp.web ...", description="Application CLI", add_help=False ) # Positional argument arg_parser.add_argument( "message", help="message to print" ) # Optional argument arg_parser.add_argument( "--repeat", help="number of times to repeat message", type=int, default="1" ) # Avoid conflict with -h from `aiohttp.web` CLI parser arg_parser.add_argument( "--app-help", help="show this message and exit", action="help" ) args = arg_parser.parse_args(argv) app = Application() app["args"] = args app.router.add_get('/', display_message) return app aiohttp-3.0.1/examples/curl.py0000666000000000000000000000165113240304665014460 0ustar 00000000000000#!/usr/bin/env python3 import argparse import asyncio import aiohttp async def curl(url): async with aiohttp.ClientSession() as session: async with session.request('GET', url) as response: print(repr(response)) chunk = await response.content.read() print('Downloaded: %s' % len(chunk)) if __name__ == '__main__': ARGS = argparse.ArgumentParser(description="GET url example") ARGS.add_argument('url', nargs=1, metavar='URL', help="URL to download") ARGS.add_argument('--iocp', default=False, action="store_true", help="Use ProactorEventLoop on Windows") options = ARGS.parse_args() if options.iocp: from asyncio import events, windows_events el = windows_events.ProactorEventLoop() events.set_event_loop(el) loop = asyncio.get_event_loop() loop.run_until_complete(curl(options.url[0])) aiohttp-3.0.1/examples/fake_server.py0000666000000000000000000000737113240304665016014 0ustar 00000000000000import asyncio import pathlib import socket import ssl import aiohttp from aiohttp import web from aiohttp.resolver import DefaultResolver from aiohttp.test_utils import unused_port class FakeResolver: _LOCAL_HOST = {0: '127.0.0.1', socket.AF_INET: '127.0.0.1', socket.AF_INET6: '::1'} def __init__(self, fakes, *, loop): """fakes -- dns -> port dict""" self._fakes = fakes self._resolver = DefaultResolver(loop=loop) async def resolve(self, host, port=0, family=socket.AF_INET): fake_port = self._fakes.get(host) if fake_port is not None: return [{'hostname': host, 'host': self._LOCAL_HOST[family], 'port': fake_port, 'family': family, 'proto': 0, 'flags': socket.AI_NUMERICHOST}] else: return await self._resolver.resolve(host, port, family) class FakeFacebook: def __init__(self, *, loop): self.loop = loop self.app = web.Application(loop=loop) self.app.router.add_routes( [web.get('/v2.7/me', self.on_me), web.get('/v2.7/me/friends', self.on_my_friends)]) self.runner = None here = pathlib.Path(__file__) ssl_cert = here.parent / 'server.crt' ssl_key = here.parent / 'server.key' self.ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) self.ssl_context.load_cert_chain(str(ssl_cert), str(ssl_key)) async def start(self): port = unused_port() self.runner = web.AppRunner(self.app) await self.runner.setup() site = web.TCPSite(self.runner, '127.0.0.1', port, ssl_context=self.ssl_context) await site.start() return {'graph.facebook.com': port} async def stop(self): await self.runner.cleanup() async def on_me(self, request): return web.json_response({ "name": "John Doe", "id": "12345678901234567" }) async def on_my_friends(self, request): return web.json_response({ "data": [ { "name": "Bill Doe", "id": "233242342342" }, { "name": "Mary Doe", "id": "2342342343222" }, { "name": "Alex Smith", "id": "234234234344" }, ], "paging": { "cursors": { "before": "QVFIUjRtc2c5NEl0ajN", "after": "QVFIUlpFQWM0TmVuaDRad0dt", }, "next": ("https://graph.facebook.com/v2.7/12345678901234567/" "friends?access_token=EAACEdEose0cB") }, "summary": { "total_count": 3 }}) async def main(loop): token = "ER34gsSGGS34XCBKd7u" fake_facebook = FakeFacebook(loop=loop) info = await fake_facebook.start() resolver = FakeResolver(info, loop=loop) connector = aiohttp.TCPConnector(loop=loop, resolver=resolver, verify_ssl=False) async with aiohttp.ClientSession(connector=connector, loop=loop) as session: async with session.get('https://graph.facebook.com/v2.7/me', params={'access_token': token}) as resp: print(await resp.json()) async with session.get('https://graph.facebook.com/v2.7/me/friends', params={'access_token': token}) as resp: print(await resp.json()) await fake_facebook.stop() loop = asyncio.get_event_loop() loop.run_until_complete(main(loop)) aiohttp-3.0.1/examples/legacy/0000777000000000000000000000000013240305035014372 5ustar 00000000000000aiohttp-3.0.1/examples/legacy/crawl.py0000666000000000000000000000607113240304665016070 0ustar 00000000000000#!/usr/bin/env python3 import asyncio import logging import re import signal import sys import urllib.parse import aiohttp class Crawler: def __init__(self, rooturl, loop, maxtasks=100): self.rooturl = rooturl self.loop = loop self.todo = set() self.busy = set() self.done = {} self.tasks = set() self.sem = asyncio.Semaphore(maxtasks, loop=loop) # connector stores cookies between requests and uses connection pool self.session = aiohttp.ClientSession(loop=loop) async def run(self): t = asyncio.ensure_future(self.addurls([(self.rooturl, '')]), loop=self.loop) await asyncio.sleep(1, loop=self.loop) while self.busy: await asyncio.sleep(1, loop=self.loop) await t await self.session.close() self.loop.stop() async def addurls(self, urls): for url, parenturl in urls: url = urllib.parse.urljoin(parenturl, url) url, frag = urllib.parse.urldefrag(url) if (url.startswith(self.rooturl) and url not in self.busy and url not in self.done and url not in self.todo): self.todo.add(url) await self.sem.acquire() task = asyncio.ensure_future(self.process(url), loop=self.loop) task.add_done_callback(lambda t: self.sem.release()) task.add_done_callback(self.tasks.remove) self.tasks.add(task) async def process(self, url): print('processing:', url) self.todo.remove(url) self.busy.add(url) try: resp = await self.session.get(url) except Exception as exc: print('...', url, 'has error', repr(str(exc))) self.done[url] = False else: if (resp.status == 200 and ('text/html' in resp.headers.get('content-type'))): data = (await resp.read()).decode('utf-8', 'replace') urls = re.findall(r'(?i)href=["\']?([^\s"\'<>]+)', data) asyncio.Task(self.addurls([(u, url) for u in urls])) resp.close() self.done[url] = True self.busy.remove(url) print(len(self.done), 'completed tasks,', len(self.tasks), 'still pending, todo', len(self.todo)) def main(): loop = asyncio.get_event_loop() c = Crawler(sys.argv[1], loop) asyncio.ensure_future(c.run(), loop=loop) try: loop.add_signal_handler(signal.SIGINT, loop.stop) except RuntimeError: pass loop.run_forever() print('todo:', len(c.todo)) print('busy:', len(c.busy)) print('done:', len(c.done), '; ok:', sum(c.done.values())) print('tasks:', len(c.tasks)) if __name__ == '__main__': if '--iocp' in sys.argv: from asyncio import events, windows_events sys.argv.remove('--iocp') logging.info('using iocp') el = windows_events.ProactorEventLoop() events.set_event_loop(el) main() aiohttp-3.0.1/examples/legacy/srv.py0000666000000000000000000001235413240304665015573 0ustar 00000000000000#!/usr/bin/env python3 """Simple server written using an event loop.""" import argparse import asyncio import logging import os import sys import aiohttp import aiohttp.server try: import ssl except ImportError: # pragma: no cover ssl = None class HttpRequestHandler(aiohttp.server.ServerHttpProtocol): async def handle_request(self, message, payload): print('method = {!r}; path = {!r}; version = {!r}'.format( message.method, message.path, message.version)) path = message.path if (not (path.isprintable() and path.startswith('/')) or '/.' in path): print('bad path', repr(path)) path = None else: path = '.' + path if not os.path.exists(path): print('no file', repr(path)) path = None else: isdir = os.path.isdir(path) if not path: raise aiohttp.HttpProcessingError(code=404) for hdr, val in message.headers.items(): print(hdr, val) if isdir and not path.endswith('/'): path = path + '/' raise aiohttp.HttpProcessingError( code=302, headers=(('URI', path), ('Location', path))) response = aiohttp.Response( self.writer, 200, http_version=message.version) response.add_header('Transfer-Encoding', 'chunked') # content encoding accept_encoding = message.headers.get('accept-encoding', '').lower() if 'deflate' in accept_encoding: response.add_header('Content-Encoding', 'deflate') response.add_compression_filter('deflate') elif 'gzip' in accept_encoding: response.add_header('Content-Encoding', 'gzip') response.add_compression_filter('gzip') response.add_chunking_filter(1025) if isdir: response.add_header('Content-type', 'text/html') response.send_headers() response.write(b'
      \r\n') for name in sorted(os.listdir(path)): if name.isprintable() and not name.startswith('.'): try: bname = name.encode('ascii') except UnicodeError: pass else: if os.path.isdir(os.path.join(path, name)): response.write(b'
    • ' + bname + b'/
    • \r\n') else: response.write(b'
    • ' + bname + b'
    • \r\n') response.write(b'
    ') else: response.add_header('Content-type', 'text/plain') response.send_headers() try: with open(path, 'rb') as fp: chunk = fp.read(8192) while chunk: response.write(chunk) chunk = fp.read(8192) except OSError: response.write(b'Cannot open') await response.write_eof() if response.keep_alive(): self.keep_alive(True) ARGS = argparse.ArgumentParser(description="Run simple HTTP server.") ARGS.add_argument( '--host', action="store", dest='host', default='127.0.0.1', help='Host name') ARGS.add_argument( '--port', action="store", dest='port', default=8080, type=int, help='Port number') # make iocp and ssl mutually exclusive because ProactorEventLoop is # incompatible with SSL group = ARGS.add_mutually_exclusive_group() group.add_argument( '--iocp', action="store_true", dest='iocp', help='Windows IOCP event loop') group.add_argument( '--ssl', action="store_true", dest='ssl', help='Run ssl mode.') ARGS.add_argument( '--sslcert', action="store", dest='certfile', help='SSL cert file.') ARGS.add_argument( '--sslkey', action="store", dest='keyfile', help='SSL key file.') def main(): args = ARGS.parse_args() if ':' in args.host: args.host, port = args.host.split(':', 1) args.port = int(port) if args.iocp: from asyncio import windows_events sys.argv.remove('--iocp') logging.info('using iocp') el = windows_events.ProactorEventLoop() asyncio.set_event_loop(el) if args.ssl: here = os.path.join(os.path.dirname(__file__), 'tests') if args.certfile: certfile = args.certfile or os.path.join(here, 'sample.crt') keyfile = args.keyfile or os.path.join(here, 'sample.key') else: certfile = os.path.join(here, 'sample.crt') keyfile = os.path.join(here, 'sample.key') sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.load_cert_chain(certfile, keyfile) else: sslcontext = None loop = asyncio.get_event_loop() f = loop.create_server( lambda: HttpRequestHandler(debug=True, keep_alive=75), args.host, args.port, ssl=sslcontext) svr = loop.run_until_complete(f) socks = svr.sockets print('serving on', socks[0].getsockname()) try: loop.run_forever() except KeyboardInterrupt: pass if __name__ == '__main__': main() aiohttp-3.0.1/examples/legacy/tcp_protocol_parser.py0000666000000000000000000001145113240304665021041 0ustar 00000000000000#!/usr/bin/env python3 """Protocol parser example.""" import argparse import asyncio import collections import aiohttp try: import signal except ImportError: signal = None MSG_TEXT = b'text:' MSG_PING = b'ping:' MSG_PONG = b'pong:' MSG_STOP = b'stop:' Message = collections.namedtuple('Message', ('tp', 'data')) def my_protocol_parser(out, buf): """Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py """ while True: tp = yield from buf.read(5) if tp in (MSG_PING, MSG_PONG): # skip line yield from buf.skipuntil(b'\r\n') out.feed_data(Message(tp, None)) elif tp == MSG_STOP: out.feed_data(Message(tp, None)) elif tp == MSG_TEXT: # read text text = yield from buf.readuntil(b'\r\n') out.feed_data(Message(tp, text.strip().decode('utf-8'))) else: raise ValueError('Unknown protocol prefix.') class MyProtocolWriter: def __init__(self, transport): self.transport = transport def ping(self): self.transport.write(b'ping:\r\n') def pong(self): self.transport.write(b'pong:\r\n') def stop(self): self.transport.write(b'stop:\r\n') def send_text(self, text): self.transport.write( 'text:{}\r\n'.format(text.strip()).encode('utf-8')) class EchoServer(asyncio.Protocol): def connection_made(self, transport): print('Connection made') self.transport = transport self.stream = aiohttp.StreamParser() asyncio.Task(self.dispatch()) def data_received(self, data): self.stream.feed_data(data) def eof_received(self): self.stream.feed_eof() def connection_lost(self, exc): print('Connection lost') async def dispatch(self): reader = self.stream.set_parser(my_protocol_parser) writer = MyProtocolWriter(self.transport) while True: try: msg = await reader.read() except aiohttp.ConnectionError: # client has been disconnected break print('Message received: {}'.format(msg)) if msg.type == MSG_PING: writer.pong() elif msg.type == MSG_TEXT: writer.send_text('Re: ' + msg.data) elif msg.type == MSG_STOP: self.transport.close() break async def start_client(loop, host, port): transport, stream = await loop.create_connection( aiohttp.StreamProtocol, host, port) reader = stream.reader.set_parser(my_protocol_parser) writer = MyProtocolWriter(transport) writer.ping() message = 'This is the message. It will be echoed.' while True: try: msg = await reader.read() except aiohttp.ConnectionError: print('Server has been disconnected.') break print('Message received: {}'.format(msg)) if msg.type == MSG_PONG: writer.send_text(message) print('data sent:', message) elif msg.type == MSG_TEXT: writer.stop() print('stop sent') break transport.close() def start_server(loop, host, port): f = loop.create_server(EchoServer, host, port) srv = loop.run_until_complete(f) x = srv.sockets[0] print('serving on', x.getsockname()) loop.run_forever() ARGS = argparse.ArgumentParser(description="Protocol parser example.") ARGS.add_argument( '--server', action="store_true", dest='server', default=False, help='Run tcp server') ARGS.add_argument( '--client', action="store_true", dest='client', default=False, help='Run tcp client') ARGS.add_argument( '--host', action="store", dest='host', default='127.0.0.1', help='Host name') ARGS.add_argument( '--port', action="store", dest='port', default=9999, type=int, help='Port number') if __name__ == '__main__': args = ARGS.parse_args() if ':' in args.host: args.host, port = args.host.split(':', 1) args.port = int(port) if (not (args.server or args.client)) or (args.server and args.client): print('Please specify --server or --client\n') ARGS.print_help() else: loop = asyncio.get_event_loop() if signal is not None: loop.add_signal_handler(signal.SIGINT, loop.stop) if args.server: start_server(loop, args.host, args.port) else: loop.run_until_complete(start_client(loop, args.host, args.port)) aiohttp-3.0.1/examples/lowlevel_srv.py0000666000000000000000000000104413240304665016232 0ustar 00000000000000import asyncio from aiohttp import web async def handler(request): return web.Response(text="OK") async def main(loop): server = web.Server(handler) await loop.create_server(server, "127.0.0.1", 8080) print("======= Serving on http://127.0.0.1:8080/ ======") # pause here for very long time by serving HTTP requests and # waiting for keyboard interruption await asyncio.sleep(100*3600) loop = asyncio.get_event_loop() try: loop.run_until_complete(main(loop)) except KeyboardInterrupt: pass loop.close() aiohttp-3.0.1/examples/server.crt0000666000000000000000000000211713240304665015157 0ustar 00000000000000-----BEGIN CERTIFICATE----- MIIDADCCAegCCQCgevpPMuTTLzANBgkqhkiG9w0BAQsFADBCMQswCQYDVQQGEwJV QTEQMA4GA1UECAwHVWtyYWluZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQ dHkgTHRkMB4XDTE2MDgwNzIzMTMwOFoXDTI2MDgwNTIzMTMwOFowQjELMAkGA1UE BhMCVUExEDAOBgNVBAgMB1VrcmFpbmUxITAfBgNVBAoMGEludGVybmV0IFdpZGdp dHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOUgkn3j X/sdg6GGueGDHCM+snIUVY3fM6D4jXjyBhnT3TqKG1lJwCGYR11AD+2SJYppU+w4 QaF6YZwMeZBKy+mVQ9+CrVYyKQE7j9H8XgNEHV9BQzoragT8lia8eC5aOQzUeX8A xCSSbsnyT/X+S1IKdd0txLOeZOD6pWwJoc3dpDELglk2b1tzhyN2GjQv3aRHj55P x7127MeZyRXwODFpXrpbnwih4OqkA4EYtmqFbZttGEzMhd4Y5mkbyuRbGM+IE99o QJMvnIkjAfUo0aKnDrcAIkWCkwLIci9TIG6u3R1P2Tn+HYVntzQZ4BnxanbFNQ5S 9ARd3529EmO3BzUCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAXyiw1+YUnTEDI3C/ vq1Vn9pnwZALVQPiPlTqEGkl/nbq0suMmeZZG7pwrOJp3wr+sGwRAv9sPTro6srf Vj12wTo4LrTRKEDuS+AUJl0Mut7cPGIUKo+MGeZmmnDjMqcjljN3AO47ef4eWYo5 XGe4r4NDABEk5auOD/vQW5IiIMdmWsaMJ+0mZNpAV2NhAD/6ia28VvSL/yuaNqDW TYTUYHWLH08H6M6qrQ7FdoIDyYR5siqBukQzeqlnuq45bQ3ViYttNIkzZN4jbWJV /MFYLuJQ/fNoalDIC+ec0EIa9NbrfpoocJ8h6HlmWOqkES4QpBSOrkVid64Cdy3P JgiEWg== -----END CERTIFICATE----- aiohttp-3.0.1/examples/server.csr0000666000000000000000000000167013240304665015161 0ustar 00000000000000-----BEGIN CERTIFICATE REQUEST----- MIIChzCCAW8CAQAwQjELMAkGA1UEBhMCVUExEDAOBgNVBAgMB1VrcmFpbmUxITAf BgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcNAQEB BQADggEPADCCAQoCggEBAOUgkn3jX/sdg6GGueGDHCM+snIUVY3fM6D4jXjyBhnT 3TqKG1lJwCGYR11AD+2SJYppU+w4QaF6YZwMeZBKy+mVQ9+CrVYyKQE7j9H8XgNE HV9BQzoragT8lia8eC5aOQzUeX8AxCSSbsnyT/X+S1IKdd0txLOeZOD6pWwJoc3d pDELglk2b1tzhyN2GjQv3aRHj55Px7127MeZyRXwODFpXrpbnwih4OqkA4EYtmqF bZttGEzMhd4Y5mkbyuRbGM+IE99oQJMvnIkjAfUo0aKnDrcAIkWCkwLIci9TIG6u 3R1P2Tn+HYVntzQZ4BnxanbFNQ5S9ARd3529EmO3BzUCAwEAAaAAMA0GCSqGSIb3 DQEBCwUAA4IBAQDO/PSd29KgisTdGXhntg7yBEhBAjsDW7uQCrdrPSZtFyN6wUHy /1yrrWe56ZuW8jpuP5tG0eTZ+0bT2RXIRot8a2Cc3eBhpoe8M3d84yXjKAoHutGE 5IK+TViQdvT3pT3a7pTmjlf8Ojq9tx+U2ckiz8Ccnjd9yM47M9NgMhrS1aBpVZSt gOD+zzrqMML4xks9id94H7bi9Tgs3AbEJIyDpBpoK6i4OvK7KTidCngCg80qmdTy bcScLapoy1Ped2BKKuxWdOOlP+mDJatc/pcfBLE13AncQjJgMerS9M5RWCBjmRow A+aB6fBEU8bOTrqCryfBeTiV6xzyDDcIXtc6 -----END CERTIFICATE REQUEST----- aiohttp-3.0.1/examples/server.key0000666000000000000000000000321313240304665015155 0ustar 00000000000000-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEA5SCSfeNf+x2DoYa54YMcIz6ychRVjd8zoPiNePIGGdPdOoob WUnAIZhHXUAP7ZIlimlT7DhBoXphnAx5kErL6ZVD34KtVjIpATuP0fxeA0QdX0FD OitqBPyWJrx4Llo5DNR5fwDEJJJuyfJP9f5LUgp13S3Es55k4PqlbAmhzd2kMQuC WTZvW3OHI3YaNC/dpEePnk/HvXbsx5nJFfA4MWleulufCKHg6qQDgRi2aoVtm20Y TMyF3hjmaRvK5FsYz4gT32hAky+ciSMB9SjRoqcOtwAiRYKTAshyL1Mgbq7dHU/Z Of4dhWe3NBngGfFqdsU1DlL0BF3fnb0SY7cHNQIDAQABAoIBAG9BJ6B03VADfrzZ vDwh+3Gpqd/2u6wNqvYIejk123yDATLBiJIMW3x0goJm7tT+V7gjeJqEnmmYEPlC nWxQxT6AOdq3iw8FgB+XGjhuAAA5/MEZ4VjHZ81QEGBytzBaosT2DqB6cMMJTz5D qEvb1Brb9WsWJCLLUFRloBkbfDOG9lMvt34ixYTTmqjsVj5WByD5BhzKH51OJ72L 00IYpvrsEOtSev1hNV4199CHPYE90T/YQVooRBiHtTcfN+/KNVJu6Rf/zcaJ3WMS 1l3MBI8HwMimjKKkbddpoMHyFMtSNmS9Yq+4a9w7XZo1F5rt88hYSCtAF8HRAarX 0VBCJmkCgYEA9HenBBnmfDoN857femzoTHdWQQrZQ4YPAKHvKPlcgudizE5tQbs0 iTpwm+IsecgJS2Rio7zY+P7A5nKFz3N5c0IX3smYo0J2PoakkLAm25KMxFZYBuz4 MFWVdfByAU7d28BdNfyOVbA2kU2eal9lJ0yPLpMLbH8+bbvw5uBS808CgYEA7++p ftwib3DvKWMpl6G5eA1C2xprdbE0jm2fSr3LYp/vZ4QN2V6kK2YIlyUqQvhYCnxX oIP3v2MWDRHKKwJtBWR4+t23PaDaSXS2Ifm0qhRxwSm/oqpAJQXbR7VzxXp4/4FP 1SgkLe51bubc4h+cDngqBLcplCanvj52CqhqzDsCgYAEIhG8zANNjl22BLWaiETV Jh9bMifCMH4IcLRuaOjbfbX55kmKlvOobkiBGi3OUUd28teIFSVF8GiqfL0uaLFg 9XkZ1yaxe+or3HLjz1aY171xhFQwqcj4aDoCqHIE+6Rclr/8raxqXnRNuJY5DivT okO5cdr7lpsjl83W2WwNmQKBgCPXi1xWChbXqgJmu8nY8NnMMVaFpdPY+t7j5U3G +GDtP1gZU/BKwP9yqInblWqXqp82X+isjg/a/2pIZAj0vdB2Z9Qh1sOwCau7cZG1 uZVGpI+UavojsJ1XOKCHrJmtZ/HTIVfYPT9XRdehSRHGYwuOS8iUi/ODqr8ymXOS IRINAoGBAMEmhTihgFz6Y8ezRK3QTubguehHZG1zIvtgVhOk+8hRUTSJPI9nBJPC 4gOZsPx4g2oLK6PiudPR79bhxRxPACCMnXkdwZ/8FaIdmvRHsWVs8T80wID0wthI r5hW4uqi9CcKZrGWH7mx9cVJktspeGUczvKyzNMfCaojwzA/49Z1 -----END RSA PRIVATE KEY----- aiohttp-3.0.1/examples/static_files.py0000666000000000000000000000023713240304665016163 0ustar 00000000000000import pathlib from aiohttp import web app = web.Application() app.router.add_static('/', pathlib.Path(__file__).parent, show_index=True) web.run_app(app) aiohttp-3.0.1/examples/websocket.html0000666000000000000000000000447313240304665016022 0ustar 00000000000000

    Chat!

     | Status: disconnected
    aiohttp-3.0.1/examples/web_classview1.py0000666000000000000000000000277113240304665016435 0ustar 00000000000000#!/usr/bin/env python3 """Example for aiohttp.web class based views """ import asyncio import functools import json from aiohttp.web import Application, Response, View, json_response, run_app class MyView(View): async def get(self): return json_response({ 'method': 'get', 'args': dict(self.request.GET), 'headers': dict(self.request.headers), }, dumps=functools.partial(json.dumps, indent=4)) async def post(self): data = await self.request.post() return json_response({ 'method': 'post', 'args': dict(self.request.GET), 'data': dict(data), 'headers': dict(self.request.headers), }, dumps=functools.partial(json.dumps, indent=4)) async def index(request): txt = """ Class based view example

    Class based view example

    • / This page
    • /get Returns GET data.
    • /post Returns POST data.
    """ return Response(text=txt, content_type='text/html') async def init(loop): app = Application(loop=loop) app.router.add_get('/', index) app.router.add_get('/get', MyView) app.router.add_post('/post', MyView) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) run_app(app) aiohttp-3.0.1/examples/web_cookies.py0000666000000000000000000000171513240304665016005 0ustar 00000000000000#!/usr/bin/env python3 """Example for aiohttp.web basic server with cookies. """ import asyncio from pprint import pformat from aiohttp import web tmpl = '''\ Login
    Logout
    {}
    ''' async def root(request): resp = web.Response(content_type='text/html') resp.text = tmpl.format(pformat(request.cookies)) return resp async def login(request): resp = web.HTTPFound(location='/') resp.set_cookie('AUTH', 'secret') return resp async def logout(request): resp = web.HTTPFound(location='/') resp.del_cookie('AUTH') return resp async def init(loop): app = web.Application(loop=loop) app.router.add_get('/', root) app.router.add_get('/login', login) app.router.add_get('/logout', logout) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) web.run_app(app) aiohttp-3.0.1/examples/web_rewrite_headers_middleware.py0000666000000000000000000000145513240304665021723 0ustar 00000000000000#!/usr/bin/env python3 """ Example for rewriting response headers by middleware. """ import asyncio from aiohttp.web import Application, HTTPException, Response, run_app async def handler(request): return Response(text="Everything is fine") async def middleware_factory(app, next_handler): async def middleware(request): try: response = await next_handler(request) except HTTPException as exc: response = exc if not response.prepared: response.headers['SERVER'] = "Secured Server Software" return response return middleware def init(loop): app = Application(loop=loop, middlewares=[middleware_factory]) app.router.add_get('/', handler) return app loop = asyncio.get_event_loop() app = init(loop) run_app(app) aiohttp-3.0.1/examples/web_srv.py0000666000000000000000000000270413240304665015162 0ustar 00000000000000#!/usr/bin/env python3 """Example for aiohttp.web basic server """ import asyncio import textwrap from aiohttp.web import Application, Response, StreamResponse, run_app async def intro(request): txt = textwrap.dedent("""\ Type {url}/hello/John {url}/simple or {url}/change_body in browser url bar """).format(url='127.0.0.1:8080') binary = txt.encode('utf8') resp = StreamResponse() resp.content_length = len(binary) resp.content_type = 'text/plain' await resp.prepare(request) resp.write(binary) return resp async def simple(request): return Response(text="Simple answer") async def change_body(request): resp = Response() resp.body = b"Body changed" resp.content_type = 'text/plain' return resp async def hello(request): resp = StreamResponse() name = request.match_info.get('name', 'Anonymous') answer = ('Hello, ' + name).encode('utf8') resp.content_length = len(answer) resp.content_type = 'text/plain' await resp.prepare(request) resp.write(answer) await resp.write_eof() return resp async def init(loop): app = Application() app.router.add_get('/', intro) app.router.add_get('/simple', simple) app.router.add_get('/change_body', change_body) app.router.add_get('/hello/{name}', hello) app.router.add_get('/hello', hello) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) run_app(app) aiohttp-3.0.1/examples/web_srv_route_deco.py0000666000000000000000000000261513240304665017373 0ustar 00000000000000#!/usr/bin/env python3 """Example for aiohttp.web basic server with decorator definition for routes """ import asyncio import textwrap from aiohttp import web routes = web.RouteTableDef() @routes.get('/') async def intro(request): txt = textwrap.dedent("""\ Type {url}/hello/John {url}/simple or {url}/change_body in browser url bar """).format(url='127.0.0.1:8080') binary = txt.encode('utf8') resp = web.StreamResponse() resp.content_length = len(binary) resp.content_type = 'text/plain' await resp.prepare(request) resp.write(binary) return resp @routes.get('/simple') async def simple(request): return web.Response(text="Simple answer") @routes.get('/change_body') async def change_body(request): resp = web.Response() resp.body = b"Body changed" resp.content_type = 'text/plain' return resp @routes.get('/hello') async def hello(request): resp = web.StreamResponse() name = request.match_info.get('name', 'Anonymous') answer = ('Hello, ' + name).encode('utf8') resp.content_length = len(answer) resp.content_type = 'text/plain' await resp.prepare(request) resp.write(answer) await resp.write_eof() return resp async def init(): app = web.Application() app.router.add_routes(routes) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init()) web.run_app(app) aiohttp-3.0.1/examples/web_srv_route_table.py0000666000000000000000000000271313240304665017547 0ustar 00000000000000#!/usr/bin/env python3 """Example for aiohttp.web basic server with table definition for routes """ import asyncio import textwrap from aiohttp import web async def intro(request): txt = textwrap.dedent("""\ Type {url}/hello/John {url}/simple or {url}/change_body in browser url bar """).format(url='127.0.0.1:8080') binary = txt.encode('utf8') resp = web.StreamResponse() resp.content_length = len(binary) resp.content_type = 'text/plain' await resp.prepare(request) resp.write(binary) return resp async def simple(request): return web.Response(text="Simple answer") async def change_body(request): resp = web.Response() resp.body = b"Body changed" resp.content_type = 'text/plain' return resp async def hello(request): resp = web.StreamResponse() name = request.match_info.get('name', 'Anonymous') answer = ('Hello, ' + name).encode('utf8') resp.content_length = len(answer) resp.content_type = 'text/plain' await resp.prepare(request) resp.write(answer) await resp.write_eof() return resp async def init(): app = web.Application() app.router.add_routes([ web.get('/', intro), web.get('/simple', simple), web.get('/change_body', change_body), web.get('/hello/{name}', hello), web.get('/hello', hello), ]) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init()) web.run_app(app) aiohttp-3.0.1/examples/web_ws.py0000666000000000000000000000300313240304665014772 0ustar 00000000000000#!/usr/bin/env python3 """Example for aiohttp.web websocket server """ import asyncio import os from aiohttp.web import (Application, Response, WebSocketResponse, WSMsgType, run_app) WS_FILE = os.path.join(os.path.dirname(__file__), 'websocket.html') async def wshandler(request): resp = WebSocketResponse() ok, protocol = resp.can_prepare(request) if not ok: with open(WS_FILE, 'rb') as fp: return Response(body=fp.read(), content_type='text/html') await resp.prepare(request) try: print('Someone joined.') for ws in request.app['sockets']: await ws.send_str('Someone joined') request.app['sockets'].append(resp) async for msg in resp: if msg.type == WSMsgType.TEXT: for ws in request.app['sockets']: if ws is not resp: await ws.send_str(msg.data) else: return resp return resp finally: request.app['sockets'].remove(resp) print('Someone disconnected.') for ws in request.app['sockets']: await ws.send_str('Someone disconnected.') async def on_shutdown(app): for ws in app['sockets']: await ws.close() async def init(loop): app = Application() app['sockets'] = [] app.router.add_get('/', wshandler) app.on_shutdown.append(on_shutdown) return app loop = asyncio.get_event_loop() app = loop.run_until_complete(init(loop)) run_app(app) aiohttp-3.0.1/HISTORY.rst0000666000000000000000000016377113240304665013232 0ustar 000000000000002.3.10 (2018-02-02) =================== - Fix 100% CPU usage on HTTP GET and websocket connection just after it (#1955) - Patch broken `ssl.match_hostname()` on Python<3.7 (#2674) 2.3.9 (2018-01-16) ================== - Fix colon handing in path for dynamic resources (#2670) 2.3.8 (2018-01-15) ================== - Do not use `yarl.unquote` internal function in aiohttp. Fix incorrectly unquoted path part in URL dispatcher (#2662) - Fix compatibility with `yarl==1.0.0` (#2662) 2.3.7 (2017-12-27) ================== - Fixed race-condition for iterating addresses from the DNSCache. (#2620) - Fix docstring for request.host (#2591) - Fix docstring for request.remote (#2592) 2.3.6 (2017-12-04) ================== - Correct `request.app` context (for handlers not just middlewares). (#2577) 2.3.5 (2017-11-30) ================== - Fix compatibility with `pytest` 3.3+ (#2565) 2.3.4 (2017-11-29) ================== - Make `request.app` point to proper application instance when using nested applications (with middlewares). (#2550) - Change base class of ClientConnectorSSLError to ClientSSLError from ClientConnectorError. (#2563) - Return client connection back to free pool on error in `connector.connect()`. (#2567) 2.3.3 (2017-11-17) ================== - Having a `;` in Response content type does not assume it contains a charset anymore. (#2197) - Use `getattr(asyncio, 'async')` for keeping compatibility with Python 3.7. (#2476) - Ignore `NotImplementedError` raised by `set_child_watcher` from `uvloop`. (#2491) - Fix warning in `ClientSession.__del__` by stopping to try to close it. (#2523) - Fixed typo's in Third-party libraries page. And added async-v20 to the list (#2510) 2.3.2 (2017-11-01) ================== - Fix passing client max size on cloning request obj. (#2385) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy connector. (#2408) - Drop generated `_http_parser` shared object from tarball distribution. (#2414) - Fix connector convert OSError to ClientConnectorError. (#2423) - Fix connection attempts for multiple dns hosts. (#2424) - Fix ValueError for AF_INET6 sockets if a preexisting INET6 socket to the `aiohttp.web.run_app` function. (#2431) - `_SessionRequestContextManager` closes the session properly now. (#2441) - Rename `from_env` to `trust_env` in client reference. (#2451) 2.3.1 (2017-10-18) ================== - Relax attribute lookup in warning about old-styled middleware (#2340) 2.3.0 (2017-10-18) ================== Features -------- - Add SSL related params to `ClientSession.request` (#1128) - Make enable_compression work on HTTP/1.0 (#1828) - Deprecate registering synchronous web handlers (#1993) - Switch to `multidict 3.0`. All HTTP headers preserve casing now but compared in case-insensitive way. (#1994) - Improvement for `normalize_path_middleware`. Added possibility to handle URLs with query string. (#1995) - Use towncrier for CHANGES.txt build (#1997) - Implement `trust_env=True` param in `ClientSession`. (#1998) - Added variable to customize proxy headers (#2001) - Implement `router.add_routes` and router decorators. (#2004) - Deprecated `BaseRequest.has_body` in favor of `BaseRequest.can_read_body` Added `BaseRequest.body_exists` attribute that stays static for the lifetime of the request (#2005) - Provide `BaseRequest.loop` attribute (#2024) - Make `_CoroGuard` awaitable and fix `ClientSession.close` warning message (#2026) - Responses to redirects without Location header are returned instead of raising a RuntimeError (#2030) - Added `get_client`, `get_server`, `setUpAsync` and `tearDownAsync` methods to AioHTTPTestCase (#2032) - Add automatically a SafeChildWatcher to the test loop (#2058) - add ability to disable automatic response decompression (#2110) - Add support for throttling DNS request, avoiding the requests saturation when there is a miss in the DNS cache and many requests getting into the connector at the same time. (#2111) - Use request for getting access log information instead of message/transport pair. Add `RequestBase.remote` property for accessing to IP of client initiated HTTP request. (#2123) - json() raises a ContentTypeError exception if the content-type does not meet the requirements instead of raising a generic ClientResponseError. (#2136) - Make the HTTP client able to return HTTP chunks when chunked transfer encoding is used. (#2150) - add `append_version` arg into `StaticResource.url` and `StaticResource.url_for` methods for getting an url with hash (version) of the file. (#2157) - Fix parsing the Forwarded header. * commas and semicolons are allowed inside quoted-strings; * empty forwarded-pairs (as in for=_1;;by=_2) are allowed; * non-standard parameters are allowed (although this alone could be easily done in the previous parser). (#2173) - Don't require ssl module to run. aiohttp does not require SSL to function. The code paths involved with SSL will only be hit upon SSL usage. Raise `RuntimeError` if HTTPS protocol is required but ssl module is not present. (#2221) - Accept coroutine fixtures in pytest plugin (#2223) - Call `shutdown_asyncgens` before event loop closing on Python 3.6. (#2227) - Speed up Signals when there are no receivers (#2229) - Raise `InvalidURL` instead of `ValueError` on fetches with invalid URL. (#2241) - Move `DummyCookieJar` into `cookiejar.py` (#2242) - `run_app`: Make `print=None` disable printing (#2260) - Support `brotli` encoding (generic-purpose lossless compression algorithm) (#2270) - Add server support for WebSockets Per-Message Deflate. Add client option to add deflate compress header in WebSockets request header. If calling ClientSession.ws_connect() with `compress=15` the client will support deflate compress negotiation. (#2273) - Support `verify_ssl`, `fingerprint`, `ssl_context` and `proxy_headers` by `client.ws_connect`. (#2292) - Added `aiohttp.ClientConnectorSSLError` when connection fails due `ssl.SSLError` (#2294) - `aiohttp.web.Application.make_handler` support `access_log_class` (#2315) - Build HTTP parser extension in non-strict mode by default. (#2332) Bugfixes -------- - Clear auth information on redirecting to other domain (#1699) - Fix missing app.loop on startup hooks during tests (#2060) - Fix issue with synchronous session closing when using `ClientSession` as an asynchronous context manager. (#2063) - Fix issue with `CookieJar` incorrectly expiring cookies in some edge cases. (#2084) - Force use of IPv4 during test, this will make tests run in a Docker container (#2104) - Warnings about unawaited coroutines now correctly point to the user's code. (#2106) - Fix issue with `IndexError` being raised by the `StreamReader.iter_chunks()` generator. (#2112) - Support HTTP 308 Permanent redirect in client class. (#2114) - Fix `FileResponse` sending empty chunked body on 304. (#2143) - Do not add `Content-Length: 0` to GET/HEAD/TRACE/OPTIONS requests by default. (#2167) - Fix parsing the Forwarded header according to RFC 7239. (#2170) - Securely determining remote/scheme/host #2171 (#2171) - Fix header name parsing, if name is split into multiple lines (#2183) - Handle session close during connection, `KeyError: ` (#2193) - Fixes uncaught `TypeError` in `helpers.guess_filename` if `name` is not a string (#2201) - Raise OSError on async DNS lookup if resolved domain is an alias for another one, which does not have an A or CNAME record. (#2231) - Fix incorrect warning in `StreamReader`. (#2251) - Properly clone state of web request (#2284) - Fix C HTTP parser for cases when status line is split into different TCP packets. (#2311) - Fix `web.FileResponse` overriding user supplied Content-Type (#2317) Improved Documentation ---------------------- - Add a note about possible performance degradation in `await resp.text()` if charset was not provided by `Content-Type` HTTP header. Pass explicit encoding to solve it. (#1811) - Drop `disqus` widget from documentation pages. (#2018) - Add a graceful shutdown section to the client usage documentation. (#2039) - Document `connector_owner` parameter. (#2072) - Update the doc of web.Application (#2081) - Fix mistake about access log disabling. (#2085) - Add example usage of on_startup and on_shutdown signals by creating and disposing an aiopg connection engine. (#2131) - Document `encoded=True` for `yarl.URL`, it disables all yarl transformations. (#2198) - Document that all app's middleware factories are run for every request. (#2225) - Reflect the fact that default resolver is threaded one starting from aiohttp 1.1 (#2228) Deprecations and Removals ------------------------- - Drop deprecated `Server.finish_connections` (#2006) - Drop %O format from logging, use %b instead. Drop %e format from logging, environment variables are not supported anymore. (#2123) - Drop deprecated secure_proxy_ssl_header support (#2171) - Removed TimeService in favor of simple caching. TimeService also had a bug where it lost about 0.5 seconds per second. (#2176) - Drop unused response_factory from static files API (#2290) Misc ---- - #2013, #2014, #2048, #2094, #2149, #2187, #2214, #2225, #2243, #2248 2.2.5 (2017-08-03) ================== - Don't raise deprecation warning on `loop.run_until_complete(client.close())` (#2065) 2.2.4 (2017-08-02) ================== - Fix issue with synchronous session closing when using ClientSession as an asynchronous context manager. (#2063) 2.2.3 (2017-07-04) ================== - Fix `_CoroGuard` for python 3.4 2.2.2 (2017-07-03) ================== - Allow `await session.close()` along with `yield from session.close()` 2.2.1 (2017-07-02) ================== - Relax `yarl` requirement to 0.11+ - Backport #2026: `session.close` *is* a coroutine (#2029) 2.2.0 (2017-06-20) ================== - Add doc for add_head, update doc for add_get. (#1944) - Fixed consecutive calls for `Response.write_eof`. - Retain method attributes (e.g. :code:`__doc__`) when registering synchronous handlers for resources. (#1953) - Added signal TERM handling in `run_app` to gracefully exit (#1932) - Fix websocket issues caused by frame fragmentation. (#1962) - Raise RuntimeError is you try to set the Content Length and enable chunked encoding at the same time (#1941) - Small update for `unittest_run_loop` - Use CIMultiDict for ClientRequest.skip_auto_headers (#1970) - Fix wrong startup sequence: test server and `run_app()` are not raise `DeprecationWarning` now (#1947) - Make sure cleanup signal is sent if startup signal has been sent (#1959) - Fixed server keep-alive handler, could cause 100% cpu utilization (#1955) - Connection can be destroyed before response get processed if `await aiohttp.request(..)` is used (#1981) - MultipartReader does not work with -OO (#1969) - Fixed `ClientPayloadError` with blank `Content-Encoding` header (#1931) - Support `deflate` encoding implemented in `httpbin.org/deflate` (#1918) - Fix BadStatusLine caused by extra `CRLF` after `POST` data (#1792) - Keep a reference to `ClientSession` in response object (#1985) - Deprecate undocumented `app.on_loop_available` signal (#1978) 2.1.0 (2017-05-26) ================== - Added support for experimental `async-tokio` event loop written in Rust https://github.com/PyO3/tokio - Write to transport ``\r\n`` before closing after keepalive timeout, otherwise client can not detect socket disconnection. (#1883) - Only call `loop.close` in `run_app` if the user did *not* supply a loop. Useful for allowing clients to specify their own cleanup before closing the asyncio loop if they wish to tightly control loop behavior - Content disposition with semicolon in filename (#917) - Added `request_info` to response object and `ClientResponseError`. (#1733) - Added `history` to `ClientResponseError`. (#1741) - Allow to disable redirect url re-quoting (#1474) - Handle RuntimeError from transport (#1790) - Dropped "%O" in access logger (#1673) - Added `args` and `kwargs` to `unittest_run_loop`. Useful with other decorators, for example `@patch`. (#1803) - Added `iter_chunks` to response.content object. (#1805) - Avoid creating TimerContext when there is no timeout to allow compatibility with Tornado. (#1817) (#1180) - Add `proxy_from_env` to `ClientRequest` to read from environment variables. (#1791) - Add DummyCookieJar helper. (#1830) - Fix assertion errors in Python 3.4 from noop helper. (#1847) - Do not unquote `+` in match_info values (#1816) - Use Forwarded, X-Forwarded-Scheme and X-Forwarded-Host for better scheme and host resolution. (#1134) - Fix sub-application middlewares resolution order (#1853) - Fix applications comparison (#1866) - Fix static location in index when prefix is used (#1662) - Make test server more reliable (#1896) - Extend list of web exceptions, add HTTPUnprocessableEntity, HTTPFailedDependency, HTTPInsufficientStorage status codes (#1920) 2.0.7 (2017-04-12) ================== - Fix *pypi* distribution - Fix exception description (#1807) - Handle socket error in FileResponse (#1773) - Cancel websocket heartbeat on close (#1793) 2.0.6 (2017-04-04) ================== - Keeping blank values for `request.post()` and `multipart.form()` (#1765) - TypeError in data_received of ResponseHandler (#1770) - Fix ``web.run_app`` not to bind to default host-port pair if only socket is passed (#1786) 2.0.5 (2017-03-29) ================== - Memory leak with aiohttp.request (#1756) - Disable cleanup closed ssl transports by default. - Exception in request handling if the server responds before the body is sent (#1761) 2.0.4 (2017-03-27) ================== - Memory leak with aiohttp.request (#1756) - Encoding is always UTF-8 in POST data (#1750) - Do not add "Content-Disposition" header by default (#1755) 2.0.3 (2017-03-24) ================== - Call https website through proxy will cause error (#1745) - Fix exception on multipart/form-data post if content-type is not set (#1743) 2.0.2 (2017-03-21) ================== - Fixed Application.on_loop_available signal (#1739) - Remove debug code 2.0.1 (2017-03-21) ================== - Fix allow-head to include name on route (#1737) - Fixed AttributeError in WebSocketResponse.can_prepare (#1736) 2.0.0 (2017-03-20) ================== - Added `json` to `ClientSession.request()` method (#1726) - Added session's `raise_for_status` parameter, automatically calls raise_for_status() on any request. (#1724) - `response.json()` raises `ClientReponseError` exception if response's content type does not match (#1723) - Cleanup timer and loop handle on any client exception. - Deprecate `loop` parameter for Application's constructor `2.0.0rc1` (2017-03-15) ======================= - Properly handle payload errors (#1710) - Added `ClientWebSocketResponse.get_extra_info()` (#1717) - It is not possible to combine Transfer-Encoding and chunked parameter, same for compress and Content-Encoding (#1655) - Connector's `limit` parameter indicates total concurrent connections. New `limit_per_host` added, indicates total connections per endpoint. (#1601) - Use url's `raw_host` for name resolution (#1685) - Change `ClientResponse.url` to `yarl.URL` instance (#1654) - Add max_size parameter to web.Request reading methods (#1133) - Web Request.post() stores data in temp files (#1469) - Add the `allow_head=True` keyword argument for `add_get` (#1618) - `run_app` and the Command Line Interface now support serving over Unix domain sockets for faster inter-process communication. - `run_app` now supports passing a preexisting socket object. This can be useful e.g. for socket-based activated applications, when binding of a socket is done by the parent process. - Implementation for Trailer headers parser is broken (#1619) - Fix FileResponse to not fall on bad request (range out of file size) - Fix FileResponse to correct stream video to Chromes - Deprecate public low-level api (#1657) - Deprecate `encoding` parameter for ClientSession.request() method - Dropped aiohttp.wsgi (#1108) - Dropped `version` from ClientSession.request() method - Dropped websocket version 76 support (#1160) - Dropped: `aiohttp.protocol.HttpPrefixParser` (#1590) - Dropped: Servers response's `.started`, `.start()` and `.can_start()` method (#1591) - Dropped: Adding `sub app` via `app.router.add_subapp()` is deprecated use `app.add_subapp()` instead (#1592) - Dropped: `Application.finish()` and `Application.register_on_finish()` (#1602) - Dropped: `web.Request.GET` and `web.Request.POST` - Dropped: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect() (#1593) - Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (#1605) - Dropped: `ServerHttpProtocol.keep_alive_timeout` attribute and `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (#1606) - Dropped: `TCPConnector's`` `.resolve`, `.resolved_hosts`, `.clear_resolved_hosts()` attributes and `resolve` constructor parameter (#1607) - Dropped `ProxyConnector` (#1609) 1.3.5 (2017-03-16) ================== - Fixed None timeout support (#1720) 1.3.4 (2017-03-14) ================== - Revert timeout handling in client request - Fix StreamResponse representation after eof - Fix file_sender to not fall on bad request (range out of file size) - Fix file_sender to correct stream video to Chromes - Fix NotImplementedError server exception (#1703) - Clearer error message for URL without a host name. (#1691) - Silence deprecation warning in __repr__ (#1690) - IDN + HTTPS = `ssl.CertificateError` (#1685) 1.3.3 (2017-02-19) ================== - Fixed memory leak in time service (#1656) 1.3.2 (2017-02-16) ================== - Awaiting on WebSocketResponse.send_* does not work (#1645) - Fix multiple calls to client ws_connect when using a shared header dict (#1643) - Make CookieJar.filter_cookies() accept plain string parameter. (#1636) 1.3.1 (2017-02-09) ================== - Handle CLOSING in WebSocketResponse.__anext__ - Fixed AttributeError 'drain' for server websocket handler (#1613) 1.3.0 (2017-02-08) ================== - Multipart writer validates the data on append instead of on a request send (#920) - Multipart reader accepts multipart messages with or without their epilogue to consistently handle valid and legacy behaviors (#1526) (#1581) - Separate read + connect + request timeouts # 1523 - Do not swallow Upgrade header (#1587) - Fix polls demo run application (#1487) - Ignore unknown 1XX status codes in client (#1353) - Fix sub-Multipart messages missing their headers on serialization (#1525) - Do not use readline when reading the content of a part in the multipart reader (#1535) - Add optional flag for quoting `FormData` fields (#916) - 416 Range Not Satisfiable if requested range end > file size (#1588) - Having a `:` or `@` in a route does not work (#1552) - Added `receive_timeout` timeout for websocket to receive complete message. (#1325) - Added `heartbeat` parameter for websocket to automatically send `ping` message. (#1024) (#777) - Remove `web.Application` dependency from `web.UrlDispatcher` (#1510) - Accepting back-pressure from slow websocket clients (#1367) - Do not pause transport during set_parser stage (#1211) - Lingering close does not terminate before timeout (#1559) - `setsockopt` may raise `OSError` exception if socket is closed already (#1595) - Lots of CancelledError when requests are interrupted (#1565) - Allow users to specify what should happen to decoding errors when calling a responses `text()` method (#1542) - Back port std module `http.cookies` for python3.4.2 (#1566) - Maintain url's fragment in client response (#1314) - Allow concurrently close WebSocket connection (#754) - Gzipped responses with empty body raises ContentEncodingError (#609) - Return 504 if request handle raises TimeoutError. - Refactor how we use keep-alive and close lingering timeouts. - Close response connection if we can not consume whole http message during client response release - Abort closed ssl client transports, broken servers can keep socket open un-limit time (#1568) - Log warning instead of `RuntimeError` is websocket connection is closed. - Deprecated: `aiohttp.protocol.HttpPrefixParser` will be removed in 1.4 (#1590) - Deprecated: Servers response's `.started`, `.start()` and `.can_start()` method will be removed in 1.4 (#1591) - Deprecated: Adding `sub app` via `app.router.add_subapp()` is deprecated use `app.add_subapp()` instead, will be removed in 1.4 (#1592) - Deprecated: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect() will be removed in 1.4 (#1593) - Deprecated: `Application.finish()` and `Application.register_on_finish()` will be removed in 1.4 (#1602) 1.2.0 (2016-12-17) ================== - Extract `BaseRequest` from `web.Request`, introduce `web.Server` (former `RequestHandlerFactory`), introduce new low-level web server which is not coupled with `web.Application` and routing (#1362) - Make `TestServer.make_url` compatible with `yarl.URL` (#1389) - Implement range requests for static files (#1382) - Support task attribute for StreamResponse (#1410) - Drop `TestClient.app` property, use `TestClient.server.app` instead (BACKWARD INCOMPATIBLE) - Drop `TestClient.handler` property, use `TestClient.server.handler` instead (BACKWARD INCOMPATIBLE) - `TestClient.server` property returns a test server instance, was `asyncio.AbstractServer` (BACKWARD INCOMPATIBLE) - Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (#1201) - Call worker_int and worker_abort callbacks in `Gunicorn[UVLoop]WebWorker` (#1202) - Has functional tests for client proxy (#1218) - Fix bugs with client proxy target path and proxy host with port (#1413) - Fix bugs related to the use of unicode hostnames (#1444) - Preserve cookie quoting/escaping (#1453) - FileSender will send gzipped response if gzip version available (#1426) - Don't override `Content-Length` header in `web.Response` if no body was set (#1400) - Introduce `router.post_init()` for solving (#1373) - Fix raise error in case of multiple calls of `TimeServive.stop()` - Allow to raise web exceptions on router resolving stage (#1460) - Add a warning for session creation outside of coroutine (#1468) - Avoid a race when application might start accepting incoming requests but startup signals are not processed yet e98e8c6 - Raise a `RuntimeError` when trying to change the status of the HTTP response after the headers have been sent (#1480) - Fix bug with https proxy acquired cleanup (#1340) - Use UTF-8 as the default encoding for multipart text parts (#1484) 1.1.6 (2016-11-28) ================== - Fix `BodyPartReader.read_chunk` bug about returns zero bytes before `EOF` (#1428) 1.1.5 (2016-11-16) ================== - Fix static file serving in fallback mode (#1401) 1.1.4 (2016-11-14) ================== - Make `TestServer.make_url` compatible with `yarl.URL` (#1389) - Generate informative exception on redirects from server which does not provide redirection headers (#1396) 1.1.3 (2016-11-10) ================== - Support *root* resources for sub-applications (#1379) 1.1.2 (2016-11-08) ================== - Allow starting variables with an underscore (#1379) - Properly process UNIX sockets by gunicorn worker (#1375) - Fix ordering for `FrozenList` - Don't propagate pre and post signals to sub-application (#1377) 1.1.1 (2016-11-04) ================== - Fix documentation generation (#1120) 1.1.0 (2016-11-03) ================== - Drop deprecated `WSClientDisconnectedError` (BACKWARD INCOMPATIBLE) - Use `yarl.URL` in client API. The change is 99% backward compatible but `ClientResponse.url` is an `yarl.URL` instance now. (#1217) - Close idle keep-alive connections on shutdown (#1222) - Modify regex in AccessLogger to accept underscore and numbers (#1225) - Use `yarl.URL` in web server API. `web.Request.rel_url` and `web.Request.url` are added. URLs and templates are percent-encoded now. (#1224) - Accept `yarl.URL` by server redirections (#1278) - Return `yarl.URL` by `.make_url()` testing utility (#1279) - Properly format IPv6 addresses by `aiohttp.web.run_app` (#1139) - Use `yarl.URL` by server API (#1288) * Introduce `resource.url_for()`, deprecate `resource.url()`. * Implement `StaticResource`. * Inherit `SystemRoute` from `AbstractRoute` * Drop old-style routes: `Route`, `PlainRoute`, `DynamicRoute`, `StaticRoute`, `ResourceAdapter`. - Revert `resp.url` back to `str`, introduce `resp.url_obj` (#1292) - Raise ValueError if BasicAuth login has a ":" character (#1307) - Fix bug when ClientRequest send payload file with opened as open('filename', 'r+b') (#1306) - Enhancement to AccessLogger (pass *extra* dict) (#1303) - Show more verbose message on import errors (#1319) - Added save and load functionality for `CookieJar` (#1219) - Added option on `StaticRoute` to follow symlinks (#1299) - Force encoding of `application/json` content type to utf-8 (#1339) - Fix invalid invocations of `errors.LineTooLong` (#1335) - Websockets: Stop `async for` iteration when connection is closed (#1144) - Ensure TestClient HTTP methods return a context manager (#1318) - Raise `ClientDisconnectedError` to `FlowControlStreamReader` read function if `ClientSession` object is closed by client when reading data. (#1323) - Document deployment without `Gunicorn` (#1120) - Add deprecation warning for MD5 and SHA1 digests when used for fingerprint of site certs in TCPConnector. (#1186) - Implement sub-applications (#1301) - Don't inherit `web.Request` from `dict` but implement `MutableMapping` protocol. - Implement frozen signals - Don't inherit `web.Application` from `dict` but implement `MutableMapping` protocol. - Support freezing for web applications - Accept access_log parameter in `web.run_app`, use `None` to disable logging - Don't flap `tcp_cork` and `tcp_nodelay` in regular request handling. `tcp_nodelay` is still enabled by default. - Improve performance of web server by removing premature computing of Content-Type if the value was set by `web.Response` constructor. While the patch boosts speed of trivial `web.Response(text='OK', content_type='text/plain)` very well please don't expect significant boost of your application -- a couple DB requests and business logic is still the main bottleneck. - Boost performance by adding a custom time service (#1350) - Extend `ClientResponse` with `content_type` and `charset` properties like in `web.Request`. (#1349) - Disable aiodns by default (#559) - Don't flap `tcp_cork` in client code, use TCP_NODELAY mode by default. - Implement `web.Request.clone()` (#1361) 1.0.5 (2016-10-11) ================== - Fix StreamReader._read_nowait to return all available data up to the requested amount (#1297) 1.0.4 (2016-09-22) ================== - Fix FlowControlStreamReader.read_nowait so that it checks whether the transport is paused (#1206) 1.0.2 (2016-09-22) ================== - Make CookieJar compatible with 32-bit systems (#1188) - Add missing `WSMsgType` to `web_ws.__all__`, see (#1200) - Fix `CookieJar` ctor when called with `loop=None` (#1203) - Fix broken upper-casing in wsgi support (#1197) 1.0.1 (2016-09-16) ================== - Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake of backward compatibility (#1178) - Tune alabaster schema. - Use `text/html` content type for displaying index pages by static file handler. - Fix `AssertionError` in static file handling (#1177) - Fix access log formats `%O` and `%b` for static file handling - Remove `debug` setting of GunicornWorker, use `app.debug` to control its debug-mode instead 1.0.0 (2016-09-16) ================== - Change default size for client session's connection pool from unlimited to 20 (#977) - Add IE support for cookie deletion. (#994) - Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD INCOMPATIBLE) - Remove deprecated `force` parameter for `ClientResponse.close` method (BACKWARD INCOMPATIBLE) - Avoid using of mutable CIMultiDict kw param in make_mocked_request (#997) - Make WebSocketResponse.close a little bit faster by avoiding new task creating just for timeout measurement - Add `proxy` and `proxy_auth` params to `client.get()` and family, deprecate `ProxyConnector` (#998) - Add support for websocket send_json and receive_json, synchronize server and client API for websockets (#984) - Implement router shourtcuts for most useful HTTP methods, use `app.router.add_get()`, `app.router.add_post()` etc. instead of `app.router.add_route()` (#986) - Support SSL connections for gunicorn worker (#1003) - Move obsolete examples to legacy folder - Switch to multidict 2.0 and title-cased strings (#1015) - `{FOO}e` logger format is case-sensitive now - Fix logger report for unix socket 8e8469b - Rename aiohttp.websocket to aiohttp._ws_impl - Rename aiohttp.MsgType tp aiohttp.WSMsgType - Introduce aiohttp.WSMessage officially - Rename Message -> WSMessage - Remove deprecated decode param from resp.read(decode=True) - Use 5min default client timeout (#1028) - Relax HTTP method validation in UrlDispatcher (#1037) - Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()` should be present) - Remove aiohttp.websocket module (BACKWARD INCOMPATIBLE) Please use high-level client and server approaches - Link header for 451 status code is mandatory - Fix test_client fixture to allow multiple clients per test (#1072) - make_mocked_request now accepts dict as headers (#1073) - Add Python 3.5.2/3.6+ compatibility patch for async generator protocol change (#1082) - Improvement test_client can accept instance object (#1083) - Simplify ServerHttpProtocol implementation (#1060) - Add a flag for optional showing directory index for static file handling (#921) - Define `web.Application.on_startup()` signal handler (#1103) - Drop ChunkedParser and LinesParser (#1111) - Call `Application.startup` in GunicornWebWorker (#1105) - Fix client handling hostnames with 63 bytes when a port is given in the url (#1044) - Implement proxy support for ClientSession.ws_connect (#1025) - Return named tuple from WebSocketResponse.can_prepare (#1016) - Fix access_log_format in `GunicornWebWorker` (#1117) - Setup Content-Type to application/octet-stream by default (#1124) - Deprecate debug parameter from app.make_handler(), use `Application(debug=True)` instead (#1121) - Remove fragment string in request path (#846) - Use aiodns.DNSResolver.gethostbyname() if available (#1136) - Fix static file sending on uvloop when sendfile is available (#1093) - Make prettier urls if query is empty dict (#1143) - Fix redirects for HEAD requests (#1147) - Default value for `StreamReader.read_nowait` is -1 from now (#1150) - `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now (BACKWARD INCOMPATIBLE) (#1150) - Streams documentation added (#1150) - Add `multipart` coroutine method for web Request object (#1067) - Publish ClientSession.loop property (#1149) - Fix static file with spaces (#1140) - Fix piling up asyncio loop by cookie expiration callbacks (#1061) - Drop `Timeout` class for sake of `async_timeout` external library. `aiohttp.Timeout` is an alias for `async_timeout.timeout` - `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by default (BACKWARD INCOMPATIBLE) (#1152) - `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by default (BACKWARD INCOMPATIBLE) (#1152) - Conform to RFC3986 - do not include url fragments in client requests (#1174) - Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (#1173) - Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (#1173) - Fix clashing cookies with have the same name but belong to different domains (BACKWARD INCOMPATIBLE) (#1125) - Support binary Content-Transfer-Encoding (#1169) 0.22.5 (08-02-2016) =================== - Pin miltidict version to >=1.2.2 0.22.3 (07-26-2016) =================== - Do not filter cookies if unsafe flag provided (#1005) 0.22.2 (07-23-2016) =================== - Suppress CancelledError when Timeout raises TimeoutError (#970) - Don't expose `aiohttp.__version__` - Add unsafe parameter to CookieJar (#968) - Use unsafe cookie jar in test client tools - Expose aiohttp.CookieJar name 0.22.1 (07-16-2016) =================== - Large cookie expiration/max-age does not break an event loop from now (fixes (#967)) 0.22.0 (07-15-2016) =================== - Fix bug in serving static directory (#803) - Fix command line arg parsing (#797) - Fix a documentation chapter about cookie usage (#790) - Handle empty body with gzipped encoding (#758) - Support 451 Unavailable For Legal Reasons http status (#697) - Fix Cookie share example and few small typos in docs (#817) - UrlDispatcher.add_route with partial coroutine handler (#814) - Optional support for aiodns (#728) - Add ServiceRestart and TryAgainLater websocket close codes (#828) - Fix prompt message for `web.run_app` (#832) - Allow to pass None as a timeout value to disable timeout logic (#834) - Fix leak of connection slot during connection error (#835) - Gunicorn worker with uvloop support `aiohttp.worker.GunicornUVLoopWebWorker` (#878) - Don't send body in response to HEAD request (#838) - Skip the preamble in MultipartReader (#881) - Implement BasicAuth decode classmethod. (#744) - Don't crash logger when transport is None (#889) - Use a create_future compatibility wrapper instead of creating Futures directly (#896) - Add test utilities to aiohttp (#902) - Improve Request.__repr__ (#875) - Skip DNS resolving if provided host is already an ip address (#874) - Add headers to ClientSession.ws_connect (#785) - Document that server can send pre-compressed data (#906) - Don't add Content-Encoding and Transfer-Encoding if no body (#891) - Add json() convenience methods to websocket message objects (#897) - Add client_resp.raise_for_status() (#908) - Implement cookie filter (#799) - Include an example of middleware to handle error pages (#909) - Fix error handling in StaticFileMixin (#856) - Add mocked request helper (#900) - Fix empty ALLOW Response header for cls based View (#929) - Respect CONNECT method to implement a proxy server (#847) - Add pytest_plugin (#914) - Add tutorial - Add backlog option to support more than 128 (default value in "create_server" function) concurrent connections (#892) - Allow configuration of header size limits (#912) - Separate sending file logic from StaticRoute dispatcher (#901) - Drop deprecated share_cookies connector option (BACKWARD INCOMPATIBLE) - Drop deprecated support for tuple as auth parameter. Use aiohttp.BasicAuth instead (BACKWARD INCOMPATIBLE) - Remove deprecated `request.payload` property, use `content` instead. (BACKWARD INCOMPATIBLE) - Drop all mentions about api changes in documentation for versions older than 0.16 - Allow to override default cookie jar (#963) - Add manylinux wheel builds - Dup a socket for sendfile usage (#964) 0.21.6 (05-05-2016) =================== - Drop initial query parameters on redirects (#853) 0.21.5 (03-22-2016) =================== - Fix command line arg parsing (#797) 0.21.4 (03-12-2016) =================== - Fix ResourceAdapter: don't add method to allowed if resource is not match (#826) - Fix Resource: append found method to returned allowed methods 0.21.2 (02-16-2016) =================== - Fix a regression: support for handling ~/path in static file routes was broken (#782) 0.21.1 (02-10-2016) =================== - Make new resources classes public (#767) - Add `router.resources()` view - Fix cmd-line parameter names in doc 0.21.0 (02-04-2016) =================== - Introduce on_shutdown signal (#722) - Implement raw input headers (#726) - Implement web.run_app utility function (#734) - Introduce on_cleanup signal - Deprecate Application.finish() / Application.register_on_finish() in favor of on_cleanup. - Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (#729) - Deprecate bare aiohttp.request(), aiohttp.get() and family (#729) - Refactor keep-alive support (#737): - Enable keepalive for HTTP 1.0 by default - Disable it for HTTP 0.9 (who cares about 0.9, BTW?) - For keepalived connections - Send `Connection: keep-alive` for HTTP 1.0 only - don't send `Connection` header for HTTP 1.1 - For non-keepalived connections - Send `Connection: close` for HTTP 1.1 only - don't send `Connection` header for HTTP 1.0 - Add version parameter to ClientSession constructor, deprecate it for session.request() and family (#736) - Enable access log by default (#735) - Deprecate app.router.register_route() (the method was not documented intentionally BTW). - Deprecate app.router.named_routes() in favor of app.router.named_resources() - route.add_static accepts pathlib.Path now (#743) - Add command line support: `$ python -m aiohttp.web package.main` (#740) - FAQ section was added to docs. Enjoy and fill free to contribute new topics - Add async context manager support to ClientSession - Document ClientResponse's host, method, url properties - Use CORK/NODELAY in client API (#748) - ClientSession.close and Connector.close are coroutines now - Close client connection on exception in ClientResponse.release() - Allow to read multipart parts without content-length specified (#750) - Add support for unix domain sockets to gunicorn worker (#470) - Add test for default Expect handler (#601) - Add the first demo project - Rename `loader` keyword argument in `web.Request.json` method. (#646) - Add local socket binding for TCPConnector (#678) 0.20.2 (01-07-2016) =================== - Enable use of `await` for a class based view (#717) - Check address family to fill wsgi env properly (#718) - Fix memory leak in headers processing (thanks to Marco Paolini) (#723) 0.20.1 (12-30-2015) =================== - Raise RuntimeError is Timeout context manager was used outside of task context. - Add number of bytes to stream.read_nowait (#700) - Use X-FORWARDED-PROTO for wsgi.url_scheme when available 0.20.0 (12-28-2015) =================== - Extend list of web exceptions, add HTTPMisdirectedRequest, HTTPUpgradeRequired, HTTPPreconditionRequired, HTTPTooManyRequests, HTTPRequestHeaderFieldsTooLarge, HTTPVariantAlsoNegotiates, HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (#644) - Do not remove AUTHORIZATION header by WSGI handler (#649) - Fix broken support for https proxies with authentication (#617) - Get REMOTE_* and SEVER_* http vars from headers when listening on unix socket (#654) - Add HTTP 308 support (#663) - Add Tf format (time to serve request in seconds, %06f format) to access log (#669) - Remove one and a half years long deprecated ClientResponse.read_and_close() method - Optimize chunked encoding: use a single syscall instead of 3 calls on sending chunked encoded data - Use TCP_CORK and TCP_NODELAY to optimize network latency and throughput (#680) - Websocket XOR performance improved (#687) - Avoid sending cookie attributes in Cookie header (#613) - Round server timeouts to seconds for grouping pending calls. That leads to less amount of poller syscalls e.g. epoll.poll(). (#702) - Close connection on websocket handshake error (#703) - Implement class based views (#684) - Add *headers* parameter to ws_connect() (#709) - Drop unused function `parse_remote_addr()` (#708) - Close session on exception (#707) - Store http code and headers in WSServerHandshakeError (#706) - Make some low-level message properties readonly (#710) 0.19.0 (11-25-2015) =================== - Memory leak in ParserBuffer (#579) - Support gunicorn's `max_requests` settings in gunicorn worker - Fix wsgi environment building (#573) - Improve access logging (#572) - Drop unused host and port from low-level server (#586) - Add Python 3.5 `async for` implementation to server websocket (#543) - Add Python 3.5 `async for` implementation to client websocket - Add Python 3.5 `async with` implementation to client websocket - Add charset parameter to web.Response constructor (#593) - Forbid passing both Content-Type header and content_type or charset params into web.Response constructor - Forbid duplicating of web.Application and web.Request (#602) - Add an option to pass Origin header in ws_connect (#607) - Add json_response function (#592) - Make concurrent connections respect limits (#581) - Collect history of responses if redirects occur (#614) - Enable passing pre-compressed data in requests (#621) - Expose named routes via UrlDispatcher.named_routes() (#622) - Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (#629) - Use ensure_future if available - Always quote params for Content-Disposition (#641) - Support async for in multipart reader (#640) - Add Timeout context manager (#611) 0.18.4 (13-11-2015) =================== - Relax rule for router names again by adding dash to allowed characters: they may contain identifiers, dashes, dots and columns 0.18.3 (25-10-2015) =================== - Fix formatting for _RequestContextManager helper (#590) 0.18.2 (22-10-2015) =================== - Fix regression for OpenSSL < 1.0.0 (#583) 0.18.1 (20-10-2015) =================== - Relax rule for router names: they may contain dots and columns starting from now 0.18.0 (19-10-2015) =================== - Use errors.HttpProcessingError.message as HTTP error reason and message (#459) - Optimize cythonized multidict a bit - Change repr's of multidicts and multidict views - default headers in ClientSession are now case-insensitive - Make '=' char and 'wss://' schema safe in urls (#477) - `ClientResponse.close()` forces connection closing by default from now (#479) N.B. Backward incompatible change: was `.close(force=False) Using `force` parameter for the method is deprecated: use `.release()` instead. - Properly requote URL's path (#480) - add `skip_auto_headers` parameter for client API (#486) - Properly parse URL path in aiohttp.web.Request (#489) - Raise RuntimeError when chunked enabled and HTTP is 1.0 (#488) - Fix a bug with processing io.BytesIO as data parameter for client API (#500) - Skip auto-generation of Content-Type header (#507) - Use sendfile facility for static file handling (#503) - Default `response_factory` in `app.router.add_static` now is `StreamResponse`, not `None`. The functionality is not changed if default is not specified. - Drop `ClientResponse.message` attribute, it was always implementation detail. - Streams are optimized for speed and mostly memory in case of a big HTTP message sizes (#496) - Fix a bug for server-side cookies for dropping cookie and setting it again without Max-Age parameter. - Don't trim redirect URL in client API (#499) - Extend precision of access log "D" to milliseconds (#527) - Deprecate `StreamResponse.start()` method in favor of `StreamResponse.prepare()` coroutine (#525) `.start()` is still supported but responses begun with `.start()` does not call signal for response preparing to be sent. - Add `StreamReader.__repr__` - Drop Python 3.3 support, from now minimal required version is Python 3.4.1 (#541) - Add `async with` support for `ClientSession.request()` and family (#536) - Ignore message body on 204 and 304 responses (#505) - `TCPConnector` processed both IPv4 and IPv6 by default (#559) - Add `.routes()` view for urldispatcher (#519) - Route name should be a valid identifier name from now (#567) - Implement server signals (#562) - Drop a year-old deprecated *files* parameter from client API. - Added `async for` support for aiohttp stream (#542) 0.17.4 (09-29-2015) =================== - Properly parse URL path in aiohttp.web.Request (#489) - Add missing coroutine decorator, the client api is await-compatible now 0.17.3 (08-28-2015) =================== - Remove Content-Length header on compressed responses (#450) - Support Python 3.5 - Improve performance of transport in-use list (#472) - Fix connection pooling (#473) 0.17.2 (08-11-2015) =================== - Don't forget to pass `data` argument forward (#462) - Fix multipart read bytes count (#463) 0.17.1 (08-10-2015) =================== - Fix multidict comparison to arbitrary abc.Mapping 0.17.0 (08-04-2015) =================== - Make StaticRoute support Last-Modified and If-Modified-Since headers (#386) - Add Request.if_modified_since and Stream.Response.last_modified properties - Fix deflate compression when writing a chunked response (#395) - Request`s content-length header is cleared now after redirect from POST method (#391) - Return a 400 if server received a non HTTP content (#405) - Fix keep-alive support for aiohttp clients (#406) - Allow gzip compression in high-level server response interface (#403) - Rename TCPConnector.resolve and family to dns_cache (#415) - Make UrlDispatcher ignore quoted characters during url matching (#414) Backward-compatibility warning: this may change the url matched by your queries if they send quoted character (like %2F for /) (#414) - Use optional cchardet accelerator if present (#418) - Borrow loop from Connector in ClientSession if loop is not set - Add context manager support to ClientSession for session closing. - Add toplevel get(), post(), put(), head(), delete(), options(), patch() coroutines. - Fix IPv6 support for client API (#425) - Pass SSL context through proxy connector (#421) - Make the rule: path for add_route should start with slash - Don't process request finishing by low-level server on closed event loop - Don't override data if multiple files are uploaded with same key (#433) - Ensure multipart.BodyPartReader.read_chunk read all the necessary data to avoid false assertions about malformed multipart payload - Don't send body for 204, 205 and 304 http exceptions (#442) - Correctly skip Cython compilation in MSVC not found (#453) - Add response factory to StaticRoute (#456) - Don't append trailing CRLF for multipart.BodyPartReader (#454) 0.16.6 (07-15-2015) =================== - Skip compilation on Windows if vcvarsall.bat cannot be found (#438) 0.16.5 (06-13-2015) =================== - Get rid of all comprehensions and yielding in _multidict (#410) 0.16.4 (06-13-2015) =================== - Don't clear current exception in multidict's `__repr__` (cythonized versions) (#410) 0.16.3 (05-30-2015) =================== - Fix StaticRoute vulnerability to directory traversal attacks (#380) 0.16.2 (05-27-2015) =================== - Update python version required for `__del__` usage: it's actually 3.4.1 instead of 3.4.0 - Add check for presence of loop.is_closed() method before call the former (#378) 0.16.1 (05-27-2015) =================== - Fix regression in static file handling (#377) 0.16.0 (05-26-2015) =================== - Unset waiter future after cancellation (#363) - Update request url with query parameters (#372) - Support new `fingerprint` param of TCPConnector to enable verifying SSL certificates via MD5, SHA1, or SHA256 digest (#366) - Setup uploaded filename if field value is binary and transfer encoding is not specified (#349) - Implement `ClientSession.close()` method - Implement `connector.closed` readonly property - Implement `ClientSession.closed` readonly property - Implement `ClientSession.connector` readonly property - Implement `ClientSession.detach` method - Add `__del__` to client-side objects: sessions, connectors, connections, requests, responses. - Refactor connections cleanup by connector (#357) - Add `limit` parameter to connector constructor (#358) - Add `request.has_body` property (#364) - Add `response_class` parameter to `ws_connect()` (#367) - `ProxyConnector` does not support keep-alive requests by default starting from now (#368) - Add `connector.force_close` property - Add ws_connect to ClientSession (#374) - Support optional `chunk_size` parameter in `router.add_static()` 0.15.3 (04-22-2015) =================== - Fix graceful shutdown handling - Fix `Expect` header handling for not found and not allowed routes (#340) 0.15.2 (04-19-2015) =================== - Flow control subsystem refactoring - HTTP server performance optimizations - Allow to match any request method with `*` - Explicitly call drain on transport (#316) - Make chardet module dependency mandatory (#318) - Support keep-alive for HTTP 1.0 (#325) - Do not chunk single file during upload (#327) - Add ClientSession object for cookie storage and default headers (#328) - Add `keep_alive_on` argument for HTTP server handler. 0.15.1 (03-31-2015) =================== - Pass Autobahn Testsuite tests - Fixed websocket fragmentation - Fixed websocket close procedure - Fixed parser buffer limits - Added `timeout` parameter to WebSocketResponse ctor - Added `WebSocketResponse.close_code` attribute 0.15.0 (03-27-2015) =================== - Client WebSockets support - New Multipart system (#273) - Support for "Except" header (#287) (#267) - Set default Content-Type for post requests (#184) - Fix issue with construction dynamic route with regexps and trailing slash (#266) - Add repr to web.Request - Add repr to web.Response - Add repr for NotFound and NotAllowed match infos - Add repr for web.Application - Add repr to UrlMappingMatchInfo (#217) - Gunicorn 19.2.x compatibility 0.14.4 (01-29-2015) =================== - Fix issue with error during constructing of url with regex parts (#264) 0.14.3 (01-28-2015) =================== - Use path='/' by default for cookies (#261) 0.14.2 (01-23-2015) =================== - Connections leak in BaseConnector (#253) - Do not swallow websocket reader exceptions (#255) - web.Request's read, text, json are memorized (#250) 0.14.1 (01-15-2015) =================== - HttpMessage._add_default_headers does not overwrite existing headers (#216) - Expose multidict classes at package level - add `aiohttp.web.WebSocketResponse` - According to RFC 6455 websocket subprotocol preference order is provided by client, not by server - websocket's ping and pong accept optional message parameter - multidict views do not accept `getall` parameter anymore, it returns the full body anyway. - multidicts have optional Cython optimization, cythonized version of multidicts is about 5 times faster than pure Python. - multidict.getall() returns `list`, not `tuple`. - Backward incompatible change: now there are two mutable multidicts (`MultiDict`, `CIMultiDict`) and two immutable multidict proxies (`MultiDictProxy` and `CIMultiDictProxy`). Previous edition of multidicts was not a part of public API BTW. - Router refactoring to push Not Allowed and Not Found in middleware processing - Convert `ConnectionError` to `aiohttp.DisconnectedError` and don't eat `ConnectionError` exceptions from web handlers. - Remove hop headers from Response class, wsgi response still uses hop headers. - Allow to send raw chunked encoded response. - Allow to encode output bytes stream into chunked encoding. - Allow to compress output bytes stream with `deflate` encoding. - Server has 75 seconds keepalive timeout now, was non-keepalive by default. - Application does not accept `**kwargs` anymore ((#243)). - Request is inherited from dict now for making per-request storage to middlewares ((#242)). 0.13.1 (12-31-2014) =================== - Add `aiohttp.web.StreamResponse.started` property (#213) - HTML escape traceback text in `ServerHttpProtocol.handle_error` - Mention handler and middlewares in `aiohttp.web.RequestHandler.handle_request` on error ((#218)) 0.13.0 (12-29-2014) =================== - `StreamResponse.charset` converts value to lower-case on assigning. - Chain exceptions when raise `ClientRequestError`. - Support custom regexps in route variables (#204) - Fixed graceful shutdown, disable keep-alive on connection closing. - Decode HTTP message with `utf-8` encoding, some servers send headers in utf-8 encoding (#207) - Support `aiohtt.web` middlewares (#209) - Add ssl_context to TCPConnector (#206) 0.12.0 (12-12-2014) =================== - Deep refactoring of `aiohttp.web` in backward-incompatible manner. Sorry, we have to do this. - Automatically force aiohttp.web handlers to coroutines in `UrlDispatcher.add_route()` (#186) - Rename `Request.POST()` function to `Request.post()` - Added POST attribute - Response processing refactoring: constructor does not accept Request instance anymore. - Pass application instance to finish callback - Exceptions refactoring - Do not unquote query string in `aiohttp.web.Request` - Fix concurrent access to payload in `RequestHandle.handle_request()` - Add access logging to `aiohttp.web` - Gunicorn worker for `aiohttp.web` - Removed deprecated `AsyncGunicornWorker` - Removed deprecated HttpClient 0.11.0 (11-29-2014) =================== - Support named routes in `aiohttp.web.UrlDispatcher` (#179) - Make websocket subprotocols conform to spec (#181) 0.10.2 (11-19-2014) =================== - Don't unquote `environ['PATH_INFO']` in wsgi.py (#177) 0.10.1 (11-17-2014) =================== - aiohttp.web.HTTPException and descendants now files response body with string like `404: NotFound` - Fix multidict `__iter__`, the method should iterate over keys, not (key, value) pairs. 0.10.0 (11-13-2014) =================== - Add aiohttp.web subpackage for highlevel HTTP server support. - Add *reason* optional parameter to aiohttp.protocol.Response ctor. - Fix aiohttp.client bug for sending file without content-type. - Change error text for connection closed between server responses from 'Can not read status line' to explicit 'Connection closed by server' - Drop closed connections from connector (#173) - Set server.transport to None on .closing() (#172) 0.9.3 (10-30-2014) ================== - Fix compatibility with asyncio 3.4.1+ (#170) 0.9.2 (10-16-2014) ================== - Improve redirect handling (#157) - Send raw files as is (#153) - Better websocket support (#150) 0.9.1 (08-30-2014) ================== - Added MultiDict support for client request params and data (#114). - Fixed parameter type for IncompleteRead exception (#118). - Strictly require ASCII headers names and values (#137) - Keep port in ProxyConnector (#128). - Python 3.4.1 compatibility (#131). 0.9.0 (07-08-2014) ================== - Better client basic authentication support (#112). - Fixed incorrect line splitting in HttpRequestParser (#97). - Support StreamReader and DataQueue as request data. - Client files handling refactoring (#20). - Backward incompatible: Replace DataQueue with StreamReader for request payload (#87). 0.8.4 (07-04-2014) ================== - Change ProxyConnector authorization parameters. 0.8.3 (07-03-2014) ================== - Publish TCPConnector properties: verify_ssl, family, resolve, resolved_hosts. - Don't parse message body for HEAD responses. - Refactor client response decoding. 0.8.2 (06-22-2014) ================== - Make ProxyConnector.proxy immutable property. - Make UnixConnector.path immutable property. - Fix resource leak for aiohttp.request() with implicit connector. - Rename Connector's reuse_timeout to keepalive_timeout. 0.8.1 (06-18-2014) ================== - Use case insensitive multidict for server request/response headers. - MultiDict.getall() accepts default value. - Catch server ConnectionError. - Accept MultiDict (and derived) instances in aiohttp.request header argument. - Proxy 'CONNECT' support. 0.8.0 (06-06-2014) ================== - Add support for utf-8 values in HTTP headers - Allow to use custom response class instead of HttpResponse - Use MultiDict for client request headers - Use MultiDict for server request/response headers - Store response headers in ClientResponse.headers attribute - Get rid of timeout parameter in aiohttp.client API - Exceptions refactoring 0.7.3 (05-20-2014) ================== - Simple HTTP proxy support. 0.7.2 (05-14-2014) ================== - Get rid of `__del__` methods - Use ResourceWarning instead of logging warning record. 0.7.1 (04-28-2014) ================== - Do not unquote client request urls. - Allow multiple waiters on transport drain. - Do not return client connection to pool in case of exceptions. - Rename SocketConnector to TCPConnector and UnixSocketConnector to UnixConnector. 0.7.0 (04-16-2014) ================== - Connection flow control. - HTTP client session/connection pool refactoring. - Better handling for bad server requests. 0.6.5 (03-29-2014) ================== - Added client session reuse timeout. - Better client request cancellation support. - Better handling responses without content length. - Added HttpClient verify_ssl parameter support. 0.6.4 (02-27-2014) ================== - Log content-length missing warning only for put and post requests. 0.6.3 (02-27-2014) ================== - Better support for server exit. - Read response body until EOF if content-length is not defined (#14) 0.6.2 (02-18-2014) ================== - Fix trailing char in allowed_methods. - Start slow request timer for first request. 0.6.1 (02-17-2014) ================== - Added utility method HttpResponse.read_and_close() - Added slow request timeout. - Enable socket SO_KEEPALIVE if available. 0.6.0 (02-12-2014) ================== - Better handling for process exit. 0.5.0 (01-29-2014) ================== - Allow to use custom HttpRequest client class. - Use gunicorn keepalive setting for asynchronous worker. - Log leaking responses. - python 3.4 compatibility 0.4.4 (11-15-2013) ================== - Resolve only AF_INET family, because it is not clear how to pass extra info to asyncio. 0.4.3 (11-15-2013) ================== - Allow to wait completion of request with `HttpResponse.wait_for_close()` 0.4.2 (11-14-2013) ================== - Handle exception in client request stream. - Prevent host resolving for each client request. 0.4.1 (11-12-2013) ================== - Added client support for `expect: 100-continue` header. 0.4 (11-06-2013) ================ - Added custom wsgi application close procedure - Fixed concurrent host failure in HttpClient 0.3 (11-04-2013) ================ - Added PortMapperWorker - Added HttpClient - Added TCP connection timeout to HTTP client - Better client connection errors handling - Gracefully handle process exit 0.2 === - Fix packaging aiohttp-3.0.1/LICENSE.txt0000666000000000000000000002610413240304665013146 0ustar 00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2013-2018 Nikolay Kim and Andrew Svetlov Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. aiohttp-3.0.1/Makefile0000666000000000000000000000615313240304665012765 0ustar 00000000000000# Some simple testing tasks (sorry, UNIX only). all: test .install-deps: $(shell find requirements -type f) @pip install -U -r requirements/dev.txt @touch .install-deps isort: isort -rc aiohttp isort -rc tests isort -rc examples isort -rc demos flake: .flake .flake: .install-deps $(shell find aiohttp -type f) \ $(shell find tests -type f) \ $(shell find examples -type f) \ $(shell find demos -type f) @flake8 aiohttp --exclude=aiohttp/backport_cookies.py @flake8 examples tests demos python setup.py check -rms @if ! isort -c -rc aiohttp tests examples; then \ echo "Import sort errors, run 'make isort' to fix them!!!"; \ isort --diff -rc aiohttp tests examples; \ false; \ fi @touch .flake check_changes: @./tools/check_changes.py .develop: .install-deps $(shell find aiohttp -type f) .flake check_changes @pip install -e . @touch .develop test: .develop @pytest -q ./tests vtest: .develop @pytest -s -v ./tests cov cover coverage: tox cov-dev: .develop @echo "Run without extensions" @AIOHTTP_NO_EXTENSIONS=1 pytest --cov=aiohttp tests @pytest --cov=aiohttp --cov-report=term --cov-report=html --cov-append tests @echo "open file://`pwd`/htmlcov/index.html" cov-ci-no-ext: .develop @echo "Run without extensions" @AIOHTTP_NO_EXTENSIONS=1 pytest --cov=aiohttp tests cov-ci-aio-debug: .develop @echo "Run in debug mode" @PYTHONASYNCIODEBUG=1 pytest --cov=aiohttp --cov-append tests cov-ci-run: .develop @echo "Regular run" @pytest --cov=aiohttp --cov-report=term --cov-report=html --cov-append tests cov-dev-full: cov-ci-no-ext cov-ci-aio-debug cov-ci-run @echo "open file://`pwd`/htmlcov/index.html" clean: @rm -rf `find . -name __pycache__` @rm -f `find . -type f -name '*.py[co]' ` @rm -f `find . -type f -name '*~' ` @rm -f `find . -type f -name '.*~' ` @rm -f `find . -type f -name '@*' ` @rm -f `find . -type f -name '#*#' ` @rm -f `find . -type f -name '*.orig' ` @rm -f `find . -type f -name '*.rej' ` @rm -f .coverage @rm -rf htmlcov @rm -rf build @rm -rf cover @make -C docs clean @python setup.py clean @rm -f aiohttp/_frozenlist.html @rm -f aiohttp/_frozenlist.c @rm -f aiohttp/_frozenlist.*.so @rm -f aiohttp/_frozenlist.*.pyd @rm -f aiohttp/_http_parser.html @rm -f aiohttp/_http_parser.c @rm -f aiohttp/_http_parser.*.so @rm -f aiohttp/_http_parser.*.pyd @rm -f aiohttp/_multidict.html @rm -f aiohttp/_multidict.c @rm -f aiohttp/_multidict.*.so @rm -f aiohttp/_multidict.*.pyd @rm -f aiohttp/_websocket.html @rm -f aiohttp/_websocket.c @rm -f aiohttp/_websocket.*.so @rm -f aiohttp/_websocket.*.pyd @rm -f aiohttp/_parser.html @rm -f aiohttp/_parser.c @rm -f aiohttp/_parser.*.so @rm -f aiohttp/_parser.*.pyd @rm -rf .tox @rm -f .develop @rm -f .flake @rm -f .install-deps @rm -rf aiohttp.egg-info doc: @make -C docs html SPHINXOPTS="-W -E" @echo "open file://`pwd`/docs/_build/html/index.html" doc-spelling: @make -C docs spelling SPHINXOPTS="-W -E" install: @pip install -U pip @pip install -Ur requirements/dev.txt .PHONY: all build flake test vtest cov clean doc aiohttp-3.0.1/MANIFEST.in0000666000000000000000000000051413240304665013056 0ustar 00000000000000include LICENSE.txt include CHANGES.rst include README.rst include CONTRIBUTORS.txt include Makefile graft aiohttp graft docs graft examples graft tests global-exclude *.pyc global-exclude *.pyd global-exclude *.so exclude aiohttp/_frozenlist.html exclude aiohttp/_http_parser.html exclude aiohttp/_websocket.html prune docs/_build aiohttp-3.0.1/PKG-INFO0000666000000000000000000003472513240305035012420 0ustar 00000000000000Metadata-Version: 1.2 Name: aiohttp Version: 3.0.1 Summary: Async http client/server framework (asyncio) Home-page: https://github.com/aio-libs/aiohttp/ Author: Nikolay Kim , Andrew Svetlov Author-email: aio-libs@googlegroups.com License: Apache 2 Description-Content-Type: UNKNOWN Description: ================================== Async http client/server framework ================================== .. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png :height: 64px :width: 64px :alt: aiohttp logo .. image:: https://travis-ci.org/aio-libs/aiohttp.svg?branch=master :target: https://travis-ci.org/aio-libs/aiohttp :align: right :alt: Travis status for master branch .. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiohttp :alt: codecov.io status for master branch .. image:: https://badge.fury.io/py/aiohttp.svg :target: https://badge.fury.io/py/aiohttp :alt: Latest PyPI package version .. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest :target: http://docs.aiohttp.org/ :alt: Latest Read The Docs .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter Key Features ============ - Supports both client and server side of HTTP protocol. - Supports both client and server Web-Sockets out-of-the-box. - Web-server has middlewares and pluggable routing. Getting started =============== Client ------ To retrieve something from the web: .. code-block:: python import aiohttp import asyncio import async_timeout async def fetch(session, url): async with async_timeout.timeout(10): async with session.get(url) as response: return await response.text() async def main(): async with aiohttp.ClientSession() as session: html = await fetch(session, 'http://python.org') print(html) if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(main()) Server ------ This is simple usage example: .. code-block:: python from aiohttp import web async def handle(request): name = request.match_info.get('name', "Anonymous") text = "Hello, " + name return web.Response(text=text) async def wshandler(request): ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: if msg.type == web.MsgType.text: await ws.send_str("Hello, {}".format(msg.data)) elif msg.type == web.MsgType.binary: await ws.send_bytes(msg.data) elif msg.type == web.MsgType.close: break return ws app = web.Application() app.router.add_get('/echo', wshandler) app.router.add_get('/', handle) app.router.add_get('/{name}', handle) web.run_app(app) Documentation ============= https://aiohttp.readthedocs.io/ External links ============== * `Third party libraries `_ * `Built with aiohttp `_ * `Powered by aiohttp `_ Feel free to make a Pull Request for adding your link to these pages! Communication channels ====================== *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs Feel free to post your questions and ideas here. *gitter chat* https://gitter.im/aio-libs/Lobby We support `Stack Overflow `_. Please add *aiohttp* tag to your question there. Requirements ============ - Python >= 3.5.3 - async-timeout_ - chardet_ - multidict_ - yarl_ Optionally you may install the cChardet_ and aiodns_ libraries (highly recommended for sake of speed). .. _chardet: https://pypi.python.org/pypi/chardet .. _aiodns: https://pypi.python.org/pypi/aiodns .. _multidict: https://pypi.python.org/pypi/multidict .. _yarl: https://pypi.python.org/pypi/yarl .. _async-timeout: https://pypi.python.org/pypi/async_timeout .. _cChardet: https://pypi.python.org/pypi/cchardet License ======= ``aiohttp`` is offered under the Apache 2 license. Keepsafe ======== The aiohttp community would like to thank Keepsafe (https://www.getkeepsafe.com) for it's support in the early days of the project. Source code =========== The latest developer version is available in a github repository: https://github.com/aio-libs/aiohttp Benchmarks ========== If you are interested in by efficiency, AsyncIO community maintains a list of benchmarks on the official wiki: https://github.com/python/asyncio/wiki/Benchmarks ========= Changelog ========= .. You should *NOT* be adding new change log entries to this file, this file is managed by towncrier. You *may* edit previous change logs to fix problems like typo corrections or such. To add a new change log entry, please see https://pip.pypa.io/en/latest/development/#adding-a-news-entry we named the news folder "changes". WARNING: Don't drop the next directive! .. towncrier release notes start 3.0.0 (2018-02-12) ================== Features -------- - Speed up the `PayloadWriter.write` method for large request bodies. (#2126) - StreamResponse and Response are now MutableMappings. (#2246) - ClientSession publishes a set of signals to track the HTTP request execution. (#2313) - Content-Disposition fast access in ClientResponse (#2455) - Added support to Flask-style decorators with class-based Views. (#2472) - Signal handlers (registered callbacks) should be coroutines. (#2480) - Support ``async with test_client.ws_connect(...)`` (#2525) - Introduce *site* and *application runner* as underlying API for `web.run_app` implementation. (#2530) - Only quote multipart boundary when necessary and sanitize input (#2544) - Make the `aiohttp.ClientResponse.get_encoding` method public with the processing of invalid charset while detecting content encoding. (#2549) - Add optional configurable per message compression for `ClientWebSocketResponse` and `WebSocketResponse`. (#2551) - Add hysteresis to `StreamReader` to prevent flipping between paused and resumed states too often. (#2555) - Support `.netrc` by `trust_env` (#2581) - Avoid to create a new resource when adding a route with the same name and path of the last added resource (#2586) - `MultipartWriter.boundary` is `str` now. (#2589) - Allow a custom port to be used by `TestServer` (and associated pytest fixtures) (#2613) - Add param access_log_class to web.run_app function (#2615) - Add ``ssl`` parameter to client API (#2626) - Fixes performance issue introduced by #2577. When there are no middlewares installed by the user, no additional and useless code is executed. (#2629) - Rename PayloadWriter to StreamWriter (#2654) - New options *reuse_port*, *reuse_address* are added to `run_app` and `TCPSite`. (#2679) - Use custom classes to pass client signals parameters (#2686) - Use ``attrs`` library for data classes, replace `namedtuple`. (#2690) - Pytest fixtures renaming, add ``aiohttp_`` prefix (#2578) - Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line parameters (#2578) Bugfixes -------- - Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not support HTTP2 yet, the protocol is not upgraded but response is handled correctly. (#2277) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy connector (#2408) - Fix connector convert OSError to ClientConnectorError (#2423) - Fix connection attempts for multiple dns hosts (#2424) - Fix writing to closed transport by raising `asyncio.CancelledError` (#2499) - Fix warning in `ClientSession.__del__` by stopping to try to close it. (#2523) - Fixed race-condition for iterating addresses from the DNSCache. (#2620) - Fix default value of `access_log_format` argument in `web.run_app` (#2649) - Freeze sub-application on adding to parent app (#2656) - Do percent encoding for `.url_for()` parameters (#2668) - Correctly process request start time and multiple request/response headers in access log extra (#2641) Improved Documentation ---------------------- - Improve tutorial docs, using `literalinclude` to link to the actual files. (#2396) - Small improvement docs: better example for file uploads. (#2401) - Rename `from_env` to `trust_env` in client reference. (#2451) - Fixed mistype in `Proxy Support` section where `trust_env` parameter was used in `session.get("http://python.org", trust_env=True)` method instead of aiohttp.ClientSession constructor as follows: `aiohttp.ClientSession(trust_env=True)`. (#2688) - Fix issue with unittest example not compiling in testing docs. (#2717) Deprecations and Removals ------------------------- - Simplify HTTP pipelining implementation (#2109) - Drop `StreamReaderPayload` and `DataQueuePayload`. (#2257) - Drop `md5` and `sha1` finger-prints (#2267) - Drop WSMessage.tp (#2321) - Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. (#2343) - Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (#2348) - Drop `resolve` param from TCPConnector. (#2377) - Add DeprecationWarning for returning HTTPException (#2415) - `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are genuine async functions now. (#2475) - Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal handlers should be coroutines, support for regular functions is dropped. (#2480) - `StreamResponse.drain()` is not a part of public API anymore, just use `await StreamResponse.write()`. `StreamResponse.write` is converted to async function. (#2483) - Drop deprecated `slow_request_timeout` param and `**kwargs`` from `RequestHandler`. (#2500) - Drop deprecated `resource.url()`. (#2501) - Remove `%u` and `%l` format specifiers from access log format. (#2506) - Drop deprecated `request.GET` property. (#2547) - Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, merge `FlowControlStreamReader` functionality into `StreamReader`, drop `FlowControlStreamReader` name. (#2555) - Do not create a new resource on `router.add_get(..., allow_head=True)` (#2585) - Drop access to TCP tuning options from PayloadWriter and Response classes (#2604) - Drop deprecated `encoding` parameter from client API (#2606) - Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in client API (#2626) - Get rid of the legacy class StreamWriter. (#2651) - Forbid non-strings in `resource.url_for()` parameters. (#2668) - Deprecate inheritance from ``ClientSession`` and ``web.Application`` and custom user attributes for ``ClientSession``, ``web.Request`` and ``web.Application`` (#2691) - Drop `resp = await aiohttp.request(...)` syntax for sake of `async with aiohttp.request(...) as resp:`. (#2540) - Forbid synchronous context managers for `ClientSession` and test server/client. (#2362) Misc ---- - #2552 Platform: UNKNOWN Classifier: License :: OSI Approved :: Apache Software License Classifier: Intended Audience :: Developers Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Development Status :: 5 - Production/Stable Classifier: Operating System :: POSIX Classifier: Operating System :: MacOS :: MacOS X Classifier: Operating System :: Microsoft :: Windows Classifier: Topic :: Internet :: WWW/HTTP Classifier: Framework :: AsyncIO Requires-Python: >=3.5.3 aiohttp-3.0.1/pyproject.toml0000666000000000000000000000020113240304665014225 0ustar 00000000000000[tool.towncrier] package = "aiohttp" filename = "CHANGES.rst" directory = "CHANGES/" title_format = "{version} ({project_date})" aiohttp-3.0.1/pytest.ini0000666000000000000000000000007713240304665013355 0ustar 00000000000000[pytest] addopts= --aiohttp-loop=all filterwarnings= error aiohttp-3.0.1/README.rst0000666000000000000000000001063313240304665013012 0ustar 00000000000000================================== Async http client/server framework ================================== .. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/_static/aiohttp-icon-128x128.png :height: 64px :width: 64px :alt: aiohttp logo .. image:: https://travis-ci.org/aio-libs/aiohttp.svg?branch=master :target: https://travis-ci.org/aio-libs/aiohttp :align: right :alt: Travis status for master branch .. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiohttp :alt: codecov.io status for master branch .. image:: https://badge.fury.io/py/aiohttp.svg :target: https://badge.fury.io/py/aiohttp :alt: Latest PyPI package version .. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest :target: http://docs.aiohttp.org/ :alt: Latest Read The Docs .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter Key Features ============ - Supports both client and server side of HTTP protocol. - Supports both client and server Web-Sockets out-of-the-box. - Web-server has middlewares and pluggable routing. Getting started =============== Client ------ To retrieve something from the web: .. code-block:: python import aiohttp import asyncio import async_timeout async def fetch(session, url): async with async_timeout.timeout(10): async with session.get(url) as response: return await response.text() async def main(): async with aiohttp.ClientSession() as session: html = await fetch(session, 'http://python.org') print(html) if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(main()) Server ------ This is simple usage example: .. code-block:: python from aiohttp import web async def handle(request): name = request.match_info.get('name', "Anonymous") text = "Hello, " + name return web.Response(text=text) async def wshandler(request): ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: if msg.type == web.MsgType.text: await ws.send_str("Hello, {}".format(msg.data)) elif msg.type == web.MsgType.binary: await ws.send_bytes(msg.data) elif msg.type == web.MsgType.close: break return ws app = web.Application() app.router.add_get('/echo', wshandler) app.router.add_get('/', handle) app.router.add_get('/{name}', handle) web.run_app(app) Documentation ============= https://aiohttp.readthedocs.io/ External links ============== * `Third party libraries `_ * `Built with aiohttp `_ * `Powered by aiohttp `_ Feel free to make a Pull Request for adding your link to these pages! Communication channels ====================== *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs Feel free to post your questions and ideas here. *gitter chat* https://gitter.im/aio-libs/Lobby We support `Stack Overflow `_. Please add *aiohttp* tag to your question there. Requirements ============ - Python >= 3.5.3 - async-timeout_ - chardet_ - multidict_ - yarl_ Optionally you may install the cChardet_ and aiodns_ libraries (highly recommended for sake of speed). .. _chardet: https://pypi.python.org/pypi/chardet .. _aiodns: https://pypi.python.org/pypi/aiodns .. _multidict: https://pypi.python.org/pypi/multidict .. _yarl: https://pypi.python.org/pypi/yarl .. _async-timeout: https://pypi.python.org/pypi/async_timeout .. _cChardet: https://pypi.python.org/pypi/cchardet License ======= ``aiohttp`` is offered under the Apache 2 license. Keepsafe ======== The aiohttp community would like to thank Keepsafe (https://www.getkeepsafe.com) for it's support in the early days of the project. Source code =========== The latest developer version is available in a github repository: https://github.com/aio-libs/aiohttp Benchmarks ========== If you are interested in by efficiency, AsyncIO community maintains a list of benchmarks on the official wiki: https://github.com/python/asyncio/wiki/Benchmarks aiohttp-3.0.1/requirements/0000777000000000000000000000000013240305035014033 5ustar 00000000000000aiohttp-3.0.1/requirements/ci-wheel.txt0000666000000000000000000000125313240304665016302 0ustar 00000000000000attrs==17.4.0 async-timeout==2.0.0 brotlipy==0.7.0 cchardet==2.1.1 chardet==3.0.4 coverage==4.4.2 cython==0.27.3 flake8==3.5.0 gunicorn==19.7.1 isort==4.3.3 pip==9.0.1 pyflakes==1.6.0 multidict==4.1.0 pytest==3.4.0 pytest-cov==2.5.1 pytest-mock==1.6.3 pytest-xdist==1.22.0 towncrier==17.8.0 tox==2.9.1 twine==1.9.1 yarl==1.1.0 # Using PEP 508 env markers to control dependency on runtimes: aiodns==1.1.1; platform_system!="Windows" # required c-ares will not build on windows codecov==2.0.14; platform_system!="Windows" # We only use it in Travis CI uvloop==0.9.1; python_version>="3.5" and platform_system!="Windows" # MagicStack/uvloop#14 idna-ssl==1.0.0; python_version<"3.7" aiohttp-3.0.1/requirements/ci.txt0000666000000000000000000000006513240304665015200 0ustar 00000000000000setuptools-git==1.2 -r doc.txt -r ci-wheel.txt -e . aiohttp-3.0.1/requirements/dev.txt0000666000000000000000000000007213240304665015361 0ustar 00000000000000-r ci.txt ipdb==0.10.3 pytest-sugar==0.9.1 ipython==6.2.1 aiohttp-3.0.1/requirements/doc-spelling.txt0000666000000000000000000000013113240304665017157 0ustar 00000000000000sphinxcontrib-spelling==4.0.1; platform_system!="Windows" # We only use it in Travis CI aiohttp-3.0.1/requirements/doc.txt0000666000000000000000000000011613240304665015347 0ustar 00000000000000sphinx==1.6.6 sphinxcontrib-asyncio==0.2.0 pygments>=2.1 aiohttp-theme==0.1.4 aiohttp-3.0.1/requirements/wheel.txt0000666000000000000000000000005213240304665015705 0ustar 00000000000000cython==0.27.3 pytest==3.3.2 twine==1.9.1 aiohttp-3.0.1/setup.cfg0000666000000000000000000000075613240305035013141 0ustar 00000000000000[pep8] max-line-length = 79 [easy_install] zip_ok = false [flake8] ignore = N801,N802,N803,E226 max-line-length = 79 [tool:pytest] testpaths = tests addopts = --loop all [isort] known_third_party = jinja2 known_first_party = aiohttp,aiohttp_jinja2,aiopg lines_after_imports = 2 [report] exclude_lines = @abc.abstractmethod @abstractmethod [coverage:run] branch = True source = aiohttp, tests omit = site-packages [egg_info] tag_build = tag_date = 0 aiohttp-3.0.1/setup.py0000666000000000000000000000774613240304665013050 0ustar 00000000000000import codecs import pathlib import re import sys from distutils.command.build_ext import build_ext from distutils.errors import (CCompilerError, DistutilsExecError, DistutilsPlatformError) from setuptools import Extension, setup from setuptools.command.test import test as TestCommand try: from Cython.Build import cythonize USE_CYTHON = True except ImportError: USE_CYTHON = False ext = '.pyx' if USE_CYTHON else '.c' extensions = [Extension('aiohttp._websocket', ['aiohttp/_websocket' + ext]), Extension('aiohttp._http_parser', ['aiohttp/_http_parser' + ext, 'vendor/http-parser/http_parser.c'], define_macros=[('HTTP_PARSER_STRICT', 0)], ), Extension('aiohttp._frozenlist', ['aiohttp/_frozenlist' + ext])] if USE_CYTHON: extensions = cythonize(extensions) class BuildFailed(Exception): pass class ve_build_ext(build_ext): # This class allows C extension building to fail. def run(self): try: build_ext.run(self) except (DistutilsPlatformError, FileNotFoundError): raise BuildFailed() def build_extension(self, ext): try: build_ext.build_extension(self, ext) except (CCompilerError, DistutilsExecError, DistutilsPlatformError, ValueError): raise BuildFailed() here = pathlib.Path(__file__).parent txt = (here / 'aiohttp' / '__init__.py').read_text('utf-8') try: version = re.findall(r"^__version__ = '([^']+)'\r?$", txt, re.M)[0] except IndexError: raise RuntimeError('Unable to determine version.') install_requires = ['attrs>=17.4.0', 'chardet>=2.0,<4.0', 'multidict>=4.0,<5.0', 'async_timeout>=1.2,<3.0', 'yarl>=1.0,<2.0'] if sys.version_info < (3, 7): install_requires.append('idna-ssl>=1.0') def read(f): return (here / f).read_text('utf-8').strip() class PyTest(TestCommand): user_options = [] def run(self): import subprocess errno = subprocess.call([sys.executable, '-m', 'pytest', 'tests']) raise SystemExit(errno) tests_require = install_requires + ['pytest', 'gunicorn', 'pytest-timeout'] args = dict( name='aiohttp', version=version, description='Async http client/server framework (asyncio)', long_description='\n\n'.join((read('README.rst'), read('CHANGES.rst'))), classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Development Status :: 5 - Production/Stable', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Topic :: Internet :: WWW/HTTP', 'Framework :: AsyncIO', ], author='Nikolay Kim', author_email='fafhrd91@gmail.com', maintainer=', '.join(('Nikolay Kim ', 'Andrew Svetlov ')), maintainer_email='aio-libs@googlegroups.com', url='https://github.com/aio-libs/aiohttp/', license='Apache 2', packages=['aiohttp'], python_requires='>=3.5.3', install_requires=install_requires, tests_require=tests_require, include_package_data=True, ext_modules=extensions, cmdclass=dict(build_ext=ve_build_ext, test=PyTest)) try: setup(**args) except BuildFailed: print("************************************************************") print("Cannot compile C accelerator module, use pure python version") print("************************************************************") del args['ext_modules'] del args['cmdclass'] setup(**args) aiohttp-3.0.1/tests/0000777000000000000000000000000013240305035012452 5ustar 00000000000000aiohttp-3.0.1/tests/aiohttp.jpg0000666000000000000000000001574613240304665014651 0ustar 00000000000000ÿØÿàJFIFHHÿáXExifMM*‡i&  € €ÿí8Photoshop 3.08BIM8BIM%ÔŒÙ²é€ ˜ìøB~ÿÀ€€"ÿÄ ÿĵ}!1AQa"q2‘¡#B±ÁRÑð$3br‚ %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyzƒ„…†‡ˆ‰Š’“”•–—˜™š¢£¤¥¦§¨©ª²³´µ¶·¸¹ºÂÃÄÅÆÇÈÉÊÒÓÔÕÖרÙÚáâãäåæçèéêñòóôõö÷øùúÿÄ ÿĵw!1AQaq"2B‘¡±Á #3RðbrÑ $4á%ñ&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz‚ƒ„…†‡ˆ‰Š’“”•–—˜™š¢£¤¥¦§¨©ª²³´µ¶·¸¹ºÂÃÄÅÆÇÈÉÊÒÓÔÕÖרÙÚâãäåæçèéêòóôõö÷øùúÿÛC ÿÛC  ÿÝÿÚ ?ýü¢Š(¢‚qɬö¹–áŒVxÀ8iÝÃÔйgŠÝ+õ_íÿǼ$ïIòË“úWâè^I u;ôë°óeSþÞ8Œ}qô5óo‹¾2ø×V™­´‡þʶÇ"ÝwJsØÈÃ#ì…¯ŽâN:ʲUlLܧü±Wä¾lúŒ‹„3ÕÞ„mæ–‹üßÜ};< æÞÞGw< ‹çfñ_ƒmإLj­£íqd~F¾–óRÔUŸP{‹Ë’xi™åcŸ÷‰éT- ž9Â_,ví ÎO¶:×凎ӄÔp¸;§ÕÉþ‹õ?@ÃøKWõŒN«²ÿ6}ÿ‹<;ƒÄV…Aö¸¿©®Š Ây¶W‘Ü!èFÅM~}ÜhÐÛ7™®3!vÒsþù?,Ž[ýšK]fêÉöiÑá [;$®GBòd3}8_jéÁxÙ5>L~—ÉK_šjËæÓò0­ádgàñõZ|šzü“^gègÚ'‹ý|GÞŒî—_Ò¬E4S Ѱaí_øsã?‰ô‡j7ê¶à}Ùð%ã°‘FIÿx÷¿ üC𷈎ÑÞÃRÆM´øIN?º~ìƒéϰ¯Ór=Éóv©Ð©ËQý™hþ]ÉŸœðvg–§:мÚZ¯ŸUó=6Ф³É îÀç…}Óõô5v¾Èù`¢Š(ÿÐýü œrh¬Ë¹yVÆ‚Ã20þÿÐP3µó²!)n‡àãq@>ž¦¾|ø‡ñq”ÉáïH#Ž<Ç5òzŽ Ãôîÿ—­[øÅãÿì»Gðž†û®Ë©ò õJ}qËŸN;šù¦Êa$[[†NÒ¿ ñ3Ä:”*<Ÿ+§öä·_Ý]ŸwÓcõ¾à˜Ö¦³<|o³×ÍùvFS‰à¹iDŽ$f.dÜw±=Ëg$û×Iaâ+÷ÿGÔ _'ð‹¥óÐ??¬sràä|‡Çë[qØÀ–ßoÔI†×$(@<Ù˜uXóú±á}Î7aq8lêP¨ì·×Fº·ÒÞ§í¥IÁF¬=;ü¿à[ü$Ñ4H|5¹ij‰s|]‰ó1]ŠÇ3SÎj¿Åm.Ê×LMzÍ#·¾YR2*‰7+»¨=ò9Ç+ç¯ |Yñ…¥{=>ÚÞ]>Cû»7 #ê®>lž¬[9<ñ]©ñ'^ñ-â‹Ñ VêKd]ÑõÝ»%‰ìxÇlWô-;ÈkpärújÕyR²’—Y§¢ÕëÑëfȧÂ9Å,íã¦ïNíÝÊ÷òµ«ÛNÞgê6æ9<á÷\óõÿëÖÌBd#¸äWa¬=¶§*ÛèÌ"ºŒî’ÑH˜õ?ì} ¬t·–@ÆÛÛ€›~}Þ˜ëÅ~+˜á%*­Swuªoúé£òØýG ˆå‚”´}žÿמÇ«ƒž†·­¦Þ‹*’²FANXt ŽA÷ÖYxJþXó4q£uýãsúf¨j~Ô¢9ŽÊ8Ì'wéÖ¹©äy¦}kÙÊÏÉéçò çXDý‹š¿ª=ÃáÏÅ™¦’/ ø½¼á.#‚õ†I'¢MõìÿŸ­}¬Ön¨çtpŽy*O@O§¡¯ü†ÑQìcm÷r.Û©ÈOü±Cêå¡ÿ€Ž3Ÿ¡þxäÞľÖßyÚVÒW<°ýQ> r§ÓŽÂ¿¡ü9ãªÓ”r|Þw©ödÿô–ú¿?–çã¼mÁôéÆY–]sª_úR]—Ìú Š¡jí†ÎS’£(Çø—üGCWëöãò£ÿÑý÷žU‚'•΂k…ñ7ˆàð‡†.5ÛÇÙqrË9±$¼ Çpƒæaè u™ó¤·±òÙþo÷“ùãóÇmuYxu$ÄZ|^l€tó§éŸ¢÷«äx爿±2j¸èü{GüOE÷oò>‹…ru™æTðÒøw—¢ÿ=¾g]Åt÷ó[ëf¹ÌÒŸ0¹ÜX᳸àÕ9Ò b-÷¹?v·¦·z:]?hÓ1œrÖì~Cÿo—èÀv¬› E½’[Ûæ1ÙÛá¦qÔçîÆŸí·Aè2O¿‹1Ú¬ù!¬§ï&í·[¿îÙ¦ü®MaêF4û(éeߥ—ž/;f‰e$k¨^©6û¶ÇbwÂeÄ݇“F·¹¹ò»B"¯ ¯DQØßãVl/žþc,Ê#U]–ñ/ÝŽ!ÑWùç©9'­MªÍ lÒó’Žù¯qPÃ<#PŠKw¦íu¢éës…N²Ä''w·§õÕþ‡(ð{~­S[Ë À‡Áô²X óY¢Þ)¤}ŒFLWÍâ%Ršµ=/²G­J¤Z|ú—l­!½q1RÏžy9,kè_ è ¨©¤+À€Ïxq£øXž¤â<àsšñÀ±j6é»;äÉþè$WñkV»Ð¾ \ɧ1ŽMNém¦uàˆÝˆaŸuM¿~Íáf_Nµ9â«Æü»®öýuZô׹𵴘­çoÀä¡SµÈôá‡\Wæí^Ó5½#Q¶Õ,$h®m%I¢u8!ÐäWê°Ì2S§^Ö_ƒÞçßã< Ê¥ƒtðÓš­m&äÝßšÚϪItxßÃói,ñ(‘±:ÁÏGåpßÞ[N®“4m««§ÊÊÈw+=5õ‡‹.ÍΗg¨”D{Ë1ÁEb @ÃïÓu|å.«ª3[„ÁãȈý¿ñ+¥–æ xy¸·®‰zÿ2î¿áx?3«‰ÁÊ•x§m5ð™ö7…Œ>aìEw1H²Æ²)Èa‘_-üñmÇü$·>¼hÌW°")Gûèzð€”'þù¯¥´ÿÝ4ÖgþY?Ëþër?ž+ú;‚x†ÖQKåð˧¼´}^ûïÔüS‰ò‰e¹Œðö²Ýz=»m·ÈÿÒýãŒùÚÛžÐCõsþ _ øÆYõk–AÞÊ“üŸ-ñÕ÷%‹ÄÃQ~ê#c_ Úm½¹#p%™½y$šü3Æöªáp˜Im)IüÒI~lýOÂèòâ+×í¾÷ÿì~øz={ÄQéz¬ž]¬ö“yˆ§ 0AìGÞÏ^)ÿ<1aá}RÓFÒd?aX~ÐГºE‘Ø‚X÷$ŒôóšH͆˜$„ùww12œ4Q)êä#þè>µËø‡|º¼ó³3ùû&‰bD¨©ú×äøÕ‚Âd/¨^»’|÷ÕEëËéîÝ­µ¾çè¬ÕÍ–-Tµ4šäèÚÒÿŽû騿ÓP‚2¥7½8ÿëÓµ &Õd%U'¿|Õ­Y%;yæ¬Z³ÂÄH¤!êHàøGŽ›—±ŸÂ÷ý®ä„mRžâÇÊBaùrzôéY\¬«°)$ãëš¶n!';×ó®«ÃÚx¾v¹g c/!ä"÷>äô¹®¬.8Úêš½ÖÚô9q·Fœ§%¡™¢Ø_5ù¹ù#ŽÕÕä‘ÎpA8êG@95í7ºFã¯ßxâO-/Wͳ¸nz>`@í†ÇR ¯/×®<ÁµM´M„‹<–?ÆßÞsÜöè8¥Ñu{ˆ,™GP2¹<?Zý7ƒóÌ6IˆxFœ£-ß{öòüt¾Ú/ŒÏ08œW&:ƒåœq·FµOÕ?—æ|¯â†^6ð•ûØêºUÉ Ä$ðÆÒà ìU”Ï¡Á¯BøSð7ľ-Õíõ zÎ];C¶u–y.TÆÓ*œìEl›¡n€{×Ö‹ñ[i^Ý.ÒDˆ˜ÇÚ#,~N3¸c9#=MgêÌ  ™ÌËÐÃùq°î =Zý:YæEJN¤«].—_£×ä®z¸Ÿx—ƒxZxxÆ£VsWûÔ^ÏÕ´‹~>Öc¸I¡$ìð€ö${Óé^ ö{¢%1€UG=+§ñ û6 Q¥WGPИŸ•#ÜŽ¾õ=µºÃCÉ<·¾kñŽ+ÇG;ÌgRW\·_×õ²G?àÿ³°qŽîZÿ_Öæ'ƒåŸGñn©dìAÈ?Á!òÛÿc_v·îõeôšÕøø>å ­À~ã£lE}ÛrßéÖ/Ý„ƒó kõo-K ŠÂFöŒ¢þm4ÿô”|Š+›B¿W¾çÿÿÓýÞ±ÀÕuñ,Mÿ¡ ø—HÒÌ:¤–Ò‚ ¦3·÷c…›wè0=Í}°Çìþ!ŒžæOøá‡éšù#â.œÞ×õ»U8}RïÏL–͉ýõ)Çü¿ñ‹ ¾§‡ÇM^4äïóZ}í%ó?Iðßþ³[ g4­òzýÉ·ò1|I­…i"ùZàaTË8ÇÅR¿œn¶W8"ÆÙ³ê`JijŸÊ~wtÏ<Ö߉eòLf {eo¨‰I™¯æú¹§·ÃÔÅÎ[¸«yêÿàŸ²Ã U!‡‚èÿC·Ÿæž‡Œ{T·räÔñÔšÙ´…nÙBƒÉã ï[k¡\j7ÐYé°‰f¸a õõ>ƒMqPÉkשK™É«YjïÛÔÖ®6•9^zY}Çok%ÕÂA,Î@uæ½§N ¡Û‘åÆ~ã£ÏÐóýÔû£ñ=ëÒ×àÖ¹¦ÚI¨Û\ÛÏ},Ѐ¨‘€C¥A$p9Å|ã$wJð͹^6*ÊÙ08 ûƒ_E˜dù‡Q×é8Ê}]¾m4¾¯ª½Ò·sÎÁæxLæRúµDãžo¯M;t¿¡§¬ß´÷B8[ä„ñîÝÏô­máEÀ½ãý)gÇ£F„¯þ=ŠË¶t’,¾2½xýkCJ˜-®©9ÀSP¯<Éþ¡MxØlW5xÖæÕÝúr¦ÿKø˜¯`é(íe÷»~·1ìî¥Üú‚Z©s4“¹,Ço@=«¯°²Is+¨Ú8MR¸¶ò˜ÙG:ŠåžW]QMÔÓúó.ª^ÑÚ:²‡íú0‘ÆeÓ }`”ÿìÿ¡Ö”7©£¼ÇýPüÇoðªpPHØíŠíZÖ_@%U¿à-ƒøW<¢h'1¾K«e<òÅvË1Th·Wî¿Uk?¹¯¸ÍPUe(·¦ézî¾û¿™=¤Ó^j‘£|ÆîxãÇ»¸Qùf¿@n€:•”CøVCÿ Šù/À¾½¾ñ^Že¶háYÅË]¹Xþ‡üÀ=kë$>v·!ˆ"Tÿ6Xþ˜¯èÏòê´0ŒE[ûòV¾›+ßñ?ñ7J¶.*f/ñðÿÔýÜ×ÑÒ¯â{WêQøŒŠñŽZ$wiž.·]ñ-®vI>h›è‘ÿôTÑ,Ñ´mц+·´†òÒ÷ÂzÜdo%»ùlx+Ÿâ¹œzWÏqVC ç*­—ÏNe£ìÖ©ýûù¶GšK.ÇSÅÇ£×ÑèÎcáG„ô7Ãvú³[#^Þîw’EË¢†!Tg ÀÉõ&°>4hz%–—‰’Ƽ¤ »r¬Šàœ°B»ŠãŒö®{F×¼QðÔj^¨Vú%bC<’œ‡Gäídˆ á¸ëšá¼kã-_ÆrDo!·¶%¢¶%wy˜òÇtzWãYÞ’àøUdÓ¡Ë^1K‘Å{³NÎmÚÛ¦ï»í©ú>U”æ5óÏíkzMß™Ië¯-·Ú˲3ômfâ“ɵ‡ÌèÞ>=²A?™®çþ3¶Ð¼Ccq¬¼ko#4lâ$_/zà9* €3ìkÆ~ß < È÷©åS©·Ÿ+ÀmÚ: ~O•ñF#Rœðò¼¢ÓIí£¾Çè8쎆&2…xÚ2M6·ÔûÚë[Ò,¬©sy Z„ßæïJã<yÏlu¯ŠõOìëWÚŽ0Š[©ä•mî”HcŸ•òTŸfÚj¦¹f©¨KhŒBD±D:yH«ú‘Y–úfù•csœúWÒq÷â3ú”ðo •:mßÞ»rÚéém6Ñï­ÏŸá~¥•ÆuãY¹M.–Iofµ¿áäSÔ-ÞÂVµ’6†Q÷•¡«6HÏ¢^ªõ{›Uü6Ê¥w~ðýljµxü5|LÐaŸÍ§»7»kŸ‰²Jù=o¨cZr²k—íG£»Ù]4÷w[[S«#Í(f4þµ‡Ñm¯G×E¿å®ý “Ǥii¾êSw0òsÿëWµmMÒñåÓ’;4¹T¸Ý íùÊæC—êOBrïh%Àg?(Àâ» O ^øŠ]'OÓØ‰nmÄE±‹Žϲ¦>¼ ð0tñ¸ÙK €¦•ÜyTwzÛ}õæïm6;±RÃáÒ­‰Ò½ï·}¶éêzwÁ- …Ô¼e|X¡ÖÝÜ–%î•ò}Xÿ5î:*;@÷’Œ=Ë™ô üc¦›i¥ØXxCJ]°Á‰=DkëþÓ·'ךëãA^Š1_ØÜ)‘G'ʨ໊÷Ÿy=[ûöò±üñžfo0ÇTÅ=›ÓÉ-áøŸÿÕýü¬ jÂY•/lÎË«s½×Ôb85¿Ey‡Š|?oãíMkˆu;L€¬q†ïûªZùZ{k‹i䵺¡ž)$N0Èñç5ö†£¦ÜÚ\ÿjé8‰#?vUô?Ðö®SÄ>Ñ> Û›ˆOØuxnò>aþÌ‹üièGNǵ~U⇑ΣõÌ•Öëe%þk£ûϹá.,ysú¶'ZOÿ%ÿ€|‡5¶éÈŒŒã$TÇ%«ù½ÍkëÞñ…õ³jѱc²eù¡”°øÇàpG¥e]G<„(FsÓ¹¯äŒv„ÄJ8ŠR…H½šgî¸|]¤WÓ5ðƵs%´’ZX‚Ó²i×øW¼qŸý ‡^ƒŽ½†þ%øÎm¤½È˜9òƒÏwBAwpHÝŽ¹¯Ùø+Ä\SMåxØ¿‹G}]´z®ºÝ~—?9ânÄc¤±øi-µMöê¿ËüÏvøŸ©[éþ Ô"•‡›y·…Iå™úÿß#$ׯ«1µO*^}6ó]–£«ë^'¶’÷Y’Iï,˜îãhòd=”Çààrg¥`èž×üWzm´‹V”çJß,1öŸ§à2O¥~}ÇYö'‰óŠUrêk—– ÞóWwm-ôßKn}/ e´²\ ጨ–·“¾›+[åù”m$’öê++(džâw h2Ìǰþ¾‚¾»ð¾ƒô8Úð,ڜ˳ êÇw–§û òÇ¿_JÏð¯ƒ4‡6ÂyÛõ›…Ûæcç?ìÆ¼ìOSÔ÷=vú~Ÿqqqý§ªa¦#ƒîƾƒúžõû†þ¼’›ÅãëÉmÒ+·›îöíÜüçŒx²9Œþ­„Ò’ëÖOü‹:M”«Ý]×3ÎßÐ{‚¶h¢¿W> ÿÖýü¢Š(®RТ»qujÍorœ¬ˆpGøcÅtP> ëiÞ*±[›giV?1·Î>£?…qZŸÂÏk ÞxgP6­õ`ùÐý6’>=+Ú¤Š9WlŠ{×;uá}:y<è‚^Ï(ߘ דšäx ÊŸ³ÇRS^{¯Fµ_yßÌñX9sá¦ãù}Û?ëÿ |_äÀ°Á ͡IØÇiÛ&ßá uí\|~ ñvjÒ&‹z÷ÓîEhãÞ ¡ ©?;ô²ûž>§þÌñ¯š“H£¢Ì«'êFZ<ßÇÁ’{”aüš¿=Æø=“V«*Ô§8I«hÓ·K««ÞÝo罬Ãxƒ˜Ò§ì§Éz5¹íÿ ±òƒ¼e"yØwÙ7G°ňÐé ü{sv·3Cž¹tóaŽ˜X÷r>¢¾Œó|]'´Ü#æÔfx†ë‹½E‘OU…V?Ô þµÏƒð[%¤àëT©>]µKﲿâiˆñ2šjœcù7ù³³øqáýæ}SÄÆcq»t9ò¡ÃòÊíÏN êíï]¡]?ÂöKklƒ +G±±úœ} iZxgO·“Δ¥îòì“[éF6  =«ôì»'Á`#É„¦£é¾¾{ŸŒÌq8§Íˆ›‘§h±Z¹¹¸c=ËýéäŸðÕ¹EéAEPÿÙaiohttp-3.0.1/tests/aiohttp.png0000666000000000000000000016726413240304665014660 0ustar 00000000000000‰PNG  IHDR€€“®½ˆgAMA± üasRGB®Îé cHRMz&€„ú€èu0ê`:˜pœºQ<bKGDÿÿÿÿÿÿ X÷Ü pHYsHHFÉk>€IDATxÚìuœVU×÷¿ûœsåôCwwKƒ”"" ˆb‰ˆˆJ™H‡4R"‚tÃôÌ'ÖûÇ5Þ‚·¾w=ÏûÞ¿?<~øÌY×ÞkïµÎÞ+•äÿâà—¸æ0‡­le+°šÕ¬–±Œe ‹e±,V²’•ÀR–²8ÈA‚¬—õ²8ÏyÎðãÇT£Õ@5P T õ¨ÜÎíÜ´£í@µUmUÛëþ½ÍhÔ¦6µR”¢K,±ÿifþ7ƒñŸÀÿ·¸*€8À® ì'|Â' ¯Ê«ò*Hé#}€§xЧ@$A@ŽÉ19Î~g¿³d¹,—å WäŠ\©(¥"H i!-@‚” ð?ñpœã皨JUª•©LeP¨Ô ’U²J•_åWùA)¥”ÍÐ ÍåVnåu^WçAíSûÔ>P«ÕjµøŒÏø ÔËêeõ2ð ò Д¦4ŠSœâ×ã¿ø·â¿ à_A®}q¿ç{¾^á^¹N05ÑDç+ç+ç+pžvžvž¾ö´“ì$; œ$'ÉIç%ç%ç% è¼È‹¼LcÓ@T@@«¤UÒ*]PÞå]Þ½N0Ÿæiž¾nœÛØÆ¶ëG?é'ýÀ ;a' ’*©’ R\ŠKqwå]y—_˜Z©Vª• &ª‰j"è;ôúЪkÕµê 7Ôê AûQûQûT!UHµAmP@MWÓÕôëÆu ·p M4ÑÿéÅüê¿W€.r‘‹Àt¦3¤³t–Î nq‹d²L–ÉàÌrf9³ÀúÚúÚúìÖvk»5Ø=ìvÒCz€ö„ö„öè¦nê&èUõªzUÐz@€v«v«v+hóµùÚ|P¢D 0–±ŒåÚÑýŸ…ílg;ð<Ïó<ˆ!†àÜïÜïÜÎ^g¯³ìòvy»<Ø©vª v3»™Ý d¬‘5×…¶X[¬-c®1ט z½‰Þôúz}½>¨6ªjê¸:®Ž£Íh®]5|øðý§ý?þ«þ*LLL`=ëYô¦7½A8O9O9OÝÔnj7k³µÙÚ Ök‡µœ²NY§,h µ…ÚB0:ŒNàšîšîšú }>´l-[ˆ0„!ÿéIÿÁF€SÂ)á”{¶=Ûž æXs¬9¬²B oËÛò6hº¦k:åŒrF90¢Œ(# ôôô@õU}Ußëë¼Ê«@ÊPæ?=Ùÿ}ø¯ø{È&›l`*S™ RBJH q2NÆý¥ý¥ý%Xß[ß[߃yÜ+öAÐ7ØMÓ_­¿/}ËgùÀÕÓÕÓÕܵܵܵ@¯¢WÑ«€6Tª ±ˆE@iJSú?½9ÿýøÿGœä$'Æ4¦18ÝœnN7°/Øì z;ôvèm0™ƒÌA`ÜmÜmÜ ¾:¾:¾:`x ¯áÊSžòáwÇ%O•žÝ e+œ{joq&ªºµX§.Þ· Ôwfvæí`¬0š¨÷À¥Õ–!`XúVíaÐÏjií²*¢îUÕ™2@Í />ÿ>ÐWÖ˜Ј+ÌÊÈFéª°Ž·6àvÏŽ‰àn&n„Ó×3Œ¢8sy…ïÉL¹hë¡}À ÕX-6QC ÆË›êÈv/o1®´¸Ö % ½Sg8U ÚµkA`Hú®qjÉÖ¦`}“›šj‚tŒ­_l2U Œ­xx“‹î®=¼û ­¼Œ˜¨Å‰ƒþœTÏý~çLÙuì:v k 5ÐîQîQîQà¶Ý¶Û}º>]Ÿª‹ê¢ºð«›öÿ/Ãÿ» ÀÂÂÆ1Žq ÓdšLûMûMûM0G›£ÍÑœœœÚm‰¶üAÐWUWUWU®¹þ$œífVðs5»Tì`/ȽåpËU…À¬zfÄöLÐvæxΗ×›êMÓ÷ózWFš«¿Ñ´j¾bëƒ*¤Õöo2O‚ªÈNyH”…‘Õ2ßMdX}3ŸLЍþ€Âbc’JSíØ¸]ŸÇÞÐÛzzB2K®¸¢$í@ùì•¡™ !³wæT@d9-Ø$¯AÉrÆjã¾ÓT±ÂÞ¦ƒ*\¸Gãl`Bp\Ú`p&œZþã3 ÛÒª¯ ²‡ŽZ9pŽû*%ü V<¯éƒ <ƪìó-Ùc§–¯zÁà:\¸}ÏÁ¿±ô⦂秔ïZmWQïëaä>™UͪfU\ \ \9#gä x’d_s_s_sð.õ.õ.v±‹]âwÎqŒ]` tOÿmÎôÛ]‚WN5ܺœ%ç^Üš®‚¹ÝϯÏ£†zŒ;õ¹ ´¢´”Û@c’ÓTk#ç÷:ÿ@š§T´  U‡’ å¶ëå>b…ó]œX¼+ðQöoW7ÂY Ê£¥º~±Ý]ãæ–ÊÒ¢®û[CüN(#œéÉç–ÇU¯~y$Ý™dïüê‚hÄ«ö ¼N¶Ç,Ùí~Ð}Ëãæ€õî¡‘ù±à7ó½úÿ‚ÊÀ&ˆÆ‡ä2qTu›±×ç€Lõ>šo „¢2ë_~r/§»° ì1Ñ/”h¾Ô*“îšÑÅ*.¾½#èø &<ú'¸ó)Ÿò)„2B¡Œk©®Ÿ\?¹~OOO“knÝ_“¡nåVnýwlø þ÷)€«n¶JT¢Ø[ì-ö°îµîµî½vÇc7»Ù 1b:Åt}¾F_ó÷ÉËh(wA¶ç?…ì'~ywÎ*ÐÖ_8³u5ø6:ÏeNw²jfm›>Hû´ª¢î™™Ýór{dz;›ˆ§ÔUnÿ©†È4T{At·ñ=(ÇH?rÊ9~ ¸Â ¦q4a ¨|ê[u;`ÑE- ãü‚Ñq6Ó¤¨hm »<ð‚¿lÁù º—¸¿u>PïÄì-ydÍéök&ƒ„/´Ü²ä˜Š¢”G¿Pä§f„eØx8ÊÍ ÚIÀÏáÉtïŽöŽº—'“û—V¢v”®Øx¨‘ú÷ÿGí‘öH{$ä89NŽ\æ2—Áûº÷uïë`¤éF:h3´Ú ®ådü/s+þïQÉ$“ rNÎÉ9pz;½Þ׎ø= tpssƒ¨6Qm¢Ú€ºOݧîûCº{Y áú)#޶‚Œƒ?9Ï 2Òê˜þ„ÄaÅ&wRt¹„8ÐÆØzà4–õÂé9 w¤:²x>pþ\ÈûkÒÅåt|j®öàV#Ui@‘ Πƣa*FHsâ„>1u+Úlÿk…¾º¸¿ŽÔ´f†Åo^žjlø‰ì§Áj¹Àá^µÈ–x'TEc†çKPÏ]ÕèCPu =Y»Èꔟ.Yqæí A–‡Çe¾ D«¢Ú[€FUõæ 9iSÐUG§)(Ý>p;H0Ôȼp˜,óðâùK+oç)²ÂÌ•¨·pÞ,÷ˆ8? «n®Ò /1@Ýv3æk] ´Èì$í pÈ9áv@{¹ØÐFŸC\ŸÏv­î[–:§Êy¯æ0änÏÝž»Ìâfq³8ø>ô}èû\I®$WÒµìÈ«£¸páú׊Ä?ÿó³æg~™+se.Øb‹-êêêÁZÁZÁZà·ý¶ßŸá3|Ð….t¹9Y©mÅ?„¬FûS–öÜ’»/O]îÃ¥ç[‚²«;+À(xá• ª†kˆ¿HÅèãI¯€š?ºthõŠ¥6ì4öX1ïÑv{H³ŒO-¹%}Û±À{¹ËS¶€œv›±€OÍ×^ÜŒV•ƒJ™|+ÁNW hÌNšÚäiЛ—ÝÓi hção/[Ôã%ï` óœÙ$%ëËóÀ>œ|dmŸì¬.2“r>µ?1£|SÐ:—ê{ë¤&¥Üv¨?øÇU=Ü¥Ĭ-_®õ P›ža¿ÿqÕJµR­ *+*+* ‚ÛƒÛƒÛ!w]îºÜuàëë îêîêîê×â ®îÛ_ë,üÅÿÜÀF6²ñZÚ©5ËšeÍ‚P\(.¡9¡9¡9Ó/¦_L?pŸqŸqŸùûdÍq™+Îôû¶=ðÅEpä輨‘ÆTËž/ô²ôuˆçí JÊXÉÉ`–<äJ3Y ¸ÔVëPqÆ“ÖÓ@[oŸØ_€¹q+\õs¡ž ƒ}±hà˜µ-T¤NúëǤD꧇2€¹‰—¯€¤; ­àÜ­R÷‘Æuž î>UÖÞw 8` ôý‰£+›ä=§8íO]úqH·ÔØ#›\ô5 ê¥;÷lõfæ‡É{@Br™24œ–=œWBd4|中ⷰ©ÊO[{*`©Y¹@B4“à|ö§`â³@LI•HèpÄ6rs„ˆ墿 \ú/Á’ ¹j»%€¡êÑ \îz…Ÿ h­¼/Š“¼* ä÷¨} UTm„úXýÙ 9C¬»µ¦ %ý¹ayˆÿ¢Î¶^#ÀõXìCE>ýûlÏÏφœÛrn˹ <ƒ<ƒ<ƒÀå‰òD>YŸ¬ONsšÓ@ $ü ääÄÿ¼À*V± d€ ` ±†XC X6X6Xöší/öbìÅØ‹à:ã:ãú#Á_ÄcbB®ïL÷ ³àŽÙïWBÖSÇ~˜»}'ä30ÎPA^vò”\΃dᢠýÔ. F»› \úç°êaŸ¬àûi-œ@Üæ UScŽœuØ;%©/ðMŒ]ì=P—ò½SéÐ&UHì|¨l^Ìùìç/íÞ{ œ/ì Ú=ª”zý¨« (*¨³\ãý…@M°Ž…’A´o7·>U‘y´û™¢Eœ´,Їš€„Õ rõÊU$¤§hg@¹]gôvY£!)ï t³;³É1Д• ÔêÙ‘ÐeS­,9a¿ØVZúÓ€î>T0?`«ZEL‚œ zIÐ,ŠØËÁ›idhÙ`ôÔÇØË çû³9+›Â•BÛŽÞ±;êÞÞïð_(övÝ@{>½ÑðÜ]Ý]Ý]A‹×âµxÈ®˜]1»âµtno9o9o9ÐlÍÖlPWÔuÈG¾<Åú?ÿsNW¿øÉCòX[[C :ˆë=ë=ë=ˆíÛ;¶7C¡Æ†vn1MÈêupÞ’9›³·ç7À·IÞÏ( ¾Ñî·”´f‰ð6P¥£B5A2™Ì\ü¿)D¡È¥(¯¶Ã: Ôš;‚Á˜ÈOæõ.Ê P5Èý,ˆí©QxÎWú$@ØÇ Z¹F•VÅ7/½¬uù¬UÁèQú§ÛŸ=U)ÿÙ.þ$dS ã•gÁ¾uã˜dÐæ) sU¤ Hdz~ªÑœsf±ÀÇ e‚vúT‹Lã.@Ãû›/¶Â¦&(¯zÔ\ bbàaÀÁÍçyüˆDÒ• ”Ç©&9€~ M ;?åÅ/”ý›d¢l’@yÔNgà臃/˜<ãD¼Á¼x“@§)GAyÜC ¬ÉÕŸÏ‹x<ÍÇ¿e¨8Ó@«¡Xp>¦¸SχgHk4WoÇàÏWcQïîóUÅ¦í«ƒ*§­1²nÎgë¨uÔ: Ù½³{g÷W–+Ë•Þ\o®7´]Ú.m×µŠJÄCÌ?¾¾ÿ(þó `{Øòº¼.¯ƒÕËêeõ‚€+à ¸À¬`V0+@ܱ¸cqÇÀè`t0:Üœœ³Ã’{ÒÕîJÓƒùÖ‘çæî¨i®,ëð,Ó»Ê~кË») ´“¾€ ,2‡€¬¬ˆ¯zžõ>GååQ³œS€¦ ~àC»!¹TüÛAŠKDÜy¸8+¾"€C!Õ ”—-2¤¬3Þ aì̲ðUoýÂÈž ¾qíŠúêŸÀ߃lâ{°Þ{fÊ/ <;n½Ô«ê5ý  ”F4ANÑ_.‚S00)ý(HÈú>°ðSCÝŸ7»" <”wšZ”[$@u; [U½Á8å) ÊkN²> 5_l6KÄ[bàtŠâ€ò鯅ç€ä¨×Âõ!ƒÄ_i]ÿ<Ç$PmƒçG@Ü= [ aUÀˆ(ðTVæý}MPQ`]+Ðp¨ÁRp¦«·õºÓž£ª@N/³k#¸†–ŸÐ¹+Äëµvôü´j®W|öÍÙmÇÚ±v,d¦g¦g¦ƒûa÷Ãî‡Á{Å{Å{åº4ðSœâÿqÁNä1@¾’¯ä+°^·^·^‡àÐàÐàPo o oƒ¸@\ .ÆÆÆ7'g'‡´Ì÷!íâ¶“Ê€£%ûV¬€è’›‰à¾W;kµ]^pŽƒdr…ó€CuʃòاˆÈ  زQ"GÔ²”åÑ{…뀄ÕÌð@ÈäêF¸qzi6{CÍÐ^¥¹Ë*ÖóU•ŸTVÓÖ:ájf×9QK›' 3Ÿ4—¾…ì·Ž$}¿´ŽÉ÷5@ÕLý9l¿Zõ”Ÿªtig Ÿçb@O[²Rö9{W$2QyÔèÐ!0/„¼D|W7òù‡0¤±|J Þc– [•íÈ—°mA¹TO».ˆèã‚S‹òr*2òß®`9¡åq=?$dÌNœ™E G.ƒrA ÿ€=а) *Žüi¢ÆjU ü“_ Ù'C:Ï€V²d³;6B‚»þè¾_€žàÙ[åæü¶f[³­ÙU,«XV1ð<äyÈóxú{ú{úƒ>UŸªOåZÆÿPÝ‚ft:餃\ rl˶l BãBãBã T3T3TbÏÄž‰=ó'¿G`Xú.Hݽqí'ÏÍ’«. ±wygó¸gk;¬c ¶È£Î é\⑬Àˆ›æ’AÂú~m5(ËÒ?iDjêiÎ –zÈôüȆ4óî°f^¤Ýß>=T,ia×tëHÆ“€A?ç`à’V€›¤pKŸíÞÁ0ÈkßöOäwÞ±UÚY£Ãý€\9̺›ŒÛ °—ùÀLýŠ{6¨*®_¤"Q¥A¹©cǃXô3/ýùïñ#…lÀR'Õ`@w_1ІÊd? ÓV”¶ÃXT“Mó®7£ù¯+Ûœ5”ljý( ð8(¦™Æ´Ïæ½_6o i\ä¨uÒÇY‘·oŽClïL&e“³—©[6üd<Ø}ƒí3»ßœíFW£«Ñ¢Ó£Ó£Ó!T&T&TÂ3Â3Â3ÀÞ`o°7ÈÀâzÿEüûN666¿¦]Úº­Û:„¢BQ¡(È-[ ·Ä$Å$Å$gšgšgÚ¬—©ç6ùôPÍÎYÑU½§9 F–šfÕŽ÷9 9 "_š›ŠÊåt•¢€;P*ô OArTs³™¿;òÿ1,ÒMUU3Ay]åïÄi¾ Må`n{$?úü3ZO;ôÔ˜g‹|ûOà{k§£¥À|}{çq?‚±9uØ/çA½¦ÙÆR0ê׬·ëvrSœ† _:-íà\¸RÝ Ï:U2{–êo–ˆ,…ªý—FäB4ÙG”7ô™ÙЭ:> ¹ZÕÐã€!SòFåý“”#ëšÌ» ¼zÅèâ€á¾˜ÿÀT¯o r®Æ!”¸!É; AÈbªö`¹åíaÈ™ã[ñ4HÍrïo )%2j¤‚þ¢·MÌù›-œ/œ/œrŒ#Çßxßxßxpïqïqï­¤VR+ɵªÉÿ&üû¼yÅ$íö {„K„K„K@î”Ü)¹SÀ¿Å¿Å¿<_x¾ð|qs2N•pÅœ®~nçƒÓK‚æ\8¹å6ˆúÀ7Àý8‡Õ{9¨;åm/H€Ï)”÷òß ™ ˜ZåAÅ{8»€ùÞºÛùò[œç¯AŸ`@0 È1ïs‚úAê:£Aw‚ç/êzõR׃cÁO Eþ l·ë¦Wu$±W…b X[3¶ƒ„²Ož‹„8JŠ~êX„Ř a„Í üZ }¨Áºnô J{U 0øÂÓó/ßapÔîp+í< zÇpCÀa©ï>@ã KóV%€ ‘)åññª"7òÖÕKS_a‹¢RT´åÎì’##ñ‚ÎÕÜŒ?bn^…-—ìúàZX°U]{®ÄÚ;êCf‡ƒ-g$Aúà”¿œ„©õöêÚ÷þ¨|¸ÜWÜWÜWÀ~É~É~ €\3º>r}äúTMUSÕäZ…1þõ ¯´“SÛ©íÔë”uÊ:>>>ànãnãnþzþzþ?¨b+…­¡¡ ýÖ=Qs»Ï¹²þyˆj+É9Á8,ó¼E@5â>½3H4ZE^ý“#£! K1{ ëL ¸e˜îZZ]wÄÈu-öÏ…|Füè*Ï­e™—s¶‚d‡ËdŽu›öfüYðì(ÿp§È=±àüÇ.xþàž[À})©Rÿ‹ˆ2g|î§W€uËÞºSÁý¬«ŸQx·Ä®–;A=“ýÁ…~ÀÔ s·Ýr!”y0Ôm(·1d©3Úª’ž;óJ_@Ó~6j)îeìeá_äGÄØ·‡³€c÷Ë)¦'Ý€T×±è@†z]9@6ƒT# ƒÇ” „TU5 pãpˆ•õÒˆçI¢™-å»vè RY›@Ö»žE &«È:F‘óë•%ûí@íËÿs•Ç@;PbpÛo€¦‡GÌ=Q%ƒ®”†œQfׇôm{ŠùëAB©ZŸt{ÔIc€ç^*_)_)_)°Ÿ³Ÿ³ŸƒÀÝ»wƒzW½«Þc±ÇتŒ*£ÊqÄ÷××ÿÏâ_§.p óe¾Ìû²}Ù¾ ÁÁÁ@zÐbÆ,ŒYȯހߡŒ±@Æ€÷.}œ‡N×þ©5Dï·R/±Õœê!mž| r—ð·%’ã– Ü,äöT^$›”Ç—uÄ´ËåŽåaƒý0(—»ë ˆÉ(q[åréïð ¢zŠÓ”×¹b>’i=³ÔQ¹·€–YÑzøýš©GÁ=ô—#S wÞ¡ù?{d½QOï±Ò5›ú@M2~ñ޼Áoµtl{ ˜­RÞ=Ø ¬9Ï;îʹÃO—•©Þ£#ðDæÊ“Ù@÷­ë6F–{ý®S :^ZñóJ`Å•ØÅ‘~ö2¹#® érg=Êm4ö}­ë>SÅ«®¤ÿáÉH'È%"…Gš—œæ¡¶ ó LFi%íÀ)ÝôÖ~Ñy¢3êU¸À9uu\]™@¼\’2@YL©T·]¨ïœ±f‚,ÑÖoj‰áŒl»‘þ `°W^„âêj:u~ [ÎØ³A-JØ\v-h;*þpœ~‡§ÎsÕ3>8ÙŒbÞ[ãJ@T’¶#в‡œé·á dÔñäÆÎ„ø ÕÊtè VÇõë¯ó8CÔʨ•Q+!£]F»Œvz:ôtèiÐÎjgµ³ ÑÇèc€MlbÓ¿B@ó6ÿ¿ÌDI`}g}g}A JP H¸7áÞ„{ÁÈ6²ì¼ŸŸ¬´c+׆àì_JÌì1?ÛÝ/ßF³ìN§‚zVLûÀÔ6Å|Mâ&€˜Zwã GYu¶ó‘YáaP.鞨ÐWR@BrÈüйE-åv·‹ù$¨/óFbâϲú9ñ›k|.€p¨bz P³b¶–hÚÚŠÓzÎQGï: 2åxñuÓ°|í”;Á^î<Á)¬ð.™_àDR ,|g-ziñü€¼àì —'~úØRP'S2ö>9#Áx2jM) Fjºê€ä0T²a=@U.Q¤&ðp‘®·ì,g¥é‰?Ùö‡×ÀYt6u«ò³Í2?#•”Ûû~ÌÏ A×K¾HRÐ~¼nö:¹@ˆdv?«¡ê`¹ºb°Ù™õ*È‘ÐÀ"5^ûج}ëq€Ÿ]Yѧ€ Tâ- ‡£ll²¹h耋ü”ò©f< T±Ž„Æõ­Ù €»8A4¨e½ôº˜ºÖhl®4_J;鿀-[œ ÞŽ~¨H hY5®<ØœO´X~?HàâÆÝ=AÅDÝ‘xÐÕXý#±ê]m%XëÒñç «¾Ó9z&xªV½›bTÙ|ÍZÁ¯.Ì¿5Ìf ƒÌ²™e3Ë‚/è ú‚à™ï™ï™­2=éIϺ”þ N_ò%_^+½gõ°zX= °#°#°¢~ˆú!ê‡?ü<k]Š:ô ¸ü} ˆ™§õ ¤1){åå8P}åuç H˜FÊà`OåßÈ4ïÔ˜â€C3µø}š‹D@c©ø,•1$,¿XŸPLy‡$XV\ €¶Ü} °T+=b¸,{CW)ȹ#Yw†€ÊÎ÷h¡ í.Û»Ó,/Nl\ü:8ÙÝ2˜¬nó}8ò²«èIú&møáQ³ØC.ßµµ؉óoý$ZS.¨³þq±÷‚{‡þˆ¾´Çô~úTPóÕ§ôùÜœ’S8çÉßJ§·6 Ä¡+ñ {³ÛŸëÊ´Ç÷ÃVÏÔ˜|Rªço€+´ý@üÙ’›†ƒd8EíT+Ü+R@/îXZX/hü,Óý$ª—€)Ú}®w€¹Fa߃ Så`6p ÚSŠôÕ¾NK5ºƒœµïýÓ‚®ûŸõáÞRÀYµT‹då=¥br8”ãy™ ”uZ˜e€2VûÜײ^†*ÊÇjHµL{Hp•5F€ê¬½éôµÞõ¹±Ì“ZåjÃ{| rñÌÇü ¹ç÷îü ˆöí‹ÛVßÃØÇ2Pniç¬ã[»sv¢îsUs'@öN|¿ \bZô€wWÁuß@_3^3^ßßß~ü0ø!èíõöz{pÍwÍwÍç×F)¢Ð¯6­þy'€sœãH/é%½Àb1‡@Î’œ%9K€^ô¢$ K–0 ˜ÃæüžŒ58gï•tHm·iß„ãàï|ótiðÎ ~ráYP rÖ^x$@Õ=/»,“ €®V¨ ÜÞñqS@FÀ= €=ÌÿÍO•âP>»VîC a3ßňÿÿ=‰äwûòMH^`OÇ@y2¾ ëJtsÀ!.ÏMÑxgXè²Ë~´Æîª]T½"Ë›Ô§í¡—¦§ƒ4I›zð,­Šk]@yég$b—óv Ë“*ÊG<é@¢p€DjÐpyûż]Ó|C+`3pt ¹¤sT5F5-dz=á¨\ã¼wHû‰XÙFžÿ¿-ˆ/Ø5­&8ß:½éÚ‹å7uLYÞšòÐáœù³A¦îL= ªÇŒé’å)ÌUû´VÀÆNïÉz¬g+0Jnu†+Ù—º€ìuæï&Ò]{ä9ãÿ0ØS;a0Þ—RðÈ_ÍB%úóÞá@ŽV[ßäãˆ|ôp^²R@ OÍxx't)Ô`‡+ G¨gú& µs[¨$¨ª”PA ¡³Éó 0^+á ú˜Êƒï.f~wj8ÓÕ™¨†ížöM› ØLV‘ÏfvžqÒÁå#ÌIMî§cFB°3Uc€Ü™Þ·‹+H\uKõÇò1$*6ñà ä'ïJœ12cdÆHP–²”þËþËþË`ô4z=£åè?OüóâÚІ6`¿f¿f¿á%á%á%`­±ÖXk æX̱˜c7|Yç4¶€Ìû'/^®Ú¹½NWo{ô€ê±ÀÚ” 9yþ{w^à„žç N `Kç]@Âñ9]r&äÏ3«ÇS0hf'X§ÒlyÙ‰\¢)ùº¿:£Ä´¿ ¦r9—ÍHDb)Z6e (¬ü ½T¼Ê­}A%ÞÀNü/='i’6÷`£ÊèŸ —jf†]ÐÕ*ó5"þkH äd°×¨ïl7Æ{ƒìd?û@’I%$„CЉ¥È^éî çstvä’ó9(ƒ”ePæ «Ã*ÎÔœwR :é;Ž'}×¾XWʃV°ö¹~gAÈß³êc §Â rNo;EÊ@_×3ѱ ÝõÖÞŽ@;YGA/¬œÌOAj;ÂCAº{ýïçx.õ»ƒóD±·õ{_’Õò!°I¸T}Xm}ýŠÆ‚ÕÆ;µÐ7`Õsß—ïs°jº?*P¬XOÇB€%QO—Êv›üêoÛ_ô[ׂS¬Xç[Û„‹V» äqWlÔdYšeå€Ê)ýÀmÈyk^ð]p¶n±AfºÆ4I÷¬‹>8ÌRoC€ ¿Ù.H!‚ 7GåLo;½¬Õ\esO} ™K÷/X|d“s§uµ‚ÓõÈëí]'ºNt°··s¹¹Ü\N#§‘ÓXÈÂ_®ÿüãW€5¬a 8·;·;·ƒeZ¦eBàõÀë×Á¿Ë¿Ë¿ Œ{Œ{Œ{nN&giò'[Ç‚ýÝùÛ·-¸oÝCÕ' šft¾Ð䧤µÐTOqíþ‘È9ÆTm@BÎKfcPºÕ=ø,HØýƒ¿ È"(eÝ—ý8ˆ%Â¥Q©ê}¶ÞÔ]X°8éä޽9÷)@´R1û@Åj)ÆPŸ•¼ØîVP“}£ó 8%öþ0¾<È‹Á®ÔüZ¦¹3[¿ÒÕÁÅa°ÞǼ J£¥a˜rJsšÚ¯±Üóü…K›£EröüZÝ÷F6¶ê ÒÎIW™f•ÍmJw-€PŒÒ '‚ÓR €ô4ëç| ˜Ú9}>ÈQ§Wè óÈ„…ÍA¥d ;q ´!ŠuÊç©‹ KxÀyõòæ½—ãjž ¼D5ùäiú¤—ÖS›΂]êÏù8&±tKô¡Þ4Òò¨tY!CäI $Ç牢ÝÀÝÌ ÈTéZU@c¹óm76Y,Eíb€‹VLUNM2ún÷‚W@{,ßêü£A+;¤Lh.ãAÏ7€ò,'ù—¶Ó? ÊP»1p»¯\|yPO¹Ö½¬ÛsE¥©S©rŸ5  ÈzöÚãA-¶öe~Q_zWæK‡ŒQçÓ¶~ 9?&¯/Û¢o)Í nè)zŠžÞ÷¼ïy߃`L0&Æ"c‘±ˆk]˜sU®Êå%þ¿¿äUÝ•m²M¶uÒ:i„ÜE¹‹r9ÅœbN|ƒó Î7Ôûêý<û Ì2kχԸõ¯¾? bjs‚-Àû­Õ)µ(ˆ?ãçäm gämÊÑþS#<)[A¹4ŒÇåë™òDçG@B/bÊ3"ðTçëôæ° Çe¨âž;›ú®ü‘{ºWïã=N¹ýi_,ù!œy ðª ÚÖÈ”oâ~ºŠ89Ê+ë]é –ÝÔ“Êp5ô?öN984âàÏVØ q–  jÓôI Uò­ËwÐì­Vkpö§?> ä#{@¨2ࡱz&°¼ÿ¸\’ï퀣ÑÞ"A5­Rêþb@#kIð#pbNÏZ7$G¶Z“ÁiâéÕœ—ãï¯0 ¤¡ûtBH®dIÈAù\º!vó0Nî/ðs]{ÞxxLë= ´7^òÜ ä—5vg`«Õ5¸Xîì7Ë>YiŸºR€¥@C†« €õò hqÞZ‰Y k‰e*®õÞ™«Š‚:tjÄÆRÀ0«v®ª‘¯VÜ[ÀwÚ½ú ¾·[‡×Y·æt Ék²„HNdÉëöC˜,PÅhÄS ²½O'†`OIóU‡¬ãÁì…ÄôÖM^ž ®ñÛ‹øýrÉGò‘|é1é1é1àªëªëª ¾|?ø~¸®ãÔp†3üOÉà ñ¯&1‰I`7¶Û!Ü>Ü>ܲîϺ?ë~ˆ½3öÎØ;ÁÛÂÛÂÛâïçsŽØÝኳ)s|_`Õ©?샸±1i1aP ³:Ÿ›òÕ"°ðp£òØügGù»Síè« (-’ewËŒ”äçªàÿ±[ÇXò•ÔUToæ ƒZUÑÛs¨Ö®gc–‚SdÿêÏc@¶˜ms®õ­ñ[²–F ËA>#éþ=@г:aHPïfÌcòÓŸ÷oä4ë@«ïZêo ¨ðϹ-÷ü•¯Í”Gõ·tS8ŒP‹´;Az¸jF>uÕ¾ªy©ø6QÑ5’.€å¾’pà28Ǽu[ƒs§toM%ù°å0ß )!:È8­¶ëcÐÂÞuùZÞ(êóõAáý8î0¨‡Œ|ÞÓ >×:E€ é.™ :{ÌÝ ¬qÁÛÁþ8˜™~ œj9#/M§GÐÖè¥}kô­W¾~ÕJƒ<‘UúT?Pf¨ÿ•6 WÉú´9©ŸLÆš/f} ª™z[Vñ ŸƒúÞ~#(ÀƒæªÀÝ@ ÇäK"'¸«'G„8Î"P_è/¸nY£†zŸ‡Œ.W87 èQ6ÔáNÈh^ä™êÀy-A¿"}ú$ô äŒÎ3¢¶GmÚîÛÝ·»oµL-SË€‚ÌK[ÿKøëW€rȹ$—äØ%í’vIööö¿Ö@ûллhA n rï8Ýq[0?¿ä4Ľe´ ~ |—ÝþJ) Ñõž1ð¸[KˆåæJß¿8ÖˆHT÷aç•Àã€F9c5€»x‘L®ä¥ÿÞL´<ÁŸ&ÍAÕ*»¾µ¸ÜA tuŠÙRïøúÅÍÔ»ñ]·§[ÂÕ£yÔß<ÿ’›Ú<„¥‡y°ÃvvO) wW|üU¸¹L"È…à¼Ô%€æÊñ àöŸIˆ”*›/W‹h*À@ÈpI=ò’ñš?xYÄ› |Hig8ŸŸKÜМWJN¿c%ÈGeJwê ÎóÓ’Ç<ŠËPÀWt§ÈZÐÏ‚~"ºVÁLp­Ž^l ènïâ¸6 ^Óvé µ(¬bßc2XD7Þ%‡:@ uxŒ†ÑFáó µª•­öŽÀâ´º`¥Ú™vup¢ƒÕÒ;ƒ´ ÎM)¥ÍñTó]ðWêhãÍ­û¹"[ŠlÊ} e¨»Õh• àYéT6_Êí´²?= h‘Ðé_á!—X`#å} Å5Eþ.±#ãÚBÆÄóã¶V…ÜÙçìN?Å–Õ½Áry’=Éžäk^´p»p»p;0ŽG£ ÔêïøŽïþúvøë'€1Œa Øùíüv~­ ­ ­¬aYò†AB“„& MÀìNv'ÿþug¼SÂ)_­l9¬xÒÎØ?,ÿ Ou'ZuírÎ4 à*—4î‰ rˆ¸ÛäºÛó!šê \ôt>´ðº ýM¶«m ¦ûÂI€Å­#¹ìûÍû‘ˆ0‹yÜ*I»lµºì›…ôn?°òÔm?lURëd'702=¤©½1¼ÈË®ËùŸ•—g»ý`óenÀî.=nÞÂyãüs¦ÜÑ \<Γ€ÒWçÊçvƒØî"…:_kÝEû€ËìäG ±ÖÌ}äMw먞ÀÓ²—º Ý(,{À¾Åשpp*$¯\äoóÄn fŹʕ§_öî ÍAf˜UYக¯a¹®àJŒYà$¨/ÔZp†ùÜ8JÔq•j/€R“'A=#Ý(2ŸÒ8¬ò©Þê{5Tag—sÈ¡ õ×µÙ®ûAzÓBUsBêÇ¿³ZjãEœ÷ÃP—·íé ÝBŽlý©Üw/ß ÚŠ³µ7Ü L2_ÎîjÊUñ ÎH>«;0*ôRæI ‚l²oòQ Pèª$(¯V*ü:à×wY]ÁÙå9\´ äÞa…<ŸAðH¼Q}ÒrÒ‹Ù  w—ôß@Ã{Â{Â{ ëǬ³~„èmÑÛ¢·{{{¨_Ô/ê Å(ö§¥ù/œB„„%,ákGÿС'BO\ë¤ã~Ûý¶û훓ɹãl¹=eAõ¬´Þîé#cgƒVÆ^zaˆ© 9³@évNæK 1?BÖ™'‡‰²6O€iÌXÊÐø˜ª£+R½vµÜʧ<ÚP_•zñöË :z^Šýœª‡‹Ì½Ô,#Æó ¨2Zo£9à0o'žû@Usóž ™µs¿üTVP¤°)(õ¤õ"HϬ¾€NQSmQ@y<5Œò€L·y*O!ún’5—M ©ÎÓ@H;`ž$ÂdÙ˜ö3™A¹ô'üå@Nª®² ojÕ]·´ÝÆv`?EX2Ÿzœò±uÊgˆñ±·'H+»nèià1L\à4±‹›À·Õ»Ó;Œ*þŸ–í*æ ëœÖ0°:_XxqX ×{7f¸~Þ´÷ HlÚçéÑ@¬ýŒ= (#ä"wn7ÐV}«Z©®°Ë«fÁÊùo½|Ö*‚ëö[Ž7èjSÌÝþf %‹TpU‰^P`ð-]¬m^wùÒ¾ö@a§ˆ9 äÕŽ>`ÿsGñ/A?ŸÝüÜýÀÞ¬VÉ{@õÅRpYÊ“4° ù€:rÒ8Úr³#H˜g$RÊì’ŠÜvãœ- ß¡ÈÚÞÁƈ˜·!÷¡#—­Ýâë-½“·Rì×­búÐüüü «IV“¬&`·Æ[ãA£ÑÆ€JU©*H$ñfч×ãOÇHY)+eÁãŒqÆ@øÍð›á7Ao¬7Öƒg‹g‹ç Xät;ñòúñ îO›vh,¸Í^u/¨û⪖üØšøJÕ“@ŽØvÄÏ™¨:X¶™=”Ç~>ð‰ÜQ­<¿z!bÒ”a%et1åÓðyô"¯î1eN¸ à²Çeäk¤zž‰5¤øØ–i FF½Zð[p 3¯(°­2TØ5×WrˆU Ù¤ƒJâYuTw÷ƨª Ö>2â0§Øù›ñ^}†) ÊEqç.@x"´°Ñx5ïï"éºG8b ú— <úqý¸xº Q’ó  m£é1èìÈS ‘n¸ùÔ= ¦3&üೊe< Ì"•À}š»°Ûi 2ƒ^g§k}< ¹~½Ð3@G§Zøà"»X βU?îæ²+/c¿7!æ[ §œ ²\­Ñoƒð…}ùí‡ð{“Þþª8w¯j·ö)³WÞN}('¯É œVFó¥õ‘úS@}˜þPV;¡m<öL{Hå³#ÏŸ+kþ !T_å€=ê´}æ!ªJÒ™é £AôˆB•Àè7´T+` ‡˜²™±èà\ÑC®£ cî)ïFjÇÜ¿eˆ<ò‘ÊÑ?6ê+ܽ¥šÚALUÜ\©y‚¨Q$“Ùr ÔrkTZxÖéÝœ îÍ\{ÜœN§VlùƒŽCn¿Ûïöƒ–¨%j‰nnn N=§žS_{aþYü}°…-l)#e$Xc¬1Öíííÿ!$‚k3›Ùüûצ¡™Ùõ 0àPµ¥ÓÁ[]Ëžú³ý¥ _žztñ,О/åë°Tm£­¿8`³I†¶ v€cK ”‹v¤7[ 5ò~$RѦ‚ò9kƒ¯Ž½!;ÒãÏÿkþ~¤‚ÊKòP$dKV"àØæ¦ƒÖ±ÈÅ[ªƒÚ•еl;pŠ8o5g—ôíI÷+Ñ_‚ªÈ uˆMg¯Ñ—GX êí5×~PÍ\·ù« Sl¿ª¬ˆ¥á<àQϘ}@,j:­ó¨•ûHçl¾¡#à¸/q \ê À%.ÿæï³(Ê§êØ½Gµ·zäý»ý~Ç?¥AÒíñÙ¯_ÊOæ{À.m¿«:LŸ¬”‰j-8S£|EûƒÔPǨ âÈÙrŸÓÇy´:ÑùŠT÷ÂØ¨"s,–ÃàLä~û!åßÝx×°FO{jÆ ðÙ1çÎ¥µ‘Ú`@WMUd}Kç•fûcï¢ €Xím,Pˆãìç…—]‚ðãSžù.ìågÞ8{+Hªê¬^U„ÔW‘¸áÅç€zÁûIÂb`1I.PKeó%8µ|çòÅ‚´ôO/xd—¼l›ÀûÎÇÎ`žÔúUJèM MfXS€tùÈZ¤Ë7ÖO@ºpI·%ð蛬⩵À[\O´ûAàñc•Vw§MøÞœÅþ¯f5«ÁÛÉÛÉÛ ÂIá¤pØíŽvGI®5Çý;øûW€ÇxŒÇÀ‰sâœ80ç˜sÌ9 ^V/«—Á;Þ;Þ;þæ¯NŸ9´ãcàÙTçÐ{àà)Æ P9šÏª¢._Ü5X\ðëúO‚êYôB‹¹ ¹ÉU–öbTq}1ˆéœ>ÊmÍÊ^`xâ'F¶¯œ4ú:aÀ¶ôŒ± ¦tv"_ÈhuµÕoý¥Qjáô6ûƒ–”oVÙ¨‡ m©ÝãàœYû@^ åfŒíϱ¯ôËî> Š …ó„ø·Ýc¯Öºõ«j ª¸Êú_õ­µ1ô6Hg;_øiÀO^¥ÓÌN Ŭ—·ÍGåQJº!KEz êa- ”ÛUÑØ ¨°aö„ 1€F?Þlm§©˜Ô‘Ndr£š}:±j!°GÊÚ‹@–9{s¼À`n•M Çù^Íg¾¾Û; ¸ÛÛ'¾p@~°Êqñ 8uµ>n/øj$š¥£@-P/èÏ|D ™ æ çbÎgmÍ·¿÷w¤f§-];þ¼ÏŸw÷þN^èv*Eµ9{jØiÌäõ•Ó§o! ›Q¿øK€ÖD?áš .Wl³â_A˜PíÌ8 Z½¬5Ye|ëÿØTÀU³=¨Ž® þn oy>…ñ™r€x'&ë1PH“@¦¬‘Ë@¦¬²5à‚ó]x;Èakxð4ðŒSÂ|Ìú\Õ]Õ]Õ!n^ܼ¸y¿',G»­'!å¥ ®Ïm^“Ñ]}‹C‚Òé|ä‚T— >ŽÙ^"´“•ï|¨(8©{ï÷%È7ÁÃWnžà (—ºÛuÐ<. R_x*‚òØu#É<á¥)1€pþSÞÀdKºý¨- Í+mMÅÖ=v‚S÷hËÅ—@šewHij’ÞÎýh%ýõç€zD}¯ß æ påù•Á~ªs/8¹Ö€@ BÁŸ2Úƒœ“Oi Üj^° H˜nfĨiQëOmô¢‘ Fò ´Ümá© !ç çP-ÚZ Ö*‡æ‰Óô<Áº­@¹Ñ°@>b U@ʹî-¼éµ“^i¢Õ‹z¬ Þw 4ç­ü3ªÔi-ëXò¡´u¤›oBáhˆ~£xÉÃ@mÔÑ¢ÁzߪlµƒÐ£s¦/l¬_»pCA °ö©öFÞ¨þlåŸÿëØ\PÕ `dÝ÷E—¸«´¼Ô¼¨OÙ¨j€,wâí0Lõ\WL™i-õíT-Ðqî Ï£YFÎñÒÀeË•ý¨ÉÄ[‚úÑžx èã¬Û@4ð «JéS€|Z¬ë ¨²úz÷P Ž~Å3$¿Þ*jd®ä: áɞɅÚ@¾‡ËkÔTA5NkòûieuÎêœÕùZi½è]Ñ»¢wa†a'9ÉÉ›³åæ'€ùÌg>8ýþN°ZZ-­–`¥YiVÄî‹Ý»ïæ„ÃcÓVxìr)ãö< ы݇¤p…—ÂEAü, `«õêQY¥’ÓA–¥÷<´TV±Z­›ƒ˜Gî˜ñàVgµË –¼eÎåµr3?å7^Š+(«|†„½9’æjÒED½Ê¤ŠSÔ[QûŠè …+¼Ð}8¯¿0ï èÔi¿X Êº¿-vÔ&÷Ѝ{@UQ+õ0‡Xü6^ìuÑÈñr¨P:ˆa¶Éb“k> Ø„%Ò÷^ÔŸ‹8 –š¥Jò¸W!PþPñÐÏ€RO„kÂ(© @ OÜtɸl¤ºö"àS—µDà²ý`Öã µîK ™Æ1o ð-¥Ô:Pwã—¢`/—\²Áõ}TÙ‚ÃA¥i‡õf “˜#CÁjq!xi-HÚžÆû– ÞRÉ ™”ø¿øŠÖ4<ö‡ö°Ën+´«%Èm Ž40A5öçxË‚2Ù¡‚zÃî—3 œ73Jœ¼˜ÂXg48ç ³د$^³T(wcòdP'µ+æ{ t«Un] ²5"{K gEy5Ðh×@†ú@+ *^å×^\Z ÷Hp-ó´-±rëÉ…Ä%þ8á‹üÙà–?¯£Üoqµ­}Æ«¯f¼z­W¦ÐzTºJWé@<ñ7jÄrS ÷ʽr/899¹Î\g®íqíqíqpåºr]Ù–ûæ™ÀöNà*¦­ ¬#ÉXË; ¦c»G‚¤ð@Þ‘\¡/8+AŸÏ·ácТª®zԵ瓛»ƒŒ,¿˜ xxS›XŒ3¢ÍäôÍ éö-2»Žp„H`$',oH9P­Ý£bUéó>AÜgwoâ»øåÞ×QߨQÀxº™…@e»öûj&Ûyà¯u‡!T´:¢ž¸R¼¨ÊÖéÝÜbœ ’Zrµà„¿òÄØ®Þ€O¾3ïIgk- tqÉûÛ?¨:+&)üTVk[€*Fmßó@®Z¢€$Ye²÷‚ŒÕ«º“€FrF;rŽéò5.½ è­¼;ã¾v0— ñ a°Nœø9y ¨ù1™?d«zêNÀù 5ÿþ1DjR&¨ÑjHÚٷΆÁÎ>½ÿÔ› *¿´ìã@™!ïæÓW{«¯-vxUmÃù^Źvû›ÄÍ:™«õ-ÀiUÍà‚vÞPl“>@9ô&ÒœýªbÀ Êdd<¨|dH0›û/Íã|ö© 5 pÛÉí?ý§îøý¤ŒsÆ9㨣bÀT¦2EŒ"FÐê õ…@ozÓûïÿî_RI%D]tpF;£Ñ`†Ìï»Þw½ï‚:­N«Ó¿'èôÿ›áŒsŸþœþ/ôÃvP¦QŽÙ@?ýµØwä¥5F4¿›f”ùÀ63@¦f~|¢¨„;nbžè¶À<ê ÞÀ©`zÜ'g 9Oyö6 UÄ?î!8l“AåWõxPÉå3{UÙœùÊÅÅà´;µkãÀfŸï+ië€ *Õè ÄÊEó;À­tGÜŽ‡8ž7Ù?‚IWòK@:‹e™§­ÏݽËY…tVåñÃùSt¯žlŠEªÊ:o‡¹êVk? ¹$z6`«ñZ$é+™œ’ÀYå_)î¡1Ÿ&U,È-¡UÊÀJ­´¯*pIF3€eÒÉ> ¼‡Û=´òÆLß $u¹œhûIG@v­¡¨NömvYÀÑŠhêºõÿwA©WÔsÀÉ`ÕPp/¶¿4dQ…­å.ƒ&oÚë®Áþf@uõ¤ûi †ãoSTyC_j N¸Z{“óÚ:÷9à‚ó“ÙÈQ‡¸Plçͼ}}ã¡€Q< jŒu!8<_ë÷Ú¥!·÷éÔíµÁy¢†sßFÐ>soŒÚpÝˉ*Q%^«9h6›‡ÁÓÌÓÌÓ ô;õ;õ;ù `%+Y rRNÊI°æ[ó­ù`³ŠYÅ ÖëŽýƒo`hpZÇ“õz™ŽÝ®»¹”5ÔSæ³3 ZCc€š”×ÙçÂoîÒõ–v/ÈÞ‹]¶| šYñÜA-ðó}25xê€nM Ì #æ PmšöHÀ)ì:HDŒÖa!PýJ/lST-£¿ïEpÔϾ{8ÊB©¸h¢:ƒò¹Vy׃\Ñ*èàèá³Ï‚–â}!a(aªˆCnžûífˆT—õ©v¼ ò¬Y9°d}î-)‡Arä[»(¯ñ˜[ jéÀõ•uþXÁx( Ì¡€nµÌè¶çæƒòèóAúï³€pþ†Œ®FüÑU%“4Wù€¡Ä+€m„¾ž—wÍ@!m˜Z d°Bf°d‡ÖUêkí¨þ¦GÁùÀiè¼ ôÉNÎ9ê~¶Rä2%ÿÆxúïB¤#î´qz€dd9ÙW€o¥-£›4iÉÆóÀI5BõlyˆL`Ÿ¬0‡3™ç¼É'ù@e«l• D}½"ú½ðU^åUp:;Î`-³–YË@ë¡õÐz€»®»®».7E0íüý{Ž‚ñŒ™téMпÇ2\2Éš`Å"Vûy'o Fòû#G47¯ªÊ ïò_þ ä‘죧A%ØYç À´ n1펡÷›Ò²°´Yæg  µNfYrÔ^*§àÆš“Ae\Pó9pš´f69h½|0¸E½ ÊÐ èÃÛõ±ÿQÀCo5(j v‰±Êç^Šçå6\ýnÝð©4b)rNjË™9/Oùܾ.(g ý Â·åv4¾’ÈÕ—Þyõ$ðûgÄ}Wˆû@¹ìª9]AL»]îZÀæ¸üXVlî"P†,±«üš¾jç½ÿÛ§é @ƒÀ-»íGËz#8Ø/ŸI3à(MÀG­•@$ĘÏå3F‡e'+ˆ4ºp³d§lù\½ÂÓDRªþ'´ÍŠPd­šª>RÈ$ÐóêMg«Ìp”S€›AŽ$è´oŽóTÒœOÃ%—=È,8yŠ9L6‘\ç†ü¾ºï5Öñ ,_ààvFXσ¾\`W£?d,‚`ÖÙ˜Wn>×g®Ï\Ÿ£Íh°:Z­Žàlv6;›ùÕÿ·¸¦®¦÷Ö—úRœ'Ɖ³’Yɬ®¢®¢®¢×ò‘ÇÏá{!|Ë…”w€{”š˜›ªir"$dŸ½ Jw> Gî¦I4ÏcL8arQrìÒùm]A]LH©ô,¨ÚÜ®–&Ïȧ€ÂT5AÂŒp¾ * = jXÔȤ{AQve»×ÀyàÈšùÕAÆf¥¸¯š«½(Zò€û ÿpÔ;ç8rŠQâ€L5ÇgßòŒ±¯€r“Ÿêyã4ó´>u‡” ÍìB`nz à¡¾Š´Ù.¬Êƒ„¬BO€Ò­ç‘Q>OÁ\åÇßÒ¡1 KȪbZ©_6Ÿ;Š¢ª#HØyÑn(«N ) 8ÊO\=‰ÁG~ 8S¥:`ZÏ¿4ùÁþèÅ»ò,pÖ‰7£QÙÎi ¹ÇIà¹Í^rÜYfõâ)F]Pçõ{ô†@¿˜pt]ót§Sžü•,ÆWHÒ´ÒšÔ“1¿Ä”ÚЇž€—Bª*H–=É:ì—NÀí<n *‘[ìF€×>j~ “oå8¶ܲѮ ÄÈ‹‹qòæym#ëZ˜æ tglx$HÐ>zçšœ¨Æ’’àm”´ÙàâAZXŸ†&Þ`Z?©ŸÔO`T4*ÁœlN6'ƒ¼#ïÈ;À'|Â'¿ïÚ ¯Ô8âˆÎ"g‘³¬+ÁJ€˜ù1ócæßœ¯f¿ìúVŒOû怌¡Ú`yDoÙ ò-Ô(À‘²r Ь—É \î!®2 &STÄ žÍyÀ§NiÇG³ žù(jMÈM¶Æ/,÷"`_Z¾ã.P~U\/`¹´UÞ•n´-ªÒ¨.¥A*œ_°5¤ö• ‡Òh¥éËØG!Pã 7 ŽkŸ·9`Sšˆ±%j€Zj.H?»ƒy È03”{”å.);^"/ ,RÊPåÔí ë°Uœôìí—Ÿi*ߨ;{U3àêÝבò= ™wƒréÝ&ˆ© ©HsËÔ¼‚W¿ÞH.ƒýVÖ óx(’%©ç9µ¼HÉ,N€8ö¡n  ½¦g*HPk는™>ÅòëÐMsJƒJt^6ïé VÀÃõ¨)TvÞÎØŸå¾D±L>ºiµ˜¼eö5¿û9óû@ 06x{Ç´-¨}¬½ØEûi ¼fhÆ‹À»Ni' 0©ø­ücÐe€<ˆªî_ ª{Ò¡B† c°ˆÂ'× ü@iç!s# Ë ³Еõ²Ȱâr£€2êž✋V.`e"õÂræubºª¢IåâAYhV¾ÀqÀ‘Q QLc¤"¸^×ãÈý ³Ú‘ÇÀŒË’ Qà&’ý~?=÷Wî¯Ü_A`E`E`8ƒÁÎ`¯xÅѺ×ëÝk'€«m¹wÊNÙ vi»´]ä>¹Oî×a×a×anŠðþ”­½ùJÊ3 oÕêò¨B¼pÔj&€:ªNƒ˜Î³ à²ß 5PÛÔµV}(uY ©vþ`]|ßÜ:Ô™‚v½}À#”ÂÒ“NN~`3ImP¯–z ]{­V—l?ȊӫוüÚ8í5õ½Ú ­  ¹—G­lµ:’¨æ«×õ£šXj±zd‡EîÝ ÊVGP>uZ]PëÔT@ck@’ƒRW‚´3åt|*Fk Ö¨¯òè  .¨s ¦=ʬ¸,ü@íT+3ŽÃÊå–Üð9À¶Kf¥¢ñ|Þ8¿ûÍßïP?Î瀲¿ ĺjéf¬¶¨ÅמʧÚñ-PÛªž³ðI¬ýàR‡ÕPÖÉ€n’ò«žÒTE5ÇiêNµØÖØÜZéuQ+Ô' =áêï­Ú°²­ÊϧbÂÖ¨’r@"mÝÿ]'°e„LR‹½ZÐ_*ô}¡·@Õ’€øA&J+g;Ø Á”Œe >–M¡Èø†ˆ —cÖ$àq뉬X ’Òµ8 ´Z IVµÜ™@!Ùo7DQÉ¿Y—È>wÛ¯‡š€˜Î›f¥<¹8sMNTÞÃýg­Akd-Jû ÂR;ü}\ñ®xW<8…œBN!p¾u¾u¾©%µ¤FZ^+·¿Qñ]{Áo·ÇƒŠVÑ*Œ‹ÆEãŒ^áø”1º‚‘À·f7P¨GdÆ"’›/Ï UàpNÞ Yõ¿†Ì±Ó¨As@±Hn•ß>N eÃÏ%ÀYX:x´Æèg*‚ Õè÷lPÝ*Ž{pp4þhÅGAº+ñÝ4,b··¨aXr”2&{猷]‘ØøËyVøÄ¼ y5•"òtS‰Z ßÉJ» ÈÔðÔìÛ@’+) üªcA†Xƒ‡@–çMYä°X:Ф¼¿¥ ‰T>Äg !sD`(`8Ø#é-Ü(VÈbËòd, ‹fEŽ’eU«ÒMÌ»JUWÍA‚öÛáŸAév‘PK*ЀòÔ4»dèN ŽÝ,´Ðäk!à'ž(P.µÛ€×ƒŸ\Ù˜NœùI1±@ë®×ã2˜ïšO†¿‰Òº¸ Pk¤ƒL—?qXÜ—`7©¹©Z]ùLf<àâÜ_î´ô×Q0nùHÆ“]Ó\÷‚êÕ §îfЖû¿ò®âø…Xp°V„V‚Ý/ðÕåÞÀ\Ríõ ª©Á*Ôdël7¨oÌY~PÅxŽ­@u.ò5Ïy#< ˆ¶¼¹/Š%Òöº}­ËLû2HЪØ 8œ‘×ÉÅU9 a² Ô(ÕS>£ÓÍ.ÎyÅþ¡7Ÿì¯YC°ãíx;Ä-nq{ÙËuU¬Uò¼U°åëUP†U< s‰dPìátpö[iÁs`±+”ý³Â; F«ªõ½úwU<œúœÚ­ž¿ ¤ƒ–î’l.ÊÞ*VËoFºÎwF‚ri{ŒÖî¹¾i€ÐME’'ì¼Øé«U†oüôRVµ‰µ[OÅ­þÁ– ¹ò&A¾Ïñ_¶A>¶î%ê©Ç0óZZ™„nø,J Àvlk<(Ãì‘û* ³‹K .ÊBg!ÈbË£Jûéw£€¿ˆ¯ P’Õ$ƒ=Î\Ø æŠôûOìmƒzËž4Wšö6¨ÃÎøpqàDÎ’3ÑÀê0x‘[ÔY º”±:Y²ÔI±ÙÉPºU=Д—ÅT±ì`(ÈÖHE"U£ÙðûIˆ™×ù(• @߯½¡ÎcÙçÏ;=›Œ4F,p} UÐŒõÆzc=X­ƒÖAé2]¦“™Ìd`£JvËnÙ-â<ê<ê< ¡K¡K¡K>!}BúˆÙ³%f D¿ýzôë¿hËi‡CFãÕ§‡¼q³\Z`¸ª©ÅÖ )üÌ,@ò"ò~”‡¢ÔÎúU,WЂàOzÍØÀ;ªåÚÑuAÿ:¶N±úÜ9Íe0¿Ú>|L[PÁä[–Î5^Û¡½XVk HЙ$MXô×î¸B.P1ŒåE ¦1Qò“j牉ܖ7¿ð_¤­å±ö,¨(­ŠóHHÞ—.€Å:51o£þÕÈ1È/2Ô7zYÏ]DÊŸH/ûhè{P±TU÷‚t”ITÉçjàØlÔò%s ÞU瘅ã÷W_Γæí¡t`–U'·0ÈxFHC?N´êƒ»ZþÌÊu *µ@Ù2 @23ä!0+YíÙóí‘SGG‚:µ¤ÎÊ`œ<1ë䇠>tò;ÍcZ[­ H&ݸ<\ÕÈ"T¬ “wD+_ƒÜå*îºÎC5«Ô( Æ¡ößܾ< ±§cOÇžW7W7W7PªGÕ£`°‹]ìi$¤8çóÎyäyŒVF+£ÕÍw•“³ïÊ' Î²[=Ú{Æ@ÿ @—3Ö0@hËàë6ào' ±§/u5m½ï¹„hZá-Ù‰@­Pm»Ø[ƒK3C [‡3°Í{Ù©á3àli9O€þ±¾+j €Ö*|PrÖ|ÐÍáLÀ–^ò1ºf_dÙ U 0µGÌÈfûûGP†ç™ÂÅ@BÚJO?9'Ÿ_7ß?ƒ  <øœþ€°% ƒró¬ûQ€Œö¼ºú ãÍk©¥ª©Iª*h¶÷ã„.Wýçà“iµ@Âò´¬6ˆW>‚Ú¤¨A ­(Ý»ý$ ¯»\l1Ð:ž(¿´7ØÇ]ï%U!ßĪ٠žQoKqpµwMwÖA°ëå*‡F€þ²{°ï xÛÆ=Qø!p-7’´G!zJ…ÌrÙHÍW-±8˜?ìxaÏ÷ =º«Þîö Ýyù³”¦ Ö†ë…ßšÊf9 )L!`™6_ûœµ¾'½A¾Eú'õ2¯×ÜëIýMà¾5ÌïÕÒ©o.ÙIqÌNM;½¬ƒ™é§O€ž¤­Ð+€ŠWΠwüáá ªfµ=³T%íS÷ÃÀ~ÐwÈ}VÀTï†÷ Ú = Èá[A%ºçE5µÞý\ÔHpÆ ¦e»³¹ðQ‰yktýINò®DˆmM ˜“¹¬êÙSš‚‡ü”¿Ár}Œ>Fprr'Þ‰wâ¹V£3¯QÁZÖ²¤•´’Vàlw¶;ÛA“Çä1зê[õ­7ßWÖC9'/-ýüöFPõ¨ EòÖwäÕ ß0Ò!H:¨Òª?€:åöF½ª¨ÖGS\»¢€þNpïå¬|4¸,\1‰›Êî•eÜâ©~½_"íÌÜsÏn{ èzö`uP[Õl;RZë¸ù à[õá ÜÆh}HÐVèûM %ZQÁý µoË©N–/Xà+P«¢žÍ7´ÙÚíZ&¸sÑVcÈyðì»{*ƒt”€4ßúø&I³Àؤ·×AôÑ‚¼ Ìám?¾c„îª7©æ2°ì³ú¹ÀÚKÛ.ßTÏ™“SÔýj¼š Ž[×´ÉI¡ÂÙ` .rw‘VàÓ5z1èóôjzpVû@¯RËÁ Á™©³Oõ€ð©%ŽôCiÅíí@EÖ¨÷ÝlqnõVæÉ“ó@kz7­¨nê„J†0CòƒjiͶÉÎ —,õšÐ÷jçzÄ¿ÔZk½ ¨Ón=jP:85ý=sò%·¾F Ú$“ ÊVÏKô޼äô«_NÆ¥Û(”GóèKô%ú~­ Å¥¸yJž’§@]TÕE0XÊR–‚ –Á2œd'ÙIõŒzF=s­ÔÐÍ`—|{Ñ­†–ß²@½Ì×(ÀTÏrµÌñß*Åÿáí=ã¬(¶öíkUwï4™8äœs$¨€‚EŠŠ9¢âQ1 ¨bFs"ƒ€HTrΙ˜¼S‡z?ì ‚‚á<çÿ®´?§÷êêªUÕU+Ü7Dô:ý#°×ü48ä¸98PÈç%ž¹ÆxÒl¾ºÁ½©ç‚·#6íXc(>Ïâ_>«’ï ÿ z§_Qõ¸ïï{na6èÅ»gÏê¾[½Î%%€£Zǯª'±ójq>àún1óÃ]í]8Þ5z:€T: ¼²‹Uqyõ”íŽÉOÞJ@ó¤Ìw)þÄ4†:€Ž¯¤$ˆ@v'3Ͼ¨ ¢ÀéÚ{à€tmcxÇAü´'Îîšx0C’‰&œøY 2Dž7*€\꫟R¤œ¼)«’ H¸Ñ7%ôÈZkqJ.(·–Õç&Ðå î)oÀ¾›½|n¶ ö¹G]g„À|Ùû® ñ ò¾X›º‹Õ¸å[}w¦ßêk³—ùø.ñZÚ/@xëÖ¿V7-æ/ƒPnéw«ÍµÈ?=õfð`¢ÜÖ‹eš”þx–ñ•¾ ¼^4÷ªvôqýˆŸ¼RUeÇ@ óŒOAº°„6 Ÿ³Ž@c}‡nn^܈ €èÅEÇî§Ú±È¶J`Þ£û1àC~Aþ#kd3HÓð§{U¡èÒeAºI.ÚÈtú»ßÛÝÓû0þ5PU&a€4`Èw¾‹Sf€ô3žO"Bµc,Hyw@ƒÞgN "ö€È@Š4‘«’cvêNÀ¢ ð‚~Ô£ê#çApÝÈ¢#‰ð;g2%U¤ŠTЂ´¯¿×ß뺟î§û›ÙÌfo™·Ì[¦µÓËéåô‚âw‹ß-~¢#£#£#!;3;3;dµ¬–Õ§è£ÜKIvkZr¢$y¥È Æç9% #±ÚvÐØä''Æég³êƒøåJ· hÛx+ú.àQGç$ˆVá^ýý㣒Ï'¯*ÙŽ«l1`[–q9ˆÏؤ_êIà À¬¡dæÖHÀTìÿM’¬ÀqJ'ïO¬ÜÙr3踷)Z˜ÎäâÁ$¼í T`/ÙŽD¿.A®Ý´‹ë:É¿×;M¯CiÀ!U¯´ŒŠOLJë:É¿§Ÿ¢7Â&¾™n,°®if•m~ʽg@jøŸ.Õx®ZQ72ù5·ßºï‘ÊËE¬×Ó€‰¼#oï»círnQì{ í7A]ª7ÀʰßÊ鼚“¿òkÐ#b}ò§åÕj¨ÍÏŒ‘à_c)c=˜™O‡ègõÞ ƒ¶^< ›X±¦„ËÿÏî+)®uô=ˆ_^’WâݯŸ ± EŽÔ‡p½c3wN‡¢¢ƒSÖ‚¢5{­ vûœO¶ôã§IÑ·`äD»m’’Ò8»3Èg ójé%÷µ¥Ž5X†ãeGÖ\ÆÛNGà>¾w¿z¹U£¯ëIN) •r”9GÖªw@Ýh¤>rž¤ªLйìø}\t4içÓŒ=¾AY…Á‰€œ‚qé2Ž'æQg.¿ê-èVýN¼EÑ {{ýÜõžŸ²VÖÊÚß篞¨'ꉠÖë‡Mlb(æ0‡9 ëÁz0x“¼IÞ$¦ÒTšãÏø??€s¼\Ç]ÍιPŠh T—¡ñ¯E+ïÇä‹T"ìàGçÕæ# í}ÃBW‚ø©Íù$J]¢€Ë ÞF¡ß²?꥞[yHÛÜåkŠ@vš=ƒ}A¯ôš:™ _‰vÈ[2$´ ¬t>FìkÐq*è{“S½l²Í§Ãf%´-—Ë@ùŽ›õ%$!¹J(Jþ¾`ªWœº £rÐ @ÑIJ¨Óõ&òæc ¨â®pW¸+´Ž—Š—Š—‚´´Â4 ,e){vÊ1OâªXAÎÃs^¿÷zH½)>p—šy¡·¤”~tD7 69n¾ò2¨¢àø¬ó@êòµ LžE®®í¥ƒ\XêåúË€Þ¥ßnظç9ßÜ :î t;øhÃÝà½Ë/lú¢”‰j€jRþ¹&ܼ-³¦î„LI$ÿq¦>Aü^+ï*€H$þ)`r úè°šOTÏžÔ÷Ï@ÕÃlLy^•1}©å µÓ?tŒc‘C€úæ[Æ?Ò›8ÆGƒ1¾Ç@úʧ¡¨rA¯Ô ï©/¬ ÀU/¯T}´[Yà˜S2ôkz, hfð*Wçb èz›m/'*ÇÇßéfá%€Ç³±Ç@ÿÌlv€v©ÀýÆãÁ›€˜±Ü°òò,è/yEßÞ,ïmï2p ƒc*wýLå½r@_x¤ãê¯ûKSÆ—šå‚6H¹Ãèøe¸ì<²éXÉ£°i@@ÒÓÓË´/ Ý çu ݾ«¤Hq8uwOPcâÍs›‚ñ…NP/W»¶×}@žû`ä.Ð+öOš×èç]ýd¦{l-0š¶< âc½Ñ °ô%¡ SüsÒg‚ºÎW)u2èxÂÇÿ ØE Ò©z+ëÁ EFå-í\^ÒÄ'K³@çÊPÄ;?çBñg¾õ5ºAÙ±>ùÜÏ "¾c©g`â:¦écÔ'êõ ¤êTªÁºÍºÍº ÌÞ@|øð YÈÂß·'¢²½¹Úô.¾r—:m@úzc½ À2gŸûè(/èß@Ë·±² Ò}–Õ (”™ ]–3ЬÐãA2ÌÂ`=`\ÙšÞ9ôÖ/wޏì]²@]™4ú4VÃPià|gwÑf ±9/t¤Šª¼ô>o·] ÊŸÆÙvvÙ‚W“ŒR Y¾½.@±÷”$Ðv¯õ%Œœ“GŠ*8´Òï¦[\Ü ½Û}pXá%ò, ¶ýK½ 3Ó¢ÔŒ)ã?˜Y dŠZk íp„Ï@nÉþ¬]. üBxëÐèŸë¯²]W„vrÅ#lfˆaìµböòu/lW+(s9`q>åAv³žbàg·m,twÝ‹¦@ªqoàëåIóP;¤¿û1nFvÕÅ ·E6ïM§Ε?-o¯±%° ô³VAzèV¹´0Ph^ eÔ«P…ùº ÐÞ[Ï;Ë¿,o+Üê!÷¢’W@ÕÓ¬ jƒì÷¾Ëbù äN½’«@›‡û/;Æe5‡õ{¼ ¥æÂí@ýØî’ pŸÌQƒ‡_H]À w ÈÇFçÀjJÖ®Pp¥âɪБX¦]òÉIe·ÌÕвC{À+vW> :FšÑˆ£ÅéG{u.ÐDÏE@£kyÏÿ…éeK¶dÕ¨F5àb.æbлô.½ Lö°‡=@Yd[ØÂ` kXó-ÏÐàN¬«³8ëŸxjºû`0^oúŠÍó@¿ç­pƒ ç$ )Ê`ž¾U?ô.=½áBàplEAôÊ¢¼}ÈR•¨B̤`Ê ¾wŒ/ýTxLÑQäßVÀo<™Yð¼¥Å%€AFý£±H9oè±@˜GJºÚòB½Ô—f¢håð?A>ƒd³ˆ;—Dg»¦(àÍÄ(˜„Hûç’ØZna ÕƒfeÐ}wºÇA?¤»¹@z—Ú¸#pQÊž Ÿïªý]eÐQ¯—„^òð;pEBµ¢,µAïÕ¼  ËG»m<ùƼõ°õ:à°]ÿT¥ (¡ E {¯2tuÏvÚ3ŒF}sQÁ• Âiãk}$g׊¹`0¢ÀÛF†zô…4):úÞø ¢w€KãŸÓô ØÒˆ,Ê æ‚¼ÌíôY%ŸË|`šñƒ<rŽ=ªäFÀïv1/ú²žlà]Çì.ºyëc ¤Å6µõPæÄæeÁëu¬Û7ߎTR‰"²br‹Gäq`~h˜×ج¯wïL©¦eÛëOŸ6Ž®T"zB|[Éã@w»’Û PVÕ`Â'TZÞŽ¡Ô …¾þ«€2KÁÙUë¥z©^ ÒRZJK #éìd';Á$H ‰p‘²É&t#ÝH7ú‹6ß ÍU;àfõcè‹ùM?àèž •TãÐEVÍÀ ÀÃÎ×%~Éá¯âSÏi¾sÒ4ðLVõºðܾ§æ®—7õÒ¼ çƒ(Ò¨ú8GôM ‡·ä<Lo.i4uëÙ£€fV¿ÀÀuv D&ð×’ØJWã¿+Iw7õñˈÀ*À0®H»pi”ô‰8ÿ0·,MAüz¦[ÐnÓH€|­_ñI'u'è¸>_%Vò”¿;!Ñ8÷3ÄïË Ý „¼‚Ø‹@š¿VfGॲ‹[ŒØ™Ýt®“Rr> ¤~2¿cîŸ$4`€<:5º¿ ½'¯¤À¼Ë?´¸Æ¸t—›%€/aöòBrAØÂuº;ð¡{kä&ГŒé¾EÀ¯Æ^³)èHÁ²MÃ@:y[¢ˆ˜ ý£@ÆÉ:c*àqË͕ձù…D0×®ÆJPŸá£"FCüÀ0»\¤=Ðo¢û¦:ËX Ôq¾ö¹Ñ¯ ¼¬g9Aà±=m)iôöàÞ ƒt\Ü ô'±·ó§>ºòHª5"0 t¾ÚkT]ÇÎ/®\šQô—,бd8ù/íNü´çÐÜÆñÚ ¿ßuôKÐ9Þvç!€y¯o1èý²Ã, úu¹‹A@‘jk\r“ÔU%pÖÒ£<Ê£@MjR“D¥„ 'æ½I-jQë”?ÜÍÝÜ z¬«Ç{ÙË^ *UO#¡§®Tƒ€:êóz “^Í6`¹ —"ÀôB/†ºÇ4A?â-q^}S¥¤PdM›rG™ÿ4ý Ø~âp1è-‘º9€bžt2ÈÕò"P'Z£` èP4«à —¦ô¹oÇF»ã—;u‚¤´µ<Å_IALtj†³³`*àr@Ÿ Úv+Æfƒ˜ÆOþG@GÕ=¾ÄÙ÷ɪª3¯ô H,!ŸÓ¹tXàVê‰<pšâ^ã« hŽ&} ê¬gÈÄóÖéA|¦h:f¶õMid އŸ{.ðHÎÓ«»€Þ^t$ ¿¨D”£Å©_ü“飃@O÷zųA\rëÑž wëK¼%@º³%öˆé¼ï tÔôü ßÅZ¾âÄbŠ>‘”¤h•¤÷®¡—ú&´¯ìW %±Ú9WýŠÞØÕ 8,Ï©ÍÀ&¯µ=ˆù«¥'àÝ ŒÑ@L–n€Agy°ˆë#€Á—œ”Ñ/yó î“Ñå@Vìé¼@¯Ÿý ÈýÞíÔZ¹BÀn>– À'­Õë {–Ô:\ô–¢*ûžy#»æ9Ï‚ïΙ 2æYŸ¦¯vh?`Ê:Ém¸C£€Ôu–ÅöA3/(þÚpåùLö!øÉ]ÂV.=¨äËœ §:ÏD€¢¡¬ {F8ˆú¥·FÒ˜¾À µÃhT“ûÕÒ3èOîäuŠNÑ)À­ÜÊ­ÀAr¤žÔ“z h@ÝèF7äù¼§½§½§wxçL¹FòµÊ7ïyÉ\Ø:®ëë»#Æ­¾ý@Ìœê_ hrÙ ø( z—]?|0ÄÌ­:¦¿R­ ðyÎCkÖB•$"Ʊ¨ÅE g¹óìOÀ›RüâQ?èõúu÷RÐamxËÎ#±C@S}™üxúnLêI$ôü™+1A [o÷¦¢»@Ǽº±D1O ypôÞÀëì _t× |Æ3é=™@WÀð¶Ù炎¸ïE£ÉÖM16è¨\i·1¥Šû ùIJ°?ëMÈvJZ¥AÛ¾C¡(•'e>ÈèJ};_úîèÇêƒwõáwV„‘oÔÉ÷VgÔ+‡üzy8~l>èavA¤b€|8º‰÷àÆSÂ;Cçz ŒÈŒdXôt½63M÷Y“6®r Š‹÷,1õÕñþ —é}ö ÄUþ¯ ™q³`(ÈFgdÑÓ ³íÂÂQÀ;ö’ÂJÀ§öꢆ Ëì2…³@Ù?5 D;ç*¦n$²¤œné¼ò¹ìuß¹@•²µhD /1ûXzõÁ_—¾r¼ìMt¶îL= ÚµÜÀ £ê³/ öä­ 7è+¼WÁ›j?^\ô,½Í›b‘Iƒ3ö³ƒ”‹epQ¼BñÇ wEÞ;žØ‰§%?´©”l»u4a߻탠cúV‚¼aŒtùF-6΄љ$óÕËô2½ äCùP>ä÷Z€äN_ ’Çåqyœ“øâú:}¾tÝCŸ\«Úæe]ZÞ{ª:À&_qh SÇ^˜¨5AÏö>uÓA¿œ5½Þl`YÑØ}½AïŽ=t|& )ѤA&eRɽ/2õ¸€¾$^®d6ä™ĸMϽÏ{Îþ ¨é}é¾ X^;ÁR\‹ŽÉ…À=íZ! >ïœX;ÐÚ¹®°? )Ò'2õ G%é ÚqçØÿñ9s¢ã’Ó¥ZRßé ²“ûõâ2èJmJIDATAÃÙ¾ðt+ÏKlulÊíõG€¡ÖÙ÷&—ëLj'Nèõp!NÄo\ ¨F-5}kÙ ³ÒëU?ôˆ}ÎÍý“=±¸)0Ì{Í^¤f2|©ONQ$(=z¸}<|èßÂÑck‡zI²7í©/€vÜ_í×@|Nûh—:.Ò§µÛ  PJ¾\ Á–¥îÝ»ÐØñ-`p‘|Ô§· Î{Þ@OœëÂoìb¿žbÓ‡k@2i¢DÑD§€ìgÏ‚ÌvkGïã5·W¸K^R©%m@DZÙÚV·Û;A|õ;ŒBªY¨>=¬ðå½ã€ïbw~r´ü¸æ•€b³¡ II:Ø~AjKÐ}Üóâ€ÞíŒ ×!qD9½úUãdÓôzï9·x”üçh è9Þ\çÀ¤Ž$ªH³ Þë•6ÙµÂWƒ7ÕkáukQh-ÈB•kÞÿçùy"±ï´Ÿ¤HФ€ ‘!2„“ìÁ&}èCN¢K‘IxÅ^±WÌIˆ0i'í¤Ý)O˜!³ÕvPÎÏø¼qfž¿p½¹ßß 0ô^‡¼õ8ÏéBÑÁ”ÒW˜é «>ú·=#f ð¸LmSŽº ¹@žýˆý]äjÐsÂ/æ–} ê‡?í%pô](öJœö •ä*U:¯® Áw}ý—+¥(iðAÀ`ŽÞ˜Îí%€£SœD{«Êˆä;žŠU§9Ê€ëöŒñ#|™ cr‹‘Èá>B"·¾ øÝ ± ÷ùxÂ\–'Á@­?P¼UAÇE9Ã@|j‚9tØki&| Á$‚ËvøÕ`ÓíX‘à2zV›àJPÕ+¿×å^ð>;0dÑÐvìò—S6¨ÏAgÇŸ(¾Øx&3Dqc2L ô]Iw]«d[N=ÐÜöñÏÝäñdKëžÖÛY¸ñ'"µA|†çÿtLµ1eã»ø…Äþ(Hgy‚é@W£–Ux$|Û¡7WÄS)€Çýɧ (÷†øË ÷îØ0жÑ<°—Ä—úGÀ¥­Ñ¯»…@ÄQ c¼Å—É¿ÿ1úSp¸E? ê;0Ü~;1>\ äëO\?x‡ïY¾TëjCÎ?R¦hÿ®k@—èã^M@(G=~ßÞ—p€ŸAϱ˔ ý°áî ¨6æ; £ìâGÀ/}åÐ*Ò/0P7æ½ãzIDÒOëg“Nr#0Çiaïïuù%ÖÔCþyi_Éuï€|¯óA£¿Ñ߀ԗúRhIKZrа'=é 2M¦É4P[Ôµôúýxã¼qÞ¸ß}„'%Yƒ¨F¤ÜYîZðÖ™U‚@¿ }åBoY ý’¦Rˆëþ:¤MùÏ[Wmæî~t¹¢Þ{Wº34 l€´R7€Î¥¦~t§ð†Ü™ ¹Íã•¿ÄUs~JxO™Ål ˆ=È”ŒÑ@ÄÙrÙ­@Ûæa¢ÍÔQnvøeЮ·'\ ðd³d°[qfÑ ãúZ÷RŸ{0z Ök©‰³oâlfèÝ® cî‚hSÀ‘û’à›eÍïzþ$AÝt\=h'òÜk™#W_"‰#ÉE \ÿC!€|u“Êé™=¼]-Ð÷ÅÏÏ’wûæ‡W­5¶:yÖLwØ Ü ±Æ@ÀÜLÀ'|(å£} Ž€.ÛTЈH"Åù'þ—ý±Ã â³oŒl$pOj_@s®l¯u¼ p­ïíÀJà=ýº]Xošÿ0QŠ$–|Ή<ˆD ì 踻8ÚÐÆßÀ•*Æt@˜Î€ëì‹^ÚÖƒÜÖáæ$¸ìfÎ<ŽŠ"ÐQÉsf€(j›¥AÛÞ`ói E¾5Þ=±çv ¾5"˜ <šújÅ%@¤¨Æ¾Ï|¥^?Ek£@¿ª+¸õAwrÆ”\äú+dd‚¸MZžâNŒ¿zZøšœlЩ¬¿”¬‘YÉ^ÝyŠ^Mè§åuV‚÷k|{ôiP·øw¥÷jeh.Ó—éË@Ö£õh¡2T†u©K]à)žâ)0OÄå6¹Mnõ«úUý ú}‰¾ÜÎng·óï>Â?ŠÑ<ýÓ S þŽŒ³Æ 8(‰¢†6Ôl&èÚ 7¾Í¼$+£GÍà…¶œQôSºµ[ôv›’ ÊUÆ­@›è—…Ç@»Ñ‹órHâe«$±ìNH ¥M9€’ å*Àç5 ñçÃ]Ó0­Kä:½À™žÿ&àñƒ—ïµ>¹Ãùë²àêÒ´ë¾}Ä2|þ¹ ãªµuˆå Œ¾:êÕpް”d|]ÎÂö{R¤ àÊ<¿:j_:îVò= b™3|¥A‡ÍʾkA ÝWÖ±KMnøxÏngúd`,éW€/IO8µ¥·2EÞ·öìâ~ *øoDÏó€·¿dÉÑQ {ë^â(Ö_¦$ ò¯û£²´í:ÏF±Üíþ@ÇG|Uí.8ræÒJ>пÅJò>徟ˆ,SyÉþ9ý9ä Àõ:86ˆß:nmNébyóíù =77z9àÒ;I'žBþ^6ãOö‘`Ì PèBÉï<ï~¯ øn,˼=yßÂ?¿žû¤û¤û$p/÷r/¨yjžšwÊN>¹˜ÒWúJ_NâýË\™+s9YGì6r¹€]ì:Sݹ¹6åér³!ºI7[€÷«þ;T%*±ˆè«½[Aª–éÐÌ= ¸Âª ›Gëæ4L^W×€Þl· w]Aù­ûÁû¸dRÎù kzç¸R€‚“W0¨$ç©2[Æùº¢×²¹´í·×ƒì²Ñ¯QOÄhÇ‹Äzš¬$ÅYøf ªÄÎCwõJÚ™¾Äg¶ îí¸wD¿*êDüVIݤ÷Ä;ÙúhG.µ‚„T¶5P¾…¡€H¥@†UøðÜ} §æuÚt>èûÃßæLR¤®Ij:5Ф)׃¾Òmmß úu§Cx) bÛJÞ}sÜ\ŽKbœóÿ!@ŠÀ¸+ò:ÄSÃ… ©¾ nèCîEݧSŽUj<^òùÁ{q ñ €CÔ¿ì"½ ´ã>=Ä4^µú8í#×®>ßKY‰ØÉqÜñÚÆm˧îùÆÙ4«¤: ™½|ÍA‡ŽíÝô ¨Ïkí;äEktJkÐ?9³ÃûƒŽòŸS´„(z¥îª/o»ýxñ{ .w³ï½%üø±7àÓ$ò[J%\xÂGS™zà­Ñ“e,x½ä2ë8˜ËR”»þð1<)î$w’; Å(Ft‘.Òd€ À7|Ã7`Òƒôîàîõ–zK½2JFÉ(°_¶_¶_>{/š%¡ÌÒëÁûÎ55 ëE®.ªÊ.§4HkWÊ(éÇj^ ÞªmK>?h-Sô `ÑH­ý#ź€#ÜpÚ£nèëõ* Óz8” :×\æËt -} e‡š8|í¾ó¤ ­;ËöÀþ‡q÷?ô0ùŒêH{UxBÕõU<3Í—ØÊgtòzÉ¿Ô=p8'±Ö—ëz  ì‘W@>ž›ýÈÿجµàmÙvÁ5€ =³$a0}å Ä'Ϫ6tèâwÕDµ1ž†¦] ˜¤ÿK¨±R‘¥ m"z-HЛÿXÑ2W€Ü’RP­#x‡þökwÀ´Ú—Ë]ÆßõÏÝ€G窫>à1£\`7àªãÞ}€GŸÿ¢Õ:Y¼eó‹¾PÞt¯+`Eß/úôÓÑ”âK€ôðÈcOÇ‚Ÿdoжþu.åŒÉ'uý.AÒð€®Ò­èvð.6~UA¿ï{5˜8 – &xŽ2ì íRxÄ€j1tmý´Ù¼Ýf©´`¾–æd¯ç¬bnnª®ª«ê‚šª¦ª©À¼Áœ ë›t 8Yï¯&¨ jq#nÄ!>*>*þ™tÆ¡*™ A:›W¥¼n‡HìðS`Þ®–Û A -3´ùÐï…3Ž>ÚÈ»kû6@HOÂAã>Ÿ¬ÐC@»ºmìGŸopé»1ÿ“ö*€Þ«_8íÑ6àãARÖçÁÐ Õ ¹¨é#x.`ðT´ýzŽˆ÷•Ø€²•{ðh,?àè¿Cì9Uâlä+ǪZ²Æ·(å(x?GZæµý”g;ÕÕéô¯LÒ•š –´tÂá± ™n«h‚ìamÊ€žzð¢_~ýuẸ́ߦXê{^KV3þQLÀO^f¼KDq*g~ë¢2Ÿ.m$ù{£ÓDªÊ] ¦óBQ {ŠÆƒÔó_¥ÐÅ÷yé¡À›v4¶¸Ë\™à@|!‰Ê|f‰²›E *¿¹TJÐÈZÚ‹ŸWⳆoB4LWýSÑøITŠ| s£·8G#`ÆÒã㺩³t™£sÛr4ëç:÷ƒ.uàŠ¥Íä‰$N†qV¡P,"õ‘=ôG:ÇÛbø¿È¶@ÇÕM¾!ú¨þôäo~ÿ¹K!èM2H¾÷Aç2ÙraàîìWÀ˜ô•Zuö—sÚ;íö žRO©§@öÈÙòм"¯p²ÈϤ! i2U¦ÊT×åuyŒjF5£Ø—Û—Û—oñoÁ Áb óMNÝêùô¢*+À9\òúö‹Àÿ½ï…€RºüM-g€·u_Õy ªnì…€TñÃÊĿڟh:9Cí|[Ðħfö‚ŽËÆ$ðE `»•4Ù¥< –Úm}ltvÇÚuh(CAnËð; ]÷XÉ/ ý´ñy=S¦€Ž¨ÇSÑý\ÿçø“Ä9È*Õx#r™¯Yèc Æçf ¾ ¡o¬è×…›A‡ÙD‚ÂÌ:cŽx)Q|I aêl Ýâ}AdS ä‹ÒÖJ¬Xº%xG·<0½.*{Ts±½ 毤F õ‚û蘌ræ‚vݧŠÛƒøT‹ :bìN9‘èôö?èÈ¢+ˆßûÑ~PÎôü2 ‹¼xt;ȹ'þ,Gš?ØûŠ¿î£ ¼üíØÙ˜ž6RÔäB™¾ËBÕ@ú{Ík€›|½C[Ant7Ä®=Å»Ë}ðQ‘Vüqi «œÏA»|_ h³½‘ â3wƒ.t tЭóîÝšêŽJßvädVýEÌ_0 0œ\8ÔñËdÝ0Tiû\ÀÕ_ºß†³«À(ë2%€Ç/|šlOñ)­óãq xž½² œžnªžª}Vù Ï€q½o}ÊàC*ñî)¿kK[Ú‚=Ôj«¢UѪR]ªKuà2.ã2p7(*Q‰J ßÊ·òíïGóóópª;Õêàw»ÇÏÐÓå2Õ¬U¥g×n Î 7WÌÊøµv !Jèš÷ØöÙ¯¼äD® bJk·“nÎÕ€&$Ã@ǽ&±ª€áÞT4”DÂÎÆdG%âщª«²êic-+ÍúÀZ÷ÃèÍ€¥»—®³³àvÀÑOxW?ã  Ëóµ—¸ád—' çôêûßDbÉG9Z´·z„Z€<¯F[³b½œ© ¥ÍyÁrÀ³½¯fƒþ&©G~¿J&sƒ7ôõú!÷ð øÄù¼}Üá| \¬?æApù¡m[€6s?Y“ z¡ývño€Á †¡³´ŠI yT×\Y¿p¨M¢?îñÊÚÙŸÀ2¤¹› µ þe€—‡¥A|¤‚6Ø/¥À™ç=BE°Öe}YãWàCi#á3¬jmݶn[p»‡ÝÃ`´3Úí@Γó䥿¯×¡¼ÅÍ€½ÆxÕ¤©W] mêýÁ+­“m×”1ÔÎ/ m ¹ÐtM†rÏ“ c:+ÚÄï÷±*d¼h—ŒÿþîTLðWfˆå¥Å&ƒv ï`ú¾õOyJÕD—°‰„ëø|ðJéG¹œËÑÖFÖ,Õ¢æ_8«íªvU»*'y>Œ<#ÏÈÕIuR8™êÿûp¢ož’§ä©ßÁO`†É^Ù+{!º+º+ºë/€eY#«Õž(sKƒ²àòÝŸu;¨ßr·®k d¨7ÕR@ˆP$¨:;ÍA{2Ô¹‡D¢äÇòõ‰“.€Ã^çE@œw Þ ß[e·1¹Ÿ@[ëÊà·ÀJ ½4`ŸS®h'ˆáV*¬Ú–WuѨŒœêlÒŒ×wŽÛ´ *ˆe”„R@Çe©/QŸÃ$ÅrAêËÒTŽ¿cêu ¦zQÍmó6Ÿün`:‚o"s¾oHg«j°)´Ÿ/ù¼ªº„2 +gÏk·Ìwë7»ª¨hš®\xŠ0«S몟ë_Ý¥À±&½nÏïüõ#Þj úÇ£Õ6ä‚X\¥.mãKf Q$ ‡¼‹QçÛ‰²Õ/“ 4±“ †/ÙOŽ»»`ˆeÖÖ—e¾D†ßádn» €Éeú-Àç\Ÿ$,ù<”$Õ¤3àò‰žr©¿EÖàÁغ£kÎn—Âɀ߼5« åkN§Ûla H¦Õ-8˜o¶÷­Jg4ñ“µëAB*U}*î“Ú¼rÑ_ò‚Þ£ŸÕ¹€"t²­ˆŸ–Þ÷€H•x ù–Ÿ“Ë}¢Jõ 4 e5 p|¦9Hw:@Ÿ_0eûP÷TXÜö½¾ñ©ó@¯´,é øŒ#ñ7WZëDtbÆIÜîä^F€Ž{]#oƒX^¯âKAG¹éÛ-7éZ —àg*8uÝAnEÐ?úÊHߊÌ*_Ô$ΪcÍcÍcÍÁho´7ÚƒÑÌhf4)/å¥

    çsþï÷ÿžÛs-×r-È¥r©\ Æ3Æ3Æ3`ccÑ¢D?æ3ÿTðÁ“+ÉÁãYãÀXXëíÞ@¼UÞÜ]ýAÇcñ‚Æ i\+‡@LY¬'ŽúÞ~pIK:­¬dÎwŠY‚A5¹zÍJj‚”s«—4úïm`°ïÓôRÀw|ôe`|onh[o±ï ¢ôNêÓ§]+HжÞ`o ç…‚€b³N`†È¢ì×óA Ì!pÐôüS(û˜“Ô'§´WE e.߀ ó­O €w„òj¸ Ë4huX£šm¹i1¨žiïVÎêr÷_âô—A]]ö‚VãÀ¨Ø20z&èêéVoz¿nž¤Ø²ðBÖ¢öØûAǹË'[Xíýœè$­¸žcW §bA @±:‰9JÂVWãZËíþtÌmQÒ€ªIT[ßɽG!ÐÏ×9c,ðMlFÞ Àq.ÉÏ1u¡ˆ–dÓˆq”_@nU+ŒÛAúúæ¦hÒ •o~ï×ÓÚ-§ŒÃ!Óò¿Rh–öÏ"ì×s“÷%ú#ƒ/%Äs@Çià%œœfrâŸnwC>è¸|% ¯¿¯%ðvdÑ1Øâ>½x9µn¥n€+?Ú @Ǥš“ÇÙ§éó£HìlW%º‚7ôñøòã›@Lýƒ È@ßÂô›@ÚgúëÖ÷Þò+:N3TëÝKƃùaðÙRgâéHâvÄÄÄ€YÎ,g–•©2U&H‰”H P›ÚÉZ“?,IJ!©%µ¤¨+Õ•êJ°ŽXG¬#ÏŒgÆ3Á9Ç9Ç93ÏbÕÇœ þé¥+×ñsÛo˜ ÞÊ©B‹D‚†Zf×å /±òG©™Ô‘¨$$ ½À|]ì.o«÷¥·¼õçkЭQ©o]CºBÐ^¸ÓÁ×@/·Íâ'-™Ê+'ßRÿášøïTi :î5-þÄçŽ,bì¦ärilT¹Ìz$åJ˜,mÓÛ ¸\ìõ\zz}—K¼k@Géêõ.‘®ª;xÓƒ=³€U¿îÖ7€ÜßÌ-²&c]óAM¨wååw UM³ H ¿±ÄT]Ü: £ÒÖyÐýiëv¦þ€Li :îî*©âóÞ$N¥ä]ÀÐYî} mçpþÓ€ÃKÞÕI¯tŠ>ƒn2èhå¥õæØ?¸ž×€á~Y80Ñ€Ë@ºX£C7<¦úš7‚޲#¹Ðž±ÝÚKîЪò²”è;Úd Ün|ØÀ1éæ$Æ¥¿àJtOҨ꿼îe?è˜u‘ :G]ã½ zPaß½KAæe>]ëE ,×ÇÇÙ´/˜òMü:À#ä%úyÇ“ý½ã/¯yX€«Ïw°çô±hïžp_/Š®—K,à5$±ø’_h‹B ïÉ}@s³T(ô»nñ@q®<¸nIÑG€ë¥G‚üh¤ùZÔ±Ž„^#áÌK؉„HsÈ:˵ Té_@¦¨Ï¬þ ­|eCut À”;âO’ðQIê=]ð—×|r9\ ˜¾.ÖJЗöÛyäýtUq ¢6²È×_º¥Aªªþ¾ ³3öÖ ú°Æ#úƒjÚäŽÛ§€ú¡VøÊÒÀ5þ—Jooß¡‚ùýÁÛ±¹ø½À¾~ÓŠ÷ºsï¡s—®ÿè´¥ÙÁ1‹Çâ±8'1ÿÌcæ1óÈ ™!3N ÿýAþ”Ý+_Èò¨jªšªæds²9ÌŠfE³"”ÜTrSÉMBÊŬö© Ê7ãÁ¬ûÜñ rÎ;¾ ¬‹U×èvÀ¢±JdB-4©›’¨‹ç¨žîL]F ¼ZmMŸ‡ÁjÜøçAºÜ2§ÒH_y’óÉ„’JtÄ-ý}“§À·¡êWƒÞ¼ûµ¯¶€”‘æ²èî{.ã àþЮòËAžO¹§òÏÀ¡+Ì)í«’ypµï‹R—‚üæËM« ÜÃéÄô¯n] ªk¸é€Ö7éD®1Óä\b@ ©š ?÷"Œ® ]Uµ2ðŠªgVå&²'Ð5«)0?ý‰ÊÅÀÆ’F{ꀶÕM’ ¡,mÝ–¼9û_©vx,áŶ׃d9  Ä)î h&'3Kszµ¨Mb¬ñHeãÁ@è©ÎøØ*à ›@Çô.í©ã./Þª0ÅWñ9¸Z`<Úa' ®þ)²€IEZ‚´¶&¤ý²ó[ñ Ðýõ«RÅ(IÞ»æ_è“u€kv•¯€{bãŽtÚ[­S—‚ì)E“Π省®^¤qÖ« ²—ñÑt‚ضûÁ¶§ò7{‹{íº£{Ad"ÎÇ@q8ýÄÏÇ@0V”ÖmõPÆwUîà¶35.<;<;<Ì^f/³A#hAÝ¥îRwñ;0ÈŸ^ê2’‘ŒÕGõQ}À(c”1Ê€ï°ï°ï0DºGºGºƒ»Ê]宣•ÑêÔzd©ª›ý 0¨RøÜç rõ‘›~Y ÁþÉÁ2`øŒ¯BSÑ˨Rš RÀ»ÑËrÓÁ •›Ô©øi»³H+kQêþ±ˆX\f˜ÿi²úŽ ¸+BÅÕÏ™)mñ@^2sƒW=ì%qÐãÃfNu oþ³~ïõȸµ“€jÆ·r ¨ñ™•A†±–Z@XotÛ¶õÝz"?è+’†Ÿˆã~‚¤©¼"ô&÷gï.ÐïTy÷â€2Oÿ›Ìà¿•,>SÕ@ß#¿Z«@Ö˜ƒõ ¡5$£3ÝD%ÛþWšOä-¬Õ…@Ä^ Úu›÷ŽþÔJL¨&ò+­ ôNTõ ùPWÖV[íÊm½•¡ €ð¦ŒŠæ¾: ·JªnR•IbŒ5gKs=«$ޏ8¤{:{€v÷Ø«æåÁ„¾oå0@Áè &”\p"äîð>\ýW€\ꛟ¶¤$ã¾j&ÈæRê_ ê·Pý2­@Wú˜·ÎÞ ÚÕõ<æëë!…t@Ȥ" ”°ˆêõR`nâî/.ÉÙ¼ÏX®û?¼×m~€.§7ÝC ÊyÇz3õ^ñZ8(‘&œ hÙ öHŽJ UÙg¹–K] â×¹Ë@ǽ]Ñ Kvï³µz’´Г¥/©²@º|mLÒ)2*{Ü^Î{€ÒŸÈµ >ÕÒ<ú ååžf—-é ú#ýŒ»ÄG%š`$sâÿú*¤ º„÷éznɸÜÍ Ÿˆ¾Qø.HšóŽØ©5”$§¢$‘Léhr¥ P¬è„Ó8KF€L í®´ ÔwÕö÷ܪZŸ¯X|[®t™Ü~ûôêœÇWúÁë±§ñ×@÷*.·ëNЇôv×2Uc«-’ïÌ,@IºqˆO"ª!è/è®úÝ4¾+ZÞ/)cÊ]Áƒ•–·:•óù9òsägNX1+fÅ@µT-UKßä7ù3”ó&äÏ;€d^€rŒÆc˜o˜o˜o€u‡u‡uÏ,žY<RÏI='õ 5¨qŠâgRz•)ß[Ë´ßÑwvtÙ9|·øvÅûT0s}qÐïo%è·Ó7Õì Æ%™ÕNæþkÌ3ˆ´ô7Ï(Õ÷y&ðx|ZÁÀ“€ÙÈ”ŽÆ1K®U>ÀòoM- º@Ú© ÷9³"߃\l¶ ä,1êúW&3ü>>ñÓ @.6é "ê'õ0xÿ9üÛ¯¿€Û©úžîƒqmj’(èÿ&nÆñg7¬IOý ZÕ¿wXwПÅ~:žlÏËZç:VÞµô~OÇ©²ÆÈ„ÊÉŒ2tÇØ{E³@Dæžnç}…wXÆa«&蘨ù@‰®ìƒTQ]¬"`iF¥ÚE õÊÒBƒ,ó×K_ºN~xëð*l¾vÒÐFñ¤]£A¾/S³ÕL”Š£»¤¢Z›£‹/$±à¨d}ÁŠ”v’F€º Ÿqž‹÷†è”x={ø>¯îø*˜÷§xeÏT>ždö)ÙS²§dXë¬uÖ:0ß1ß1ßµP-T wy÷´LÁ?ÈÙ‘íÇ1Žq ö«ýj?˜“ÌIæ$ð—øKü%y0ò`äAˆWŒWŒŸ©\4{%å­U»=ö#˜ù8ûí‘Ñ·<ý…Ûô<Ñ=€A3ûã]ÿÿ`⟔'͆#@Ù”Y7ŸèåÄA,‚'©´l«0p5舷,޲ô\íºËÁ›j?Z’ z-]ôÍ ©'½Þ4H ØržŒy×™ ªIî®ßvB¤ÑÖÁ_n×?xÿºpâ!ׯÿSԜΛҧ¬ºlÛðÎ+àíÚ¸}âƒÀEù5¶Üêžò?ušªr]÷:Ä+Ý·Ù¹ eÔhcP¬ëz›HD uè¡D˜È™}tÄ[l¿Ù2p訿\µÝ~P Ôãˆ'øÅ‚²EL†Ê& È›àLµÊ\:bÕ ~ hÒ(xsô*»kÉ oð²ß Pûd‘Ï&‚T èµÞ³n𦔼zÔ=Û[î,,úÊK #îö ‹âÕÂ7‚TV»Ía J•No<Tõz®šꦊƒÎ}hP0m» Þ¶-÷~ô>x÷î^úuЗ¬ÝÛ ÉNÕx!òÕ‘@.5;¥Ø ŒýÀãð™ÊtOŠIYƒÞ¥ßu[€³Ç}ìiFͬŸ!åÃe/|’ŽÎ?‰ý¾ý¾ý>IJbY±,ð½ê{Õ÷*FQr­\+×íhG»³7ãì À‰°àp.ÃÁÈ2²Œ,ð-ò-ò-I•TI…âêÅÕ‹«Ÿý~¯LãºÙ`NÏîÑþ D‡ÅÆ;ÓÀ›í”‹m¦éÜÁ +R,Í€’3’‰ÿ÷RÂn~}«n¨öq,yVÜÉŸêaì­}·ƒ ’N-‡ ȧK€úÎgÑÅ Å)ŠœG"æ gTQ”¢&è:æ…@Ï(YóXÊmö[ànµ>k%6MýâeÐÜ_íêÿôeÀ[û¥pD«¬{îÍŠ Ͻ÷ç* .T}Ì"ÐSããòwƒ÷õÁ×çmoËúØ+ëÁ[¤ÕÒM Ú•ëÜ®-¨ u¶ydofZýq€ÁVžgÛ׃ÖNÍÈn@³Q'œq'VЍ Ú•<§5ˆO=äö‡Ú$uŠ‹å€Ô”~@œbÞ,ßPpå6•pga3`QŽf ?Õ_º5A¥¸ èÝܯwƒ(‚IòÕ?˜´‘›A«èù~ Aì@ÑÝ@6Üë¨ûH9õ©±TVÖÞ @Õ¯»ûªž õËoQ¼%G6¬Œ€·mKÓ·üh§UåA¯²'ôÒ½ÚÅó—ò0`q»dŸÇ'äçÊë—Àá[ÝýŒChIg¶¾ôLçœX£ä¼ø̙ٗµÏqé`ݼ³ÛAñcÅ?j…Z¡V€UêaÕ5BP#@öË~I”+ÿ‘äö49ûü¡–ÃrŒGGGÁiŽ4G‚¿»¿»¿;5)jRÔÜ îwÃôì‘cÆ8H¹ªŽqI_ˆÕ’!©ÕÁ±ÝO½r ª•ÝÓú07ëÕfß‚žîfØóùŸ‰nàôŸú×â‡Ì®Çå@s5“Ë7+ô-àÈr#ÑûNcïUøÈ}ˆh úÓø×Å€ž¢oq_ñ$„H¤Ê¤‡< TŠŽ-˜Úfåïuž<¤¾€à\¶„ß·õo·¾^…wZAìæÜ7¶­wD´UAmðºÄʼΡ"ïÐùýuÏÞ…= ÒqÙ5O¿ê™Ýïö ½Ô`¯ èÊKoÀäjÙ dJ£è]î”Ès ½£/,k^Ζwß©z[n÷oƒSéú >UP+ïªK@žöy™‹|oz¼%àq$™)wãN$Qž{ Õ·ñ§@2C/ÉH€R†b½põ;º-°×xÎì5|Öc€¦½×$•mzˆ…ªË… ÓÜžÑ@Eg`´< ÆI°Øzqk†‚žâ¬‹í=½dçÑAïð®q^ É\õ>Èñ2ëš< ªb=Ü©U~qëŠà}¹÷®ù#Á;°uÅÇ/ƒvoÙx è^9ç ”ìQkAÎàÂ"Àñ^‹¿@9(êȵ 78S¢Ï‹ìíE`„9$õïj'‘qW: šØÁ˜”§èÃÞb§8ÅÎuö"ˆ5’Cé@Ê%uòû´¶Énã U¸^ª—ê¥BI´$Z†?ßæ÷æ÷æ÷ öª½j/0†1Œùûùñ÷äVÉÜaÙ'ûd˜óÍùæ|ðgù³üYàŽqǸc ø¹â犟;»šÀeå¿nÒÌ«=Ù{ Ä?«ÚóªW@RJÕlºt=ó¾| âOæ®\ûß-î’Üö›î¶æÿgÇ· Ã业hóYŸ:jÝHÀ‡Jfâé¤gä÷«©N/Ðwyk‹A/²Í’ ’™ì @|4æÐ_¹صÀ›ZòÉѦ ·êŸ½Ï@CQÔF5W½¡Œ:v-ð—?øÑÜàìÓ/»âÕŠ\&K'‰¾û{»½$ zC¹ðy³â_‚Îw/Š\^Nî'¿5oû–{¦|ÜWòö¾i ŽÕùôšÖ n¨ìëÞ ¤¼ÚnM¢\î LÚL*‰”îÆ@*¯z@ÿ" beÀ[¦0¼ÆæiŸ‚çúnÉø ¼ŸËku!è¬] —‚ÞÜPñnð"ª®ÿ;ÐãõoÞÏ€Éd]ô.}¯ÞÞ{ö‹ÅA­gz½’ nGÐ1–ñèÙá¹ÍAß_Þ 23ã—õAYVi jxÕ—Ï¿¼_¾³T·ý˜·‚6L^Ut¡óVt .Õ£€¢?â-Œ¼ :êÎ,þ€ 4Löï‰Õt P¯sË‚~'zqî wÞ*]°xä@8ÊAÒ© ’AGïrЯ9 c!º.ö›[Œn6ž[W–÷7ý#\û)R¼¤xIñÐ3ô =ü÷ùïó߆eX†'~NÂýÿ˜{G’DPÕQuT0J¥Òà»ÀwïðÿäÿÉÿl.Ø\°Ò&¥MJ›j¸®NÁ’“:ê¸5 Rk¼|p%(X¼2ør]°žÙ[铇Á\&Ÿxéݳmܧ«À:7S×HµÓ_ùïêéÖïE·ÿ ì”Í/¿÷1˜íÜEKÓ(´¦øžOY¸’•$}ádNú_Å5ÇôFЛœr‘A®2óƒY@À˜å+½9rôxзإÃõ-%ákx‹‹A'ÑŒÄÏ—Ò¬ÛBõ3¾ÿ¼”çSîw[ñØ-¯ƒ+c¼7AÞò×K­ò†:Kд_É]"™ò! Rð/ÿ¦3؉ô· >gFt$hÏçK€Þèî‹>:¶gÐ7‚Œ=z÷²KAÍ­>¨ï+ VæÈú?€÷È®Çf”ýDøƒ-‹·dŽë߼͠±_» §/Ý_¼ÎG6®ˆƒŽ~aég âå—®÷ Ûý‚s4üˆÏ7;mÐ;Öªèsн¼>Þ>P ëÞ~ÙO Ò'×hºÙáO–ƒÞväõ@—8"Õ€<ÉVÝ‘wÅQñ_"·€ö‚?ZÓM/F‚(gLA?Ðqzykäã“N¼Ó2Iõ7Ì^ŒŽ8Úx?¿xÛTÐïï»gÁ} ?mÈ_ ¤Š§:ж$2éNDâ„E+¹$x;­% j§Ýt8þFîTÀc½¾HÛÿYеå= r Tݦ«Aµ©Ð­Óppó7^7q-HÐhæŸd˜­BëÀýܾ5r9- ?é,.«QþÊó ô›mÓî\ Œ—úê Ž¿")’"¼õyëóÖCfß̾™}!˜Ì æù–ù–ùÖ);€³xþÑ”ÿñpÂÖ’w;yNž“ÑEÑEÑEPx°ð`áApžwžwž‡Ês*Ï©<ŒF £ÆŸõØ5 fíkÇú-¼ïáöºÙ]xø5´ ôò-øÑ¡áu?¼£Ú•`Mi|×ÍCÁlX6 Ôò‚Þ.{vI¸ãŽ•Ùô)xCŒŸÓ¬#áÏvW£ƒ1IMÉd¿$R\³ÍΠ#l‘„wYÄü7âQB1HM™#+UP pmwaòŽø_ü¾ÀÛíÞªr¹Q-_vfeÔk^ÊÖœi­|­æ¤©%t„ëâG?­NÿÙ<ÄgmÕvŸ‚\©²F“H{Ñ §yOÚ/ xlÔ€¨nëòUFAÝà]ReüÅ[¼¬Ý`}—}Y»7€‡Õ«VÎÑg³³c19÷Š5Ñ7ê€l+¾p êîúât`š÷±ûx×ï½aÖù ²ç*ÀâYXr›$}Ü¿ôA}$ ¦© %µœª Ãz•›ÀŸ(•ÌG8óBå6ï C?UjN}ßoÓ¦ogiz…› zžÑ+Ø¢ƒ¢Çö@qãæìï¡ô÷ÿóäZ°6dÔ­òìÌ,ËËò²ààóŸ?ø<ø–ø–ø–@Úò´åiËÁ7Õ7Õ7Œ Æcp×ý›<“°FüÁ†É"YÆ4cš1 ,miKCpApApä'ÈN)ø©à§‚Ÿ TR5Ja°vft¯²B7ÖßvuÂ#6ØìÿýÁ×€ÿzsGAK°Þ7n÷…À}:úâáͯ¼î²×3ãîâøK éŒv¿sžn¯+Û¨ ]AuöGÊ´Ë@.â2-Z 0íiáÅ€ŠŽ±Ç›ub ­ÕŠBi z¿¼â´<ë>ÝÄð}ýh[#AÇ‚3üÞÓËÜ&ÀðÔ2Þ>vkEàZ_ßÐ ]Æ«¶@œ­‘W@,Ê¥ÍÑçZ -S¯ú' MÊ „4Ê ?§(\ =úN¡:Ìv=P´a HM|÷ÚâËsk€ÎŠÍ GÀ¿»fø’¡ÀÃÔJ$cÿoEJûu©O7‰Þ4¼C9£ë zØ–ß Ú̵c$è’ï« „ü3ҩɉªÄM»32E ¯c ¶ñÆu€Òw ¡s2¿Å>£Ž§h,e„/È0M9Ôôo®ð$0M?èVÝÑú!ý9OŒ–*¸ B+ê?{uÙäÄñìM,lRؤ° xý½þ^|ø,ð˜[Í­æVPkÕZµÄ ýû~þ× çqçꢺ¨.`Ì€_3_3_3î î î„ü¼ü¼üeù¦¤Þ²[½­ž>eªù¦¤ŽÏõ†ä}”ÓôDp‡y}hjpÅÏ;lµ'øqé¿æ ø"F‹r;ÏÉïÚ¬Ò;€^rüÛÕ£AZ»Ý‹Ú˜Ó27${ÃHöÆÙ¼éi€Ý>Ñgñ™ mй€ƒ‘̈¬0˜iœ¸ä&áÒå4 E”fÀ!½Íþ(­û;uA&G”yb%%×ç¥A4¨êg,€ŒÍ Ê_rT)kPÐw†¶½ÄK¼yÁ¼`^TÕFµÀŒÀŒÀ 0³Íl3ÔuD2ÿ}¿þ÷ @’bHT#ÕH0¦ÓéàÛáÛáÛ¡¡¡P4·hnÑ\(Î-Î-Î…TRϘé㯑9­ê…lSóÍnoBIøø»;ö‚ñvÉg¹À¸6¾§`  dŽlmsMI jø‚K%%·ÙmA;Lu’æ%Â8>ö‚Žš+TŸ™k^:jOp€¦îY€"*¦ñº‚Ž©õö€{J4.Ý@Ûz¯ó-}Zú4°:Y¬N`ü`ü`üÒZZKkÀÂú××SäïóþNf0ƒ`Œ2F£ÀúÙúÙú+++ÀßÓßÓßr'äNÈÎJg¥³òìêR3«ïð5¨6åÚ´ž %ë¯[x7XÿI}$Cž‘f€P”tÞ8™Å¨â£Vw€[âðfò¾Ä©„õVªžïCsˆ©6I‚dqGNÓÅ °į~µs—Áž™ÔW-yb!*%W‚¶½ª%€Xî…á„/  CM‡ A²Õ»¦ØéÖ '˜|ê9ƒ’újþáýj.Ù^> ä–x‚FÝä+àw2Q;y÷N~¥–‚\îû*ÅR†+øДPôûUÊp_\雞 Õ¼×}Ã3€*šSö¯R[ÿ‰§‡j}•\c-üúcÛ”cÔó“F:éµ?Ñ6•@üØÞ@ÀÀŽ_@Â72ñ4ûH\5šsÔ°øf“Ú^dÏV=E¯PL.H iÞåÎKöyP²/ÞBÅ@õ+ÿqëÏ Õ«º²Ý¢³¿—›éfº™p^:[îÆÞ cž?ö`¸Ï%QJؤÊ/* X2Š€Cz¡„2ôæ¯DS pHÓaÀãžøxÀ¤‚¶Yò•¡}߃T´ŠBw‘pF†“¿÷΢×Kþ©l•„ÞÒ_½ú-½Þû?³–ÿ^^öÒœ·Ao`¸þP”&½Á}ðœÓK½!¤00À­ : öñäpÊžmH²?tŒ«í÷@ütu `Ð ô6il6†Ø"Îî…È+ñ—1¨ææóúƒ±?øZÖ®³¿NÑÔ¢©ES¡¤]I»’v²€?Jò,r|PíSûÔ>ðµòµòµ÷+÷+÷+°÷Ûûíý[?·~n}ðçøsü9‚3À ûÓ3祥 ©Ï4xµÿ{PüÀº'?œ òÛc{À–«KúªÀ-±Ö€Gçd=·w’:ï/[~Œ<À•¦ò |Ì‘€]ÿÄ/8?‚ŽÈ7ñÅp“@šýÿ _4ûô‡€ç´+¼°Tóà ƒgüY@€^ XwÖÏÂê$–aÖiDg’„£§ÓÄìP•òþ‡@ùj¥H‰tS瀶Y›@,â¯3ÃfÈRµÇª­»Ò{t{|ɇÀLߌÿmµÖ¿ïõpÉá çyìJ€+If¢A;îG‘[@,§fÑý€c†3.Lüñá #hýRRYÓøÐ4À£”þÈ”WbóÀ;Ä‹) >Œ)·!œ-fÖZHYåŶÇÁWèêv•±1±1±1pl± Ç&@¨c¨c¨#R©Ôßöê3õ™úŒ“8ÿkùß-'¤ hêIõ¤zòw§…¿·¿·¿7¸¿º¿º¿‚s¯s¯s/xdà‘P9¯r^å<0~2~2~ú³Úоò?4Þ^÷øãý:BÉò½>9 ¼á ßþTã#ýHоH€S€wFý÷܄:fN3ßI³ú¹÷®wEt4àÉKIÚðXÒ§ðEç'w4ˆÏmZTè¤=ÿAÀÖÏë ­“ðåz›® À¶…塨:LÃX*¨FÖ´ýÀvs¹ÿS ‹*I((ï,  §Kœ8ä!¦É>PÕ½s¢.8¯M?4 R(ÿÍÿØfþ‰¼GUݼÇÃ뀱™ž:‘Òû³J»Fño™¸î½ÅWiÆy>t:¯.}URgðs¿¹ßÜA_ÐôuÜ:nã9ã9ã9crLŽYdýëįòzÿ6ð‹‹‹ d’I&888@üÁøƒñ! ‡Â!((_P¾ <Í 4¡B¨B¨BäCùP><ƒÞ×é¦WBÑâ¥>áâ5ö¤ášâoé|þeæFý:Èýœ·t!Ç9Fb!øko©"ûy $èÔ+¨:®žÖKG´JÀX°û_öE†œæ –{«œ£ŸÌU®Á…7*ðÒ¶LüÀ=%ÚÿxCÀb¨úGÚ²ˆ«Õ ú¤9-_–~°’@GÿÓTg@êK© öoö=ö¹{0u|RßoqßMÙ@ŽzÔ|éf-+^•â”}-!þõ²õc&Y&¾ãH- ’LVwrêò–AuÀÐu=$`?j?:ì3j:QÎLâÿH– ²È]^¦¨!ÖÚ­­€âBctÙ…ªÞ¼ÔWCZ³ñóÖ#X(gHU×wé»ô]p$ëHÖ‘,ˆ555…ŒÛ3nϸýw§¹¯Ÿ¯Ÿ¯¨¡j¨ e(Cÿßõëÿ~pBNÔ!oe+[ÁoŒ7ƃÕÇêcõÀÖÀÖÀVp¯r¯r¯‚Âù…ó çCÎÀœ9¡ÜrCÊ ¦2•©§è½…ÙÒÒVÕxæ¼Z ž,|Š>Ø2çÓo²ÒÒû | n *­ÞjÐÅa‰/æ™U¦SÑ >ï»hh×Ý]ôˆ):p/è˜qg¨! ‰œ<xüI¥"`r•þ´¶¯+>¤wzß.­¼R@eU@{CõçòÀŸøâO‡„¡€¹M~ô¤Å«oꔌ Ó™&>×s=×ñ Ç*«‘ББŠN:éü^Ôc]f]f]ÊR–²ø>ñOÈÿ»à„œˆô–ÞÒŒ²FY£,øžð=á{<¿ç÷üà•òJy¥ Kþ–ü-`.6›‹¡Ô²RËJ-ã$éáI™,÷«— ý¦ú9½»¹æµRP4mSïiAŠ¡tÀXk’ñ$(¿^ç>:Îv¾\Œ¤?HuÀä%o`8ÕóŽ>Ç9p–‘ ~ã2ßQÐQùÅL$˜à„Wül*AEU– gQ´èbo‹= ÈUÛòt°WTÈjxRO'hŸß’¡¡6é™ c·µp­ûƒCAçéÆÞ.ð ì·K²@ÅŒc¾: ïÉÐ.Å8ûp‰A6Í@oõ>p—õÂrüà_î\üVˆ¾·÷ÆŸžãp¹'›-3Ûw}ðþ÷²#ä|½àj ía³†ú^½+•@áÖ.)É„ŸJt èvNðœ"Õ‡¸×•jߘîõêMÀá Iùøˆ&E >2¼gä)#¢q§´…’¯½õ); tq‹w‡Cz£º…¥¥“*u†ö/cË /;/;/ Ë–),iii87pnà\°LË´LPãÕx5¨G½ä‚öÿ‹üï¢''Rˆgª™j&˜yfž™þOýŸú?…P~(?”)µRj¥Ô‚ÜN¹r;AþÕùWç_ çøÉŒ·SåMyQÍ‚ô9uÞïö¤nm6ü†K änodº‘Éñ . îFþ¿öÎ;<«*kû¿}ÊÓRH¡BUšCKEDDQ^Š£ðÚ+¨8£3RÞTAGEQäPF -´¡CBž”§œ¶¾?ˆeÆq|gF×ûŸsåºr}ž}ö^gíµîu¯¸žĸœW@ùÏRtS¹”rŠU 1÷בh¥¦ƒØrÐÍ<Ç‹.ïHe?w½ŠÑtÆøæ5,@“)îN˜“Í„òpZ¶º²Ñçë7ù><ÙÈ‹ßyßrNJMSàø…Š@,5R Ià<à|gQì Ï)Œ^FeSÓößyßÊ«OµW#@œØË%Ÿ±ËçÀ|­~Ëì:È«i7~*nÞ8éùS`O+Yš_)?þÏÁí2Ì] ñ’Ãç¯xœ+w5xi!õät¤;¨ŒTû©ró¾ò‹äÀqâÑ… –ìw+]]õ‰«R§;(M›äVËX4åÇ¢¨àjÞw›m̆è‹Ö<êBÅD¦¤n‚Ä#í»Ý‡ä¹-îîù>ð'ÕXûkGÊB )„ðýáûÃ÷Ãé÷N¿wú=Hè•Ð+¡kkkožožoè)zŠž*U¥ªTþ6Sî_„] àjÉáŽpG€½ÙÞlo†XA¬ VåÍÊ›•7ƒÈË‘—#/Cíaµ‡Õ)ÝSº§tç¬gñ-f ÚùÄÄ­«!üâoN#ðií8¶B⯧¿z;µß} Tïö[@™5ëD ÄöºÆï CUnô€¦F«º |æ¾äý 1ý„¯2¸vð,£®òË\Ùæ[ã(ÛA™ö®òŽ q§Qté´\ÖËpГÏkþ_×€·ïIJMÅ I…v ê€^)°¢}ƒÒºM–€ò›ÏVØŽÉ€ÇxU|ŠPDÙχ ¦k;Ak|¢æª¦ë·‚X㓪;žQØ2ÇýÒÚ^nñŸ÷µ Ö»\꺌ÄËW±û7oÌ«Kø¾Ã7C|Gò’ó>ß[Œëÿ4¤þñKîmÿÙË€6\Á­ß±n—qn8Ãcó_»<2¢è>`íÞñ¯ü7˜«¢ùû5P–ÖÄpAŠ)äXõsè ÊïÖ²ž±íf¥ O^ÊêÄéÙ Êǯµw$·UðNwY¯M÷SùPÿD\k˜\ñ+‚  Æ˜öÓFG!¸¸~“¬uÀ9œÿ5O´zÝæ0„W„W„W@Q“¢&EM ¡(¡(¡’’’ÀŸèOô'‚ÑÎhg´«–ô¢#ùºÿ[üxÀ©"ÚTQŠÝ‹Ý‹Ý‹ÁþÈþÈþ¢ë¢ë¢ë ¢ME›Š6Ñ#zD‡ôµékÓ×BêþÔý©û븎ëþö0Ö]%ûí…c˪¹ @Ë/Ù¼ý0$øü3´L0 ¼™§Úf_S|;H9Œ³t•û|* ]á›xþ‹“‡ƒX¬Ñþ€RÐ^ BÞ${ˆe ¿8r‡T–epí@³iC.ú—>w`xÛŽmÎ|ì÷U¶éNf'ìåÓ®Ñ.´`JÁ 1=l–ÈN>úÚóVrámN± ´1¾E‰ýAýÉ7:ù •!ÏÊ¡c|rGiÚQ x;qi£] ԟѱ1x w_õç™ ÷•8ÖTŠÚ¤û zS7 ~³ïã”àz¡/Ný•š¯¶Úú”û‚vÈ÷«„KAM M«s¸Oþéó·Àë[¸ns!|êíÍËÁ·1õ/Íã`¼[{í…ýA®;0g¾’§¦è• Ð *7vç‚òÑÙ»Ð⟕Ήy[pn•ì7UR\.Å iEgÖ²)ð'°ë8¬VÜ'xÍk³–@Šuqï­À÷§ÔüÌšß±Ž³˜ÅPÒ²¤eIK85çÔœSs ᙄgž„¬„¬„,ðgú3ý™`ô1ú}@SšÒ0ˆA ú·ï¾³ø×ÇþöÈ•‚Ug%½¶^[¯ œÃ9œ²WöÊ^`3˜ÁÙöå%%%àÍöf{³¡æÄškNä¬`É7á{)¥É¹Í æŒKî© á ;§.Z¥ >½t„î‘Ýçÿrý:#´Ã¤»GA5g‰„AÊq Pù•ì—"»5(Ÿ;<ÞÀø$XÙv<Žht“÷Çu#‡^'õ…:ÓH‡¥rð¾ö7^¬Ñ°õa– ¼‰f&àñªº»êþIæ¾ÀŸ@,}•YIð©}T뾋¨,Š9…Ùå´Ž$‚ºÙH ´µD?àË„æª#H'ëžÒ…I5ZꦌW$€×dOÛ7ꀌ¨ØvìrÀÔÞ×ÚƒD+Å?õsä¶€à]*#Ö¼¸=éàcà¶;zGþð:I%N¶}u±úªfnIµû¸¶²ÌK‹së>I¿Áªn _ì56ã©ìÅ %öm8 ˆUô˪±Æ„ÊW[iÀœÛâ@,Y`w˜VÓÜÞ…záŃR@ÈOù®ßžö´‡â!ÅCЇÀéN¿púH8˜p0á „ö†ö†ö‚¿µ¿µ¿5ÎFgЊµb­ø5¿þë=~þ½øñ<€o¢JH@\qů×ÎkÖ2k™µ âãã¡"©"©" Ê'”O(Ÿ)ñ”xJj?Tû¡Ú*U¥ªôo#ÞÇn]¨¸ ¿øãP~ëö‘/} ¾:ñÔ÷@hªï-õ Úp·)¨\™ã-¡²¨æ58Ų0T'£!(Íg2K½¥¿Êï¶Š ±¬e¿„=ÌÀøµ·D>wƒvq1o%•d®¯IÞþyuA¥ÉÁHÜ+3Ïeê3Œ‹@¼à5ž\õ¼^IL:Æö¿÷– rå5P~C®­8ðeê^ Ì{Î=‚~$©¨iu‡e·¯ÝÞoné_:4ÿr Qyz€Ê(ùWp˜/@µµz ¨ìQã E܈“Þìÿ÷ÉPo7êÚï(çÞòÈáK>QÕÎèrRºMAkVûWÙy Üšu³òÀ‹ï_òz_2ýj;P¦ qñâ³ÃÀ•ç8¨i*0iÁ­ -ÔmÚµà”{3õ=¹×ê+wUáOhpßjõÆ/@Âê̺$‚JÔþh<ôë'S2%Š¢EÑ¢(”&–&–&B†„  ¡kBׄ®àßãßã߯ c1´ºZ]­.0™ÉLæ'ƒÏø&ªøgˆZž–§å¯³¯³¯3g»óñ¨íj»Úááá`µÇÚc¡î亓ëNc‚1¡êKñ5¨­‹~÷4¦àÛ–Ö½Ùj(µ½áœã¾ÿ؇«€Ðï 3Þü´! á:÷bP5yó€rùg ;§# üÆe¡Î€8áÈL@˜#•UiuÕƒUƒÕÌš\ÌV—â]Úõ5—¶ûÔQí:=@*ìKA%ÈãuÀôE®\m§Q@V¹ü ¿×üi§uÝ!ñÏ@vÛ‹¢ÿ ê‹PqFPùé.ž Þ yXi_º8? $ªÕz¥Ð†w¶Þü›uç@¢^Ä] *hIô- ÐŸ*¯®³'\ RDcÊšïŸ Zÿôy—,¯Dݯמ¬\µM{$­Øûr6¨–u_ìø_ÀA«îË€Û\Ð0ÜhìKPºüÁÉ’ÈR-@þLoõð¦°\oñ¶¢";ûv‚þFFbç¦6­õç·å‚ѓ€âß5}ÎÇÎÇÎÇPо }A{ˆŽŠŽŠŽ‚¤QI£’FAp]p]pÝW\ýF£ÇWÒzOñOýÛvÓ÷ÆOÇø&"Dˆp–ûìÞåÞåÞö`{°=âvÜŽÛ]]]e×—]_v=èez™^õŽÖ;Zï(‡‡‡üýád¼{S|Tä,_s%”ÿyWÛ7çƒñëH‡C@p¯­Ú ¾Qzª< jº÷PýÕ›ª­¿à;2Ïy2V;ð8ã‰üµ4¡ÍL±@ ¬®µ´Â&o ÜޑݯϚ²Éi¹T óÍ@Tu ¼b3Z} @œ’hF+=Ór®WJ%jõ?­¼é¼þ-ÀK'Îâü&ÚÐcTLpjx•Á­¶ß“®ã⌓õ úùö%}š:粫qNþem1ÈÒòýšÁàÖ”Jf²ù{Ý¿šHUÙ“07ú2`8cJ¯e˜›Òo±xMOÈ3ª=áýY-%m]ˆ†Ûì›$ªÍÚi ÄJÙ,M>² ´ÍM:\ú ¨M)šN~$¡ð1°G[£¬ˆî³\7öƒúkiYà¸EÓNË!eCýªµ`,Ý›ö=úMH‰”H „§…§…§Á©ŒS§2ÀL3ÓÌ4Hžš<5y*òy<ðíõíõí½¿Þ_ïÚ~m¿¶ŸŸüÆ?ƒà‡âŒ!XËZÖ‚>QŸ¨Os­¹Ö\ þgüÏøŸPv(;” 5ž®ñt§!08080NÞ|òæ“7ñnǺëöóöóöóXcHèÖ´HµÚ|Ô?iñW+Šëu?¥ç¹M“ò¡|¿S7p)XªY` x Íþ‰Ÿƒ<Ì õ2¨D~ÅpPiø‰òáQhÔæ"=ÞÖ€,°:”^ ê©Àô€Æþø[µsÏð4;º*¨*-¡ à[ ÿjoN\>’Û@ej¿÷ù@[ß´Eß~åWï9ÞÚ£ã—5UC íÊEc*Yð•W¾óZżã¯0>wû–nZÙ…€F í@NxÃ,Áï­ j]rŸF;5rGÕ<ÅAÕdºêÒ >>ÜÜa±Ú% ¬PÂÖ¦£¡|»=ÝçBi·qÒ ¨~´{Ò*:twR·ùC¿Áèêö}6¾½ÁÞ`o€ã[Žo9¾ŠfÍ*šÁŠ`E°’#É‘ä„ú„ú„ú€ÿÿ#þGÀ8j5Ž‚¶EÛ¢mnåÖïL{þÄðÓ=ü=Teä^¹WîgŸ³ÏÙv©]j—B¼V¼V¼ÄÄÄ@ù3åÏ”?ê÷ê÷ê÷Þ!½CzH“<&y ¨ Õ…êÂï1îÙìAüùð“GCä¾£›F€­ž¿m+…öÿ;u>˜Ï;‹Ã`&j'½í ªõ—ëAP2T7ù\Î)‘7Úõ–u¸ èmÞjÞÕ‡·¿³ð«æj ³‰tPþبý;§2Ö´*÷u§ØÇe#h¯e^Õ; ÌL<$=¯÷+Q¯…s¨D}R’€Ä}³ÒÛ=ª<%‹ß9#ç0TÈ[ö HÜšY8 pX.Å üª•YÐ|o×Û R¡Þ4z‚ú$q}þ =^ÿž+ë‚ûòÞ´ùÍ@z3½Öà^*9*ì2o¸zì·S¬¶ãÁ¹¡Æµ­?óï‘Åšr΀ö—ƒÿó:0EeýUfÞ7§g‡ìPÖ­¬[Y7(º±èÆ¢QŒb$ÞxwâÝ ¾®¾®¾®`ö7û›ý¿¢É—¦ÒTÐŒf4û±7Å?ŽŸ¯8ƒ*mÂ3iCo¬7Ö vŽc瀷âVâ÷Åï‹ß‘‘‘ý]ôwÑßAÂÉ„“ '!ý®ô»Òï‚@@@ ô¿º±¾‰Ùô’ÏÀ®]QTôDï;qlë!ˆdý'õ@. Ÿ·çQðÍ$l½¾¦æSšÆ%zT5­%ÙÞ }šÔáÜi }–Ñõ’{Á;–·öh Htï! FK.ð6ÉÀ‹¬¿÷šÉdÕ°Š¥Ð2ñ¹KA»°ÎæKË@µÉèzQx§wͲÌúM¸ð©ûµJ%Aê:P>_NíwAbzZ»äóÛ3«¥êZ¹Zjr5(ÓÛáüЬy' Í»>þ*TÔ€WþXúoRꃼjέ• ÒJß T4þíM©`yù±€uuéÉãe`æÅÀÌmþüP!p;‹T£ïñžNp‚ooo……………… ÁµÁµÁµ²BVÈŠ?ÅŸæPs¨9Œ———A34C3€¬`gƒ×?Wüü ÀœÑ#ïBº€ô‚^œ—œ—œ—ÀŽØ;ÖXk¬5b›c›c›¡b@Å€Šà^ë^ë^ )-RZ¤´€šÙ5³kfƒQj”¥ÿÀsT‰LxØ÷FN@좢&{o‡(ÇÒ>; ö¹E}vlU/Þ¹ð÷`®Wûí•à›ìßë| šê;˜qxÃÛ‡@Å⯟Z j™!…À-ø¹ÔùV…½P±¿ØylÂÒÔ³7’G5\ÚîfçßÔ¼úûB¯2¨|ôƒ Bj³Þ 8“ÖS–i@P«8ˆ¯KÝ…€¨ƒzÀ ÂgT†kSÉw žscñ• ;ì]%7Ô¯´z ½ª+A^–™ª!¸$Y{OKHÿìw½õþ·Á_wa§wÀ{P)Ÿ Æ1õ”ׂfýuí/‚À—µö4´'ͧƒÿpÃ'ËÉr² xVñ¬âYÎ g†3Á¸Ç¸Ç¸BSCSCS!°3°3°|†Ïð`Ž5ÇšcAÏÓóôîü#C_©_Ø/ç­°Ã:h Žë‰@Mt9ø•ç;EŒ:ÀiÎQ€½Æi ÀBó‚”›@^æ$=@n”j+H©÷ëøëw‡÷±ÜN¦w?ÇÁm/Ž!àæèÓ“€ªta³D0¤û/jGêý&+üzZ&¥ _kŽ÷5“«&þãÓììwö;û!üÛðoÿ…’=%{Jö€Œ—ñ2BBB ð`àÁÀƒà¿Áƒÿ0]Ó5]Ð'é“ôI 5Òi¾Â+9ó8ïÇ^Üÿ|üç€oâG8ÂÙ>é^k¯µ×ܸwã`ϱçØsÀh´B¼k¼k¼+D»G»G»Cì‰Ø±'@{H{H{RÂ)á”0¤|šòiʧ`Ž6G›£ùáRM‡ÙÅzðpºGçƒó58V¬ŒpíƒCÁªU°âóóÀiS;´¤K´ àsव%ÜT«°¢ÔòøºØ60iôzÏ7@[ÉL»$çHxQOP>Uß{Ô«jŒºÔºP)wíá¼ÈjÖ‚Ü.ÏÊ&G 4¢ 4‹joïZú'w®”’ÒÃy­´dõå ¯cj]P«B-ê½Æ—5^È4Á÷ZÚìæ5Áw*e{“kÀ˜’”Vï Ð,c}p Ѐ–tþóv€{½Î^á}á}á}vÃnØïJïJïJ. . .…àôàôàtðÝá»Ãw˜Ùf¶™]-¶©?¯?¯?j¬«Æ˜À~°ÞþÏÿùà Δ¼Ã;¼rXËaðy‡¼Cà®w׻뫳ö8{œ=âÆ? ±ÜXn,b'b'b'@-UKÕRHj›Ô6©-¤lMÙš²333A]¦.S—ýž»‚1ÓÞg xóí;+v»/Ö­äRp&Gž;•îîXç²…à¥v×Ù n“_þE@ÚÅ®9>$É™»dšôwv¿ñ2J@PÀ8-ßÈu¯zÓh ªÌx#x9¨º ZÓÀÄôf 虼 ôîþ‰‰=À¸?ôDÍ@o–6´¾Pb¨dívã) À·ô~d£l”›››áÜpn8Ê—-.[ ìe/{!Ø.Ø.ØŒ@ø®ð]á»Ì&f³ A#hAߤoÒ7¶B[¡­6²‘@sšÿ ÙÿHüß1ßD)¥”™ÈD%²D–€»ÑÝènw“»ÉÝÎ{Î{Î{`²Ù‡ÀªgÕ³êA|^|^|ÄܘsÁàp@`n`n`.$·OnŸÜ‡'O¾_Ž/‡o œü«±‡l>`"«d„— |ʲºêp /«N@g5C;èÅãlZpþ÷ÖÎûg ªJÔi´FB¹”K¹@ÙȲ‘e#!.q‰ èIz’žZZZàïáïáï¾ù¾ù¾ù`î6w›»Á8iœ4N‚¶MÛ¦míˆvD;ÊQŽr€žô¤'ÿ1gúÿw À7qæ¨PE4’¥²T–‚›ïæ»ùàF܈·©ÛÔmZÍH´·ÛÛíí`í³öYû öNìØ;`¥X)V ¨ijššaaaÔ1©cRG  ÿ ÿ ÿ PëÔ:µîÇž„¤£t”Ž`m¶6[›¡âÁŠ+„ò¬ò¬ò,ˆµŒµŒµrÉ%|5}5}5!Ð/Ð/Ð|¦Ïô™`j¦fj`ÜnÜnÜÆcŠ1´eÚ2mh_j_j_‚RJ) e(CÀ?Ç3ù¹ãð·pÆ TÉËçò¹|^c¯±×ÜQî(w¸‹ÜEî"p‚NÐ ‚=ÞaÏ'ÓÉt2Áêdu²:A|Z|Z|ØžíÙg«Í^f/³Sƒ©ÁT­ ­ ­‚@n 7 æ5æ5æ5 /×—ëËùÉ“| ãÏxp¯q¯q¯©6±›c7Çn†è´è´è4ˆfG³£Ù`×±ëØu@-WËÕr0›™ÍÌfàëë æJs¥¹Ì5æs WWW‚±ÚXm¬=YOÖ“A Ð€š­f«Ù "*¢"À†0„jBÙ/ø~1ßUJ/La S@’$I’@“Çä1p ÝB·¼[½[½[ÁÝãîq÷€3Ôê ç\ç\ç\p;‡Ã`wµ»Ú]Á^m¯¶WƒagØànq·¸[€ãç8è=¢GÀìfv3»œœøfúfúf‚ÙÖlk¶óRóRóRÐÛëíõö 5ÖkAV£ÕhP…ªP‚zE½¢^zÑ‹^_ù}ò!‚ “a2 ¤–Ô’Z 3e¦ÌüJ¬$×ÍusÁγóì<°ÚGí£`M±¦XSª«7íö û¸½Ü^n/ }èF?£ŸÑ Ïð |=}=}=Á\f.3—‘j¤©Õgtcœ1κ£;ºSÝŠN;¨Ô‚º^]¯®çl×곿ˇïkÔë_ðWñ‹ø¡ˆ% ¬d%+9KõN{§½Ó “e²L¯–W˫ޫޫޫàöuûº}Á ¹!7nž›çæ»ÝÝîng±³ØY\­”ätp:8ÀéíôvzWÇ(¼åÞro9È&Ù$›¨”ó¶€rÈíííPmU[ÕT?ÕOõu\WÇ©®©ð)ŸòÌ—ù2¤…´ Ëe¹,)’")9)'å$È`,ƒ9›;“.ÕîÔîÔîýýý0ÖkŒ5`ì2v»À˜cÌ1æ€~£~£~#è­ôVz+ÐÓô4= ô½ú^}/è/è/è/€v™v™vh9ZŽ–êFu£ºT‘*REÀ£<Ê£œ•¡ÿ¿z†ÿßâðÏÆOa‹Xr§Ü)wA‚ÁËñr¼‡åay¼l/ÛËO÷tOo§·ÓÛ Þfo³·Ü5îwMõßÞ>oŸ·¼r¯Ü+OyÊSÕ¢ªgÒ_2A&ÈÕ²ZVƒ$J¢$‚ 2˜ÍlfSÍd{˜‡yÔ µB­¨6 j ¨‚6K›¥Í-WËÕr«÷›–¤%iI ehZh-µ–ZKÐ:h´ÕW½­ÞVo ZC­¡Ö´­@+õŽzG½SýeW-T ÕÔNµSíçq²É&›_Îîÿdübþ]8#jZõ…f:Ó™2PÊ@æÒ\šÃ™·q&M)ke­¬Y&Ëdx ¼ÞŲXƒ¬”•²d§ì” R! u¥®Ôé,¥3È8'ã€Wy•W£å(ÕÊLq£ÍhPsÕ\5Ô'êõ ¨¸Š«8¨t•®ÒAe©,•ª§ê©z‚ÖWë«õýÊ—újuµº´,-KËUKÕRµ¾r6ÿ„Oø¤:Hzö¬ÞšÖ´æ?>ÿþ“ü‚®¸âŠÈq9.ÇEäù@>‘á2\†‹ˆ%JÄ{Ò{Ò{RÄ-qKÜw°;Ø,â¬wÖ;ëE*)¯"öÛöÛöÛ"v–eg‰X[¬-Ö‘J ´ˆu‹u‹u‹ˆÕËêeõ±n°n°n±n³n³n±žµžµž±¢VÔŠŠØìö{¯½×Þ+âÜåÜåÜ%âXŽåX"îTwª;UÄkî5÷š‹xïyïyï‰HKi)-EäiyZž©<ªˆH©”Jé=é¿à ~ñ~.¡ºæ¡*HHyä;ÙÉN`;Ø|Îç|P@ÈVÙ*[©þòŸeUU‚g\ëª3µÊT™*hLcmiK[  mhCµ~}CÒ¨CêðK´ýg†ÿpú#_÷câ–%tEXtdate:create2014-12-31T13:30:38+02:00d·QÛ%tEXtdate:modify2014-10-17T01:10:21+03:00¢BÙaGtEXtsvg:base-urifile:///home/andrew/projects/aiohttp/docs/aiohttp-icon.svg!hËïIEND®B`‚aiohttp-3.0.1/tests/autobahn/0000777000000000000000000000000013240305035014253 5ustar 00000000000000aiohttp-3.0.1/tests/autobahn/client.py0000666000000000000000000000250713240304665016117 0ustar 00000000000000#!/usr/bin/env python3 import asyncio import aiohttp async def client(loop, url, name): ws = await aiohttp.ws_connect(url + '/getCaseCount') num_tests = int((await ws.receive()).data) print('running %d cases' % num_tests) await ws.close() for i in range(1, num_tests + 1): print('running test case:', i) text_url = url + '/runCase?case=%d&agent=%s' % (i, name) ws = await aiohttp.ws_connect(text_url) while True: msg = await ws.receive() if msg.type == aiohttp.WSMsgType.text: await ws.send_str(msg.data) elif msg.type == aiohttp.WSMsgType.binary: await ws.send_bytes(msg.data) elif msg.type == aiohttp.WSMsgType.close: await ws.close() break else: break url = url + '/updateReports?agent=%s' % name ws = await aiohttp.ws_connect(url) await ws.close() async def run(loop, url, name): try: await client(loop, url, name) except Exception: import traceback traceback.print_exc() if __name__ == '__main__': loop = asyncio.get_event_loop() try: loop.run_until_complete(run(loop, 'http://localhost:9001', 'aiohttp')) except KeyboardInterrupt: pass finally: loop.close() aiohttp-3.0.1/tests/autobahn/fuzzingclient.json0000666000000000000000000000042413240304665020051 0ustar 00000000000000{ "options": {"failByDrop": false}, "outdir": "./reports/servers", "servers": [{"agent": "AutobahnServer", "url": "ws://localhost:9001", "options": {"version": 18}}], "cases": ["*"], "exclude-cases": ["12.*", "13.*"], "exclude-agent-cases": {} } aiohttp-3.0.1/tests/autobahn/fuzzingserver.json0000666000000000000000000000033113240304665020076 0ustar 00000000000000 { "url": "ws://localhost:9001", "options": {"failByDrop": false}, "outdir": "./reports/clients", "webport": 8080, "cases": ["*"], "exclude-cases": ["12.*", "13.*"], "exclude-agent-cases": {} } aiohttp-3.0.1/tests/autobahn/server.py0000666000000000000000000000264013240304665016145 0ustar 00000000000000#!/usr/bin/env python3 import asyncio import logging from aiohttp import web async def wshandler(request): ws = web.WebSocketResponse(autoclose=False) ok, protocol = ws.can_start(request) if not ok: return web.HTTPBadRequest() await ws.prepare(request) while True: msg = await ws.receive() if msg.type == web.WSMsgType.text: await ws.send_str(msg.data) elif msg.type == web.WSMsgType.binary: await ws.send_bytes(msg.data) elif msg.type == web.WSMsgType.close: await ws.close() break else: break return ws async def main(loop): app = web.Application() app.router.add_route('GET', '/', wshandler) handler = app.make_handler() srv = await loop.create_server(handler, '127.0.0.1', 9001) print("Server started at http://127.0.0.1:9001") return app, srv, handler async def finish(app, srv, handler): srv.close() await handler.shutdown() await srv.wait_closed() if __name__ == '__main__': loop = asyncio.get_event_loop() logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s') loop = asyncio.get_event_loop() app, srv, handler = loop.run_until_complete(main(loop)) try: loop.run_forever() except KeyboardInterrupt: loop.run_until_complete(finish(app, srv, handler)) aiohttp-3.0.1/tests/conftest.py0000666000000000000000000000052213240304665014660 0ustar 00000000000000import tempfile import pytest from py import path pytest_plugins = ['aiohttp.pytest_plugin', 'pytester'] @pytest.fixture def shorttmpdir(): """Provides a temporary directory with a shorter file system path than the tmpdir fixture. """ tmpdir = path.local(tempfile.mkdtemp()) yield tmpdir tmpdir.remove(rec=1) aiohttp-3.0.1/tests/data.unknown_mime_type0000666000000000000000000000001513240304665017060 0ustar 00000000000000file content aiohttp-3.0.1/tests/hello.txt.gz0000666000000000000000000000005413240304665014744 0ustar 00000000000000‹Ð1%Uhello.txtËHÍÉÉWHÌÌÏ())à6¨‚aiohttp-3.0.1/tests/sample.crt0000666000000000000000000000146613240304665014464 0ustar 00000000000000-----BEGIN CERTIFICATE----- MIICMzCCAZwCCQDFl4ys0fU7iTANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQGEwJV UzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuLUZyYW5jaXNjbzEi MCAGA1UECgwZUHl0aG9uIFNvZnR3YXJlIEZvbmRhdGlvbjAeFw0xMzAzMTgyMDA3 MjhaFw0yMzAzMTYyMDA3MjhaMF4xCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxp Zm9ybmlhMRYwFAYDVQQHDA1TYW4tRnJhbmNpc2NvMSIwIAYDVQQKDBlQeXRob24g U29mdHdhcmUgRm9uZGF0aW9uMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCn t3s+J7L0xP/YdAQOacpPi9phlrzKZhcXL3XMu2LCUg2fNJpx/47Vc5TZSaO11uO7 gdwVz3Z7Q2epAgwo59JLffLt5fia8+a/SlPweI/j4+wcIIIiqusnLfpqR8cIAavg Z06cLYCDvb9wMlheIvSJY12skc1nnphWS2YJ0Xm6uQIDAQABMA0GCSqGSIb3DQEB BQUAA4GBAE9PknG6pv72+5z/gsDGYy8sK5UNkbWSNr4i4e5lxVsF03+/M71H+3AB MxVX4+A+Vlk2fmU+BrdHIIUE0r1dDcO3josQ9hc9OJpp5VLSQFP8VeuJCmzYPp9I I8WbW93cnXnChTrYQVdgVoFdv7GE9YgU7NYkrGIM0nZl1/f/bHPB -----END CERTIFICATE----- aiohttp-3.0.1/tests/sample.crt.der0000666000000000000000000000106713240304665015232 0ustar 000000000000000‚30‚œ Å—Œ¬Ñõ;‰0  *†H†÷ 0^1 0 UUS10U California10U San-Francisco1"0 U Python Software Fondation0 130318200728Z 230316200728Z0^1 0 UUS10U California10U San-Francisco1"0 U Python Software Fondation0Ÿ0  *†H†÷ 0‰§·{>'²ôÄÿØtiÊO‹Úa–¼Êf/uÌ»bÂR Ÿ4šqÿŽÕs”ÙI£µÖã»ÜÏv{Cg© (çÒK}òíåøšóæ¿JSðxããì ‚"ªë'-újGÇ«àgNœ-€ƒ½¿p2X^"ô‰c]¬‘Ígž˜VKf Ñyº¹0  *†H†÷ OO’qº¦þöûœÿ‚ÀÆc/,+• ‘µ’6¾"áîeÅ[Ó¿3½Gûp3Wãà>VY6~e>·G …Ò½] Ã·Ž‹ö=8šiåRÒ@SüUë‰ lØ>ŸH#Å›[ÝÜyÂ…:ØAW`V]¿±„õˆìÖ$¬b Òve×÷ÿlsÁaiohttp-3.0.1/tests/sample.key0000666000000000000000000000156713240304665014466 0ustar 00000000000000-----BEGIN RSA PRIVATE KEY----- MIICXQIBAAKBgQCnt3s+J7L0xP/YdAQOacpPi9phlrzKZhcXL3XMu2LCUg2fNJpx /47Vc5TZSaO11uO7gdwVz3Z7Q2epAgwo59JLffLt5fia8+a/SlPweI/j4+wcIIIi qusnLfpqR8cIAavgZ06cLYCDvb9wMlheIvSJY12skc1nnphWS2YJ0Xm6uQIDAQAB AoGABfm8k19Yue3W68BecKEGS0VBV57GRTPT+MiBGvVGNIQ15gk6w3sGfMZsdD1y bsUkQgcDb2d/4i5poBTpl/+Cd41V+c20IC/sSl5X1IEreHMKSLhy/uyjyiyfXlP1 iXhToFCgLWwENWc8LzfUV8vuAV5WG6oL9bnudWzZxeqx8V0CQQDR7xwVj6LN70Eb DUhSKLkusmFw5Gk9NJ/7wZ4eHg4B8c9KNVvSlLCLhcsVTQXuqYeFpOqytI45SneP lr0vrvsDAkEAzITYiXu6ox5huDCG7imX2W9CAYuX638urLxBqBXMS7GqBzojD6RL 21Q8oPwJWJquERa3HDScq1deiQbM9uKIkwJBAIa1PLslGN216Xv3UPHPScyKD/aF ynXIv+OnANPoiyp6RH4ksQ/18zcEGiVH8EeNpvV9tlAHhb+DZibQHgNr74sCQQC0 zhToplu/bVKSlUQUNO0rqrI9z30FErDewKeCw5KSsIRSU1E/uM3fHr9iyq4wiL6u GNjUtKZ0y46lsT9uW6LFAkB5eqeEQnshAdr3X5GykWHJ8DDGBXPPn6Rce1NX4RSq V9khG2z1bFyfo+hMqpYnF2k32hVq3E54RS8YYnwBsVof -----END RSA PRIVATE KEY----- aiohttp-3.0.1/tests/test_classbasedview.py0000666000000000000000000000237713240304665017103 0ustar 00000000000000from unittest import mock import pytest from aiohttp import web from aiohttp.web_urldispatcher import View def test_ctor(): request = mock.Mock() view = View(request) assert view.request is request async def test_render_ok(): resp = web.Response(text='OK') class MyView(View): async def get(self): return resp request = mock.Mock() request.method = 'GET' resp2 = await MyView(request) assert resp is resp2 async def test_render_unknown_method(): class MyView(View): async def get(self): return web.Response(text='OK') options = get request = mock.Mock() request.method = 'UNKNOWN' with pytest.raises(web.HTTPMethodNotAllowed) as ctx: await MyView(request) assert ctx.value.headers['allow'] == 'GET,OPTIONS' assert ctx.value.status == 405 async def test_render_unsupported_method(): class MyView(View): async def get(self): return web.Response(text='OK') options = delete = get request = mock.Mock() request.method = 'POST' with pytest.raises(web.HTTPMethodNotAllowed) as ctx: await MyView(request) assert ctx.value.headers['allow'] == 'DELETE,GET,OPTIONS' assert ctx.value.status == 405 aiohttp-3.0.1/tests/test_client_connection.py0000666000000000000000000001014413240304665017570 0ustar 00000000000000import gc from unittest import mock import pytest from aiohttp.connector import Connection @pytest.fixture def key(): return object() @pytest.fixture def request(): return mock.Mock() @pytest.fixture def loop(): return mock.Mock() @pytest.fixture def connector(): return mock.Mock() @pytest.fixture def protocol(): return mock.Mock(should_close=False) def test_ctor(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) assert conn.loop is loop assert conn.protocol is protocol assert conn.writer is protocol.writer conn.close() def test_callbacks_on_close(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) notified = False def cb(): nonlocal notified notified = True conn.add_callback(cb) conn.close() assert notified def test_callbacks_on_release(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) notified = False def cb(): nonlocal notified notified = True conn.add_callback(cb) conn.release() assert notified def test_callbacks_on_detach(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) notified = False def cb(): nonlocal notified notified = True conn.add_callback(cb) conn.detach() assert notified def test_callbacks_exception(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) notified = False def cb1(): raise Exception def cb2(): nonlocal notified notified = True conn.add_callback(cb1) conn.add_callback(cb2) conn.close() assert notified def test_del(connector, key, protocol, loop): loop.is_closed.return_value = False conn = Connection(connector, key, protocol, loop) exc_handler = mock.Mock() loop.set_exception_handler(exc_handler) with pytest.warns(ResourceWarning): del conn gc.collect() connector._release.assert_called_with(key, protocol, should_close=True) msg = {'client_connection': mock.ANY, # conn was deleted 'message': 'Unclosed connection'} if loop.get_debug(): msg['source_traceback'] = mock.ANY loop.call_exception_handler.assert_called_with(msg) def test_close(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) assert not conn.closed conn.close() assert conn._protocol is None connector._release.assert_called_with(key, protocol, should_close=True) assert conn.closed def test_release(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) assert not conn.closed conn.release() assert not protocol.transport.close.called assert conn._protocol is None connector._release.assert_called_with(key, protocol, should_close=False) assert conn.closed def test_release_proto_should_close(connector, key, protocol, loop): protocol.should_close = True conn = Connection(connector, key, protocol, loop) assert not conn.closed conn.release() assert not protocol.transport.close.called assert conn._protocol is None connector._release.assert_called_with(key, protocol, should_close=True) assert conn.closed def test_release_released(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) conn.release() connector._release.reset_mock() conn.release() assert not protocol.transport.close.called assert conn._protocol is None assert not connector._release.called def test_detach(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) assert not conn.closed conn.detach() assert conn._protocol is None assert connector._release_acquired.called assert not connector._release.called assert conn.closed def test_detach_closed(connector, key, protocol, loop): conn = Connection(connector, key, protocol, loop) conn.release() conn.detach() assert not connector._release_acquired.called assert conn._protocol is None aiohttp-3.0.1/tests/test_client_exceptions.py0000666000000000000000000000103413240304665017610 0ustar 00000000000000"""Tests for http_exceptions.py""" from yarl import URL from aiohttp import client def test_fingerprint_mismatch(): err = client.ServerFingerprintMismatch('exp', 'got', 'host', 8888) expected = ('') assert expected == repr(err) def test_invalid_url(): url = URL('http://example.com') err = client.InvalidURL(url) assert err.args[0] is url assert err.url is url assert repr(err) == "" aiohttp-3.0.1/tests/test_client_fingerprint.py0000666000000000000000000000457213240304665017770 0ustar 00000000000000import hashlib from unittest import mock import pytest import aiohttp from aiohttp.client_reqrep import _merge_ssl_params ssl = pytest.importorskip('ssl') def test_fingerprint_sha256(): sha256 = hashlib.sha256(b'12345678'*64).digest() fp = aiohttp.Fingerprint(sha256) assert fp.fingerprint == sha256 def test_fingerprint_sha1(): sha1 = hashlib.sha1(b'12345678'*64).digest() with pytest.raises(ValueError): aiohttp.Fingerprint(sha1) def test_fingerprint_md5(): md5 = hashlib.md5(b'12345678'*64).digest() with pytest.raises(ValueError): aiohttp.Fingerprint(md5) def test_fingerprint_check_no_ssl(): sha256 = hashlib.sha256(b'12345678'*64).digest() fp = aiohttp.Fingerprint(sha256) transport = mock.Mock() transport.get_extra_info.return_value = None assert fp.check(transport) is None def test__merge_ssl_params_verify_ssl(): with pytest.warns(DeprecationWarning): assert _merge_ssl_params(None, False, None, None) is False def test__merge_ssl_params_verify_ssl_conflict(): ctx = ssl.SSLContext() with pytest.warns(DeprecationWarning): with pytest.raises(ValueError): _merge_ssl_params(ctx, False, None, None) def test__merge_ssl_params_ssl_context(): ctx = ssl.SSLContext() with pytest.warns(DeprecationWarning): assert _merge_ssl_params(None, None, ctx, None) is ctx def test__merge_ssl_params_ssl_context_conflict(): ctx1 = ssl.SSLContext() ctx2 = ssl.SSLContext() with pytest.warns(DeprecationWarning): with pytest.raises(ValueError): _merge_ssl_params(ctx1, None, ctx2, None) def test__merge_ssl_params_fingerprint(): digest = hashlib.sha256(b'123').digest() with pytest.warns(DeprecationWarning): ret = _merge_ssl_params(None, None, None, digest) assert ret.fingerprint == digest def test__merge_ssl_params_fingerprint_conflict(): fingerprint = aiohttp.Fingerprint(hashlib.sha256(b'123').digest()) ctx = ssl.SSLContext() with pytest.warns(DeprecationWarning): with pytest.raises(ValueError): _merge_ssl_params(ctx, None, None, fingerprint) def test__merge_ssl_params_ssl(): ctx = ssl.SSLContext() assert ctx is _merge_ssl_params(ctx, None, None, None) def test__merge_ssl_params_invlid(): with pytest.raises(TypeError): _merge_ssl_params(object(), None, None, None) aiohttp-3.0.1/tests/test_client_functional.py0000666000000000000000000021423313240304665017600 0ustar 00000000000000"""HTTP client functional tests against aiohttp.web server""" import asyncio import http.cookies import io import json import pathlib import socket import ssl from unittest import mock import pytest from multidict import MultiDict import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.test_utils import unused_port @pytest.fixture def here(): return pathlib.Path(__file__).parent @pytest.fixture def ssl_ctx(here): ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ssl_ctx.load_cert_chain( str(here / 'sample.crt'), str(here / 'sample.key')) return ssl_ctx @pytest.fixture def fname(here): return here / 'sample.key' def ceil(val): return val async def test_keepalive_two_requests_success(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body return web.Response(body=b'OK') app = web.Application() app.router.add_route('GET', '/', handler) connector = aiohttp.TCPConnector(limit=1) client = await aiohttp_client(app, connector=connector) resp1 = await client.get('/') await resp1.read() resp2 = await client.get('/') await resp2.read() assert 1 == len(client._session.connector._conns) async def test_keepalive_response_released(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body return web.Response(body=b'OK') app = web.Application() app.router.add_route('GET', '/', handler) connector = aiohttp.TCPConnector(limit=1) client = await aiohttp_client(app, connector=connector) resp1 = await client.get('/') resp1.release() resp2 = await client.get('/') resp2.release() assert 1 == len(client._session.connector._conns) async def test_keepalive_server_force_close_connection(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body response = web.Response(body=b'OK') response.force_close() return response app = web.Application() app.router.add_route('GET', '/', handler) connector = aiohttp.TCPConnector(limit=1) client = await aiohttp_client(app, connector=connector) resp1 = await client.get('/') resp1.close() resp2 = await client.get('/') resp2.close() assert 0 == len(client._session.connector._conns) async def test_release_early(aiohttp_client): async def handler(request): await request.read() return web.Response(body=b'OK') app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.closed assert 1 == len(client._session.connector._conns) async def test_HTTP_304(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body return web.Response(status=304) app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 304 content = await resp.read() assert content == b'' async def test_HTTP_304_WITH_BODY(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body return web.Response(body=b'test', status=304) app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 304 content = await resp.read() assert content == b'' async def test_auto_header_user_agent(aiohttp_client): async def handler(request): assert 'aiohttp' in request.headers['user-agent'] return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200, resp.status async def test_skip_auto_headers_user_agent(aiohttp_client): async def handler(request): assert hdrs.USER_AGENT not in request.headers return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/', skip_auto_headers=['user-agent']) assert 200 == resp.status async def test_skip_default_auto_headers_user_agent(aiohttp_client): async def handler(request): assert hdrs.USER_AGENT not in request.headers return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app, skip_auto_headers=['user-agent']) resp = await client.get('/') assert 200 == resp.status async def test_skip_auto_headers_content_type(aiohttp_client): async def handler(request): assert hdrs.CONTENT_TYPE not in request.headers return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/', skip_auto_headers=['content-type']) assert 200 == resp.status async def test_post_data_bytesio(aiohttp_client): data = b'some buffer' async def handler(request): assert len(data) == request.content_length val = await request.read() assert data == val return web.Response() app = web.Application() app.router.add_route('POST', '/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=io.BytesIO(data)) assert 200 == resp.status async def test_post_data_with_bytesio_file(aiohttp_client): data = b'some buffer' async def handler(request): post_data = await request.post() assert ['file'] == list(post_data.keys()) assert data == post_data['file'].file.read() return web.Response() app = web.Application() app.router.add_route('POST', '/', handler) client = await aiohttp_client(app) resp = await client.post('/', data={'file': io.BytesIO(data)}) assert 200 == resp.status async def test_post_data_stringio(aiohttp_client): data = 'some buffer' async def handler(request): assert len(data) == request.content_length assert request.headers['CONTENT-TYPE'] == 'text/plain; charset=utf-8' val = await request.text() assert data == val return web.Response() app = web.Application() app.router.add_route('POST', '/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=io.StringIO(data)) assert 200 == resp.status async def test_post_data_textio_encoding(aiohttp_client): data = 'текÑÑ‚' async def handler(request): assert request.headers['CONTENT-TYPE'] == 'text/plain; charset=koi8-r' val = await request.text() assert data == val return web.Response() app = web.Application() app.router.add_route('POST', '/', handler) client = await aiohttp_client(app) pl = aiohttp.TextIOPayload(io.StringIO(data), encoding='koi8-r') resp = await client.post('/', data=pl) assert 200 == resp.status async def test_ssl_client(ssl_ctx, aiohttp_server, aiohttp_client): connector = aiohttp.TCPConnector(ssl=False) async def handler(request): return web.Response(text='Test message') app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app, ssl=ssl_ctx) client = await aiohttp_client(server, connector=connector) resp = await client.get('/') assert 200 == resp.status txt = await resp.text() assert txt == 'Test message' async def test_tcp_connector_fingerprint_ok(aiohttp_server, aiohttp_client, ssl_ctx): fingerprint = (b'0\x9a\xc9D\x83\xdc\x91\'\x88\x91\x11\xa1d\x97\xfd' b'\xcb~7U\x14D@L' b'\x11\xab\x99\xa8\xae\xb7\x14\xee\x8b') async def handler(request): return web.Response(text='Test message') connector = aiohttp.TCPConnector(ssl=Fingerprint(fingerprint)) app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app, ssl=ssl_ctx) client = await aiohttp_client(server, connector=connector) resp = await client.get('/') assert resp.status == 200 resp.close() async def test_tcp_connector_fingerprint_fail(aiohttp_server, aiohttp_client, ssl_ctx): fingerprint = (b'0\x9a\xc9D\x83\xdc\x91\'\x88\x91\x11\xa1d\x97\xfd' b'\xcb~7U\x14D@L' b'\x11\xab\x99\xa8\xae\xb7\x14\xee\x8b') async def handler(request): return web.Response(text='Test message') bad_fingerprint = b'\x00' * len(fingerprint) connector = aiohttp.TCPConnector(ssl=Fingerprint(bad_fingerprint)) app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app, ssl=ssl_ctx) client = await aiohttp_client(server, connector=connector) with pytest.raises(ServerFingerprintMismatch) as cm: await client.get('/') exc = cm.value assert exc.expected == bad_fingerprint assert exc.got == fingerprint async def test_format_task_get(aiohttp_server, loop): async def handler(request): return web.Response(body=b'OK') app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) client = aiohttp.ClientSession() task = loop.create_task(client.get(server.make_url('/'))) assert "{}".format(task).startswith("'\ in repr(response) def test_repr_non_ascii_url(): response = ClientResponse('get', URL('http://fake-host.org/\u03bb')) assert ""\ in repr(response) def test_repr_non_ascii_reason(): response = ClientResponse('get', URL('http://fake-host.org/path')) response.reason = '\u03bb' assert ""\ in repr(response) def test_url_obj_deprecated(): response = ClientResponse('get', URL('http://fake-host.org/')) with pytest.warns(DeprecationWarning): response.url_obj async def test_read_and_release_connection(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result(b'payload') return fut content = response.content = mock.Mock() content.read.side_effect = side_effect res = await response.read() assert res == b'payload' assert response._connection is None async def test_read_and_release_connection_with_error(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) content = response.content = mock.Mock() content.read.return_value = loop.create_future() content.read.return_value.set_exception(ValueError) with pytest.raises(ValueError): await response.read() assert response._closed async def test_release(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) fut = loop.create_future() fut.set_result(b'') content = response.content = mock.Mock() content.readany.return_value = fut response.release() assert response._connection is None async def test_release_on_del(loop, session): connection = mock.Mock() connection.protocol.upgraded = False def run(conn): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response._closed = False response._connection = conn run(connection) assert connection.release.called async def test_response_eof(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response._closed = False conn = response._connection = mock.Mock() conn.protocol.upgraded = False response._response_eof() assert conn.release.called assert response._connection is None async def test_response_eof_upgraded(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) conn = response._connection = mock.Mock() conn.protocol.upgraded = True response._response_eof() assert not conn.release.called assert response._connection is conn async def test_response_eof_after_connection_detach(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response._closed = False conn = response._connection = mock.Mock() conn.protocol = None response._response_eof() assert conn.release.called assert response._connection is None async def test_text(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = { 'Content-Type': 'application/json;charset=cp1251'} content = response.content = mock.Mock() content.read.side_effect = side_effect res = await response.text() assert res == '{"теÑÑ‚": "пройден"}' assert response._connection is None async def test_text_bad_encoding(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚key": "пройденvalue"}'.encode('cp1251')) return fut # lie about the encoding response.headers = { 'Content-Type': 'application/json;charset=utf-8'} content = response.content = mock.Mock() content.read.side_effect = side_effect with pytest.raises(UnicodeDecodeError): await response.text() # only the valid utf-8 characters will be returned res = await response.text(errors='ignore') assert res == '{"key": "value"}' assert response._connection is None async def test_text_custom_encoding(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = { 'Content-Type': 'application/json'} content = response.content = mock.Mock() content.read.side_effect = side_effect response.get_encoding = mock.Mock() res = await response.text(encoding='cp1251') assert res == '{"теÑÑ‚": "пройден"}' assert response._connection is None assert not response.get_encoding.called async def test_text_detect_encoding(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = {'Content-Type': 'text/plain'} content = response.content = mock.Mock() content.read.side_effect = side_effect await response.read() res = await response.text() assert res == '{"теÑÑ‚": "пройден"}' assert response._connection is None async def test_text_detect_encoding_if_invalid_charset(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = {'Content-Type': 'text/plain;charset=invalid'} content = response.content = mock.Mock() content.read.side_effect = side_effect await response.read() res = await response.text() assert res == '{"теÑÑ‚": "пройден"}' assert response._connection is None assert response.get_encoding().lower() in ('windows-1251', 'maccyrillic') async def test_text_after_read(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = { 'Content-Type': 'application/json;charset=cp1251'} content = response.content = mock.Mock() content.read.side_effect = side_effect res = await response.text() assert res == '{"теÑÑ‚": "пройден"}' assert response._connection is None async def test_json(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = { 'Content-Type': 'application/json;charset=cp1251'} content = response.content = mock.Mock() content.read.side_effect = side_effect res = await response.json() assert res == {'теÑÑ‚': 'пройден'} assert response._connection is None async def test_json_custom_loader(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response.headers = { 'Content-Type': 'application/json;charset=cp1251'} response._content = b'data' def custom(content): return content + '-custom' res = await response.json(loads=custom) assert res == 'data-custom' async def test_json_invalid_content_type(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response.headers = { 'Content-Type': 'data/octet-stream'} response._content = b'' with pytest.raises(aiohttp.ContentTypeError) as info: await response.json() assert info.value.request_info == response.request_info async def test_json_no_content(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response.headers = { 'Content-Type': 'data/octet-stream'} response._content = b'' res = await response.json(content_type=None) assert res is None async def test_json_override_encoding(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) def side_effect(*args, **kwargs): fut = loop.create_future() fut.set_result('{"теÑÑ‚": "пройден"}'.encode('cp1251')) return fut response.headers = { 'Content-Type': 'application/json;charset=utf8'} content = response.content = mock.Mock() content.read.side_effect = side_effect response.get_encoding = mock.Mock() res = await response.json(encoding='cp1251') assert res == {'теÑÑ‚': 'пройден'} assert response._connection is None assert not response.get_encoding.called @pytest.mark.xfail def test_override_flow_control(loop, session): class MyResponse(ClientResponse): flow_control_class = aiohttp.StreamReader response = MyResponse('get', URL('http://my-cl-resp.org')) response._post_init(loop, session) response._connection = mock.Mock() assert isinstance(response.content, aiohttp.StreamReader) response.close() def test_get_encoding_unknown(loop, session): response = ClientResponse('get', URL('http://def-cl-resp.org')) response._post_init(loop, session) response.headers = {'Content-Type': 'application/json'} with mock.patch('aiohttp.client_reqrep.chardet') as m_chardet: m_chardet.detect.return_value = {'encoding': None} assert response.get_encoding() == 'utf-8' def test_raise_for_status_2xx(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.status = 200 response.reason = 'OK' response.raise_for_status() # should not raise def test_raise_for_status_4xx(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.status = 409 response.reason = 'CONFLICT' with pytest.raises(aiohttp.ClientResponseError) as cm: response.raise_for_status() assert str(cm.value.code) == '409' assert str(cm.value.message) == "CONFLICT" def test_resp_host(): response = ClientResponse('get', URL('http://del-cl-resp.org')) assert 'del-cl-resp.org' == response.host def test_content_type(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {'Content-Type': 'application/json;charset=cp1251'} assert 'application/json' == response.content_type def test_content_type_no_header(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {} assert 'application/octet-stream' == response.content_type def test_charset(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {'Content-Type': 'application/json;charset=cp1251'} assert 'cp1251' == response.charset def test_charset_no_header(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {} assert response.charset is None def test_charset_no_charset(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {'Content-Type': 'application/json'} assert response.charset is None def test_content_disposition_full(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {'Content-Disposition': 'attachment; filename="archive.tar.gz"; foo=bar'} assert 'attachment' == response.content_disposition.type assert 'bar' == response.content_disposition.parameters["foo"] assert 'archive.tar.gz' == response.content_disposition.filename with pytest.raises(TypeError): response.content_disposition.parameters["foo"] = "baz" def test_content_disposition_no_parameters(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {'Content-Disposition': 'attachment'} assert 'attachment' == response.content_disposition.type assert response.content_disposition.filename is None assert {} == response.content_disposition.parameters def test_content_disposition_no_header(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {} assert response.content_disposition is None def test_content_disposition_cache(): response = ClientResponse('get', URL('http://def-cl-resp.org')) response.headers = {'Content-Disposition': 'attachment'} cd = response.content_disposition ClientResponse.headers = {'Content-Disposition': 'spam'} assert cd is response.content_disposition def test_response_request_info(): url = 'http://def-cl-resp.org' headers = {'Content-Type': 'application/json;charset=cp1251'} response = ClientResponse( 'get', URL(url), request_info=RequestInfo( url, 'get', headers ) ) assert url == response.request_info.url assert 'get' == response.request_info.method assert headers == response.request_info.headers def test_response_request_info_empty(): url = 'http://def-cl-resp.org' response = ClientResponse( 'get', URL(url), ) assert response.request_info is None def test_request_info_in_exception(): url = 'http://def-cl-resp.org' headers = {'Content-Type': 'application/json;charset=cp1251'} response = ClientResponse( 'get', URL(url), request_info=RequestInfo( url, 'get', headers ) ) response.status = 409 response.reason = 'CONFLICT' with pytest.raises(aiohttp.ClientResponseError) as cm: response.raise_for_status() assert cm.value.request_info == response.request_info def test_no_redirect_history_in_exception(): url = 'http://def-cl-resp.org' headers = {'Content-Type': 'application/json;charset=cp1251'} response = ClientResponse( 'get', URL(url), request_info=RequestInfo( url, 'get', headers ) ) response.status = 409 response.reason = 'CONFLICT' with pytest.raises(aiohttp.ClientResponseError) as cm: response.raise_for_status() assert () == cm.value.history def test_redirect_history_in_exception(): hist_url = 'http://def-cl-resp.org' url = 'http://def-cl-resp.org/index.htm' hist_headers = {'Content-Type': 'application/json;charset=cp1251', 'Location': url } headers = {'Content-Type': 'application/json;charset=cp1251'} response = ClientResponse( 'get', URL(url), request_info=RequestInfo( url, 'get', headers ) ) response.status = 409 response.reason = 'CONFLICT' hist_response = ClientResponse( 'get', URL(hist_url), request_info=RequestInfo( url, 'get', headers ) ) hist_response.headers = hist_headers hist_response.status = 301 hist_response.reason = 'REDIRECT' response._history = [hist_response] with pytest.raises(aiohttp.ClientResponseError) as cm: response.raise_for_status() assert [hist_response] == cm.value.history aiohttp-3.0.1/tests/test_client_session.py0000666000000000000000000004565613240304665017134 0ustar 00000000000000import asyncio import contextlib import gc import re from http.cookies import SimpleCookie from unittest import mock import pytest from multidict import CIMultiDict, MultiDict from yarl import URL import aiohttp from aiohttp import hdrs, web from aiohttp.client import ClientSession from aiohttp.client_reqrep import ClientRequest from aiohttp.connector import BaseConnector, TCPConnector from aiohttp.helpers import PY_36 @pytest.fixture def connector(loop): conn = BaseConnector(loop=loop) proto = mock.Mock() conn._conns['a'] = [(proto, 123)] yield conn conn.close() @pytest.fixture def create_session(loop): session = None def maker(*args, **kwargs): nonlocal session session = ClientSession(*args, loop=loop, **kwargs) return session yield maker if session is not None: loop.run_until_complete(session.close()) @pytest.fixture def session(create_session): return create_session() @pytest.fixture def params(): return dict( headers={"Authorization": "Basic ..."}, max_redirects=2, encoding="latin1", version=aiohttp.HttpVersion10, compress="deflate", chunked=True, expect100=True, read_until_eof=False) def test_close_coro(create_session, loop): session = create_session() loop.run_until_complete(session.close()) def test_init_headers_simple_dict(create_session): session = create_session(headers={"h1": "header1", "h2": "header2"}) assert (sorted(session._default_headers.items()) == ([("h1", "header1"), ("h2", "header2")])) def test_init_headers_list_of_tuples(create_session): session = create_session(headers=[("h1", "header1"), ("h2", "header2"), ("h3", "header3")]) assert (session._default_headers == CIMultiDict([("h1", "header1"), ("h2", "header2"), ("h3", "header3")])) def test_init_headers_MultiDict(create_session): session = create_session(headers=MultiDict([("h1", "header1"), ("h2", "header2"), ("h3", "header3")])) assert (session._default_headers == CIMultiDict([("H1", "header1"), ("H2", "header2"), ("H3", "header3")])) def test_init_headers_list_of_tuples_with_duplicates(create_session): session = create_session(headers=[("h1", "header11"), ("h2", "header21"), ("h1", "header12")]) assert (session._default_headers == CIMultiDict([("H1", "header11"), ("H2", "header21"), ("H1", "header12")])) def test_init_cookies_with_simple_dict(create_session): session = create_session(cookies={"c1": "cookie1", "c2": "cookie2"}) cookies = session.cookie_jar.filter_cookies() assert set(cookies) == {'c1', 'c2'} assert cookies['c1'].value == 'cookie1' assert cookies['c2'].value == 'cookie2' def test_init_cookies_with_list_of_tuples(create_session): session = create_session(cookies=[("c1", "cookie1"), ("c2", "cookie2")]) cookies = session.cookie_jar.filter_cookies() assert set(cookies) == {'c1', 'c2'} assert cookies['c1'].value == 'cookie1' assert cookies['c2'].value == 'cookie2' def test_merge_headers(create_session): # Check incoming simple dict session = create_session(headers={"h1": "header1", "h2": "header2"}) headers = session._prepare_headers({"h1": "h1"}) assert isinstance(headers, CIMultiDict) assert headers == {"h1": "h1", "h2": "header2"} def test_merge_headers_with_multi_dict(create_session): session = create_session(headers={"h1": "header1", "h2": "header2"}) headers = session._prepare_headers(MultiDict([("h1", "h1")])) assert isinstance(headers, CIMultiDict) assert headers == {"h1": "h1", "h2": "header2"} def test_merge_headers_with_list_of_tuples(create_session): session = create_session(headers={"h1": "header1", "h2": "header2"}) headers = session._prepare_headers([("h1", "h1")]) assert isinstance(headers, CIMultiDict) assert headers == {"h1": "h1", "h2": "header2"} def test_merge_headers_with_list_of_tuples_duplicated_names(create_session): session = create_session(headers={"h1": "header1", "h2": "header2"}) headers = session._prepare_headers([("h1", "v1"), ("h1", "v2")]) assert isinstance(headers, CIMultiDict) assert list(sorted(headers.items())) == [("h1", "v1"), ("h1", "v2"), ("h2", "header2")] def test_http_GET(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.get("http://test.example.com", params={"x": 1}, **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("GET", "http://test.example.com",), dict( params={"x": 1}, allow_redirects=True, **params)] def test_http_OPTIONS(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.options("http://opt.example.com", params={"x": 2}, **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("OPTIONS", "http://opt.example.com",), dict( params={"x": 2}, allow_redirects=True, **params)] def test_http_HEAD(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.head("http://head.example.com", params={"x": 2}, **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("HEAD", "http://head.example.com",), dict( params={"x": 2}, allow_redirects=False, **params)] def test_http_POST(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.post("http://post.example.com", params={"x": 2}, data="Some_data", **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("POST", "http://post.example.com",), dict( params={"x": 2}, data="Some_data", **params)] def test_http_PUT(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.put("http://put.example.com", params={"x": 2}, data="Some_data", **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("PUT", "http://put.example.com",), dict( params={"x": 2}, data="Some_data", **params)] def test_http_PATCH(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.patch("http://patch.example.com", params={"x": 2}, data="Some_data", **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("PATCH", "http://patch.example.com",), dict( params={"x": 2}, data="Some_data", **params)] def test_http_DELETE(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.delete("http://delete.example.com", params={"x": 2}, **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("DELETE", "http://delete.example.com",), dict( params={"x": 2}, **params)] async def test_close(create_session, connector): session = create_session(connector=connector) await session.close() assert session.connector is None assert connector.closed async def test_closed(session): assert not session.closed await session.close() assert session.closed async def test_connector(create_session, loop, mocker): connector = TCPConnector(loop=loop) mocker.spy(connector, 'close') session = create_session(connector=connector) assert session.connector is connector await session.close() assert connector.close.called connector.close() async def test_create_connector(create_session, loop, mocker): session = create_session() connector = session.connector mocker.spy(session.connector, 'close') await session.close() assert connector.close.called def test_connector_loop(loop): with contextlib.ExitStack() as stack: another_loop = asyncio.new_event_loop() stack.enter_context(contextlib.closing(another_loop)) connector = TCPConnector(loop=another_loop) stack.enter_context(contextlib.closing(connector)) with pytest.raises(RuntimeError) as ctx: ClientSession(connector=connector, loop=loop) assert re.match("Session and connector has to use same event loop", str(ctx.value)) def test_detach(session): conn = session.connector try: assert not conn.closed session.detach() assert session.connector is None assert session.closed assert not conn.closed finally: conn.close() async def test_request_closed_session(session): await session.close() with pytest.raises(RuntimeError): await session.request('get', '/') def test_close_flag_for_closed_connector(session): conn = session.connector assert not session.closed conn.close() assert session.closed async def test_double_close(connector, create_session): session = create_session(connector=connector) await session.close() assert session.connector is None await session.close() assert session.closed assert connector.closed def test_del(connector, loop): # N.B. don't use session fixture, it stores extra reference internally session = ClientSession(connector=connector, loop=loop) loop.set_exception_handler(lambda loop, ctx: None) with pytest.warns(ResourceWarning): del session gc.collect() def test_context_manager(connector, loop): with pytest.raises(TypeError): with ClientSession(loop=loop, connector=connector) as session: pass assert session.closed async def test_borrow_connector_loop(connector, create_session, loop): session = ClientSession(connector=connector, loop=None) try: assert session._loop, loop finally: await session.close() async def test_reraise_os_error(create_session): err = OSError(1, "permission error") req = mock.Mock() req_factory = mock.Mock(return_value=req) req.send = mock.Mock(side_effect=err) session = create_session(request_class=req_factory) async def create_connection(req, traces=None): # return self.transport, self.protocol return mock.Mock() session._connector._create_connection = create_connection with pytest.raises(aiohttp.ClientOSError) as ctx: await session.request('get', 'http://example.com') e = ctx.value assert e.errno == err.errno assert e.strerror == err.strerror async def test_cookie_jar_usage(loop, aiohttp_client): req_url = None jar = mock.Mock() jar.filter_cookies.return_value = None async def handler(request): nonlocal req_url req_url = "http://%s/" % request.host resp = web.Response() resp.set_cookie("response", "resp_value") return resp app = web.Application() app.router.add_route('GET', '/', handler) session = await aiohttp_client( app, cookies={"request": "req_value"}, cookie_jar=jar ) # Updating the cookie jar with initial user defined cookies jar.update_cookies.assert_called_with({"request": "req_value"}) jar.update_cookies.reset_mock() resp = await session.get("/") await resp.release() # Filtering the cookie jar before sending the request, # getting the request URL as only parameter jar.filter_cookies.assert_called_with(URL(req_url)) # Updating the cookie jar with the response cookies assert jar.update_cookies.called resp_cookies = jar.update_cookies.call_args[0][0] assert isinstance(resp_cookies, SimpleCookie) assert "response" in resp_cookies assert resp_cookies["response"].value == "resp_value" def test_session_default_version(loop): session = aiohttp.ClientSession(loop=loop) assert session.version == aiohttp.HttpVersion11 async def test_session_loop(loop): session = aiohttp.ClientSession(loop=loop) assert session.loop is loop await session.close() def test_proxy_str(session, params): with mock.patch("aiohttp.client.ClientSession._request") as patched: session.get("http://test.example.com", proxy='http://proxy.com', **params) assert patched.called, "`ClientSession._request` not called" assert list(patched.call_args) == [("GET", "http://test.example.com",), dict( allow_redirects=True, proxy='http://proxy.com', **params)] def test_client_session_implicit_loop_warn(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) with pytest.warns(UserWarning): session = aiohttp.ClientSession() assert session._loop is loop loop.run_until_complete(session.close()) asyncio.set_event_loop(None) loop.close() async def test_request_tracing(loop, aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) trace_config_ctx = mock.Mock() trace_request_ctx = {} on_request_start = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) on_request_redirect = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) on_request_end = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_request_start.append(on_request_start) trace_config.on_request_end.append(on_request_end) trace_config.on_request_redirect.append(on_request_redirect) session = await aiohttp_client(app, trace_configs=[trace_config]) async with session.get('/', trace_request_ctx=trace_request_ctx) as resp: on_request_start.assert_called_once_with( session.session, trace_config_ctx, aiohttp.TraceRequestStartParams( hdrs.METH_GET, session.make_url('/'), CIMultiDict() ) ) on_request_end.assert_called_once_with( session.session, trace_config_ctx, aiohttp.TraceRequestEndParams( hdrs.METH_GET, session.make_url('/'), CIMultiDict(), resp ) ) assert not on_request_redirect.called async def test_request_tracing_exception(loop): on_request_end = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) on_request_exception = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig() trace_config.on_request_end.append(on_request_end) trace_config.on_request_exception.append(on_request_exception) with mock.patch("aiohttp.client.TCPConnector.connect") as connect_patched: error = Exception() f = loop.create_future() f.set_exception(error) connect_patched.return_value = f session = aiohttp.ClientSession( loop=loop, trace_configs=[trace_config] ) try: await session.get('http://example.com') except Exception: pass on_request_exception.assert_called_once_with( session, mock.ANY, aiohttp.TraceRequestExceptionParams( hdrs.METH_GET, URL("http://example.com"), CIMultiDict(), error ) ) assert not on_request_end.called async def test_request_tracing_interpose_headers(loop, aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) class MyClientRequest(ClientRequest): headers = None def __init__(self, *args, **kwargs): super(MyClientRequest, self).__init__(*args, **kwargs) MyClientRequest.headers = self.headers async def new_headers( session, trace_config_ctx, data): data.headers['foo'] = 'bar' trace_config = aiohttp.TraceConfig() trace_config.on_request_start.append(new_headers) session = await aiohttp_client( app, request_class=MyClientRequest, trace_configs=[trace_config] ) await session.get('/') assert MyClientRequest.headers['foo'] == 'bar' @pytest.mark.skipif(not PY_36, reason="Python 3.6+ required") def test_client_session_inheritance(): with pytest.warns(DeprecationWarning): class A(ClientSession): pass def test_client_session_custom_attr(loop): session = ClientSession(loop=loop) with pytest.warns(DeprecationWarning): session.custom = None aiohttp-3.0.1/tests/test_client_ws.py0000666000000000000000000005644713240304665016102 0ustar 00000000000000import asyncio import base64 import hashlib import os from unittest import mock import pytest import aiohttp from aiohttp import client, hdrs from aiohttp.http import WS_KEY from aiohttp.log import ws_logger from aiohttp.test_utils import make_mocked_coro @pytest.fixture def key_data(): return os.urandom(16) @pytest.fixture def key(key_data): return base64.b64encode(key_data) @pytest.fixture def ws_key(key): return base64.b64encode(hashlib.sha1(key + WS_KEY).digest()).decode() async def test_ws_connect(ws_key, loop, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: 'chat' } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert isinstance(res, client.ClientWebSocketResponse) assert res.protocol == 'chat' assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] async def test_ws_connect_with_origin(key_data, loop): resp = mock.Mock() resp.status = 403 with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) origin = 'https://example.org/page.html' with pytest.raises(client.WSServerHandshakeError): await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', origin=origin) assert hdrs.ORIGIN in m_req.call_args[1]["headers"] assert m_req.call_args[1]["headers"][hdrs.ORIGIN] == origin async def test_ws_connect_custom_response(loop, ws_key, key_data): class CustomResponse(client.ClientWebSocketResponse): def read(self, decode=False): return 'customized!' resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession( ws_response_class=CustomResponse, loop=loop).ws_connect( 'http://test.org') assert res.read() == 'customized!' async def test_ws_connect_err_status(loop, ws_key, key_data): resp = mock.Mock() resp.status = 500 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError) as ctx: await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert ctx.value.message == 'Invalid response status' async def test_ws_connect_err_upgrade(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: 'test', hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError) as ctx: await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert ctx.value.message == 'Invalid upgrade header' async def test_ws_connect_err_conn(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: 'close', hdrs.SEC_WEBSOCKET_ACCEPT: ws_key } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError) as ctx: await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert ctx.value.message == 'Invalid connection header' async def test_ws_connect_err_challenge(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: 'asdfasdfasdfasdfasdfasdf' } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError) as ctx: await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert ctx.value.message == 'Invalid challenge response' async def test_ws_connect_common_headers(ws_key, loop, key_data): """Emulate a headers dict being reused for a second ws_connect. In this scenario, we need to ensure that the newly generated secret key is sent to the server, not the stale key. """ headers = {} async def test_connection(): async def mock_get(*args, **kwargs): resp = mock.Mock() resp.status = 101 key = kwargs.get('headers').get(hdrs.SEC_WEBSOCKET_KEY) accept = base64.b64encode( hashlib.sha1(base64.b64encode(base64.b64decode(key)) + WS_KEY) .digest()).decode() resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: accept, hdrs.SEC_WEBSOCKET_PROTOCOL: 'chat' } return resp with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get', side_effect=mock_get) as m_req: m_os.urandom.return_value = key_data res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat'), headers=headers) assert isinstance(res, client.ClientWebSocketResponse) assert res.protocol == 'chat' assert hdrs.ORIGIN not in m_req.call_args[1]["headers"] await test_connection() # Generate a new ws key key_data = os.urandom(16) await test_connection() async def test_close(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) writer = WebSocketWriter.return_value = mock.Mock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect( 'http://test.org') assert not resp.closed resp._reader.feed_data( aiohttp.WSMessage(aiohttp.WSMsgType.CLOSE, b'', b''), 0) res = await resp.close() writer.close.assert_called_with(1000, b'') assert resp.closed assert res assert resp.exception() is None # idempotent res = await resp.close() assert not res assert writer.close.call_count == 1 await session.close() async def test_close_exc(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) WebSocketWriter.return_value = mock.Mock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect('http://test.org') assert not resp.closed exc = ValueError() resp._reader.set_exception(exc) await resp.close() assert resp.closed assert resp.exception() is exc await session.close() async def test_close_exc2(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) writer = WebSocketWriter.return_value = mock.Mock() resp = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org') assert not resp.closed exc = ValueError() writer.close.side_effect = exc await resp.close() assert resp.closed assert resp.exception() is exc resp._closed = False writer.close.side_effect = asyncio.CancelledError() with pytest.raises(asyncio.CancelledError): await resp.close() async def test_send_data_after_close(ws_key, key_data, loop, mocker): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) resp = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org') resp._writer._closing = True mocker.spy(ws_logger, 'warning') for meth, args in ((resp.ping, ()), (resp.pong, ()), (resp.send_str, ('s',)), (resp.send_bytes, (b'b',)), (resp.send_json, ({},))): await meth(*args) assert ws_logger.warning.called ws_logger.warning.reset_mock() async def test_send_data_type_errors(ws_key, key_data, loop): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) WebSocketWriter.return_value = mock.Mock() resp = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org') with pytest.raises(TypeError): await resp.send_str(b's') with pytest.raises(TypeError): await resp.send_bytes('b') with pytest.raises(TypeError): await resp.send_json(set()) async def test_reader_read_exception(ws_key, key_data, loop): hresp = mock.Mock() hresp.status = 101 hresp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(hresp) WebSocketWriter.return_value = mock.Mock() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect('http://test.org') exc = ValueError() resp._reader.set_exception(exc) msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.ERROR assert resp.exception() is exc await session.close() async def test_receive_runtime_err(loop): resp = client.ClientWebSocketResponse( mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), 10.0, True, True, loop) resp._waiting = True with pytest.raises(RuntimeError): await resp.receive() async def test_ws_connect_close_resp_on_err(loop, ws_key, key_data): resp = mock.Mock() resp.status = 500 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError): await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) resp.close.assert_called_with() async def test_ws_connect_non_overlapped_protocols(ws_key, loop, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: 'other,another' } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert res.protocol is None async def test_ws_connect_non_overlapped_protocols_2(ws_key, loop, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_PROTOCOL: 'other,another' } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) connector = aiohttp.TCPConnector(loop=loop, force_close=True) res = await aiohttp.ClientSession( connector=connector, loop=loop).ws_connect( 'http://test.org', protocols=('t1', 't2', 'chat')) assert res.protocol is None del res async def test_ws_connect_deflate(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate', } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', compress=15) assert res.compress == 15 assert res.client_notakeover is False async def test_ws_connect_deflate_per_message(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate', } with mock.patch('aiohttp.client.WebSocketWriter') as WebSocketWriter: with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) writer = WebSocketWriter.return_value = mock.Mock() send = writer.send = make_mocked_coro() session = aiohttp.ClientSession(loop=loop) resp = await session.ws_connect('http://test.org') await resp.send_str('string', compress=-1) send.assert_called_with('string', binary=False, compress=-1) await resp.send_bytes(b'bytes', compress=15) send.assert_called_with(b'bytes', binary=True, compress=15) await resp.send_json([{}], compress=-9) send.assert_called_with('[{}]', binary=False, compress=-9) await session.close() async def test_ws_connect_deflate_server_not_support(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', compress=15) assert res.compress == 0 assert res.client_notakeover is False async def test_ws_connect_deflate_notakeover(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate; ' 'client_no_context_takeover', } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', compress=15) assert res.compress == 15 assert res.client_notakeover is True async def test_ws_connect_deflate_client_wbits(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate; ' 'client_max_window_bits=10', } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) res = await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', compress=15) assert res.compress == 10 assert res.client_notakeover is False async def test_ws_connect_deflate_client_wbits_bad(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate; ' 'client_max_window_bits=6', } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError): await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', compress=15) async def test_ws_connect_deflate_server_ext_bad(loop, ws_key, key_data): resp = mock.Mock() resp.status = 101 resp.headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_ACCEPT: ws_key, hdrs.SEC_WEBSOCKET_EXTENSIONS: 'permessage-deflate; bad', } with mock.patch('aiohttp.client.os') as m_os: with mock.patch('aiohttp.client.ClientSession.get') as m_req: m_os.urandom.return_value = key_data m_req.return_value = loop.create_future() m_req.return_value.set_result(resp) with pytest.raises(client.WSServerHandshakeError): await aiohttp.ClientSession(loop=loop).ws_connect( 'http://test.org', compress=15) aiohttp-3.0.1/tests/test_client_ws_functional.py0000666000000000000000000004670413240304665020317 0ustar 00000000000000import asyncio import async_timeout import pytest import aiohttp from aiohttp import hdrs, web @pytest.fixture def ceil(mocker): def ceil(val): return val mocker.patch('aiohttp.helpers.ceil').side_effect = ceil async def test_send_recv_text(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_str() await ws.send_str(msg+'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_str('ask') assert resp.get_extra_info('socket') is not None data = await resp.receive_str() assert data == 'ask/answer' await resp.close() assert resp.get_extra_info('socket') is None async def test_send_recv_bytes_bad_type(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_str() await ws.send_str(msg+'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_str('ask') with pytest.raises(TypeError): await resp.receive_bytes() await resp.close() async def test_send_recv_bytes(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_bytes() await ws.send_bytes(msg+b'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_bytes(b'ask') data = await resp.receive_bytes() assert data == b'ask/answer' await resp.close() async def test_send_recv_text_bad_type(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_bytes() await ws.send_bytes(msg+b'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_bytes(b'ask') with pytest.raises(TypeError): await resp.receive_str() await resp.close() async def test_send_recv_json(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) data = await ws.receive_json() await ws.send_json({'response': data['request']}) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') payload = {'request': 'test'} await resp.send_json(payload) data = await resp.receive_json() assert data['response'] == payload['request'] await resp.close() async def test_ping_pong(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_bytes() await ws.ping() await ws.send_bytes(msg+b'/answer') try: await ws.close() finally: closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.ping() await resp.send_bytes(b'ask') msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.BINARY assert msg.data == b'ask/answer' msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSE await resp.close() await closed async def test_ping_pong_manual(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_bytes() await ws.ping() await ws.send_bytes(msg+b'/answer') try: await ws.close() finally: closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', autoping=False) await resp.ping() await resp.send_bytes(b'ask') msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.PONG msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.PING await resp.pong() msg = await resp.receive() assert msg.data == b'ask/answer' msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSE await closed async def test_close(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_bytes() await ws.send_str('test') await ws.receive() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_bytes(b'ask') closed = await resp.close() assert closed assert resp.closed assert resp.close_code == 1000 msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSED async def test_concurrent_close(loop, aiohttp_client): client_ws = None async def handler(request): nonlocal client_ws ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_bytes() await ws.send_str('test') await client_ws.close() msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.CLOSE return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = client_ws = await client.ws_connect('/') await ws.send_bytes(b'ask') msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.CLOSING await asyncio.sleep(0.01, loop=loop) msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.CLOSED async def test_close_from_server(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) try: await ws.receive_bytes() await ws.close() finally: closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_bytes(b'ask') msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSE assert resp.closed msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSED await closed async def test_close_manual(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_bytes() await ws.send_str('test') try: await ws.close() finally: closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', autoclose=False) await resp.send_bytes(b'ask') msg = await resp.receive() assert msg.data == 'test' msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSE assert msg.data == 1000 assert msg.extra == '' assert not resp.closed await resp.close() await closed assert resp.closed async def test_close_timeout(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_bytes() await ws.send_str('test') await asyncio.sleep(1, loop=loop) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', timeout=0.2, autoclose=False) await resp.send_bytes(b'ask') msg = await resp.receive() assert msg.data == 'test' assert msg.type == aiohttp.WSMsgType.TEXT msg = await resp.close() assert resp.closed assert isinstance(resp.exception(), asyncio.TimeoutError) async def test_close_cancel(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_bytes() await ws.send_str('test') await asyncio.sleep(10, loop=loop) app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', autoclose=False) await resp.send_bytes(b'ask') text = await resp.receive() assert text.data == 'test' t = loop.create_task(resp.close()) await asyncio.sleep(0.1, loop=loop) t.cancel() await asyncio.sleep(0.1, loop=loop) assert resp.closed assert resp.exception() is None async def test_override_default_headers(loop, aiohttp_client): async def handler(request): assert request.headers[hdrs.SEC_WEBSOCKET_VERSION] == '8' ws = web.WebSocketResponse() await ws.prepare(request) await ws.send_str('answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) headers = {hdrs.SEC_WEBSOCKET_VERSION: '8'} client = await aiohttp_client(app) resp = await client.ws_connect('/', headers=headers) msg = await resp.receive() assert msg.data == 'answer' await resp.close() async def test_additional_headers(loop, aiohttp_client): async def handler(request): assert request.headers['x-hdr'] == 'xtra' ws = web.WebSocketResponse() await ws.prepare(request) await ws.send_str('answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', headers={'x-hdr': 'xtra'}) msg = await resp.receive() assert msg.data == 'answer' await resp.close() async def test_recv_protocol_error(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_str() ws._writer.transport.write(b'01234' * 100) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_str('ask') msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.ERROR assert type(msg.data) is aiohttp.WebSocketError assert msg.data.args[0] == 'Received frame with non-zero reserved bits' assert msg.extra is None await resp.close() async def test_recv_timeout(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive_str() await asyncio.sleep(0.1, loop=request.app.loop) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') await resp.send_str('ask') with pytest.raises(asyncio.TimeoutError): with async_timeout.timeout(0.01, loop=app.loop): await resp.receive() await resp.close() async def test_receive_timeout(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive() await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', receive_timeout=0.1) with pytest.raises(asyncio.TimeoutError): await resp.receive(0.05) await resp.close() async def test_custom_receive_timeout(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive() await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') with pytest.raises(asyncio.TimeoutError): await resp.receive(0.05) await resp.close() async def test_heartbeat(loop, aiohttp_client, ceil): ping_received = False async def handler(request): nonlocal ping_received ws = web.WebSocketResponse(autoping=False) await ws.prepare(request) msg = await ws.receive() if msg.type == aiohttp.WSMsgType.ping: ping_received = True await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', heartbeat=0.01) await resp.receive() await resp.close() assert ping_received async def test_heartbeat_no_pong(loop, aiohttp_client, ceil): ping_received = False async def handler(request): nonlocal ping_received ws = web.WebSocketResponse(autoping=False) await ws.prepare(request) msg = await ws.receive() if msg.type == aiohttp.WSMsgType.ping: ping_received = True await ws.receive() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', heartbeat=0.05) await resp.receive() await resp.receive() assert ping_received async def test_send_recv_compress(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_str() await ws.send_str(msg+'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', compress=15) await resp.send_str('ask') assert resp.compress == 15 data = await resp.receive_str() assert data == 'ask/answer' await resp.close() assert resp.get_extra_info('socket') is None async def test_send_recv_compress_wbits(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_str() await ws.send_str(msg+'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/', compress=9) await resp.send_str('ask') # Client indicates supports wbits 15 # Server supports wbit 15 for decode assert resp.compress == 15 data = await resp.receive_str() assert data == 'ask/answer' await resp.close() assert resp.get_extra_info('socket') is None async def test_send_recv_compress_wbit_error(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_bytes() await ws.send_bytes(msg+b'/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) with pytest.raises(ValueError): await client.ws_connect('/', compress=1) async def test_ws_client_async_for(loop, aiohttp_client): items = ['q1', 'q2', 'q3'] async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) for i in items: await ws.send_str(i) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') it = iter(items) async for msg in resp: assert msg.data == next(it) with pytest.raises(StopIteration): next(it) assert resp.closed async def test_ws_async_with(loop, aiohttp_server): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive() await ws.send_str(msg.data + '/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession(loop=loop) as client: async with client.ws_connect(server.make_url('/')) as ws: await ws.send_str('request') msg = await ws.receive() assert msg.data == 'request/answer' assert ws.closed async def test_ws_async_with_send(loop, aiohttp_server): # send_xxx methods have to return awaitable objects async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive() await ws.send_str(msg.data + '/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession(loop=loop) as client: async with client.ws_connect(server.make_url('/')) as ws: await ws.send_str('request') msg = await ws.receive() assert msg.data == 'request/answer' assert ws.closed async def test_ws_async_with_shortcut(loop, aiohttp_server): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive() await ws.send_str(msg.data + '/answer') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession(loop=loop) as client: async with client.ws_connect(server.make_url('/')) as ws: await ws.send_str('request') msg = await ws.receive() assert msg.data == 'request/answer' assert ws.closed async def test_closed_async_for(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) try: await ws.send_bytes(b'started') await ws.receive_bytes() finally: closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.ws_connect('/') messages = [] async for msg in resp: messages.append(msg) if b'started' == msg.data: await resp.send_bytes(b'ask') await resp.close() assert 1 == len(messages) assert messages[0].type == aiohttp.WSMsgType.BINARY assert messages[0].data == b'started' assert resp.closed await closed aiohttp-3.0.1/tests/test_connector.py0000666000000000000000000016625713240304665016106 0ustar 00000000000000"""Tests of http client with custom Connector""" import asyncio import gc import hashlib import os.path import platform import shutil import socket import ssl import tempfile import unittest import uuid from unittest import mock import pytest from yarl import URL import aiohttp from aiohttp import client, web from aiohttp.client import ClientRequest from aiohttp.connector import Connection, _DNSCacheTable from aiohttp.test_utils import make_mocked_coro, unused_port from aiohttp.tracing import Trace @pytest.fixture() def key(): """Connection key""" return ('localhost1', 80, False) @pytest.fixture def key2(): """Connection key""" return ('localhost2', 80, False) @pytest.fixture def ssl_key(): """Connection key""" return ('localhost', 80, True) def test_del(loop): conn = aiohttp.BaseConnector(loop=loop) proto = mock.Mock(should_close=False) conn._release('a', proto) conns_impl = conn._conns exc_handler = mock.Mock() loop.set_exception_handler(exc_handler) with pytest.warns(ResourceWarning): del conn gc.collect() assert not conns_impl proto.close.assert_called_with() msg = {'connector': mock.ANY, # conn was deleted 'connections': mock.ANY, 'message': 'Unclosed connector'} if loop.get_debug(): msg['source_traceback'] = mock.ANY exc_handler.assert_called_with(loop, msg) @pytest.mark.xfail async def test_del_with_scheduled_cleanup(loop): loop.set_debug(True) conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=0.01) transp = mock.Mock() conn._conns['a'] = [(transp, 'proto', 123)] conns_impl = conn._conns exc_handler = mock.Mock() loop.set_exception_handler(exc_handler) with pytest.warns(ResourceWarning): # obviously doesn't deletion because loop has a strong # reference to connector's instance method, isn't it? del conn await asyncio.sleep(0.01, loop=loop) gc.collect() assert not conns_impl transp.close.assert_called_with() msg = {'connector': mock.ANY, # conn was deleted 'message': 'Unclosed connector'} if loop.get_debug(): msg['source_traceback'] = mock.ANY exc_handler.assert_called_with(loop, msg) def test_del_with_closed_loop(loop): conn = aiohttp.BaseConnector(loop=loop) transp = mock.Mock() conn._conns['a'] = [(transp, 'proto', 123)] conns_impl = conn._conns exc_handler = mock.Mock() loop.set_exception_handler(exc_handler) loop.close() with pytest.warns(ResourceWarning): del conn gc.collect() assert not conns_impl assert not transp.close.called assert exc_handler.called def test_del_empty_conector(loop): conn = aiohttp.BaseConnector(loop=loop) exc_handler = mock.Mock() loop.set_exception_handler(exc_handler) del conn assert not exc_handler.called async def test_create_conn(loop): conn = aiohttp.BaseConnector(loop=loop) with pytest.raises(NotImplementedError): await conn._create_connection(object()) def test_context_manager(loop): conn = aiohttp.BaseConnector(loop=loop) conn.close = mock.Mock() with conn as c: assert conn is c assert conn.close.called def test_ctor_loop(): with mock.patch('aiohttp.connector.asyncio') as m_asyncio: session = aiohttp.BaseConnector() assert session._loop is m_asyncio.get_event_loop.return_value def test_close(loop): proto = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) assert not conn.closed conn._conns[('host', 8080, False)] = [(proto, object())] conn.close() assert not conn._conns assert proto.close.called assert conn.closed def test_get(loop): conn = aiohttp.BaseConnector(loop=loop) assert conn._get(1) is None proto = mock.Mock() conn._conns[1] = [(proto, loop.time())] assert conn._get(1) == proto conn.close() def test_get_expired(loop): conn = aiohttp.BaseConnector(loop=loop) assert conn._get(('localhost', 80, False)) is None proto = mock.Mock() conn._conns[('localhost', 80, False)] = [(proto, loop.time() - 1000)] assert conn._get(('localhost', 80, False)) is None assert not conn._conns conn.close() def test_get_expired_ssl(loop): conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) assert conn._get(('localhost', 80, True)) is None proto = mock.Mock() conn._conns[('localhost', 80, True)] = [(proto, loop.time() - 1000)] assert conn._get(('localhost', 80, True)) is None assert not conn._conns assert conn._cleanup_closed_transports == [proto.close.return_value] conn.close() def test_release_acquired(loop, key): proto = mock.Mock() conn = aiohttp.BaseConnector(loop=loop, limit=5) conn._release_waiter = mock.Mock() conn._acquired.add(proto) conn._acquired_per_host[key].add(proto) conn._release_acquired(key, proto) assert 0 == len(conn._acquired) assert 0 == len(conn._acquired_per_host) assert conn._release_waiter.called conn._release_acquired(key, proto) assert 0 == len(conn._acquired) assert 0 == len(conn._acquired_per_host) conn.close() def test_release_acquired_closed(loop, key): proto = mock.Mock() conn = aiohttp.BaseConnector(loop=loop, limit=5) conn._release_waiter = mock.Mock() conn._acquired.add(proto) conn._acquired_per_host[key].add(proto) conn._closed = True conn._release_acquired(key, proto) assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) assert not conn._release_waiter.called conn.close() def test_release(loop, key): conn = aiohttp.BaseConnector(loop=loop) conn._release_waiter = mock.Mock() proto = mock.Mock(should_close=False) conn._acquired.add(proto) conn._acquired_per_host[key].add(proto) conn._release(key, proto) assert conn._release_waiter.called assert conn._conns[key][0][0] == proto assert conn._conns[key][0][1] == pytest.approx(loop.time(), abs=0.1) assert not conn._cleanup_closed_transports conn.close() def test_release_ssl_transport(loop, ssl_key): conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) conn._release_waiter = mock.Mock() proto = mock.Mock() conn._acquired.add(proto) conn._acquired_per_host[ssl_key].add(proto) conn._release(ssl_key, proto, should_close=True) assert conn._cleanup_closed_transports == [proto.close.return_value] conn.close() def test_release_already_closed(loop): conn = aiohttp.BaseConnector(loop=loop) proto = mock.Mock() key = 1 conn._acquired.add(proto) conn.close() conn._release_waiters = mock.Mock() conn._release_acquired = mock.Mock() conn._release(key, proto) assert not conn._release_waiters.called assert not conn._release_acquired.called def test_release_waiter(loop, key, key2): # limit is 0 conn = aiohttp.BaseConnector(limit=0, loop=loop) w = mock.Mock() w.done.return_value = False conn._waiters[key].append(w) conn._release_waiter() assert len(conn._waiters) == 1 assert not w.done.called conn.close() # release first available conn = aiohttp.BaseConnector(loop=loop) w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False conn._waiters[key].append(w2) conn._waiters[key2].append(w1) conn._release_waiter() assert (w1.set_result.called and not w2.set_result.called or not w1.set_result.called and w2.set_result.called) conn.close() # limited available conn = aiohttp.BaseConnector(loop=loop, limit=1) w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False conn._waiters[key] = [w1, w2] conn._release_waiter() assert w1.set_result.called assert not w2.set_result.called conn.close() # limited available conn = aiohttp.BaseConnector(loop=loop, limit=1) w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = True w2.done.return_value = False conn._waiters[key] = [w1, w2] conn._release_waiter() assert not w1.set_result.called assert not w2.set_result.called conn.close() def test_release_waiter_per_host(loop, key, key2): # no limit conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=2) w1, w2 = mock.Mock(), mock.Mock() w1.done.return_value = False w2.done.return_value = False conn._waiters[key] = [w1] conn._waiters[key2] = [w2] conn._release_waiter() assert ((w1.set_result.called and not w2.set_result.called) or (not w1.set_result.called and w2.set_result.called)) conn.close() def test_release_close(loop): conn = aiohttp.BaseConnector(loop=loop) proto = mock.Mock(should_close=True) key = ('localhost', 80, False) conn._acquired.add(proto) conn._release(key, proto) assert not conn._conns assert proto.close.called def test__drop_acquire_per_host1(loop): conn = aiohttp.BaseConnector(loop=loop) conn._drop_acquired_per_host(123, 456) assert len(conn._acquired_per_host) == 0 def test__drop_acquire_per_host2(loop): conn = aiohttp.BaseConnector(loop=loop) conn._acquired_per_host[123].add(456) conn._drop_acquired_per_host(123, 456) assert len(conn._acquired_per_host) == 0 def test__drop_acquire_per_host3(loop): conn = aiohttp.BaseConnector(loop=loop) conn._acquired_per_host[123].add(456) conn._acquired_per_host[123].add(789) conn._drop_acquired_per_host(123, 456) assert len(conn._acquired_per_host) == 1 assert conn._acquired_per_host[123] == {789} async def test_tcp_connector_certificate_error(loop): req = ClientRequest('GET', URL('https://127.0.0.1:443'), loop=loop) async def certificate_error(*args, **kwargs): raise ssl.CertificateError conn = aiohttp.TCPConnector(loop=loop) conn._loop.create_connection = certificate_error with pytest.raises(aiohttp.ClientConnectorCertificateError) as ctx: await conn.connect(req) assert isinstance(ctx.value, ssl.CertificateError) assert isinstance(ctx.value.certificate_error, ssl.CertificateError) assert isinstance(ctx.value, aiohttp.ClientSSLError) assert str(ctx.value) == ('Cannot connect to host 127.0.0.1:443 ssl:True ' '[CertificateError: ()]') async def test_tcp_connector_multiple_hosts_errors(loop): conn = aiohttp.TCPConnector(loop=loop) ip1 = '192.168.1.1' ip2 = '192.168.1.2' ip3 = '192.168.1.3' ip4 = '192.168.1.4' ip5 = '192.168.1.5' ips = [ip1, ip2, ip3, ip4, ip5] ips_tried = [] fingerprint = hashlib.sha256(b'foo').digest() req = ClientRequest('GET', URL('https://mocked.host'), ssl=aiohttp.Fingerprint(fingerprint), loop=loop) async def _resolve_host(host, port, traces=None): return [{ 'hostname': host, 'host': ip, 'port': port, 'family': socket.AF_INET, 'proto': 0, 'flags': socket.AI_NUMERICHOST} for ip in ips] conn._resolve_host = _resolve_host os_error = certificate_error = ssl_error = fingerprint_error = False connected = False async def create_connection(*args, **kwargs): nonlocal os_error, certificate_error, ssl_error, fingerprint_error nonlocal connected ip = args[1] ips_tried.append(ip) if ip == ip1: os_error = True raise OSError if ip == ip2: certificate_error = True raise ssl.CertificateError if ip == ip3: ssl_error = True raise ssl.SSLError if ip == ip4: fingerprint_error = True tr, pr = mock.Mock(), None def get_extra_info(param): if param == 'sslcontext': return True if param == 'ssl_object': s = mock.Mock() s.getpeercert.return_value = b'not foo' return s if param == 'peername': return ('192.168.1.5', 12345) assert False, param tr.get_extra_info = get_extra_info return tr, pr if ip == ip5: connected = True tr, pr = mock.Mock(), None def get_extra_info(param): if param == 'sslcontext': return True if param == 'ssl_object': s = mock.Mock() s.getpeercert.return_value = b'foo' return s assert False tr.get_extra_info = get_extra_info return tr, pr assert False conn._loop.create_connection = create_connection await conn.connect(req) assert ips == ips_tried assert os_error assert certificate_error assert ssl_error assert fingerprint_error assert connected async def test_tcp_connector_resolve_host(loop): conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=True) res = await conn._resolve_host('localhost', 8080) assert res for rec in res: if rec['family'] == socket.AF_INET: assert rec['host'] == '127.0.0.1' assert rec['hostname'] == 'localhost' assert rec['port'] == 8080 elif rec['family'] == socket.AF_INET6: assert rec['hostname'] == 'localhost' assert rec['port'] == 8080 if platform.system() == 'Darwin': assert rec['host'] in ('::1', 'fe80::1', 'fe80::1%lo0') else: assert rec['host'] == '::1' @pytest.fixture def dns_response(loop): async def coro(): # simulates a network operation await asyncio.sleep(0, loop=loop) return ["127.0.0.1"] return coro async def test_tcp_connector_dns_cache_not_expired(loop, dns_response): with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) m_resolver().resolve.return_value = dns_response() await conn._resolve_host('localhost', 8080) await conn._resolve_host('localhost', 8080) m_resolver().resolve.assert_called_once_with( 'localhost', 8080, family=0 ) async def test_tcp_connector_dns_cache_forever(loop, dns_response): with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) m_resolver().resolve.return_value = dns_response() await conn._resolve_host('localhost', 8080) await conn._resolve_host('localhost', 8080) m_resolver().resolve.assert_called_once_with( 'localhost', 8080, family=0 ) async def test_tcp_connector_use_dns_cache_disabled(loop, dns_response): with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector(loop=loop, use_dns_cache=False) m_resolver().resolve.side_effect = [dns_response(), dns_response()] await conn._resolve_host('localhost', 8080) await conn._resolve_host('localhost', 8080) m_resolver().resolve.assert_has_calls([ mock.call('localhost', 8080, family=0), mock.call('localhost', 8080, family=0) ]) async def test_tcp_connector_dns_throttle_requests(loop, dns_response): with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) m_resolver().resolve.return_value = dns_response() loop.create_task(conn._resolve_host('localhost', 8080)) loop.create_task(conn._resolve_host('localhost', 8080)) await asyncio.sleep(0, loop=loop) m_resolver().resolve.assert_called_once_with( 'localhost', 8080, family=0 ) async def test_tcp_connector_dns_throttle_requests_exception_spread(loop): with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) e = Exception() m_resolver().resolve.side_effect = e r1 = loop.create_task(conn._resolve_host('localhost', 8080)) r2 = loop.create_task(conn._resolve_host('localhost', 8080)) await asyncio.sleep(0, loop=loop) assert r1.exception() == e assert r2.exception() == e async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop, dns_response): with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) m_resolver().resolve.return_value = dns_response() loop.create_task(conn._resolve_host('localhost', 8080)) f = loop.create_task(conn._resolve_host('localhost', 8080)) await asyncio.sleep(0, loop=loop) conn.close() with pytest.raises(asyncio.futures.CancelledError): await f async def test_tcp_connector_dns_tracing(loop, dns_response): session = mock.Mock() trace_config_ctx = mock.Mock() on_dns_resolvehost_start = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_dns_resolvehost_end = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_dns_cache_hit = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_dns_cache_miss = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_dns_resolvehost_start.append(on_dns_resolvehost_start) trace_config.on_dns_resolvehost_end.append(on_dns_resolvehost_end) trace_config.on_dns_cache_hit.append(on_dns_cache_hit) trace_config.on_dns_cache_miss.append(on_dns_cache_miss) trace_config.freeze() traces = [ Trace( session, trace_config, trace_config.trace_config_ctx() ) ] with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) m_resolver().resolve.return_value = dns_response() await conn._resolve_host( 'localhost', 8080, traces=traces ) on_dns_resolvehost_start.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsResolveHostStartParams('localhost') ) on_dns_resolvehost_end.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsResolveHostEndParams('localhost') ) on_dns_cache_miss.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheMissParams('localhost') ) assert not on_dns_cache_hit.called await conn._resolve_host( 'localhost', 8080, traces=traces ) on_dns_cache_hit.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams('localhost') ) async def test_tcp_connector_dns_tracing_cache_disabled(loop, dns_response): session = mock.Mock() trace_config_ctx = mock.Mock() on_dns_resolvehost_start = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_dns_resolvehost_end = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_dns_resolvehost_start.append(on_dns_resolvehost_start) trace_config.on_dns_resolvehost_end.append(on_dns_resolvehost_end) trace_config.freeze() traces = [ Trace( session, trace_config, trace_config.trace_config_ctx() ) ] with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=False ) m_resolver().resolve.side_effect = [ dns_response(), dns_response() ] await conn._resolve_host( 'localhost', 8080, traces=traces ) await conn._resolve_host( 'localhost', 8080, traces=traces ) on_dns_resolvehost_start.assert_has_calls([ mock.call( session, trace_config_ctx, aiohttp.TraceDnsResolveHostStartParams('localhost') ), mock.call( session, trace_config_ctx, aiohttp.TraceDnsResolveHostStartParams('localhost') ) ]) on_dns_resolvehost_end.assert_has_calls([ mock.call( session, trace_config_ctx, aiohttp.TraceDnsResolveHostEndParams('localhost') ), mock.call( session, trace_config_ctx, aiohttp.TraceDnsResolveHostEndParams('localhost') ) ]) async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response): session = mock.Mock() trace_config_ctx = mock.Mock() on_dns_cache_hit = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_dns_cache_miss = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_dns_cache_hit.append(on_dns_cache_hit) trace_config.on_dns_cache_miss.append(on_dns_cache_miss) trace_config.freeze() traces = [ Trace( session, trace_config, trace_config.trace_config_ctx() ) ] with mock.patch('aiohttp.connector.DefaultResolver') as m_resolver: conn = aiohttp.TCPConnector( loop=loop, use_dns_cache=True, ttl_dns_cache=10 ) m_resolver().resolve.return_value = dns_response() loop.create_task(conn._resolve_host('localhost', 8080, traces=traces)) loop.create_task(conn._resolve_host('localhost', 8080, traces=traces)) await asyncio.sleep(0, loop=loop) on_dns_cache_hit.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams('localhost') ) on_dns_cache_miss.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheMissParams('localhost') ) def test_dns_error(loop): connector = aiohttp.TCPConnector(loop=loop) connector._resolve_host = make_mocked_coro( raise_exception=OSError('dont take it serious')) req = ClientRequest( 'GET', URL('http://www.python.org'), loop=loop, ) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) def test_get_pop_empty_conns(loop): # see issue #473 conn = aiohttp.BaseConnector(loop=loop) key = ('127.0.0.1', 80, False) conn._conns[key] = [] proto = conn._get(key) assert proto is None assert not conn._conns def test_release_close_do_not_add_to_pool(loop): # see issue #473 conn = aiohttp.BaseConnector(loop=loop) key = ('127.0.0.1', 80, False) proto = mock.Mock(should_close=True) conn._acquired.add(proto) conn._release(key, proto) assert not conn._conns def test_release_close_do_not_delete_existing_connections(loop): key = ('127.0.0.1', 80, False) proto1 = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) conn._conns[key] = [(proto1, 1)] proto = mock.Mock(should_close=True) conn._acquired.add(proto) conn._release(key, proto) assert conn._conns[key] == [(proto1, 1)] assert proto.close.called conn.close() def test_release_not_started(loop): conn = aiohttp.BaseConnector(loop=loop) proto = mock.Mock(should_close=False) key = 1 conn._acquired.add(proto) conn._release(key, proto) # assert conn._conns == {1: [(proto, 10)]} rec = conn._conns[1] assert rec[0][0] == proto assert rec[0][1] == pytest.approx(loop.time(), abs=0.05) assert not proto.close.called conn.close() def test_release_not_opened(loop): conn = aiohttp.BaseConnector(loop=loop) proto = mock.Mock() key = ('localhost', 80, False) conn._acquired.add(proto) conn._release(key, proto) assert proto.close.called async def test_connect(loop): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop) key = ('host', 80, False) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) connection = await conn.connect(req) assert not conn._create_connection.called assert connection._protocol is proto assert connection.transport is proto.transport assert isinstance(connection, Connection) connection.close() async def test_connect_tracing(loop): session = mock.Mock() trace_config_ctx = mock.Mock() on_connection_create_start = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_connection_create_end = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_connection_create_start.append(on_connection_create_start) trace_config.on_connection_create_end.append(on_connection_create_end) trace_config.freeze() traces = [ Trace( session, trace_config, trace_config.trace_config_ctx() ) ] proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop) conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) await conn.connect(req, traces=traces) on_connection_create_start.assert_called_with( session, trace_config_ctx, aiohttp.TraceConnectionCreateStartParams() ) on_connection_create_end.assert_called_with( session, trace_config_ctx, aiohttp.TraceConnectionCreateEndParams() ) async def test_close_during_connect(loop): proto = mock.Mock() proto.is_connected.return_value = True fut = loop.create_future() req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop) conn._create_connection = mock.Mock() conn._create_connection.return_value = fut task = loop.create_task(conn.connect(req)) await asyncio.sleep(0, loop=loop) conn.close() fut.set_result(proto) with pytest.raises(aiohttp.ClientConnectionError): await task assert proto.close.called def test_ctor_cleanup(): loop = mock.Mock() loop.time.return_value = 1.5 conn = aiohttp.BaseConnector( loop=loop, keepalive_timeout=10, enable_cleanup_closed=True) assert conn._cleanup_handle is None assert conn._cleanup_closed_handle is not None def test_cleanup(): key = ('localhost', 80, False) testset = { key: [(mock.Mock(), 10), (mock.Mock(), 300)], } testset[key][0][0].is_connected.return_value = True testset[key][1][0].is_connected.return_value = False loop = mock.Mock() loop.time.return_value = 300 conn = aiohttp.BaseConnector(loop=loop) conn._conns = testset existing_handle = conn._cleanup_handle = mock.Mock() conn._cleanup() assert existing_handle.cancel.called assert conn._conns == {} assert conn._cleanup_handle is not None def test_cleanup_close_ssl_transport(): proto = mock.Mock() key = ('localhost', 80, True) testset = {key: [(proto, 10)]} loop = mock.Mock() loop.time.return_value = 300 conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) conn._conns = testset existing_handle = conn._cleanup_handle = mock.Mock() conn._cleanup() assert existing_handle.cancel.called assert conn._conns == {} assert conn._cleanup_closed_transports == [proto.close.return_value] def test_cleanup2(): testset = {1: [(mock.Mock(), 300)]} testset[1][0][0].is_connected.return_value = True loop = mock.Mock() loop.time.return_value = 300 conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=10) conn._conns = testset conn._cleanup() assert conn._conns == testset assert conn._cleanup_handle is not None loop.call_at.assert_called_with(310, mock.ANY, mock.ANY) conn.close() def test_cleanup3(): key = ('localhost', 80, False) testset = {key: [(mock.Mock(), 290.1), (mock.Mock(), 305.1)]} testset[key][0][0].is_connected.return_value = True loop = mock.Mock() loop.time.return_value = 308.5 conn = aiohttp.BaseConnector(loop=loop, keepalive_timeout=10) conn._conns = testset conn._cleanup() assert conn._conns == {key: [testset[key][1]]} assert conn._cleanup_handle is not None loop.call_at.assert_called_with(319, mock.ANY, mock.ANY) conn.close() def test_cleanup_closed(loop, mocker): if not hasattr(loop, '__dict__'): pytest.skip("can not override loop attributes") mocker.spy(loop, 'call_at') conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) tr = mock.Mock() conn._cleanup_closed_handle = cleanup_closed_handle = mock.Mock() conn._cleanup_closed_transports = [tr] conn._cleanup_closed() assert tr.abort.called assert not conn._cleanup_closed_transports assert loop.call_at.called assert cleanup_closed_handle.cancel.called def test_cleanup_closed_disabled(loop, mocker): conn = aiohttp.BaseConnector( loop=loop, enable_cleanup_closed=False) tr = mock.Mock() conn._cleanup_closed_transports = [tr] conn._cleanup_closed() assert tr.abort.called assert not conn._cleanup_closed_transports def test_tcp_connector_ctor(loop): conn = aiohttp.TCPConnector(loop=loop) assert conn._ssl is None assert conn.use_dns_cache assert conn.family == 0 def test_tcp_connector_ctor_fingerprint_valid(loop): valid = aiohttp.Fingerprint(hashlib.sha256(b"foo").digest()) conn = aiohttp.TCPConnector(ssl=valid, loop=loop) assert conn._ssl is valid def test_insecure_fingerprint_md5(loop): with pytest.raises(ValueError): aiohttp.TCPConnector( ssl=aiohttp.Fingerprint(hashlib.md5(b"foo").digest()), loop=loop) def test_insecure_fingerprint_sha1(loop): with pytest.raises(ValueError): aiohttp.TCPConnector( ssl=aiohttp.Fingerprint(hashlib.sha1(b"foo").digest()), loop=loop) def test_tcp_connector_clear_dns_cache(loop): conn = aiohttp.TCPConnector(loop=loop) hosts = ['a', 'b'] conn._cached_hosts.add(('localhost', 123), hosts) conn._cached_hosts.add(('localhost', 124), hosts) conn.clear_dns_cache('localhost', 123) with pytest.raises(KeyError): conn._cached_hosts.next_addrs(('localhost', 123)) assert conn._cached_hosts.next_addrs(('localhost', 124)) == hosts # Remove removed element is OK conn.clear_dns_cache('localhost', 123) with pytest.raises(KeyError): conn._cached_hosts.next_addrs(('localhost', 123)) conn.clear_dns_cache() with pytest.raises(KeyError): conn._cached_hosts.next_addrs(('localhost', 124)) def test_tcp_connector_clear_dns_cache_bad_args(loop): conn = aiohttp.TCPConnector(loop=loop) with pytest.raises(ValueError): conn.clear_dns_cache('localhost') def test_dont_recreate_ssl_context(loop): conn = aiohttp.TCPConnector(loop=loop) ctx = conn._make_ssl_context(True) assert ctx is conn._make_ssl_context(True) def test_dont_recreate_ssl_context2(loop): conn = aiohttp.TCPConnector(loop=loop) ctx = conn._make_ssl_context(False) assert ctx is conn._make_ssl_context(False) def test___get_ssl_context1(loop): conn = aiohttp.TCPConnector(loop=loop) req = mock.Mock() req.is_ssl.return_value = False assert conn._get_ssl_context(req) is None def test___get_ssl_context2(loop): ctx = ssl.SSLContext() conn = aiohttp.TCPConnector(loop=loop) req = mock.Mock() req.is_ssl.return_value = True req.ssl = ctx assert conn._get_ssl_context(req) is ctx def test___get_ssl_context3(loop): ctx = ssl.SSLContext() conn = aiohttp.TCPConnector(loop=loop, ssl=ctx) req = mock.Mock() req.is_ssl.return_value = True req.ssl = None assert conn._get_ssl_context(req) is ctx def test___get_ssl_context4(loop): ctx = ssl.SSLContext() conn = aiohttp.TCPConnector(loop=loop, ssl=ctx) req = mock.Mock() req.is_ssl.return_value = True req.ssl = False assert conn._get_ssl_context(req) is conn._make_ssl_context(False) def test___get_ssl_context5(loop): ctx = ssl.SSLContext() conn = aiohttp.TCPConnector(loop=loop, ssl=ctx) req = mock.Mock() req.is_ssl.return_value = True req.ssl = aiohttp.Fingerprint(hashlib.sha256(b'1').digest()) assert conn._get_ssl_context(req) is conn._make_ssl_context(False) def test___get_ssl_context6(loop): conn = aiohttp.TCPConnector(loop=loop) req = mock.Mock() req.is_ssl.return_value = True req.ssl = None assert conn._get_ssl_context(req) is conn._make_ssl_context(True) def test_close_twice(loop): proto = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) conn._conns[1] = [(proto, object())] conn.close() assert not conn._conns assert proto.close.called assert conn.closed conn._conns = 'Invalid' # fill with garbage conn.close() assert conn.closed def test_close_cancels_cleanup_handle(loop): conn = aiohttp.BaseConnector(loop=loop) conn._release(1, mock.Mock(should_close=False)) assert conn._cleanup_handle is not None conn.close() assert conn._cleanup_handle is None def test_close_abort_closed_transports(loop): tr = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) conn._cleanup_closed_transports.append(tr) conn.close() assert not conn._cleanup_closed_transports assert tr.abort.called assert conn.closed def test_close_cancels_cleanup_closed_handle(loop): conn = aiohttp.BaseConnector(loop=loop, enable_cleanup_closed=True) assert conn._cleanup_closed_handle is not None conn.close() assert conn._cleanup_closed_handle is None def test_ctor_with_default_loop(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) conn = aiohttp.BaseConnector() assert loop is conn._loop loop.close() async def test_connect_with_limit(loop, key): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, response_class=mock.Mock()) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) connection1 = await conn.connect(req) assert connection1._protocol == proto assert 1 == len(conn._acquired) assert proto in conn._acquired assert key in conn._acquired_per_host assert proto in conn._acquired_per_host[key] acquired = False async def f(): nonlocal acquired connection2 = await conn.connect(req) acquired = True assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) connection2.release() task = loop.create_task(f()) await asyncio.sleep(0.01, loop=loop) assert not acquired connection1.release() await asyncio.sleep(0, loop=loop) assert acquired await task conn.close() async def test_connect_queued_operation_tracing(loop, key): session = mock.Mock() trace_config_ctx = mock.Mock() on_connection_queued_start = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) on_connection_queued_end = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_connection_queued_start.append(on_connection_queued_start) trace_config.on_connection_queued_end.append(on_connection_queued_end) trace_config.freeze() traces = [ Trace( session, trace_config, trace_config.trace_config_ctx() ) ] proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, response_class=mock.Mock()) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) connection1 = await conn.connect(req, traces=traces) async def f(): connection2 = await conn.connect( req, traces=traces ) on_connection_queued_start.assert_called_with( session, trace_config_ctx, aiohttp.TraceConnectionQueuedStartParams() ) on_connection_queued_end.assert_called_with( session, trace_config_ctx, aiohttp.TraceConnectionQueuedEndParams() ) connection2.release() task = asyncio.ensure_future(f(), loop=loop) await asyncio.sleep(0.01, loop=loop) connection1.release() await task conn.close() async def test_connect_reuseconn_tracing(loop, key): session = mock.Mock() trace_config_ctx = mock.Mock() on_connection_reuseconn = mock.Mock( side_effect=asyncio.coroutine(mock.Mock()) ) trace_config = aiohttp.TraceConfig( trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx) ) trace_config.on_connection_reuseconn.append(on_connection_reuseconn) trace_config.freeze() traces = [ Trace( session, trace_config, trace_config.trace_config_ctx() ) ] proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop, response_class=mock.Mock()) conn = aiohttp.BaseConnector(loop=loop, limit=1) conn._conns[key] = [(proto, loop.time())] await conn.connect(req, traces=traces) on_connection_reuseconn.assert_called_with( session, trace_config_ctx, aiohttp.TraceConnectionReuseconnParams() ) conn.close() async def test_connect_with_limit_and_limit_per_host(loop, key): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1000, limit_per_host=1) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) acquired = False connection1 = await conn.connect(req) async def f(): nonlocal acquired connection2 = await conn.connect(req) acquired = True assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) connection2.release() task = loop.create_task(f()) await asyncio.sleep(0.01, loop=loop) assert not acquired connection1.release() await asyncio.sleep(0, loop=loop) assert acquired await task conn.close() async def test_connect_with_no_limit_and_limit_per_host(loop, key): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=1) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) acquired = False connection1 = await conn.connect(req) async def f(): nonlocal acquired connection2 = await conn.connect(req) acquired = True connection2.release() task = loop.create_task(f()) await asyncio.sleep(0.01, loop=loop) assert not acquired connection1.release() await asyncio.sleep(0, loop=loop) assert acquired await task conn.close() async def test_connect_with_no_limits(loop, key): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://localhost1:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=0, limit_per_host=0) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) acquired = False connection1 = await conn.connect(req) async def f(): nonlocal acquired connection2 = await conn.connect(req) acquired = True assert 1 == len(conn._acquired) assert 1 == len(conn._acquired_per_host[key]) connection2.release() task = loop.create_task(f()) await asyncio.sleep(0.01, loop=loop) assert acquired connection1.release() await task conn.close() async def test_connect_with_limit_cancelled(loop): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ('host', 80, False) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) connection = await conn.connect(req) assert connection._protocol == proto assert connection.transport == proto.transport assert 1 == len(conn._acquired) with pytest.raises(asyncio.TimeoutError): # limit exhausted await asyncio.wait_for(conn.connect(req), 0.01, loop=loop) connection.close() async def test_connect_with_capacity_release_waiters(loop): def check_with_exc(err): conn = aiohttp.BaseConnector(limit=1, loop=loop) conn._create_connection = mock.Mock() conn._create_connection.return_value = \ loop.create_future() conn._create_connection.return_value.set_exception(err) with pytest.raises(Exception): req = mock.Mock() yield from conn.connect(req) assert not conn._waiters check_with_exc(OSError(1, 'permission error')) check_with_exc(RuntimeError()) check_with_exc(asyncio.TimeoutError()) async def test_connect_with_limit_concurrent(loop): proto = mock.Mock() proto.should_close = False proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://host:80'), loop=loop) max_connections = 2 num_connections = 0 conn = aiohttp.BaseConnector(limit=max_connections, loop=loop) # Use a real coroutine for _create_connection; a mock would mask # problems that only happen when the method yields. async def create_connection(req, traces=None): nonlocal num_connections num_connections += 1 await asyncio.sleep(0, loop=loop) # Make a new transport mock each time because acquired # transports are stored in a set. Reusing the same object # messes with the count. proto = mock.Mock(should_close=False) proto.is_connected.return_value = True return proto conn._create_connection = create_connection # Simulate something like a crawler. It opens a connection, does # something with it, closes it, then creates tasks that make more # connections and waits for them to finish. The crawler is started # with multiple concurrent requests and stops when it hits a # predefined maximum number of requests. max_requests = 10 num_requests = 0 start_requests = max_connections + 1 async def f(start=True): nonlocal num_requests if num_requests == max_requests: return num_requests += 1 if not start: connection = await conn.connect(req) await asyncio.sleep(0, loop=loop) connection.release() tasks = [ loop.create_task(f(start=False)) for i in range(start_requests) ] await asyncio.wait(tasks, loop=loop) await f() conn.close() assert max_connections == num_connections async def test_close_with_acquired_connection(loop): proto = mock.Mock() proto.is_connected.return_value = True req = ClientRequest('GET', URL('http://host:80'), loop=loop) conn = aiohttp.BaseConnector(loop=loop, limit=1) key = ('host', 80, False) conn._conns[key] = [(proto, loop.time())] conn._create_connection = mock.Mock() conn._create_connection.return_value = loop.create_future() conn._create_connection.return_value.set_result(proto) connection = await conn.connect(req) assert 1 == len(conn._acquired) conn.close() assert 0 == len(conn._acquired) assert conn.closed proto.close.assert_called_with() assert not connection.closed connection.close() assert connection.closed def test_default_force_close(loop): connector = aiohttp.BaseConnector(loop=loop) assert not connector.force_close def test_limit_property(loop): conn = aiohttp.BaseConnector(loop=loop, limit=15) assert 15 == conn.limit conn.close() def test_limit_per_host_property(loop): conn = aiohttp.BaseConnector(loop=loop, limit_per_host=15) assert 15 == conn.limit_per_host conn.close() def test_limit_property_default(loop): conn = aiohttp.BaseConnector(loop=loop) assert conn.limit == 100 conn.close() def test_limit_per_host_property_default(loop): conn = aiohttp.BaseConnector(loop=loop) assert conn.limit_per_host == 0 conn.close() def test_force_close_and_explicit_keep_alive(loop): with pytest.raises(ValueError): aiohttp.BaseConnector(loop=loop, keepalive_timeout=30, force_close=True) conn = aiohttp.BaseConnector(loop=loop, force_close=True, keepalive_timeout=None) assert conn conn = aiohttp.BaseConnector(loop=loop, force_close=True) assert conn async def test_error_on_connection(loop): conn = aiohttp.BaseConnector(limit=1, loop=loop) req = mock.Mock() req.connection_key = 'key' proto = mock.Mock() i = 0 fut = loop.create_future() exc = OSError() async def create_connection(req, traces=None): nonlocal i i += 1 if i == 1: await fut raise exc elif i == 2: return proto conn._create_connection = create_connection t1 = loop.create_task(conn.connect(req)) t2 = loop.create_task(conn.connect(req)) await asyncio.sleep(0, loop=loop) assert not t1.done() assert not t2.done() assert len(conn._acquired_per_host['key']) == 1 fut.set_result(None) with pytest.raises(OSError): await t1 ret = await t2 assert len(conn._acquired_per_host['key']) == 1 assert ret._key == 'key' assert ret.protocol == proto assert proto in conn._acquired async def test_error_on_connection_with_cancelled_waiter(loop): conn = aiohttp.BaseConnector(limit=1, loop=loop) req = mock.Mock() req.connection_key = 'key' proto = mock.Mock() i = 0 fut1 = loop.create_future() fut2 = loop.create_future() exc = OSError() async def create_connection(req, traces=None): nonlocal i i += 1 if i == 1: await fut1 raise exc if i == 2: await fut2 elif i == 3: return proto conn._create_connection = create_connection t1 = loop.create_task(conn.connect(req)) t2 = loop.create_task(conn.connect(req)) t3 = loop.create_task(conn.connect(req)) await asyncio.sleep(0, loop=loop) assert not t1.done() assert not t2.done() assert len(conn._acquired_per_host['key']) == 1 fut1.set_result(None) fut2.cancel() with pytest.raises(OSError): await t1 with pytest.raises(asyncio.CancelledError): await t2 ret = await t3 assert len(conn._acquired_per_host['key']) == 1 assert ret._key == 'key' assert ret.protocol == proto assert proto in conn._acquired async def test_tcp_connector(aiohttp_client, loop): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) r = await client.get('/') assert r.status == 200 @pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'), reason="requires unix socket") def test_unix_connector_not_found(loop): connector = aiohttp.UnixConnector('/' + uuid.uuid4().hex, loop=loop) req = ClientRequest( 'GET', URL('http://www.python.org'), loop=loop, ) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) @pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'), reason="requires unix socket") def test_unix_connector_permission(loop): loop.create_unix_connection = make_mocked_coro( raise_exception=PermissionError()) connector = aiohttp.UnixConnector('/' + uuid.uuid4().hex, loop=loop) req = ClientRequest( 'GET', URL('http://www.python.org'), loop=loop, ) with pytest.raises(aiohttp.ClientConnectorError): loop.run_until_complete(connector.connect(req)) def test_default_use_dns_cache(loop): conn = aiohttp.TCPConnector(loop=loop) assert conn.use_dns_cache class TestHttpClientConnector(unittest.TestCase): def setUp(self): self.handler = None self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): if self.handler: self.loop.run_until_complete(self.handler.shutdown()) self.loop.stop() self.loop.run_forever() self.loop.close() gc.collect() async def create_server(self, method, path, handler, ssl_context=None): app = web.Application() app.router.add_route(method, path, handler) port = unused_port() self.handler = app.make_handler(loop=self.loop, tcp_keepalive=False) srv = await self.loop.create_server( self.handler, '127.0.0.1', port, ssl=ssl_context) scheme = 's' if ssl_context is not None else '' url = "http{}://127.0.0.1:{}".format(scheme, port) + path self.addCleanup(srv.close) return app, srv, url async def create_unix_server(self, method, path, handler): tmpdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmpdir) app = web.Application() app.router.add_route(method, path, handler) self.handler = app.make_handler( loop=self.loop, tcp_keepalive=False, access_log=None) sock_path = os.path.join(tmpdir, 'socket.sock') srv = await self.loop.create_unix_server( self.handler, sock_path) url = "http://127.0.0.1" + path self.addCleanup(srv.close) return app, srv, url, sock_path def test_tcp_connector_raise_connector_ssl_error(self): async def handler(request): return web.Response() here = os.path.join(os.path.dirname(__file__), '..', 'tests') keyfile = os.path.join(here, 'sample.key') certfile = os.path.join(here, 'sample.crt') sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.load_cert_chain(certfile, keyfile) app, srv, url = self.loop.run_until_complete( self.create_server('get', '/', handler, ssl_context=sslcontext) ) port = unused_port() conn = aiohttp.TCPConnector(loop=self.loop, local_addr=('127.0.0.1', port)) session = aiohttp.ClientSession(connector=conn) with pytest.raises(aiohttp.ClientConnectorSSLError) as ctx: self.loop.run_until_complete(session.request('get', url)) self.assertIsInstance(ctx.value.os_error, ssl.SSLError) self.assertIsInstance(ctx.value, aiohttp.ClientSSLError) self.loop.run_until_complete(session.close()) conn.close() def test_tcp_connector_do_not_raise_connector_ssl_error(self): async def handler(request): return web.Response() here = os.path.join(os.path.dirname(__file__), '..', 'tests') keyfile = os.path.join(here, 'sample.key') certfile = os.path.join(here, 'sample.crt') sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.load_cert_chain(certfile, keyfile) app, srv, url = self.loop.run_until_complete( self.create_server('get', '/', handler, ssl_context=sslcontext) ) port = unused_port() conn = aiohttp.TCPConnector(loop=self.loop, local_addr=('127.0.0.1', port)) session = aiohttp.ClientSession(connector=conn) r = self.loop.run_until_complete( session.request('get', url, ssl=sslcontext)) r.release() first_conn = next(iter(conn._conns.values()))[0][0] try: _sslcontext = first_conn.transport._ssl_protocol._sslcontext except AttributeError: _sslcontext = first_conn.transport._sslcontext self.assertIs(_sslcontext, sslcontext) r.close() self.loop.run_until_complete(session.close()) conn.close() def test_tcp_connector_uses_provided_local_addr(self): async def handler(request): return web.Response() app, srv, url = self.loop.run_until_complete( self.create_server('get', '/', handler) ) port = unused_port() conn = aiohttp.TCPConnector(loop=self.loop, local_addr=('127.0.0.1', port)) session = aiohttp.ClientSession(connector=conn) r = self.loop.run_until_complete( session.request('get', url) ) r.release() first_conn = next(iter(conn._conns.values()))[0][0] self.assertEqual( first_conn.transport._sock.getsockname(), ('127.0.0.1', port)) r.close() self.loop.run_until_complete(session.close()) conn.close() @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'requires unix') def test_unix_connector(self): async def handler(request): return web.Response() app, srv, url, sock_path = self.loop.run_until_complete( self.create_unix_server('get', '/', handler)) connector = aiohttp.UnixConnector(sock_path, loop=self.loop) self.assertEqual(sock_path, connector.path) session = client.ClientSession( connector=connector, loop=self.loop) r = self.loop.run_until_complete( session.request('get', url)) self.assertEqual(r.status, 200) r.close() self.loop.run_until_complete(session.close()) def test_resolver_not_called_with_address_is_ip(self): resolver = mock.MagicMock() connector = aiohttp.TCPConnector(resolver=resolver, loop=self.loop) req = ClientRequest('GET', URL('http://127.0.0.1:{}'.format(unused_port())), loop=self.loop, response_class=mock.Mock()) with self.assertRaises(OSError): self.loop.run_until_complete(connector.connect(req)) resolver.resolve.assert_not_called() class TestDNSCacheTable: @pytest.fixture def dns_cache_table(self): return _DNSCacheTable() def test_next_addrs_basic(self, dns_cache_table): dns_cache_table.add('localhost', ['127.0.0.1']) dns_cache_table.add('foo', ['127.0.0.2']) addrs = dns_cache_table.next_addrs('localhost') assert addrs == ['127.0.0.1'] addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.2'] with pytest.raises(KeyError): dns_cache_table.next_addrs('no-such-host') def test_remove(self, dns_cache_table): dns_cache_table.add('localhost', ['127.0.0.1']) dns_cache_table.remove('localhost') with pytest.raises(KeyError): dns_cache_table.next_addrs('localhost') def test_clear(self, dns_cache_table): dns_cache_table.add('localhost', ['127.0.0.1']) dns_cache_table.clear() with pytest.raises(KeyError): dns_cache_table.next_addrs('localhost') def test_not_expired_ttl_None(self, dns_cache_table): dns_cache_table.add('localhost', ['127.0.0.1']) assert not dns_cache_table.expired('localhost') def test_not_expired_ttl(self): dns_cache_table = _DNSCacheTable(ttl=0.1) dns_cache_table.add('localhost', ['127.0.0.1']) assert not dns_cache_table.expired('localhost') async def test_expired_ttl(self, loop): dns_cache_table = _DNSCacheTable(ttl=0.01) dns_cache_table.add('localhost', ['127.0.0.1']) await asyncio.sleep(0.01, loop=loop) assert dns_cache_table.expired('localhost') def test_next_addrs(self, dns_cache_table): dns_cache_table.add('foo', ['127.0.0.1', '127.0.0.2', '127.0.0.3']) # Each calls to next_addrs return the hosts using # a round robin strategy. addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.1', '127.0.0.2', '127.0.0.3'] addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.2', '127.0.0.3', '127.0.0.1'] addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.3', '127.0.0.1', '127.0.0.2'] addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.1', '127.0.0.2', '127.0.0.3'] def test_next_addrs_single(self, dns_cache_table): dns_cache_table.add('foo', ['127.0.0.1']) addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.1'] addrs = dns_cache_table.next_addrs('foo') assert addrs == ['127.0.0.1'] aiohttp-3.0.1/tests/test_cookiejar.py0000666000000000000000000005023713240304665016050 0ustar 00000000000000import asyncio import datetime import itertools import os import tempfile import unittest from http.cookies import SimpleCookie from unittest import mock import pytest from yarl import URL from aiohttp import CookieJar, DummyCookieJar @pytest.fixture def cookies_to_send(): return SimpleCookie( "shared-cookie=first; " "domain-cookie=second; Domain=example.com; " "subdomain1-cookie=third; Domain=test1.example.com; " "subdomain2-cookie=fourth; Domain=test2.example.com; " "dotted-domain-cookie=fifth; Domain=.example.com; " "different-domain-cookie=sixth; Domain=different.org; " "secure-cookie=seventh; Domain=secure.com; Secure; " "no-path-cookie=eighth; Domain=pathtest.com; " "path1-cookie=nineth; Domain=pathtest.com; Path=/; " "path2-cookie=tenth; Domain=pathtest.com; Path=/one; " "path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; " "path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; " "expires-cookie=thirteenth; Domain=expirestest.com; Path=/;" " Expires=Tue, 1 Jan 1980 12:00:00 GMT; " "max-age-cookie=fourteenth; Domain=maxagetest.com; Path=/;" " Max-Age=60; " "invalid-max-age-cookie=fifteenth; Domain=invalid-values.com; " " Max-Age=string; " "invalid-expires-cookie=sixteenth; Domain=invalid-values.com; " " Expires=string;" ) @pytest.fixture def cookies_to_receive(): return SimpleCookie( "unconstrained-cookie=first; Path=/; " "domain-cookie=second; Domain=example.com; Path=/; " "subdomain1-cookie=third; Domain=test1.example.com; Path=/; " "subdomain2-cookie=fourth; Domain=test2.example.com; Path=/; " "dotted-domain-cookie=fifth; Domain=.example.com; Path=/; " "different-domain-cookie=sixth; Domain=different.org; Path=/; " "no-path-cookie=seventh; Domain=pathtest.com; " "path-cookie=eighth; Domain=pathtest.com; Path=/somepath; " "wrong-path-cookie=nineth; Domain=pathtest.com; Path=somepath;" ) def test_date_parsing(): parse_func = CookieJar._parse_date utc = datetime.timezone.utc assert parse_func("") is None # 70 -> 1970 assert parse_func("Tue, 1 Jan 70 00:00:00 GMT") == \ datetime.datetime(1970, 1, 1, tzinfo=utc) # 10 -> 2010 assert parse_func("Tue, 1 Jan 10 00:00:00 GMT") == \ datetime.datetime(2010, 1, 1, tzinfo=utc) # No day of week string assert parse_func("1 Jan 1970 00:00:00 GMT") == \ datetime.datetime(1970, 1, 1, tzinfo=utc) # No timezone string assert parse_func("Tue, 1 Jan 1970 00:00:00") == \ datetime.datetime(1970, 1, 1, tzinfo=utc) # No year assert parse_func("Tue, 1 Jan 00:00:00 GMT") is None # No month assert parse_func("Tue, 1 1970 00:00:00 GMT") is None # No day of month assert parse_func("Tue, Jan 1970 00:00:00 GMT") is None # No time assert parse_func("Tue, 1 Jan 1970 GMT") is None # Invalid day of month assert parse_func("Tue, 0 Jan 1970 00:00:00 GMT") is None # Invalid year assert parse_func("Tue, 1 Jan 1500 00:00:00 GMT") is None # Invalid time assert parse_func("Tue, 1 Jan 1970 77:88:99 GMT") is None def test_domain_matching(): test_func = CookieJar._is_domain_match assert test_func("test.com", "test.com") assert test_func("test.com", "sub.test.com") assert not test_func("test.com", "") assert not test_func("test.com", "test.org") assert not test_func("diff-test.com", "test.com") assert not test_func("test.com", "diff-test.com") assert not test_func("test.com", "127.0.0.1") def test_path_matching(): test_func = CookieJar._is_path_match assert test_func("/", "") assert test_func("", "/") assert test_func("/file", "") assert test_func("/folder/file", "") assert test_func("/", "/") assert test_func("/file", "/") assert test_func("/file", "/file") assert test_func("/folder/", "/folder/") assert test_func("/folder/", "/") assert test_func("/folder/file", "/") assert not test_func("/", "/file") assert not test_func("/", "/folder/") assert not test_func("/file", "/folder/file") assert not test_func("/folder/", "/folder/file") assert not test_func("/different-file", "/file") assert not test_func("/different-folder/", "/folder/") def test_constructor(loop, cookies_to_send, cookies_to_receive): jar = CookieJar(loop=loop) jar.update_cookies(cookies_to_send) jar_cookies = SimpleCookie() for cookie in jar: dict.__setitem__(jar_cookies, cookie.key, cookie) expected_cookies = cookies_to_send assert jar_cookies == expected_cookies assert jar._loop is loop def test_save_load(loop, cookies_to_send, cookies_to_receive): file_path = tempfile.mkdtemp() + '/aiohttp.test.cookie' # export cookie jar jar_save = CookieJar(loop=loop) jar_save.update_cookies(cookies_to_receive) jar_save.save(file_path=file_path) jar_load = CookieJar(loop=loop) jar_load.load(file_path=file_path) jar_test = SimpleCookie() for cookie in jar_load: jar_test[cookie.key] = cookie os.unlink(file_path) assert jar_test == cookies_to_receive def test_update_cookie_with_unicode_domain(loop): cookies = ( "idna-domain-first=first; Domain=xn--9caa.com; Path=/;", "idna-domain-second=second; Domain=xn--9caa.com; Path=/;", ) jar = CookieJar(loop=loop) jar.update_cookies(SimpleCookie(cookies[0]), URL("http://éé.com/")) jar.update_cookies(SimpleCookie(cookies[1]), URL("http://xn--9caa.com/")) jar_test = SimpleCookie() for cookie in jar: jar_test[cookie.key] = cookie assert jar_test == SimpleCookie(" ".join(cookies)) def test_filter_cookie_with_unicode_domain(loop): jar = CookieJar(loop=loop) jar.update_cookies(SimpleCookie( "idna-domain-first=first; Domain=xn--9caa.com; Path=/; " )) assert len(jar.filter_cookies(URL("http://éé.com"))) == 1 assert len(jar.filter_cookies(URL("http://xn--9caa.com"))) == 1 def test_ctor_ith_default_loop(loop): asyncio.set_event_loop(loop) jar = CookieJar() assert jar._loop is loop def test_domain_filter_ip_cookie_send(loop): jar = CookieJar(loop=loop) cookies = SimpleCookie( "shared-cookie=first; " "domain-cookie=second; Domain=example.com; " "subdomain1-cookie=third; Domain=test1.example.com; " "subdomain2-cookie=fourth; Domain=test2.example.com; " "dotted-domain-cookie=fifth; Domain=.example.com; " "different-domain-cookie=sixth; Domain=different.org; " "secure-cookie=seventh; Domain=secure.com; Secure; " "no-path-cookie=eighth; Domain=pathtest.com; " "path1-cookie=nineth; Domain=pathtest.com; Path=/; " "path2-cookie=tenth; Domain=pathtest.com; Path=/one; " "path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; " "path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; " "expires-cookie=thirteenth; Domain=expirestest.com; Path=/;" " Expires=Tue, 1 Jan 1980 12:00:00 GMT; " "max-age-cookie=fourteenth; Domain=maxagetest.com; Path=/;" " Max-Age=60; " "invalid-max-age-cookie=fifteenth; Domain=invalid-values.com; " " Max-Age=string; " "invalid-expires-cookie=sixteenth; Domain=invalid-values.com; " " Expires=string;" ) jar.update_cookies(cookies) cookies_sent = jar.filter_cookies(URL("http://1.2.3.4/")).output( header='Cookie:') assert cookies_sent == 'Cookie: shared-cookie=first' def test_domain_filter_ip_cookie_receive(loop, cookies_to_receive): jar = CookieJar(loop=loop) jar.update_cookies(cookies_to_receive, URL("http://1.2.3.4/")) assert len(jar) == 0 def test_preserving_ip_domain_cookies(loop): jar = CookieJar(loop=loop, unsafe=True) jar.update_cookies(SimpleCookie( "shared-cookie=first; " "ip-cookie=second; Domain=127.0.0.1;" )) cookies_sent = jar.filter_cookies(URL("http://127.0.0.1/")).output( header='Cookie:') assert cookies_sent == ('Cookie: ip-cookie=second\r\n' 'Cookie: shared-cookie=first') def test_preserving_quoted_cookies(loop): jar = CookieJar(loop=loop, unsafe=True) jar.update_cookies(SimpleCookie( "ip-cookie=\"second\"; Domain=127.0.0.1;" )) cookies_sent = jar.filter_cookies(URL("http://127.0.0.1/")).output( header='Cookie:') assert cookies_sent == 'Cookie: ip-cookie=\"second\"' def test_ignore_domain_ending_with_dot(loop): jar = CookieJar(loop=loop, unsafe=True) jar.update_cookies(SimpleCookie("cookie=val; Domain=example.com.;"), URL("http://www.example.com")) cookies_sent = jar.filter_cookies(URL("http://www.example.com/")) assert cookies_sent.output(header='Cookie:') == "Cookie: cookie=val" cookies_sent = jar.filter_cookies(URL("http://example.com/")) assert cookies_sent.output(header='Cookie:') == "" class TestCookieJarBase(unittest.TestCase): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) # N.B. those need to be overridden in child test cases self.jar = CookieJar(loop=self.loop) def tearDown(self): self.loop.close() def request_reply_with_same_url(self, url): self.jar.update_cookies(self.cookies_to_send) cookies_sent = self.jar.filter_cookies(URL(url)) self.jar.clear() self.jar.update_cookies(self.cookies_to_receive, URL(url)) cookies_received = SimpleCookie() for cookie in self.jar: dict.__setitem__(cookies_received, cookie.key, cookie) self.jar.clear() return cookies_sent, cookies_received class TestCookieJarSafe(TestCookieJarBase): def setUp(self): super().setUp() self.cookies_to_send = SimpleCookie( "shared-cookie=first; " "domain-cookie=second; Domain=example.com; " "subdomain1-cookie=third; Domain=test1.example.com; " "subdomain2-cookie=fourth; Domain=test2.example.com; " "dotted-domain-cookie=fifth; Domain=.example.com; " "different-domain-cookie=sixth; Domain=different.org; " "secure-cookie=seventh; Domain=secure.com; Secure; " "no-path-cookie=eighth; Domain=pathtest.com; " "path1-cookie=nineth; Domain=pathtest.com; Path=/; " "path2-cookie=tenth; Domain=pathtest.com; Path=/one; " "path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; " "path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; " "expires-cookie=thirteenth; Domain=expirestest.com; Path=/;" " Expires=Tue, 1 Jan 1980 12:00:00 GMT; " "max-age-cookie=fourteenth; Domain=maxagetest.com; Path=/;" " Max-Age=60; " "invalid-max-age-cookie=fifteenth; Domain=invalid-values.com; " " Max-Age=string; " "invalid-expires-cookie=sixteenth; Domain=invalid-values.com; " " Expires=string;" ) self.cookies_to_receive = SimpleCookie( "unconstrained-cookie=first; Path=/; " "domain-cookie=second; Domain=example.com; Path=/; " "subdomain1-cookie=third; Domain=test1.example.com; Path=/; " "subdomain2-cookie=fourth; Domain=test2.example.com; Path=/; " "dotted-domain-cookie=fifth; Domain=.example.com; Path=/; " "different-domain-cookie=sixth; Domain=different.org; Path=/; " "no-path-cookie=seventh; Domain=pathtest.com; " "path-cookie=eighth; Domain=pathtest.com; Path=/somepath; " "wrong-path-cookie=nineth; Domain=pathtest.com; Path=somepath;" ) self.jar = CookieJar(loop=self.loop) def timed_request(self, url, update_time, send_time): with mock.patch.object(self.loop, 'time', return_value=update_time): self.jar.update_cookies(self.cookies_to_send) with mock.patch.object(self.loop, 'time', return_value=send_time): cookies_sent = self.jar.filter_cookies(URL(url)) self.jar.clear() return cookies_sent def test_domain_filter_same_host(self): cookies_sent, cookies_received = ( self.request_reply_with_same_url("http://example.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "domain-cookie", "dotted-domain-cookie" }) self.assertEqual(set(cookies_received.keys()), { "unconstrained-cookie", "domain-cookie", "dotted-domain-cookie" }) def test_domain_filter_same_host_and_subdomain(self): cookies_sent, cookies_received = ( self.request_reply_with_same_url("http://test1.example.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "domain-cookie", "subdomain1-cookie", "dotted-domain-cookie" }) self.assertEqual(set(cookies_received.keys()), { "unconstrained-cookie", "domain-cookie", "subdomain1-cookie", "dotted-domain-cookie" }) def test_domain_filter_same_host_diff_subdomain(self): cookies_sent, cookies_received = ( self.request_reply_with_same_url("http://different.example.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "domain-cookie", "dotted-domain-cookie" }) self.assertEqual(set(cookies_received.keys()), { "unconstrained-cookie", "domain-cookie", "dotted-domain-cookie" }) def test_domain_filter_diff_host(self): cookies_sent, cookies_received = ( self.request_reply_with_same_url("http://different.org/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "different-domain-cookie" }) self.assertEqual(set(cookies_received.keys()), { "unconstrained-cookie", "different-domain-cookie" }) def test_domain_filter_host_only(self): self.jar.update_cookies(self.cookies_to_receive, URL("http://example.com/")) cookies_sent = self.jar.filter_cookies(URL("http://example.com/")) self.assertIn("unconstrained-cookie", set(cookies_sent.keys())) cookies_sent = self.jar.filter_cookies(URL("http://different.org/")) self.assertNotIn("unconstrained-cookie", set(cookies_sent.keys())) def test_secure_filter(self): cookies_sent, _ = ( self.request_reply_with_same_url("http://secure.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie" }) cookies_sent, _ = ( self.request_reply_with_same_url("https://secure.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "secure-cookie" }) def test_path_filter_root(self): cookies_sent, _ = ( self.request_reply_with_same_url("http://pathtest.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "no-path-cookie", "path1-cookie" }) def test_path_filter_folder(self): cookies_sent, _ = ( self.request_reply_with_same_url("http://pathtest.com/one/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "no-path-cookie", "path1-cookie", "path2-cookie" }) def test_path_filter_file(self): cookies_sent, _ = self.request_reply_with_same_url( "http://pathtest.com/one/two") self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "no-path-cookie", "path1-cookie", "path2-cookie", "path3-cookie" }) def test_path_filter_subfolder(self): cookies_sent, _ = self.request_reply_with_same_url( "http://pathtest.com/one/two/") self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "no-path-cookie", "path1-cookie", "path2-cookie", "path3-cookie", "path4-cookie" }) def test_path_filter_subsubfolder(self): cookies_sent, _ = self.request_reply_with_same_url( "http://pathtest.com/one/two/three/") self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "no-path-cookie", "path1-cookie", "path2-cookie", "path3-cookie", "path4-cookie" }) def test_path_filter_different_folder(self): cookies_sent, _ = ( self.request_reply_with_same_url("http://pathtest.com/hundred/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "no-path-cookie", "path1-cookie" }) def test_path_value(self): _, cookies_received = ( self.request_reply_with_same_url("http://pathtest.com/")) self.assertEqual(set(cookies_received.keys()), { "unconstrained-cookie", "no-path-cookie", "path-cookie", "wrong-path-cookie" }) self.assertEqual(cookies_received["no-path-cookie"]["path"], "/") self.assertEqual(cookies_received["path-cookie"]["path"], "/somepath") self.assertEqual(cookies_received["wrong-path-cookie"]["path"], "/") def test_expires(self): ts_before = datetime.datetime( 1975, 1, 1, tzinfo=datetime.timezone.utc).timestamp() ts_after = datetime.datetime( 2115, 1, 1, tzinfo=datetime.timezone.utc).timestamp() cookies_sent = self.timed_request( "http://expirestest.com/", ts_before, ts_before) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "expires-cookie" }) cookies_sent = self.timed_request( "http://expirestest.com/", ts_before, ts_after) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie" }) def test_max_age(self): cookies_sent = self.timed_request( "http://maxagetest.com/", 1000, 1000) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "max-age-cookie" }) cookies_sent = self.timed_request( "http://maxagetest.com/", 1000, 2000) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie" }) def test_invalid_values(self): cookies_sent, cookies_received = ( self.request_reply_with_same_url("http://invalid-values.com/")) self.assertEqual(set(cookies_sent.keys()), { "shared-cookie", "invalid-max-age-cookie", "invalid-expires-cookie" }) cookie = cookies_sent["invalid-max-age-cookie"] self.assertEqual(cookie["max-age"], "") cookie = cookies_sent["invalid-expires-cookie"] self.assertEqual(cookie["expires"], "") def test_cookie_not_expired_when_added_after_removal(self): """Test case for https://github.com/aio-libs/aiohttp/issues/2084""" timestamps = [533588.993, 533588.993, 533588.993, 533588.993, 533589.093, 533589.093] loop = mock.Mock() loop.time.side_effect = itertools.chain( timestamps, itertools.cycle([timestamps[-1]])) jar = CookieJar(unsafe=True, loop=loop) # Remove `foo` cookie. jar.update_cookies(SimpleCookie('foo=""; Max-Age=0')) # Set `foo` cookie to `bar`. jar.update_cookies(SimpleCookie('foo="bar"')) # Assert that there is a cookie. assert len(jar) == 1 def test_dummy_cookie_jar(loop): cookie = SimpleCookie('foo=bar; Domain=example.com;') dummy_jar = DummyCookieJar(loop=loop) assert len(dummy_jar) == 0 dummy_jar.update_cookies(cookie) assert len(dummy_jar) == 0 with pytest.raises(StopIteration): next(iter(dummy_jar)) assert dummy_jar.filter_cookies(URL("http://example.com/")) is None dummy_jar.clear() aiohttp-3.0.1/tests/test_flowcontrol_streams.py0000666000000000000000000001205613240304665020205 0ustar 00000000000000import asyncio import unittest from unittest import mock from aiohttp import streams class TestFlowControlStreamReader(unittest.TestCase): def setUp(self): self.protocol = mock.Mock(_reading_paused=False) self.transp = self.protocol.transport self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, allow_pause=True, *args, **kwargs): out = streams.StreamReader( self.protocol, limit=1, loop=self.loop, *args, **kwargs) out._allow_pause = allow_pause return out def test_read(self): r = self._make_one() r.feed_data(b'da', 2) res = self.loop.run_until_complete(r.read(1)) self.assertEqual(res, b'd') self.assertFalse(r._protocol.resume_reading.called) def test_read_resume_paused(self): r = self._make_one() r.feed_data(b'test', 4) r._protocol._reading_paused = True res = self.loop.run_until_complete(r.read(1)) self.assertEqual(res, b't') self.assertTrue(r._protocol.pause_reading.called) def test_readline(self): r = self._make_one() r.feed_data(b'd\n', 5) res = self.loop.run_until_complete(r.readline()) self.assertEqual(res, b'd\n') self.assertFalse(r._protocol.resume_reading.called) def test_readline_resume_paused(self): r = self._make_one() r._protocol._reading_paused = True r.feed_data(b'd\n', 5) res = self.loop.run_until_complete(r.readline()) self.assertEqual(res, b'd\n') self.assertTrue(r._protocol.resume_reading.called) def test_readany(self): r = self._make_one() r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.readany()) self.assertEqual(res, b'data') self.assertFalse(r._protocol.resume_reading.called) def test_readany_resume_paused(self): r = self._make_one() r._protocol._reading_paused = True r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.readany()) self.assertEqual(res, b'data') self.assertTrue(r._protocol.resume_reading.called) def test_readchunk(self): r = self._make_one() r.feed_data(b'data', 4) res, end_of_http_chunk = self.loop.run_until_complete(r.readchunk()) self.assertEqual(res, b'data') self.assertFalse(end_of_http_chunk) self.assertFalse(r._protocol.resume_reading.called) def test_readchunk_resume_paused(self): r = self._make_one() r._protocol._reading_paused = True r.feed_data(b'data', 4) res, end_of_http_chunk = self.loop.run_until_complete(r.readchunk()) self.assertEqual(res, b'data') self.assertFalse(end_of_http_chunk) self.assertTrue(r._protocol.resume_reading.called) def test_readexactly(self): r = self._make_one() r.feed_data(b'data', 4) res = self.loop.run_until_complete(r.readexactly(3)) self.assertEqual(res, b'dat') self.assertFalse(r._protocol.resume_reading.called) def test_feed_data(self): r = self._make_one() r._protocol._reading_paused = False r.feed_data(b'datadata', 8) self.assertTrue(r._protocol.pause_reading.called) def test_read_nowait(self): r = self._make_one() r._protocol._reading_paused = True r.feed_data(b'data1', 5) r.feed_data(b'data2', 5) r.feed_data(b'data3', 5) res = self.loop.run_until_complete(r.read(5)) self.assertTrue(res == b'data1') self.assertTrue(r._protocol.resume_reading.call_count == 0) res = r.read_nowait(5) self.assertTrue(res == b'data2') self.assertTrue(r._protocol.resume_reading.call_count == 0) res = r.read_nowait(5) self.assertTrue(res == b'data3') self.assertTrue(r._protocol.resume_reading.call_count == 1) r._protocol._reading_paused = False res = r.read_nowait(5) self.assertTrue(res == b'') self.assertTrue(r._protocol.resume_reading.call_count == 1) class FlowControlMixin: def test_feed_pause(self): out = self._make_one() out._protocol._reading_paused = False out.feed_data(object(), 100) self.assertTrue(out._protocol.pause_reading.called) def test_resume_on_read(self): out = self._make_one() out.feed_data(object(), 100) out._protocol._reading_paused = True self.loop.run_until_complete(out.read()) self.assertTrue(out._protocol.resume_reading.called) class TestFlowControlDataQueue(unittest.TestCase, FlowControlMixin): def setUp(self): self.protocol = mock.Mock() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, *args, **kwargs): out = streams.FlowControlDataQueue( self.protocol, limit=1, loop=self.loop, *args, **kwargs) out._allow_pause = True return out aiohttp-3.0.1/tests/test_formdata.py0000666000000000000000000000434513240304665015676 0ustar 00000000000000from unittest import mock import pytest from aiohttp.formdata import FormData @pytest.fixture def buf(): return bytearray() @pytest.fixture def writer(buf): writer = mock.Mock() async def write(chunk): buf.extend(chunk) writer.write.side_effect = write return writer def test_formdata_multipart(buf, writer): form = FormData() assert not form.is_multipart form.add_field('test', b'test', filename='test.txt') assert form.is_multipart def test_invalid_formdata_payload(): form = FormData() form.add_field('test', object(), filename='test.txt') with pytest.raises(TypeError): form() def test_invalid_formdata_params(): with pytest.raises(TypeError): FormData('asdasf') def test_invalid_formdata_params2(): with pytest.raises(TypeError): FormData('as') # 2-char str is not allowed def test_invalid_formdata_content_type(): form = FormData() invalid_vals = [0, 0.1, {}, [], b'foo'] for invalid_val in invalid_vals: with pytest.raises(TypeError): form.add_field('foo', 'bar', content_type=invalid_val) def test_invalid_formdata_filename(): form = FormData() invalid_vals = [0, 0.1, {}, [], b'foo'] for invalid_val in invalid_vals: with pytest.raises(TypeError): form.add_field('foo', 'bar', filename=invalid_val) def test_invalid_formdata_content_transfer_encoding(): form = FormData() invalid_vals = [0, 0.1, {}, [], b'foo'] for invalid_val in invalid_vals: with pytest.raises(TypeError): form.add_field('foo', 'bar', content_transfer_encoding=invalid_val) async def test_formdata_field_name_is_quoted(buf, writer): form = FormData(charset="ascii") form.add_field("emails[]", "xxx@x.co", content_type="multipart/form-data") payload = form() await payload.write(writer) assert b'name="emails%5B%5D"' in buf async def test_formdata_field_name_is_not_quoted(buf, writer): form = FormData(quote_fields=False, charset="ascii") form.add_field("emails[]", "xxx@x.co", content_type="multipart/form-data") payload = form() await payload.write(writer) assert b'name="emails[]"' in buf aiohttp-3.0.1/tests/test_frozenlist.py0000666000000000000000000001327513240304665016302 0ustar 00000000000000from collections.abc import MutableSequence import pytest from aiohttp.frozenlist import FrozenList, PyFrozenList class FrozenListMixin: FrozenList = None SKIP_METHODS = {'__abstractmethods__', '__slots__'} def test_subclass(self): assert issubclass(self.FrozenList, MutableSequence) def test_iface(self): for name in set(dir(MutableSequence)) - self.SKIP_METHODS: if name.startswith('_') and not name.endswith('_'): continue assert hasattr(self.FrozenList, name) def test_ctor_default(self): _list = self.FrozenList([]) assert not _list.frozen def test_ctor(self): _list = self.FrozenList([1]) assert not _list.frozen def test_ctor_copy_list(self): orig = [1] _list = self.FrozenList(orig) del _list[0] assert _list != orig def test_freeze(self): _list = self.FrozenList() _list.freeze() assert _list.frozen def test_repr(self): _list = self.FrozenList([1]) assert repr(_list) == '' _list.freeze() assert repr(_list) == '' def test_getitem(self): _list = self.FrozenList([1, 2]) assert _list[1] == 2 def test_setitem(self): _list = self.FrozenList([1, 2]) _list[1] = 3 assert _list[1] == 3 def test_delitem(self): _list = self.FrozenList([1, 2]) del _list[0] assert len(_list) == 1 assert _list[0] == 2 def test_len(self): _list = self.FrozenList([1]) assert len(_list) == 1 def test_iter(self): _list = self.FrozenList([1, 2]) assert list(iter(_list)) == [1, 2] def test_reversed(self): _list = self.FrozenList([1, 2]) assert list(reversed(_list)) == [2, 1] def test_eq(self): _list = self.FrozenList([1]) assert _list == [1] def test_ne(self): _list = self.FrozenList([1]) assert _list != [2] def test_le(self): _list = self.FrozenList([1]) assert _list <= [1] def test_lt(self): _list = self.FrozenList([1]) assert _list <= [3] def test_ge(self): _list = self.FrozenList([1]) assert _list >= [1] def test_gt(self): _list = self.FrozenList([2]) assert _list > [1] def test_insert(self): _list = self.FrozenList([2]) _list.insert(0, 1) assert _list == [1, 2] def test_frozen_setitem(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): _list[0] = 2 def test_frozen_delitem(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): del _list[0] def test_frozen_insert(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): _list.insert(0, 2) def test_contains(self): _list = self.FrozenList([2]) assert 2 in _list def test_iadd(self): _list = self.FrozenList([1]) _list += [2] assert _list == [1, 2] def test_iadd_frozen(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): _list += [2] assert _list == [1] def test_index(self): _list = self.FrozenList([1]) assert _list.index(1) == 0 def test_remove(self): _list = self.FrozenList([1]) _list.remove(1) assert len(_list) == 0 def test_remove_frozen(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): _list.remove(1) assert _list == [1] def test_clear(self): _list = self.FrozenList([1]) _list.clear() assert len(_list) == 0 def test_clear_frozen(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): _list.clear() assert _list == [1] def test_extend(self): _list = self.FrozenList([1]) _list.extend([2]) assert _list == [1, 2] def test_extend_frozen(self): _list = self.FrozenList([1]) _list.freeze() with pytest.raises(RuntimeError): _list.extend([2]) assert _list == [1] def test_reverse(self): _list = self.FrozenList([1, 2]) _list.reverse() assert _list == [2, 1] def test_reverse_frozen(self): _list = self.FrozenList([1, 2]) _list.freeze() with pytest.raises(RuntimeError): _list.reverse() assert _list == [1, 2] def test_pop(self): _list = self.FrozenList([1, 2]) assert _list.pop(0) == 1 assert _list == [2] def test_pop_default(self): _list = self.FrozenList([1, 2]) assert _list.pop() == 2 assert _list == [1] def test_pop_frozen(self): _list = self.FrozenList([1, 2]) _list.freeze() with pytest.raises(RuntimeError): _list.pop() assert _list == [1, 2] def test_append(self): _list = self.FrozenList([1, 2]) _list.append(3) assert _list == [1, 2, 3] def test_append_frozen(self): _list = self.FrozenList([1, 2]) _list.freeze() with pytest.raises(RuntimeError): _list.append(3) assert _list == [1, 2] def test_count(self): _list = self.FrozenList([1, 2]) assert _list.count(1) == 1 class TestFrozenList(FrozenListMixin): FrozenList = FrozenList class TestFrozenListPy(FrozenListMixin): FrozenList = PyFrozenList aiohttp-3.0.1/tests/test_helpers.py0000666000000000000000000003775713240304665015560 0ustar 00000000000000import asyncio import datetime import gc import os import tempfile from unittest import mock import pytest from yarl import URL from aiohttp import helpers from aiohttp.abc import AbstractAccessLogger # ------------------- parse_mimetype ---------------------------------- @pytest.mark.parametrize('mimetype, expected', [ ('', helpers.MimeType('', '', '', {})), ('*', helpers.MimeType('*', '*', '', {})), ('application/json', helpers.MimeType('application', 'json', '', {})), ( 'application/json; charset=utf-8', helpers.MimeType('application', 'json', '', {'charset': 'utf-8'}) ), ( '''application/json; charset=utf-8;''', helpers.MimeType('application', 'json', '', {'charset': 'utf-8'}) ), ( 'ApPlIcAtIoN/JSON;ChaRseT="UTF-8"', helpers.MimeType('application', 'json', '', {'charset': 'UTF-8'}) ), ('application/rss+xml', helpers.MimeType('application', 'rss', 'xml', {})), ('text/plain;base64', helpers.MimeType('text', 'plain', '', {'base64': ''})) ]) def test_parse_mimetype(mimetype, expected): result = helpers.parse_mimetype(mimetype) assert isinstance(result, helpers.MimeType) assert result == expected # ------------------- guess_filename ---------------------------------- def test_guess_filename_with_tempfile(): with tempfile.TemporaryFile() as fp: assert (helpers.guess_filename(fp, 'no-throw') is not None) # ------------------- BasicAuth ----------------------------------- def test_basic_auth1(): # missing password here with pytest.raises(ValueError): helpers.BasicAuth(None) def test_basic_auth2(): with pytest.raises(ValueError): helpers.BasicAuth('nkim', None) def test_basic_with_auth_colon_in_login(): with pytest.raises(ValueError): helpers.BasicAuth('nkim:1', 'pwd') def test_basic_auth3(): auth = helpers.BasicAuth('nkim') assert auth.login == 'nkim' assert auth.password == '' def test_basic_auth4(): auth = helpers.BasicAuth('nkim', 'pwd') assert auth.login == 'nkim' assert auth.password == 'pwd' assert auth.encode() == 'Basic bmtpbTpwd2Q=' def test_basic_auth_decode(): auth = helpers.BasicAuth.decode('Basic bmtpbTpwd2Q=') assert auth.login == 'nkim' assert auth.password == 'pwd' def test_basic_auth_invalid(): with pytest.raises(ValueError): helpers.BasicAuth.decode('bmtpbTpwd2Q=') def test_basic_auth_decode_not_basic(): with pytest.raises(ValueError): helpers.BasicAuth.decode('Complex bmtpbTpwd2Q=') def test_basic_auth_decode_bad_base64(): with pytest.raises(ValueError): helpers.BasicAuth.decode('Basic bmtpbTpwd2Q') def test_basic_auth_from_url(): url = URL('http://user:pass@example.com') auth = helpers.BasicAuth.from_url(url) assert auth.login == 'user' assert auth.password == 'pass' def test_basic_auth_from_not_url(): with pytest.raises(TypeError): helpers.BasicAuth.from_url('http://user:pass@example.com') # ------------- access logger ------------------------- def test_access_logger_format(): log_format = '%T "%{ETag}o" %X {X} %%P' mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, log_format) expected = '%s "%s" %%X {X} %%%s' assert expected == access_logger._log_format def test_access_logger_atoms(mocker): utcnow = datetime.datetime(1843, 1, 1, 0, 30) mock_datetime = mocker.patch("aiohttp.helpers.datetime.datetime") mock_getpid = mocker.patch("os.getpid") mock_datetime.utcnow.return_value = utcnow mock_getpid.return_value = 42 log_format = '%a %t %P %r %s %b %T %Tf %D "%{H1}i" "%{H2}i"' mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, log_format) request = mock.Mock(headers={'H1': 'a', 'H2': 'b'}, method="GET", path_qs="/path", version=(1, 1), remote="127.0.0.2") response = mock.Mock(headers={}, body_length=42, status=200) access_logger.log(request, response, 3.1415926) assert not mock_logger.exception.called expected = ('127.0.0.2 [01/Jan/1843:00:29:56 +0000] <42> ' 'GET /path HTTP/1.1 200 42 3 3.141593 3141593 "a" "b"') extra = { 'first_request_line': 'GET /path HTTP/1.1', 'process_id': '<42>', 'remote_address': '127.0.0.2', 'request_start_time': '[01/Jan/1843:00:29:56 +0000]', 'request_time': 3, 'request_time_frac': '3.141593', 'request_time_micro': 3141593, 'response_size': 42, 'response_status': 200, 'request_header': {'H1': 'a', 'H2': 'b'}, } mock_logger.info.assert_called_with(expected, extra=extra) def test_access_logger_dicts(): log_format = '%{User-Agent}i %{Content-Length}o %{None}i' mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, log_format) request = mock.Mock(headers={"User-Agent": "Mock/1.0"}, version=(1, 1), remote="127.0.0.2") response = mock.Mock(headers={"Content-Length": 123}) access_logger.log(request, response, 0.0) assert not mock_logger.error.called expected = 'Mock/1.0 123 -' extra = { 'request_header': {"User-Agent": "Mock/1.0", 'None': '-'}, 'response_header': {'Content-Length': 123} } mock_logger.info.assert_called_with(expected, extra=extra) def test_access_logger_unix_socket(): log_format = '|%a|' mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, log_format) request = mock.Mock(headers={"User-Agent": "Mock/1.0"}, version=(1, 1), remote="") response = mock.Mock() access_logger.log(request, response, 0.0) assert not mock_logger.error.called expected = '||' mock_logger.info.assert_called_with(expected, extra={'remote_address': ''}) def test_logger_no_message(): mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, "%r %{content-type}i") extra_dict = { 'first_request_line': '-', 'request_header': {'content-type': '(no headers)'} } access_logger.log(None, None, 0.0) mock_logger.info.assert_called_with("- (no headers)", extra=extra_dict) def test_logger_internal_error(): mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, "%D") access_logger.log(None, None, 'invalid') mock_logger.exception.assert_called_with("Error in logging") def test_logger_no_transport(): mock_logger = mock.Mock() access_logger = helpers.AccessLogger(mock_logger, "%a") access_logger.log(None, None, 0) mock_logger.info.assert_called_with("-", extra={'remote_address': '-'}) def test_logger_abc(): class Logger(AbstractAccessLogger): def log(self, request, response, time): 1 / 0 mock_logger = mock.Mock() access_logger = Logger(mock_logger, None) with pytest.raises(ZeroDivisionError): access_logger.log(None, None, None) class Logger(AbstractAccessLogger): def log(self, request, response, time): self.logger.info(self.log_format.format( request=request, response=response, time=time )) mock_logger = mock.Mock() access_logger = Logger(mock_logger, '{request} {response} {time}') access_logger.log('request', 'response', 1) mock_logger.info.assert_called_with('request response 1') class TestReify: def test_reify(self): class A: def __init__(self): self._cache = {} @helpers.reify def prop(self): return 1 a = A() assert 1 == a.prop def test_reify_class(self): class A: def __init__(self): self._cache = {} @helpers.reify def prop(self): """Docstring.""" return 1 assert isinstance(A.prop, helpers.reify) assert 'Docstring.' == A.prop.__doc__ def test_reify_assignment(self): class A: def __init__(self): self._cache = {} @helpers.reify def prop(self): return 1 a = A() with pytest.raises(AttributeError): a.prop = 123 # ----------------------------------- is_ip_address() ---------------------- def test_is_ip_address(): assert helpers.is_ip_address("127.0.0.1") assert helpers.is_ip_address("::1") assert helpers.is_ip_address("FE80:0000:0000:0000:0202:B3FF:FE1E:8329") # Hostnames assert not helpers.is_ip_address("localhost") assert not helpers.is_ip_address("www.example.com") # Out of range assert not helpers.is_ip_address("999.999.999.999") # Contain a port assert not helpers.is_ip_address("127.0.0.1:80") assert not helpers.is_ip_address("[2001:db8:0:1]:80") # Too many "::" assert not helpers.is_ip_address("1200::AB00:1234::2552:7777:1313") def test_is_ip_address_bytes(): assert helpers.is_ip_address(b"127.0.0.1") assert helpers.is_ip_address(b"::1") assert helpers.is_ip_address(b"FE80:0000:0000:0000:0202:B3FF:FE1E:8329") # Hostnames assert not helpers.is_ip_address(b"localhost") assert not helpers.is_ip_address(b"www.example.com") # Out of range assert not helpers.is_ip_address(b"999.999.999.999") # Contain a port assert not helpers.is_ip_address(b"127.0.0.1:80") assert not helpers.is_ip_address(b"[2001:db8:0:1]:80") # Too many "::" assert not helpers.is_ip_address(b"1200::AB00:1234::2552:7777:1313") def test_ip_addresses(): ip_addresses = [ '0.0.0.0', '127.0.0.1', '255.255.255.255', '0:0:0:0:0:0:0:0', 'FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF', '00AB:0002:3008:8CFD:00AB:0002:3008:8CFD', '00ab:0002:3008:8cfd:00ab:0002:3008:8cfd', 'AB:02:3008:8CFD:AB:02:3008:8CFD', 'AB:02:3008:8CFD::02:3008:8CFD', '::', '1::1', ] for address in ip_addresses: assert helpers.is_ip_address(address) def test_host_addresses(): hosts = [ 'www.four.part.host' 'www.python.org', 'foo.bar', 'localhost', ] for host in hosts: assert not helpers.is_ip_address(host) def test_is_ip_address_invalid_type(): with pytest.raises(TypeError): helpers.is_ip_address(123) with pytest.raises(TypeError): helpers.is_ip_address(object()) # ----------------------------------- TimeoutHandle ------------------- def test_timeout_handle(loop): handle = helpers.TimeoutHandle(loop, 10.2) cb = mock.Mock() handle.register(cb) assert cb == handle._callbacks[0][0] handle.close() assert not handle._callbacks def test_timeout_handle_cb_exc(loop): handle = helpers.TimeoutHandle(loop, 10.2) cb = mock.Mock() handle.register(cb) cb.side_effect = ValueError() handle() assert cb.called assert not handle._callbacks def test_timer_context_cancelled(): with mock.patch('aiohttp.helpers.asyncio') as m_asyncio: m_asyncio.TimeoutError = asyncio.TimeoutError loop = mock.Mock() ctx = helpers.TimerContext(loop) ctx.timeout() with pytest.raises(asyncio.TimeoutError): with ctx: pass assert m_asyncio.Task.current_task.return_value.cancel.called def test_timer_context_no_task(loop): with pytest.raises(RuntimeError): with helpers.TimerContext(loop): pass # -------------------------------- CeilTimeout -------------------------- async def test_weakref_handle(loop): cb = mock.Mock() helpers.weakref_handle(cb, 'test', 0.01, loop, False) await asyncio.sleep(0.1, loop=loop) assert cb.test.called async def test_weakref_handle_weak(loop): cb = mock.Mock() helpers.weakref_handle(cb, 'test', 0.01, loop, False) del cb gc.collect() await asyncio.sleep(0.1, loop=loop) def test_ceil_call_later(): cb = mock.Mock() loop = mock.Mock() loop.time.return_value = 10.1 helpers.call_later(cb, 10.1, loop) loop.call_at.assert_called_with(21.0, cb) def test_ceil_call_later_no_timeout(): cb = mock.Mock() loop = mock.Mock() helpers.call_later(cb, 0, loop) assert not loop.call_at.called async def test_ceil_timeout(loop): with helpers.CeilTimeout(None, loop=loop) as timeout: assert timeout._timeout is None assert timeout._cancel_handler is None def test_ceil_timeout_no_task(loop): with pytest.raises(RuntimeError): with helpers.CeilTimeout(10, loop=loop): pass # -------------------------------- ContentDisposition ------------------- def test_content_disposition(): assert (helpers.content_disposition_header('attachment', foo='bar') == 'attachment; foo="bar"') def test_content_disposition_bad_type(): with pytest.raises(ValueError): helpers.content_disposition_header('foo bar') with pytest.raises(ValueError): helpers.content_disposition_header('—Ç–µ—Å—Ç') with pytest.raises(ValueError): helpers.content_disposition_header('foo\x00bar') with pytest.raises(ValueError): helpers.content_disposition_header('') def test_set_content_disposition_bad_param(): with pytest.raises(ValueError): helpers.content_disposition_header('inline', **{'foo bar': 'baz'}) with pytest.raises(ValueError): helpers.content_disposition_header('inline', **{'—Ç–µ—Å—Ç': 'baz'}) with pytest.raises(ValueError): helpers.content_disposition_header('inline', **{'': 'baz'}) with pytest.raises(ValueError): helpers.content_disposition_header('inline', **{'foo\x00bar': 'baz'}) # --------------------- proxies_from_env ------------------------------ def test_proxies_from_env_http(mocker): url = URL('http://aiohttp.io/path') mocker.patch.dict(os.environ, {'http_proxy': str(url)}) ret = helpers.proxies_from_env() assert ret.keys() == {'http'} assert ret['http'].proxy == url assert ret['http'].proxy_auth is None def test_proxies_from_env_http_proxy_for_https_proto(mocker): url = URL('http://aiohttp.io/path') mocker.patch.dict(os.environ, {'https_proxy': str(url)}) ret = helpers.proxies_from_env() assert ret.keys() == {'https'} assert ret['https'].proxy == url assert ret['https'].proxy_auth is None def test_proxies_from_env_https_proxy_skipped(mocker): url = URL('https://aiohttp.io/path') mocker.patch.dict(os.environ, {'https_proxy': str(url)}) log = mocker.patch('aiohttp.log.client_logger.warning') assert helpers.proxies_from_env() == {} log.assert_called_with('HTTPS proxies %s are not supported, ignoring', URL('https://aiohttp.io/path')) def test_proxies_from_env_http_with_auth(mocker): url = URL('http://user:pass@aiohttp.io/path') mocker.patch.dict(os.environ, {'http_proxy': str(url)}) ret = helpers.proxies_from_env() assert ret.keys() == {'http'} assert ret['http'].proxy == url.with_user(None) proxy_auth = ret['http'].proxy_auth assert proxy_auth.login == 'user' assert proxy_auth.password == 'pass' assert proxy_auth.encoding == 'latin1' # ------------- set_result / set_exception ---------------------- async def test_set_result(loop): fut = loop.create_future() helpers.set_result(fut, 123) assert 123 == await fut async def test_set_result_cancelled(loop): fut = loop.create_future() fut.cancel() helpers.set_result(fut, 123) with pytest.raises(asyncio.CancelledError): await fut async def test_set_exception(loop): fut = loop.create_future() helpers.set_exception(fut, RuntimeError()) with pytest.raises(RuntimeError): await fut async def test_set_exception_cancelled(loop): fut = loop.create_future() fut.cancel() helpers.set_exception(fut, RuntimeError()) with pytest.raises(asyncio.CancelledError): await fut aiohttp-3.0.1/tests/test_http_exceptions.py0000666000000000000000000000074213240304665017316 0ustar 00000000000000"""Tests for http_exceptions.py""" from aiohttp import http_exceptions def test_bad_status_line1(): err = http_exceptions.BadStatusLine(b'') assert str(err) == "b''" def test_bad_status_line2(): err = http_exceptions.BadStatusLine('Test') assert str(err) == 'Test' def test_http_error_exception(): exc = http_exceptions.HttpProcessingError( code=500, message='Internal error') assert exc.code == 500 assert exc.message == 'Internal error' aiohttp-3.0.1/tests/test_http_parser.py0000666000000000000000000005712213240304665016435 0ustar 00000000000000"""Tests for aiohttp/protocol.py""" import asyncio import unittest import zlib from unittest import mock import pytest from multidict import CIMultiDict from yarl import URL import aiohttp from aiohttp import http_exceptions, streams from aiohttp.http_parser import (DeflateBuffer, HttpPayloadParser, HttpRequestParserPy, HttpResponseParserPy) try: import brotli except ImportError: brotli = None REQUEST_PARSERS = [HttpRequestParserPy] RESPONSE_PARSERS = [HttpResponseParserPy] try: from aiohttp import _http_parser REQUEST_PARSERS.append(_http_parser.HttpRequestParserC) RESPONSE_PARSERS.append(_http_parser.HttpResponseParserC) except ImportError: # pragma: no cover pass @pytest.fixture def protocol(): return mock.Mock() @pytest.fixture(params=REQUEST_PARSERS) def parser(loop, protocol, request): """Parser implementations""" return request.param(protocol, loop, 8190, 32768, 8190) @pytest.fixture(params=REQUEST_PARSERS) def request_cls(request): """Request Parser class""" return request.param @pytest.fixture(params=RESPONSE_PARSERS) def response(loop, protocol, request): """Parser implementations""" return request.param(protocol, loop, 8190, 32768, 8190) @pytest.fixture(params=RESPONSE_PARSERS) def response_cls(request): """Parser implementations""" return request.param def test_parse_headers(parser): text = b'''GET /test HTTP/1.1\r test: line\r continue\r test2: data\r \r ''' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('test', 'line continue'), ('test2', 'data')] assert msg.raw_headers == ((b'test', b'line continue'), (b'test2', b'data')) assert not msg.should_close assert msg.compression is None assert not msg.upgrade def test_parse(parser): text = b'GET /test HTTP/1.1\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 msg, _ = messages[0] assert msg.compression is None assert not msg.upgrade assert msg.method == 'GET' assert msg.path == '/test' assert msg.version == (1, 1) async def test_parse_body(parser): text = b'GET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 _, payload = messages[0] body = await payload.read(4) assert body == b'body' async def test_parse_body_with_CRLF(parser): text = b'\r\nGET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 _, payload = messages[0] body = await payload.read(4) assert body == b'body' def test_parse_delayed(parser): text = b'GET /test HTTP/1.1\r\n' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 0 assert not upgrade messages, upgrade, tail = parser.feed_data(b'\r\n') assert len(messages) == 1 msg = messages[0][0] assert msg.method == 'GET' def test_headers_multi_feed(parser): text1 = b'GET /test HTTP/1.1\r\n' text2 = b'test: line\r' text3 = b'\n continue\r\n\r\n' messages, upgrade, tail = parser.feed_data(text1) assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(text2) assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(text3) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('test', 'line continue')] assert msg.raw_headers == ((b'test', b'line continue'),) assert not msg.should_close assert msg.compression is None assert not msg.upgrade def test_headers_split_field(parser): text1 = b'GET /test HTTP/1.1\r\n' text2 = b't' text3 = b'es' text4 = b't: value\r\n\r\n' messages, upgrade, tail = parser.feed_data(text1) messages, upgrade, tail = parser.feed_data(text2) messages, upgrade, tail = parser.feed_data(text3) assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(text4) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('test', 'value')] assert msg.raw_headers == ((b'test', b'value'),) assert not msg.should_close assert msg.compression is None assert not msg.upgrade def test_parse_headers_multi(parser): text = (b'GET /test HTTP/1.1\r\n' b'Set-Cookie: c1=cookie1\r\n' b'Set-Cookie: c2=cookie2\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('Set-Cookie', 'c1=cookie1'), ('Set-Cookie', 'c2=cookie2')] assert msg.raw_headers == ((b'Set-Cookie', b'c1=cookie1'), (b'Set-Cookie', b'c2=cookie2')) assert not msg.should_close assert msg.compression is None def test_conn_default_1_0(parser): text = b'GET /test HTTP/1.0\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_default_1_1(parser): text = b'GET /test HTTP/1.1\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_close(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: close\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_close_1_0(parser): text = (b'GET /test HTTP/1.0\r\n' b'connection: close\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_keep_alive_1_0(parser): text = (b'GET /test HTTP/1.0\r\n' b'connection: keep-alive\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_keep_alive_1_1(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: keep-alive\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_other_1_0(parser): text = (b'GET /test HTTP/1.0\r\n' b'connection: test\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_other_1_1(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: test\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_request_chunked(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert msg.chunked assert not upgrade assert isinstance(payload, streams.StreamReader) def test_conn_upgrade(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: upgrade\r\n' b'upgrade: websocket\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close assert msg.upgrade assert upgrade def test_compression_empty(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: \r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_compression_deflate(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: deflate\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == 'deflate' def test_compression_gzip(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: gzip\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == 'gzip' @pytest.mark.skipif(brotli is None, reason="brotli is not installed") def test_compression_brotli(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: br\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == 'br' def test_compression_unknown(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: compress\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_headers_connect(parser): text = (b'CONNECT www.google.com HTTP/1.1\r\n' b'content-length: 0\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert upgrade assert isinstance(payload, streams.StreamReader) def test_headers_old_websocket_key1(parser): text = (b'GET /test HTTP/1.1\r\n' b'SEC-WEBSOCKET-KEY1: line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_1(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-length: line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_2(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-length: -1\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_header(parser): text = (b'GET /test HTTP/1.1\r\n' b'test line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_name(parser): text = (b'GET /test HTTP/1.1\r\n' b'test[]: line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_max_header_field_size(parser): name = b'test' * 10 * 1024 text = (b'GET /test HTTP/1.1\r\n' + name + b':data\r\n\r\n') with pytest.raises(http_exceptions.LineTooLong): parser.feed_data(text) def test_max_header_value_size(parser): name = b'test' * 10 * 1024 text = (b'GET /test HTTP/1.1\r\n' b'data:' + name + b'\r\n\r\n') with pytest.raises(http_exceptions.LineTooLong): parser.feed_data(text) def test_max_header_value_size_continuation(parser): name = b'test' * 10 * 1024 text = (b'GET /test HTTP/1.1\r\n' b'data: test\r\n ' + name + b'\r\n\r\n') with pytest.raises(http_exceptions.LineTooLong): parser.feed_data(text) def test_http_request_parser(parser): text = b'GET /path HTTP/1.1\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg == ('GET', '/path', (1, 1), CIMultiDict(), (), False, None, False, False, URL('/path')) def test_http_request_bad_status_line(parser): text = b'getpath \r\n\r\n' with pytest.raises(http_exceptions.BadStatusLine): parser.feed_data(text) def test_http_request_upgrade(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: upgrade\r\n' b'upgrade: websocket\r\n\r\n' b'some raw data') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close assert msg.upgrade assert upgrade assert tail == b'some raw data' def test_http_request_parser_utf8(parser): text = 'GET /path HTTP/1.1\r\nx-test:теÑÑ‚\r\n\r\n'.encode('utf-8') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg == ('GET', '/path', (1, 1), CIMultiDict([('X-TEST', 'теÑÑ‚')]), ((b'x-test', 'теÑÑ‚'.encode('utf-8')),), False, None, False, False, URL('/path')) def test_http_request_parser_non_utf8(parser): text = 'GET /path HTTP/1.1\r\nx-test:теÑÑ‚\r\n\r\n'.encode('cp1251') msg = parser.feed_data(text)[0][0][0] assert msg == ('GET', '/path', (1, 1), CIMultiDict([('X-TEST', 'теÑÑ‚'.encode('cp1251').decode( 'utf-8', 'surrogateescape'))]), ((b'x-test', 'теÑÑ‚'.encode('cp1251')),), False, None, False, False, URL('/path')) def test_http_request_parser_two_slashes(parser): text = b'GET //path HTTP/1.1\r\n\r\n' msg = parser.feed_data(text)[0][0][0] assert msg[:-1] == ('GET', '//path', (1, 1), CIMultiDict(), (), False, None, False, False) def test_http_request_parser_bad_method(parser): with pytest.raises(http_exceptions.BadStatusLine): parser.feed_data(b'!12%()+=~$ /get HTTP/1.1\r\n\r\n') def test_http_request_parser_bad_version(parser): with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(b'GET //get HT/11\r\n\r\n') def test_http_request_max_status_line(parser): with pytest.raises(http_exceptions.LineTooLong): parser.feed_data( b'GET /path' + b'test' * 10 * 1024 + b' HTTP/1.1\r\n\r\n') def test_http_response_parser_utf8(response): text = 'HTTP/1.1 200 Ok\r\nx-test:теÑÑ‚\r\n\r\n'.encode('utf-8') messages, upgraded, tail = response.feed_data(text) assert len(messages) == 1 msg = messages[0][0] assert msg.version == (1, 1) assert msg.code == 200 assert msg.reason == 'Ok' assert msg.headers == CIMultiDict([('X-TEST', 'теÑÑ‚')]) assert msg.raw_headers == ((b'x-test', 'теÑÑ‚'.encode('utf-8')),) assert not upgraded assert not tail def test_http_response_parser_bad_status_line_too_long(response): with pytest.raises(http_exceptions.LineTooLong): response.feed_data( b'HTTP/1.1 200 Ok' + b'test' * 10 * 1024 + b'\r\n\r\n') def test_http_response_parser_bad_version(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HT/11 200 Ok\r\n\r\n') def test_http_response_parser_no_reason(response): msg = response.feed_data(b'HTTP/1.1 200\r\n\r\n')[0][0][0] assert msg.version == (1, 1) assert msg.code == 200 assert not msg.reason def test_http_response_parser_bad(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HTT/1\r\n\r\n') def test_http_response_parser_code_under_100(response): msg = response.feed_data(b'HTTP/1.1 99 test\r\n\r\n')[0][0][0] assert msg.code == 99 def test_http_response_parser_code_above_999(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HTTP/1.1 9999 test\r\n\r\n') def test_http_response_parser_code_not_int(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HTTP/1.1 ttt test\r\n\r\n') def test_http_request_chunked_payload(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] assert msg.chunked assert not payload.is_eof() assert isinstance(payload, streams.StreamReader) parser.feed_data(b'4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() def test_http_request_chunked_payload_and_next_message(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] messages, upgraded, tail = parser.feed_data( b'4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n' b'POST /test2 HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() assert len(messages) == 1 msg2, payload2 = messages[0] assert msg2.method == 'POST' assert msg2.chunked assert not payload2.is_eof() def test_http_request_chunked_payload_chunks(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] parser.feed_data(b'4\r\ndata\r') parser.feed_data(b'\n4') parser.feed_data(b'\r') parser.feed_data(b'\n') parser.feed_data(b'li') parser.feed_data(b'ne\r\n0\r\n') parser.feed_data(b'test: test\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert not payload.is_eof() parser.feed_data(b'\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() def test_parse_chunked_payload_chunk_extension(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] parser.feed_data( b'4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() def _test_parse_no_length_or_te_on_post(loop, protocol, request_cls): parser = request_cls(protocol, loop, readall=True) text = b'POST /test HTTP/1.1\r\n\r\n' msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_parse_payload_response_without_body(loop, protocol, response_cls): parser = response_cls(protocol, loop, response_with_body=False) text = (b'HTTP/1.1 200 Ok\r\n' b'content-length: 10\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_parse_length_payload(response): text = (b'HTTP/1.1 200 Ok\r\n' b'content-length: 4\r\n\r\n') msg, payload = response.feed_data(text)[0][0] assert not payload.is_eof() response.feed_data(b'da') response.feed_data(b't') response.feed_data(b'aHT') assert payload.is_eof() assert b'data' == b''.join(d for d in payload._buffer) def test_parse_no_length_payload(parser): text = b'PUT / HTTP/1.1\r\n\r\n' msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_partial_url(parser): messages, upgrade, tail = parser.feed_data(b'GET /te') assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(b'st HTTP/1.1\r\n\r\n') assert len(messages) == 1 msg, payload = messages[0] assert msg.method == 'GET' assert msg.path == '/test' assert msg.version == (1, 1) assert payload.is_eof() def test_url_parse_non_strict_mode(parser): payload = 'GET /test/теÑÑ‚ HTTP/1.1\r\n\r\n'.encode('utf-8') messages, upgrade, tail = parser.feed_data(payload) assert len(messages) == 1 msg, payload = messages[0] assert msg.method == 'GET' assert msg.path == '/test/теÑÑ‚' assert msg.version == (1, 1) assert payload.is_eof() class TestParsePayload(unittest.TestCase): def setUp(self): self.stream = mock.Mock() asyncio.set_event_loop(None) def test_parse_eof_payload(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, readall=True) p.feed_data(b'data') p.feed_eof() self.assertTrue(out.is_eof()) self.assertEqual([(bytearray(b'data'), 4)], list(out._buffer)) def test_parse_no_body(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, method='PUT') self.assertTrue(out.is_eof()) self.assertTrue(p.done) def test_parse_length_payload_eof(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, length=4) p.feed_data(b'da') with pytest.raises(http_exceptions.ContentLengthError): p.feed_eof() def test_parse_chunked_payload_size_error(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, chunked=True) self.assertRaises( http_exceptions.TransferEncodingError, p.feed_data, b'blah\r\n') self.assertIsInstance( out.exception(), http_exceptions.TransferEncodingError) def test_http_payload_parser_length(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, length=2) eof, tail = p.feed_data(b'1245') self.assertTrue(eof) self.assertEqual(b'12', b''.join(d for d, _ in out._buffer)) self.assertEqual(b'45', tail) _comp = zlib.compressobj(wbits=-zlib.MAX_WBITS) _COMPRESSED = b''.join([_comp.compress(b'data'), _comp.flush()]) def test_http_payload_parser_deflate(self): length = len(self._COMPRESSED) out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser( out, length=length, compression='deflate') p.feed_data(self._COMPRESSED) self.assertEqual(b'data', b''.join(d for d, _ in out._buffer)) self.assertTrue(out.is_eof()) def test_http_payload_parser_deflate_no_wbits(self): comp = zlib.compressobj() COMPRESSED = b''.join([comp.compress(b'data'), comp.flush()]) length = len(COMPRESSED) out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser( out, length=length, compression='deflate') p.feed_data(COMPRESSED) self.assertEqual(b'data', b''.join(d for d, _ in out._buffer)) self.assertTrue(out.is_eof()) def test_http_payload_parser_length_zero(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, length=0) self.assertTrue(p.done) self.assertTrue(out.is_eof()) @pytest.mark.skipif(brotli is None, reason="brotli is not installed") def test_http_payload_brotli(self): compressed = brotli.compress(b'brotli data') out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser( out, length=len(compressed), compression='br') p.feed_data(compressed) self.assertEqual(b'brotli data', b''.join(d for d, _ in out._buffer)) self.assertTrue(out.is_eof()) class TestDeflateBuffer(unittest.TestCase): def setUp(self): self.stream = mock.Mock() asyncio.set_event_loop(None) def test_feed_data(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.decompressor = mock.Mock() dbuf.decompressor.decompress.return_value = b'line' dbuf.feed_data(b'data', 4) self.assertEqual([b'line'], list(d for d, _ in buf._buffer)) def test_feed_data_err(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') exc = ValueError() dbuf.decompressor = mock.Mock() dbuf.decompressor.decompress.side_effect = exc self.assertRaises( http_exceptions.ContentEncodingError, dbuf.feed_data, b'data', 4) def test_feed_eof(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.decompressor = mock.Mock() dbuf.decompressor.flush.return_value = b'line' dbuf.feed_eof() self.assertEqual([b'line'], list(d for d, _ in buf._buffer)) self.assertTrue(buf._eof) def test_feed_eof_err(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.decompressor = mock.Mock() dbuf.decompressor.flush.return_value = b'line' dbuf.decompressor.eof = False self.assertRaises(http_exceptions.ContentEncodingError, dbuf.feed_eof) def test_empty_body(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.feed_eof() self.assertTrue(buf.at_eof()) aiohttp-3.0.1/tests/test_http_writer.py0000666000000000000000000001156113240304665016452 0ustar 00000000000000"""Tests for aiohttp/http_writer.py""" import asyncio import zlib from unittest import mock import pytest from aiohttp import http @pytest.fixture def buf(): return bytearray() @pytest.fixture def transport(buf): transport = mock.Mock() def write(chunk): buf.extend(chunk) transport.write.side_effect = write transport.is_closing.return_value = False return transport @pytest.fixture def protocol(loop, transport): protocol = mock.Mock(transport=transport) protocol._drain_helper.return_value = loop.create_future() protocol._drain_helper.return_value.set_result(None) return protocol def test_payloadwriter_properties(transport, protocol, loop): writer = http.StreamWriter(protocol, transport, loop) assert writer.protocol == protocol assert writer.transport == transport async def test_write_payload_eof(transport, protocol, loop): write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, transport, loop) msg.write(b'data1') msg.write(b'data2') await msg.write_eof() content = b''.join([c[1][0] for c in list(write.mock_calls)]) assert b'data1data2' == content.split(b'\r\n\r\n', 1)[-1] async def test_write_payload_chunked(buf, protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) msg.enable_chunking() msg.write(b'data') await msg.write_eof() assert b'4\r\ndata\r\n0\r\n\r\n' == buf async def test_write_payload_chunked_multiple(buf, protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) msg.enable_chunking() msg.write(b'data1') msg.write(b'data2') await msg.write_eof() assert b'5\r\ndata1\r\n5\r\ndata2\r\n0\r\n\r\n' == buf async def test_write_payload_length(protocol, transport, loop): write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, transport, loop) msg.length = 2 msg.write(b'd') msg.write(b'ata') await msg.write_eof() content = b''.join([c[1][0] for c in list(write.mock_calls)]) assert b'da' == content.split(b'\r\n\r\n', 1)[-1] async def test_write_payload_chunked_filter(protocol, transport, loop): write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, transport, loop) msg.enable_chunking() msg.write(b'da') msg.write(b'ta') await msg.write_eof() content = b''.join([c[1][0] for c in list(write.mock_calls)]) assert content.endswith(b'2\r\nda\r\n2\r\nta\r\n0\r\n\r\n') async def test_write_payload_chunked_filter_mutiple_chunks( protocol, transport, loop): write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, transport, loop) msg.enable_chunking() msg.write(b'da') msg.write(b'ta') msg.write(b'1d') msg.write(b'at') msg.write(b'a2') await msg.write_eof() content = b''.join([c[1][0] for c in list(write.mock_calls)]) assert content.endswith( b'2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n' b'2\r\na2\r\n0\r\n\r\n') compressor = zlib.compressobj(wbits=-zlib.MAX_WBITS) COMPRESSED = b''.join([compressor.compress(b'data'), compressor.flush()]) async def test_write_payload_deflate_compression(protocol, transport, loop): write = transport.write = mock.Mock() msg = http.StreamWriter(protocol, transport, loop) msg.enable_compression('deflate') msg.write(b'data') await msg.write_eof() chunks = [c[1][0] for c in list(write.mock_calls)] assert all(chunks) content = b''.join(chunks) assert COMPRESSED == content.split(b'\r\n\r\n', 1)[-1] async def test_write_payload_deflate_and_chunked( buf, protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) msg.enable_compression('deflate') msg.enable_chunking() msg.write(b'da') msg.write(b'ta') await msg.write_eof() assert b'6\r\nKI,I\x04\x00\r\n0\r\n\r\n' == buf def test_write_drain(protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) msg.drain = mock.Mock() msg.write(b'1' * (64 * 1024 * 2), drain=False) assert not msg.drain.called msg.write(b'1', drain=True) assert msg.drain.called assert msg.buffer_size == 0 def test_write_to_closing_transport(protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) msg.write(b'Before closing') transport.is_closing.return_value = True with pytest.raises(asyncio.CancelledError): msg.write(b'After closing') async def test_drain(protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) await msg.drain() assert protocol._drain_helper.called async def test_drain_no_transport(protocol, transport, loop): msg = http.StreamWriter(protocol, transport, loop) msg._protocol.transport = None await msg.drain() assert not protocol._drain_helper.called aiohttp-3.0.1/tests/test_locks.py0000666000000000000000000000237313240304665015213 0ustar 00000000000000"""Tests of custom aiohttp locks implementations""" import asyncio import pytest from aiohttp.locks import EventResultOrError class TestEventResultOrError: async def test_set_exception(self, loop): ev = EventResultOrError(loop=loop) async def c(): try: await ev.wait() except Exception as e: return e return 1 t = loop.create_task(c()) await asyncio.sleep(0, loop=loop) e = Exception() ev.set(exc=e) assert (await t) == e async def test_set(self, loop): ev = EventResultOrError(loop=loop) async def c(): await ev.wait() return 1 t = loop.create_task(c()) await asyncio.sleep(0, loop=loop) ev.set() assert (await t) == 1 async def test_cancel_waiters(self, loop): ev = EventResultOrError(loop=loop) async def c(): await ev.wait() t1 = loop.create_task(c()) t2 = loop.create_task(c()) await asyncio.sleep(0, loop=loop) ev.cancel() ev.set() with pytest.raises(asyncio.CancelledError): await t1 with pytest.raises(asyncio.CancelledError): await t2 aiohttp-3.0.1/tests/test_loop.py0000666000000000000000000000170013240304665015042 0ustar 00000000000000import asyncio import platform import threading import pytest from aiohttp import web from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop @pytest.mark.skipif(platform.system() == "Windows", reason="the test is not valid for Windows") async def test_subprocess_co(loop): assert isinstance(threading.current_thread(), threading._MainThread) proc = await asyncio.create_subprocess_shell( "exit 0", loop=loop, stdin=asyncio.subprocess.DEVNULL, stdout=asyncio.subprocess.DEVNULL, stderr=asyncio.subprocess.DEVNULL) await proc.wait() class TestCase(AioHTTPTestCase): async def get_application(self): app = web.Application() app.on_startup.append(self.on_startup_hook) return app async def on_startup_hook(self, app): self.startup_loop = app.loop @unittest_run_loop async def test_on_startup_hook(self): assert self.startup_loop is not None aiohttp-3.0.1/tests/test_multipart.py0000666000000000000000000021065513240304665016125 0ustar 00000000000000import asyncio import functools import io import json import unittest import zlib from unittest import mock import pytest import aiohttp.multipart from aiohttp import payload from aiohttp.hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_TRANSFER_ENCODING, CONTENT_TYPE) from aiohttp.helpers import parse_mimetype from aiohttp.multipart import (content_disposition_filename, parse_content_disposition) from aiohttp.streams import DEFAULT_LIMIT as stream_reader_default_limit from aiohttp.streams import StreamReader @pytest.fixture def buf(): return bytearray() @pytest.fixture def stream(buf): writer = mock.Mock() async def write(chunk): buf.extend(chunk) writer.write.side_effect = write return writer @pytest.fixture def writer(): return aiohttp.multipart.MultipartWriter(boundary=':') def run_in_loop(f): @functools.wraps(f) def wrapper(testcase, *args, **kwargs): coro = asyncio.coroutine(f) future = asyncio.wait_for(coro(testcase, *args, **kwargs), timeout=5) return testcase.loop.run_until_complete(future) return wrapper class MetaAioTestCase(type): def __new__(cls, name, bases, attrs): for key, obj in attrs.items(): if key.startswith('test_'): attrs[key] = run_in_loop(obj) return super().__new__(cls, name, bases, attrs) class TestCase(unittest.TestCase, metaclass=MetaAioTestCase): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) def tearDown(self): self.loop.close() def future(self, obj): fut = self.loop.create_future() fut.set_result(obj) return fut class Response: def __init__(self, headers, content): self.headers = headers self.content = content class Stream: def __init__(self, content): self.content = io.BytesIO(content) async def read(self, size=None): return self.content.read(size) def at_eof(self): return self.content.tell() == len(self.content.getbuffer()) async def readline(self): return self.content.readline() def unread_data(self, data): self.content = io.BytesIO(data + self.content.read()) class StreamWithShortenRead(Stream): def __init__(self, content): self._first = True super().__init__(content) async def read(self, size=None): if size is not None and self._first: self._first = False size = size // 2 return await super().read(size) class MultipartResponseWrapperTestCase(TestCase): def setUp(self): super().setUp() wrapper = aiohttp.multipart.MultipartResponseWrapper(mock.Mock(), mock.Mock()) self.wrapper = wrapper def test_at_eof(self): self.wrapper.at_eof() self.assertTrue(self.wrapper.resp.content.at_eof.called) async def test_next(self): self.wrapper.stream.next.return_value = self.future(b'') self.wrapper.stream.at_eof.return_value = False await self.wrapper.next() self.assertTrue(self.wrapper.stream.next.called) async def test_release(self): self.wrapper.resp.release.return_value = self.future(None) await self.wrapper.release() self.assertTrue(self.wrapper.resp.release.called) async def test_release_when_stream_at_eof(self): self.wrapper.resp.release.return_value = self.future(None) self.wrapper.stream.next.return_value = self.future(b'') self.wrapper.stream.at_eof.return_value = True await self.wrapper.next() self.assertTrue(self.wrapper.stream.next.called) self.assertTrue(self.wrapper.resp.release.called) class PartReaderTestCase(TestCase): def setUp(self): super().setUp() self.boundary = b'--:' async def test_next(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello, world!\r\n--:')) result = await obj.next() self.assertEqual(b'Hello, world!', result) self.assertTrue(obj.at_eof()) async def test_next_next(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello, world!\r\n--:')) result = await obj.next() self.assertEqual(b'Hello, world!', result) self.assertTrue(obj.at_eof()) result = await obj.next() self.assertIsNone(result) async def test_read(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello, world!\r\n--:')) result = await obj.read() self.assertEqual(b'Hello, world!', result) self.assertTrue(obj.at_eof()) async def test_read_chunk_at_eof(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'--:')) obj._at_eof = True result = await obj.read_chunk() self.assertEqual(b'', result) async def test_read_chunk_without_content_length(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello, world!\r\n--:')) c1 = await obj.read_chunk(8) c2 = await obj.read_chunk(8) c3 = await obj.read_chunk(8) self.assertEqual(c1 + c2, b'Hello, world!') self.assertEqual(c3, b'') async def test_read_incomplete_chunk(self): stream = Stream(b'') def prepare(data): f = self.loop.create_future() f.set_result(data) return f with mock.patch.object(stream, 'read', side_effect=[ prepare(b'Hello, '), prepare(b'World'), prepare(b'!\r\n--:'), prepare(b'') ]): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, stream) c1 = await obj.read_chunk(8) self.assertEqual(c1, b'Hello, ') c2 = await obj.read_chunk(8) self.assertEqual(c2, b'World') c3 = await obj.read_chunk(8) self.assertEqual(c3, b'!') async def test_read_all_at_once(self): stream = Stream(b'Hello, World!\r\n--:--\r\n') obj = aiohttp.multipart.BodyPartReader(self.boundary, {}, stream) result = await obj.read_chunk() self.assertEqual(b'Hello, World!', result) result = await obj.read_chunk() self.assertEqual(b'', result) self.assertTrue(obj.at_eof()) async def test_read_incomplete_body_chunked(self): stream = Stream(b'Hello, World!\r\n-') obj = aiohttp.multipart.BodyPartReader(self.boundary, {}, stream) result = b'' with self.assertRaises(AssertionError): for _ in range(4): result += await obj.read_chunk(7) self.assertEqual(b'Hello, World!\r\n-', result) async def test_read_boundary_with_incomplete_chunk(self): stream = Stream(b'') def prepare(data): f = self.loop.create_future() f.set_result(data) return f with mock.patch.object(stream, 'read', side_effect=[ prepare(b'Hello, World'), prepare(b'!\r\n'), prepare(b'--:'), prepare(b'') ]): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, stream) c1 = await obj.read_chunk(12) self.assertEqual(c1, b'Hello, World') c2 = await obj.read_chunk(8) self.assertEqual(c2, b'!') c3 = await obj.read_chunk(8) self.assertEqual(c3, b'') async def test_multi_read_chunk(self): stream = Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--') obj = aiohttp.multipart.BodyPartReader(self.boundary, {}, stream) result = await obj.read_chunk(8) self.assertEqual(b'Hello,', result) result = await obj.read_chunk(8) self.assertEqual(b'', result) self.assertTrue(obj.at_eof()) async def test_read_chunk_properly_counts_read_bytes(self): expected = b'.' * 10 size = len(expected) obj = aiohttp.multipart.BodyPartReader( self.boundary, {'CONTENT-LENGTH': size}, StreamWithShortenRead(expected + b'\r\n--:--')) result = bytearray() while True: chunk = await obj.read_chunk() if not chunk: break result.extend(chunk) self.assertEqual(size, len(result)) self.assertEqual(b'.' * size, result) self.assertTrue(obj.at_eof()) async def test_read_does_not_read_boundary(self): stream = Stream(b'Hello, world!\r\n--:') obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, stream) result = await obj.read() self.assertEqual(b'Hello, world!', result) self.assertEqual(b'--:', (await stream.read())) async def test_multiread(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--')) result = await obj.read() self.assertEqual(b'Hello,', result) result = await obj.read() self.assertEqual(b'', result) self.assertTrue(obj.at_eof()) async def test_read_multiline(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello\n,\r\nworld!\r\n--:--')) result = await obj.read() self.assertEqual(b'Hello\n,\r\nworld!', result) result = await obj.read() self.assertEqual(b'', result) self.assertTrue(obj.at_eof()) async def test_read_respects_content_length(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {'CONTENT-LENGTH': 100500}, Stream(b'.' * 100500 + b'\r\n--:--')) result = await obj.read() self.assertEqual(b'.' * 100500, result) self.assertTrue(obj.at_eof()) async def test_read_with_content_encoding_gzip(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_ENCODING: 'gzip'}, Stream(b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x0b\xc9\xccMU' b'(\xc9W\x08J\xcdI\xacP\x04\x00$\xfb\x9eV\x0e\x00\x00\x00' b'\r\n--:--')) result = await obj.read(decode=True) self.assertEqual(b'Time to Relax!', result) async def test_read_with_content_encoding_deflate(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_ENCODING: 'deflate'}, Stream(b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00\r\n--:--')) result = await obj.read(decode=True) self.assertEqual(b'Time to Relax!', result) async def test_read_with_content_encoding_identity(self): thing = (b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x0b\xc9\xccMU' b'(\xc9W\x08J\xcdI\xacP\x04\x00$\xfb\x9eV\x0e\x00\x00\x00' b'\r\n') obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_ENCODING: 'identity'}, Stream(thing + b'--:--')) result = await obj.read(decode=True) self.assertEqual(thing[:-2], result) async def test_read_with_content_encoding_unknown(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_ENCODING: 'snappy'}, Stream(b'\x0e4Time to Relax!\r\n--:--')) with self.assertRaises(RuntimeError): await obj.read(decode=True) async def test_read_with_content_transfer_encoding_base64(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TRANSFER_ENCODING: 'base64'}, Stream(b'VGltZSB0byBSZWxheCE=\r\n--:--')) result = await obj.read(decode=True) self.assertEqual(b'Time to Relax!', result) async def test_read_with_content_transfer_encoding_quoted_printable(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TRANSFER_ENCODING: 'quoted-printable'}, Stream(b'=D0=9F=D1=80=D0=B8=D0=B2=D0=B5=D1=82,' b' =D0=BC=D0=B8=D1=80!\r\n--:--')) result = await obj.read(decode=True) self.assertEqual(b'\xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82,' b' \xd0\xbc\xd0\xb8\xd1\x80!', result) @pytest.mark.parametrize('encoding', []) async def test_read_with_content_transfer_encoding_binary(self): data = b'\xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82,' \ b' \xd0\xbc\xd0\xb8\xd1\x80!' for encoding in ('binary', '8bit', '7bit'): with self.subTest(encoding): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TRANSFER_ENCODING: encoding}, Stream(data + b'\r\n--:--')) result = await obj.read(decode=True) self.assertEqual(data, result) async def test_read_with_content_transfer_encoding_unknown(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TRANSFER_ENCODING: 'unknown'}, Stream(b'\x0e4Time to Relax!\r\n--:--')) with self.assertRaises(RuntimeError): await obj.read(decode=True) async def test_read_text(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello, world!\r\n--:--')) result = await obj.text() self.assertEqual('Hello, world!', result) async def test_read_text_default_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream('Привет, Мир!\r\n--:--'.encode('utf-8'))) result = await obj.text() self.assertEqual('Привет, Мир!', result) async def test_read_text_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream('Привет, Мир!\r\n--:--'.encode('cp1251'))) result = await obj.text(encoding='cp1251') self.assertEqual('Привет, Мир!', result) async def test_read_text_guess_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'text/plain;charset=cp1251'}, Stream('Привет, Мир!\r\n--:--'.encode('cp1251'))) result = await obj.text() self.assertEqual('Привет, Мир!', result) async def test_read_text_compressed(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_ENCODING: 'deflate', CONTENT_TYPE: 'text/plain'}, Stream(b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00\r\n--:--')) result = await obj.text() self.assertEqual('Time to Relax!', result) async def test_read_text_while_closed(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'text/plain'}, Stream(b'')) obj._at_eof = True result = await obj.text() self.assertEqual('', result) async def test_read_json(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/json'}, Stream(b'{"test": "passed"}\r\n--:--')) result = await obj.json() self.assertEqual({'test': 'passed'}, result) async def test_read_json_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/json'}, Stream('{"теÑÑ‚": "паÑÑед"}\r\n--:--'.encode('cp1251'))) result = await obj.json(encoding='cp1251') self.assertEqual({'теÑÑ‚': 'паÑÑед'}, result) async def test_read_json_guess_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/json; charset=cp1251'}, Stream('{"теÑÑ‚": "паÑÑед"}\r\n--:--'.encode('cp1251'))) result = await obj.json() self.assertEqual({'теÑÑ‚': 'паÑÑед'}, result) async def test_read_json_compressed(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_ENCODING: 'deflate', CONTENT_TYPE: 'application/json'}, Stream(b'\xabV*I-.Q\xb2RP*H,.NMQ\xaa\x05\x00\r\n--:--')) result = await obj.json() self.assertEqual({'test': 'passed'}, result) async def test_read_json_while_closed(self): stream = Stream(b'') obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/json'}, stream) obj._at_eof = True result = await obj.json() self.assertEqual(None, result) async def test_read_form(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/x-www-form-urlencoded'}, Stream(b'foo=bar&foo=baz&boo=\r\n--:--')) result = await obj.form() self.assertEqual([('foo', 'bar'), ('foo', 'baz'), ('boo', '')], result) async def test_read_form_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/x-www-form-urlencoded'}, Stream('foo=bar&foo=baz&boo=\r\n--:--'.encode('cp1251'))) result = await obj.form(encoding='cp1251') self.assertEqual([('foo', 'bar'), ('foo', 'baz'), ('boo', '')], result) async def test_read_form_guess_encoding(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/x-www-form-urlencoded; charset=utf-8'}, Stream('foo=bar&foo=baz&boo=\r\n--:--'.encode('utf-8'))) result = await obj.form() self.assertEqual([('foo', 'bar'), ('foo', 'baz'), ('boo', '')], result) async def test_read_form_while_closed(self): stream = Stream(b'') obj = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_TYPE: 'application/x-www-form-urlencoded'}, stream) obj._at_eof = True result = await obj.form() self.assertEqual(None, result) async def test_readline(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, Stream(b'Hello\n,\r\nworld!\r\n--:--')) result = await obj.readline() self.assertEqual(b'Hello\n', result) result = await obj.readline() self.assertEqual(b',\r\n', result) result = await obj.readline() self.assertEqual(b'world!', result) result = await obj.readline() self.assertEqual(b'', result) self.assertTrue(obj.at_eof()) async def test_release(self): stream = Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--') obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, stream) await obj.release() self.assertTrue(obj.at_eof()) self.assertEqual(b'--:\r\n\r\nworld!\r\n--:--', stream.content.read()) async def test_release_respects_content_length(self): obj = aiohttp.multipart.BodyPartReader( self.boundary, {'CONTENT-LENGTH': 100500}, Stream(b'.' * 100500 + b'\r\n--:--')) result = await obj.release() self.assertIsNone(result) self.assertTrue(obj.at_eof()) async def test_release_release(self): stream = Stream(b'Hello,\r\n--:\r\n\r\nworld!\r\n--:--') obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, stream) await obj.release() await obj.release() self.assertEqual(b'--:\r\n\r\nworld!\r\n--:--', stream.content.read()) async def test_filename(self): part = aiohttp.multipart.BodyPartReader( self.boundary, {CONTENT_DISPOSITION: 'attachment; filename=foo.html'}, None) self.assertEqual('foo.html', part.filename) async def test_reading_long_part(self): size = 2 * stream_reader_default_limit protocol = mock.Mock(_reading_paused=False) stream = StreamReader(protocol) stream.feed_data(b'0' * size + b'\r\n--:--') stream.feed_eof() obj = aiohttp.multipart.BodyPartReader( self.boundary, {}, stream) data = await obj.read() self.assertEqual(len(data), size) class MultipartReaderTestCase(TestCase): def test_from_response(self): resp = Response({CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n\r\nhello\r\n--:--')) res = aiohttp.multipart.MultipartReader.from_response(resp) self.assertIsInstance(res, aiohttp.multipart.MultipartResponseWrapper) self.assertIsInstance(res.stream, aiohttp.multipart.MultipartReader) def test_bad_boundary(self): resp = Response( {CONTENT_TYPE: 'multipart/related;boundary=' + 'a' * 80}, Stream(b'')) with self.assertRaises(ValueError): aiohttp.multipart.MultipartReader.from_response(resp) def test_dispatch(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n\r\necho\r\n--:--')) res = reader._get_part_reader({CONTENT_TYPE: 'text/plain'}) self.assertIsInstance(res, reader.part_reader_cls) def test_dispatch_bodypart(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n\r\necho\r\n--:--')) res = reader._get_part_reader({CONTENT_TYPE: 'text/plain'}) self.assertIsInstance(res, reader.part_reader_cls) def test_dispatch_multipart(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'----:--\r\n' b'\r\n' b'test\r\n' b'----:--\r\n' b'\r\n' b'passed\r\n' b'----:----\r\n' b'--:--')) res = reader._get_part_reader( {CONTENT_TYPE: 'multipart/related;boundary=--:--'}) self.assertIsInstance(res, reader.__class__) def test_dispatch_custom_multipart_reader(self): class CustomReader(aiohttp.multipart.MultipartReader): pass reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'----:--\r\n' b'\r\n' b'test\r\n' b'----:--\r\n' b'\r\n' b'passed\r\n' b'----:----\r\n' b'--:--')) reader.multipart_reader_cls = CustomReader res = reader._get_part_reader( {CONTENT_TYPE: 'multipart/related;boundary=--:--'}) self.assertIsInstance(res, CustomReader) async def test_emit_next(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n\r\necho\r\n--:--')) res = await reader.next() self.assertIsInstance(res, reader.part_reader_cls) async def test_invalid_boundary(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'---:\r\n\r\necho\r\n---:--')) with self.assertRaises(ValueError): await reader.next() async def test_release(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/mixed;boundary=":"'}, Stream(b'--:\r\n' b'Content-Type: multipart/related;boundary=--:--\r\n' b'\r\n' b'----:--\r\n' b'\r\n' b'test\r\n' b'----:--\r\n' b'\r\n' b'passed\r\n' b'----:----\r\n' b'\r\n' b'--:--')) await reader.release() self.assertTrue(reader.at_eof()) async def test_release_release(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n\r\necho\r\n--:--')) await reader.release() self.assertTrue(reader.at_eof()) await reader.release() self.assertTrue(reader.at_eof()) async def test_release_next(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n\r\necho\r\n--:--')) await reader.release() self.assertTrue(reader.at_eof()) res = await reader.next() self.assertIsNone(res) async def test_second_next_releases_previous_object(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n' b'\r\n' b'test\r\n' b'--:\r\n' b'\r\n' b'passed\r\n' b'--:--')) first = await reader.next() self.assertIsInstance(first, aiohttp.multipart.BodyPartReader) second = await reader.next() self.assertTrue(first.at_eof()) self.assertFalse(second.at_eof()) async def test_release_without_read_the_last_object(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n' b'\r\n' b'test\r\n' b'--:\r\n' b'\r\n' b'passed\r\n' b'--:--')) first = await reader.next() second = await reader.next() third = await reader.next() self.assertTrue(first.at_eof()) self.assertTrue(second.at_eof()) self.assertTrue(second.at_eof()) self.assertIsNone(third) async def test_read_chunk_by_length_doesnt_breaks_reader(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n' b'Content-Length: 4\r\n\r\n' b'test' b'\r\n--:\r\n' b'Content-Length: 6\r\n\r\n' b'passed' b'\r\n--:--')) body_parts = [] while True: read_part = b'' part = await reader.next() if part is None: break while not part.at_eof(): read_part += await part.read_chunk(3) body_parts.append(read_part) self.assertListEqual(body_parts, [b'test', b'passed']) async def test_read_chunk_from_stream_doesnt_breaks_reader(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'--:\r\n' b'\r\n' b'chunk' b'\r\n--:\r\n' b'\r\n' b'two_chunks' b'\r\n--:--')) body_parts = [] while True: read_part = b'' part = await reader.next() if part is None: break while not part.at_eof(): chunk = await part.read_chunk(5) self.assertTrue(chunk) read_part += chunk body_parts.append(read_part) self.assertListEqual(body_parts, [b'chunk', b'two_chunks']) async def test_reading_skips_prelude(self): reader = aiohttp.multipart.MultipartReader( {CONTENT_TYPE: 'multipart/related;boundary=":"'}, Stream(b'Multi-part data is not supported.\r\n' b'\r\n' b'--:\r\n' b'\r\n' b'test\r\n' b'--:\r\n' b'\r\n' b'passed\r\n' b'--:--')) first = await reader.next() self.assertIsInstance(first, aiohttp.multipart.BodyPartReader) second = await reader.next() self.assertTrue(first.at_eof()) self.assertFalse(second.at_eof()) async def test_writer(writer): assert writer.size == 0 assert writer.boundary == ':' async def test_writer_serialize_io_chunk(buf, stream, writer): flo = io.BytesIO(b'foobarbaz') writer.append(flo) await writer.write(stream) assert (buf == b'--:\r\nContent-Type: application/octet-stream' b'\r\nContent-Length: 9\r\n\r\nfoobarbaz\r\n--:--\r\n') async def test_writer_serialize_json(buf, stream, writer): writer.append_json({'привет': 'мир'}) await writer.write(stream) assert (b'{"\\u043f\\u0440\\u0438\\u0432\\u0435\\u0442":' b' "\\u043c\\u0438\\u0440"}' in buf) async def test_writer_serialize_form(buf, stream, writer): data = [('foo', 'bar'), ('foo', 'baz'), ('boo', 'zoo')] writer.append_form(data) await writer.write(stream) assert (b'foo=bar&foo=baz&boo=zoo' in buf) async def test_writer_serialize_form_dict(buf, stream, writer): data = {'hello': 'мир'} writer.append_form(data) await writer.write(stream) assert (b'hello=%D0%BC%D0%B8%D1%80' in buf) async def test_writer_write(buf, stream, writer): writer.append('foo-bar-baz') writer.append_json({'test': 'passed'}) writer.append_form({'test': 'passed'}) writer.append_form([('one', 1), ('two', 2)]) sub_multipart = aiohttp.multipart.MultipartWriter(boundary='::') sub_multipart.append('nested content') sub_multipart.headers['X-CUSTOM'] = 'test' writer.append(sub_multipart) await writer.write(stream) assert ( (b'--:\r\n' b'Content-Type: text/plain; charset=utf-8\r\n' b'Content-Length: 11\r\n\r\n' b'foo-bar-baz' b'\r\n' b'--:\r\n' b'Content-Type: application/json\r\n' b'Content-Length: 18\r\n\r\n' b'{"test": "passed"}' b'\r\n' b'--:\r\n' b'Content-Type: application/x-www-form-urlencoded\r\n' b'Content-Length: 11\r\n\r\n' b'test=passed' b'\r\n' b'--:\r\n' b'Content-Type: application/x-www-form-urlencoded\r\n' b'Content-Length: 11\r\n\r\n' b'one=1&two=2' b'\r\n' b'--:\r\n' b'Content-Type: multipart/mixed; boundary="::"\r\n' b'X-CUSTOM: test\r\nContent-Length: 93\r\n\r\n' b'--::\r\n' b'Content-Type: text/plain; charset=utf-8\r\n' b'Content-Length: 14\r\n\r\n' b'nested content\r\n' b'--::--\r\n' b'\r\n' b'--:--\r\n') == bytes(buf)) async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): writer.append('Time to Relax!', {CONTENT_ENCODING: 'gzip'}) await writer.write(stream) headers, message = bytes(buf).split(b'\r\n\r\n', 1) assert (b'--:\r\nContent-Encoding: gzip\r\n' b'Content-Type: text/plain; charset=utf-8' == headers) decompressor = zlib.decompressobj(wbits=16+zlib.MAX_WBITS) data = decompressor.decompress(message.split(b'\r\n')[0]) data += decompressor.flush() assert b'Time to Relax!' == data async def test_writer_serialize_with_content_encoding_deflate(buf, stream, writer): writer.append('Time to Relax!', {CONTENT_ENCODING: 'deflate'}) await writer.write(stream) headers, message = bytes(buf).split(b'\r\n\r\n', 1) assert (b'--:\r\nContent-Encoding: deflate\r\n' b'Content-Type: text/plain; charset=utf-8' == headers) thing = b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00\r\n--:--\r\n' assert thing == message async def test_writer_serialize_with_content_encoding_identity(buf, stream, writer): thing = b'\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00' writer.append(thing, {CONTENT_ENCODING: 'identity'}) await writer.write(stream) headers, message = bytes(buf).split(b'\r\n\r\n', 1) assert (b'--:\r\nContent-Encoding: identity\r\n' b'Content-Type: application/octet-stream\r\n' b'Content-Length: 16' == headers) assert thing == message.split(b'\r\n')[0] def test_writer_serialize_with_content_encoding_unknown(buf, stream, writer): with pytest.raises(RuntimeError): writer.append('Time to Relax!', {CONTENT_ENCODING: 'snappy'}) async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer): writer.append('Time to Relax!', {CONTENT_TRANSFER_ENCODING: 'base64'}) await writer.write(stream) headers, message = bytes(buf).split(b'\r\n\r\n', 1) assert (b'--:\r\nContent-Transfer-Encoding: base64\r\n' b'Content-Type: text/plain; charset=utf-8' == headers) assert b'VGltZSB0byBSZWxheCE=' == message.split(b'\r\n')[0] async def test_writer_content_transfer_encoding_quote_printable(buf, stream, writer): writer.append('Привет, мир!', {CONTENT_TRANSFER_ENCODING: 'quoted-printable'}) await writer.write(stream) headers, message = bytes(buf).split(b'\r\n\r\n', 1) assert (b'--:\r\nContent-Transfer-Encoding: quoted-printable\r\n' b'Content-Type: text/plain; charset=utf-8' == headers) assert (b'=D0=9F=D1=80=D0=B8=D0=B2=D0=B5=D1=82,' b' =D0=BC=D0=B8=D1=80!' == message.split(b'\r\n')[0]) def test_writer_content_transfer_encoding_unknown(buf, stream, writer): with pytest.raises(RuntimeError): writer.append('Time to Relax!', {CONTENT_TRANSFER_ENCODING: 'unknown'}) class MultipartWriterTestCase(TestCase): def setUp(self): super().setUp() self.buf = bytearray() self.stream = mock.Mock() async def write(chunk): self.buf.extend(chunk) self.stream.write.side_effect = write self.writer = aiohttp.multipart.MultipartWriter(boundary=':') def test_default_subtype(self): mimetype = parse_mimetype(self.writer.headers.get(CONTENT_TYPE)) self.assertEqual('multipart', mimetype.type) self.assertEqual('mixed', mimetype.subtype) def test_unquoted_boundary(self): writer = aiohttp.multipart.MultipartWriter(boundary='abc123') self.assertEqual({CONTENT_TYPE: 'multipart/mixed; boundary=abc123'}, writer.headers) def test_quoted_boundary(self): writer = aiohttp.multipart.MultipartWriter(boundary=R'\"') self.assertEqual({CONTENT_TYPE: R'multipart/mixed; boundary="\\\""'}, writer.headers) def test_bad_boundary(self): with self.assertRaises(ValueError): aiohttp.multipart.MultipartWriter(boundary='теÑÑ‚') with self.assertRaises(ValueError): aiohttp.multipart.MultipartWriter(boundary='test\n') def test_default_headers(self): self.assertEqual({CONTENT_TYPE: 'multipart/mixed; boundary=":"'}, self.writer.headers) def test_iter_parts(self): self.writer.append('foo') self.writer.append('bar') self.writer.append('baz') self.assertEqual(3, len(list(self.writer))) def test_append(self): self.assertEqual(0, len(self.writer)) self.writer.append('hello, world!') self.assertEqual(1, len(self.writer)) self.assertIsInstance(self.writer._parts[0][0], payload.Payload) def test_append_with_headers(self): self.writer.append('hello, world!', {'x-foo': 'bar'}) self.assertEqual(1, len(self.writer)) self.assertIn('x-foo', self.writer._parts[0][0].headers) self.assertEqual(self.writer._parts[0][0].headers['x-foo'], 'bar') def test_append_json(self): self.writer.append_json({'foo': 'bar'}) self.assertEqual(1, len(self.writer)) part = self.writer._parts[0][0] self.assertEqual(part.headers[CONTENT_TYPE], 'application/json') def test_append_part(self): part = payload.get_payload( 'test', headers={CONTENT_TYPE: 'text/plain'}) self.writer.append(part, {CONTENT_TYPE: 'test/passed'}) self.assertEqual(1, len(self.writer)) part = self.writer._parts[0][0] self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed') def test_append_json_overrides_content_type(self): self.writer.append_json({'foo': 'bar'}, {CONTENT_TYPE: 'test/passed'}) self.assertEqual(1, len(self.writer)) part = self.writer._parts[0][0] self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed') def test_append_form(self): self.writer.append_form({'foo': 'bar'}, {CONTENT_TYPE: 'test/passed'}) self.assertEqual(1, len(self.writer)) part = self.writer._parts[0][0] self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed') def test_append_multipart(self): subwriter = aiohttp.multipart.MultipartWriter(boundary=':') subwriter.append_json({'foo': 'bar'}) self.writer.append(subwriter, {CONTENT_TYPE: 'test/passed'}) self.assertEqual(1, len(self.writer)) part = self.writer._parts[0][0] self.assertEqual(part.headers[CONTENT_TYPE], 'test/passed') async def test_write(self): await self.writer.write(self.stream) def test_with(self): with aiohttp.multipart.MultipartWriter(boundary=':') as writer: writer.append('foo') writer.append(b'bar') writer.append_json({'baz': True}) self.assertEqual(3, len(writer)) def test_append_int_not_allowed(self): with self.assertRaises(TypeError): with aiohttp.multipart.MultipartWriter(boundary=':') as writer: writer.append(1) def test_append_float_not_allowed(self): with self.assertRaises(TypeError): with aiohttp.multipart.MultipartWriter(boundary=':') as writer: writer.append(1.1) def test_append_none_not_allowed(self): with self.assertRaises(TypeError): with aiohttp.multipart.MultipartWriter(boundary=':') as writer: writer.append(None) class ParseContentDispositionTestCase(unittest.TestCase): # http://greenbytes.de/tech/tc2231/ def test_parse_empty(self): disptype, params = parse_content_disposition(None) self.assertEqual(None, disptype) self.assertEqual({}, params) def test_inlonly(self): disptype, params = parse_content_disposition('inline') self.assertEqual('inline', disptype) self.assertEqual({}, params) def test_inlonlyquoted(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition('"inline"') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_semicolon(self): disptype, params = parse_content_disposition( 'form-data; name="data"; filename="file ; name.mp4"') self.assertEqual(disptype, 'form-data') self.assertEqual( params, {'name': 'data', 'filename': 'file ; name.mp4'}) def test_inlwithasciifilename(self): disptype, params = parse_content_disposition( 'inline; filename="foo.html"') self.assertEqual('inline', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_inlwithfnattach(self): disptype, params = parse_content_disposition( 'inline; filename="Not an attachment!"') self.assertEqual('inline', disptype) self.assertEqual({'filename': 'Not an attachment!'}, params) def test_attonly(self): disptype, params = parse_content_disposition('attachment') self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attonlyquoted(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition('"attachment"') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attonlyucase(self): disptype, params = parse_content_disposition('ATTACHMENT') self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attwithasciifilename(self): disptype, params = parse_content_disposition( 'attachment; filename="foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_inlwithasciifilenamepdf(self): disptype, params = parse_content_disposition( 'attachment; filename="foo.pdf"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.pdf'}, params) def test_attwithasciifilename25(self): disptype, params = parse_content_disposition( 'attachment; filename="0000000000111111111122222"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': '0000000000111111111122222'}, params) def test_attwithasciifilename35(self): disptype, params = parse_content_disposition( 'attachment; filename="00000000001111111111222222222233333"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': '00000000001111111111222222222233333'}, params) def test_attwithasciifnescapedchar(self): disptype, params = parse_content_disposition( r'attachment; filename="f\oo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_attwithasciifnescapedquote(self): disptype, params = parse_content_disposition( 'attachment; filename="\"quoting\" tested.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': '"quoting" tested.html'}, params) @unittest.skip('need more smart parser which respects quoted text') def test_attwithquotedsemicolon(self): disptype, params = parse_content_disposition( 'attachment; filename="Here\'s a semicolon;.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'Here\'s a semicolon;.html'}, params) def test_attwithfilenameandextparam(self): disptype, params = parse_content_disposition( 'attachment; foo="bar"; filename="foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html', 'foo': 'bar'}, params) def test_attwithfilenameandextparamescaped(self): disptype, params = parse_content_disposition( 'attachment; foo="\"\\";filename="foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html', 'foo': '"\\'}, params) def test_attwithasciifilenameucase(self): disptype, params = parse_content_disposition( 'attachment; FILENAME="foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_attwithasciifilenamenq(self): disptype, params = parse_content_disposition( 'attachment; filename=foo.html') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_attwithtokfncommanq(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo,bar.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attwithasciifilenamenqs(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo.html ;') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attemptyparam(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; ;filename=foo') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attwithasciifilenamenqws(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo bar.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attwithfntokensq(self): disptype, params = parse_content_disposition( "attachment; filename='foo.html'") self.assertEqual('attachment', disptype) self.assertEqual({'filename': "'foo.html'"}, params) def test_attwithisofnplain(self): disptype, params = parse_content_disposition( 'attachment; filename="foo-ä.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-ä.html'}, params) def test_attwithutf8fnplain(self): disptype, params = parse_content_disposition( 'attachment; filename="foo-ä.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-ä.html'}, params) def test_attwithfnrawpctenca(self): disptype, params = parse_content_disposition( 'attachment; filename="foo-%41.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-%41.html'}, params) def test_attwithfnusingpct(self): disptype, params = parse_content_disposition( 'attachment; filename="50%.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': '50%.html'}, params) def test_attwithfnrawpctencaq(self): disptype, params = parse_content_disposition( r'attachment; filename="foo-%\41.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': r'foo-%41.html'}, params) def test_attwithnamepct(self): disptype, params = parse_content_disposition( 'attachment; filename="foo-%41.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-%41.html'}, params) def test_attwithfilenamepctandiso(self): disptype, params = parse_content_disposition( 'attachment; filename="ä-%41.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'ä-%41.html'}, params) def test_attwithfnrawpctenclong(self): disptype, params = parse_content_disposition( 'attachment; filename="foo-%c3%a4-%e2%82%ac.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-%c3%a4-%e2%82%ac.html'}, params) def test_attwithasciifilenamews1(self): disptype, params = parse_content_disposition( 'attachment; filename ="foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_attwith2filenames(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename="foo.html"; filename="bar.html"') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attfnbrokentoken(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo[1](2).html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attfnbrokentokeniso(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo-ä.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attfnbrokentokenutf(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo-ä.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdisposition(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'filename=foo.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdisposition2(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'x=y; filename=foo.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdisposition3(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( '"foo; filename=bar;baz"; filename=qux') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdisposition4(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'filename=foo.html, filename=bar.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_emptydisposition(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( '; filename=foo.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_doublecolon(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( ': inline; attachment; filename=foo.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attandinline(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'inline; attachment; filename=foo.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attandinline2(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; inline; filename=foo.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attbrokenquotedfn(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename="foo.html".txt') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attbrokenquotedfn2(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename="bar') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attbrokenquotedfn3(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo"bar;baz"qux') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmultinstances(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=foo.html, attachment; filename=bar.html') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdelim(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; foo=foo filename=bar') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdelim2(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename=bar foo=foo') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attmissingdelim3(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment filename=bar') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attreversed(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'filename=foo.html; attachment') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attconfusedparam(self): disptype, params = parse_content_disposition( 'attachment; xfilename=foo.html') self.assertEqual('attachment', disptype) self.assertEqual({'xfilename': 'foo.html'}, params) def test_attabspath(self): disptype, params = parse_content_disposition( 'attachment; filename="/foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_attabspathwin(self): disptype, params = parse_content_disposition( 'attachment; filename="\\foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo.html'}, params) def test_attcdate(self): disptype, params = parse_content_disposition( 'attachment; creation-date="Wed, 12 Feb 1997 16:29:51 -0500"') self.assertEqual('attachment', disptype) self.assertEqual({'creation-date': 'Wed, 12 Feb 1997 16:29:51 -0500'}, params) def test_attmdate(self): disptype, params = parse_content_disposition( 'attachment; modification-date="Wed, 12 Feb 1997 16:29:51 -0500"') self.assertEqual('attachment', disptype) self.assertEqual( {'modification-date': 'Wed, 12 Feb 1997 16:29:51 -0500'}, params) def test_dispext(self): disptype, params = parse_content_disposition('foobar') self.assertEqual('foobar', disptype) self.assertEqual({}, params) def test_dispextbadfn(self): disptype, params = parse_content_disposition( 'attachment; example="filename=example.txt"') self.assertEqual('attachment', disptype) self.assertEqual({'example': 'filename=example.txt'}, params) def test_attwithisofn2231iso(self): disptype, params = parse_content_disposition( "attachment; filename*=iso-8859-1''foo-%E4.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'foo-ä.html'}, params) def test_attwithfn2231utf8(self): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''foo-%c3%a4-%e2%82%ac.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'foo-ä-€.html'}, params) def test_attwithfn2231noc(self): disptype, params = parse_content_disposition( "attachment; filename*=''foo-%c3%a4-%e2%82%ac.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'foo-ä-€.html'}, params) def test_attwithfn2231utf8comp(self): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''foo-a%cc%88.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'foo-ä.html'}, params) @unittest.skip('should raise decoding error: %82 is invalid for latin1') def test_attwithfn2231utf8_bad(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=iso-8859-1''foo-%c3%a4-%e2%82%ac.html") self.assertEqual('attachment', disptype) self.assertEqual({}, params) @unittest.skip('should raise decoding error: %E4 is invalid for utf-8') def test_attwithfn2231iso_bad(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=utf-8''foo-%E4.html") self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attwithfn2231ws1(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename *=UTF-8''foo-%c3%a4.html") self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attwithfn2231ws2(self): disptype, params = parse_content_disposition( "attachment; filename*= UTF-8''foo-%c3%a4.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'foo-ä.html'}, params) def test_attwithfn2231ws3(self): disptype, params = parse_content_disposition( "attachment; filename* =UTF-8''foo-%c3%a4.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'foo-ä.html'}, params) def test_attwithfn2231quot(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=\"UTF-8''foo-%c3%a4.html\"") self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attwithfn2231quot2(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=\"foo%20bar.html\"") self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attwithfn2231singleqmissing(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8'foo-%c3%a4.html") self.assertEqual('attachment', disptype) self.assertEqual({}, params) @unittest.skip('urllib.parse.unquote is tolerate to standalone % chars') def test_attwithfn2231nbadpct1(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''foo%") self.assertEqual('attachment', disptype) self.assertEqual({}, params) @unittest.skip('urllib.parse.unquote is tolerate to standalone % chars') def test_attwithfn2231nbadpct2(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''f%oo.html") self.assertEqual('attachment', disptype) self.assertEqual({}, params) def test_attwithfn2231dpct(self): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''A-%2541.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'A-%41.html'}, params) def test_attwithfn2231abspathdisguised(self): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''%5cfoo.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': '\\foo.html'}, params) def test_attfncont(self): disptype, params = parse_content_disposition( 'attachment; filename*0="foo."; filename*1="html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0': 'foo.', 'filename*1': 'html'}, params) def test_attfncontqs(self): disptype, params = parse_content_disposition( r'attachment; filename*0="foo"; filename*1="\b\a\r.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0': 'foo', 'filename*1': 'bar.html'}, params) def test_attfncontenc(self): disptype, params = parse_content_disposition( 'attachment; filename*0*=UTF-8''foo-%c3%a4; filename*1=".html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0*': 'UTF-8''foo-%c3%a4', 'filename*1': '.html'}, params) def test_attfncontlz(self): disptype, params = parse_content_disposition( 'attachment; filename*0="foo"; filename*01="bar"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0': 'foo', 'filename*01': 'bar'}, params) def test_attfncontnc(self): disptype, params = parse_content_disposition( 'attachment; filename*0="foo"; filename*2="bar"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0': 'foo', 'filename*2': 'bar'}, params) def test_attfnconts1(self): disptype, params = parse_content_disposition( 'attachment; filename*0="foo."; filename*2="html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0': 'foo.', 'filename*2': 'html'}, params) def test_attfncontord(self): disptype, params = parse_content_disposition( 'attachment; filename*1="bar"; filename*0="foo"') self.assertEqual('attachment', disptype) self.assertEqual({'filename*0': 'foo', 'filename*1': 'bar'}, params) def test_attfnboth(self): disptype, params = parse_content_disposition( 'attachment; filename="foo-ae.html";' " filename*=UTF-8''foo-%c3%a4.html") self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-ae.html', 'filename*': 'foo-ä.html'}, params) def test_attfnboth2(self): disptype, params = parse_content_disposition( "attachment; filename*=UTF-8''foo-%c3%a4.html;" ' filename="foo-ae.html"') self.assertEqual('attachment', disptype) self.assertEqual({'filename': 'foo-ae.html', 'filename*': 'foo-ä.html'}, params) def test_attfnboth3(self): disptype, params = parse_content_disposition( "attachment; filename*0*=ISO-8859-15''euro-sign%3d%a4;" " filename*=ISO-8859-1''currency-sign%3d%a4") self.assertEqual('attachment', disptype) self.assertEqual({'filename*': 'currency-sign=¤', 'filename*0*': "ISO-8859-15''euro-sign%3d%a4"}, params) def test_attnewandfn(self): disptype, params = parse_content_disposition( 'attachment; foobar=x; filename="foo.html"') self.assertEqual('attachment', disptype) self.assertEqual({'foobar': 'x', 'filename': 'foo.html'}, params) def test_attrfc2047token(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionHeader): disptype, params = parse_content_disposition( 'attachment; filename==?ISO-8859-1?Q?foo-=E4.html?=') self.assertEqual(None, disptype) self.assertEqual({}, params) def test_attrfc2047quoted(self): disptype, params = parse_content_disposition( 'attachment; filename="=?ISO-8859-1?Q?foo-=E4.html?="') self.assertEqual('attachment', disptype) self.assertEqual({'filename': '=?ISO-8859-1?Q?foo-=E4.html?='}, params) def test_bad_continuous_param(self): with self.assertWarns(aiohttp.multipart.BadContentDispositionParam): disptype, params = parse_content_disposition( 'attachment; filename*0=foo bar') self.assertEqual('attachment', disptype) self.assertEqual({}, params) class ContentDispositionFilenameTestCase(unittest.TestCase): # http://greenbytes.de/tech/tc2231/ def test_no_filename(self): self.assertIsNone(content_disposition_filename({})) self.assertIsNone(content_disposition_filename({'foo': 'bar'})) def test_filename(self): params = {'filename': 'foo.html'} self.assertEqual('foo.html', content_disposition_filename(params)) def test_filename_ext(self): params = {'filename*': 'файл.html'} self.assertEqual('файл.html', content_disposition_filename(params)) def test_attfncont(self): params = {'filename*0': 'foo.', 'filename*1': 'html'} self.assertEqual('foo.html', content_disposition_filename(params)) def test_attfncontqs(self): params = {'filename*0': 'foo', 'filename*1': 'bar.html'} self.assertEqual('foobar.html', content_disposition_filename(params)) def test_attfncontenc(self): params = {'filename*0*': "UTF-8''foo-%c3%a4", 'filename*1': '.html'} self.assertEqual('foo-ä.html', content_disposition_filename(params)) def test_attfncontlz(self): params = {'filename*0': 'foo', 'filename*01': 'bar'} self.assertEqual('foo', content_disposition_filename(params)) def test_attfncontnc(self): params = {'filename*0': 'foo', 'filename*2': 'bar'} self.assertEqual('foo', content_disposition_filename(params)) def test_attfnconts1(self): params = {'filename*1': 'foo', 'filename*2': 'bar'} self.assertEqual(None, content_disposition_filename(params)) def test_attfnboth(self): params = {'filename': 'foo-ae.html', 'filename*': 'foo-ä.html'} self.assertEqual('foo-ä.html', content_disposition_filename(params)) def test_attfnboth3(self): params = {'filename*0*': "ISO-8859-15''euro-sign%3d%a4", 'filename*': 'currency-sign=¤'} self.assertEqual('currency-sign=¤', content_disposition_filename(params)) def test_attrfc2047quoted(self): params = {'filename': '=?ISO-8859-1?Q?foo-=E4.html?='} self.assertEqual('=?ISO-8859-1?Q?foo-=E4.html?=', content_disposition_filename(params)) async def test_async_for_reader(loop): data = [ {"test": "passed"}, 42, b'plain text', b'aiohttp\n', b'no epilogue'] reader = aiohttp.MultipartReader( headers={CONTENT_TYPE: 'multipart/mixed; boundary=":"'}, content=Stream(b'\r\n'.join([ b'--:', b'Content-Type: application/json', b'', json.dumps(data[0]).encode(), b'--:', b'Content-Type: application/json', b'', json.dumps(data[1]).encode(), b'--:', b'Content-Type: multipart/related; boundary="::"', b'', b'--::', b'Content-Type: text/plain', b'', data[2], b'--::', b'Content-Disposition: attachment; filename="aiohttp"', b'Content-Type: text/plain', b'Content-Length: 28', b'Content-Encoding: gzip', b'', b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03K\xcc\xcc\xcf())' b'\xe0\x02\x00\xd6\x90\xe2O\x08\x00\x00\x00', b'--::', b'Content-Type: multipart/related; boundary=":::"', b'', b'--:::', b'Content-Type: text/plain', b'', data[4], b'--:::--', b'--::--', b'', b'--:--', b'']))) idata = iter(data) async def check(reader): async for part in reader: if isinstance(part, aiohttp.BodyPartReader): if part.headers[CONTENT_TYPE] == 'application/json': assert next(idata) == (await part.json()) else: assert next(idata) == await part.read(decode=True) else: await check(part) await check(reader) async def test_async_for_bodypart(loop): part = aiohttp.BodyPartReader( boundary=b'--:', headers={}, content=Stream(b'foobarbaz\r\n--:--')) async for data in part: assert data == b'foobarbaz' aiohttp-3.0.1/tests/test_payload.py0000666000000000000000000000316313240304665015527 0ustar 00000000000000from io import StringIO import pytest from aiohttp import payload @pytest.fixture def registry(): old = payload.PAYLOAD_REGISTRY reg = payload.PAYLOAD_REGISTRY = payload.PayloadRegistry() yield reg payload.PAYLOAD_REGISTRY = old class Payload(payload.Payload): async def write(self, writer): pass def test_register_type(registry): class TestProvider: pass payload.register_payload(Payload, TestProvider) p = payload.get_payload(TestProvider()) assert isinstance(p, Payload) def test_payload_ctor(): p = Payload('test', encoding='utf-8', filename='test.txt') assert p._value == 'test' assert p._encoding == 'utf-8' assert p.size is None assert p.filename == 'test.txt' assert p.content_type == 'text/plain' def test_payload_content_type(): p = Payload('test', headers={'content-type': 'application/json'}) assert p.content_type == 'application/json' def test_string_payload(): p = payload.StringPayload('test') assert p.encoding == 'utf-8' assert p.content_type == 'text/plain; charset=utf-8' p = payload.StringPayload('test', encoding='koi8-r') assert p.encoding == 'koi8-r' assert p.content_type == 'text/plain; charset=koi8-r' p = payload.StringPayload( 'test', content_type='text/plain; charset=koi8-r') assert p.encoding == 'koi8-r' assert p.content_type == 'text/plain; charset=koi8-r' def test_string_io_payload(): s = StringIO('ű' * 5000) p = payload.StringIOPayload(s) assert p.encoding == 'utf-8' assert p.content_type == 'text/plain; charset=utf-8' assert p.size == 10000 aiohttp-3.0.1/tests/test_proxy.py0000666000000000000000000005250713240304665015265 0ustar 00000000000000import asyncio import gc import socket import ssl import unittest from unittest import mock from yarl import URL import aiohttp from aiohttp.client_reqrep import ClientRequest, ClientResponse from aiohttp.test_utils import make_mocked_coro class TestProxy(unittest.TestCase): response_mock_attrs = { 'status': 200, } mocked_response = mock.Mock(**response_mock_attrs) clientrequest_mock_attrs = { 'return_value.send.return_value.start': make_mocked_coro(mocked_response), } def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): # just in case if we have transport close callbacks self.loop.stop() self.loop.run_forever() self.loop.close() gc.collect() @mock.patch('aiohttp.connector.ClientRequest') def test_connect(self, ClientRequestMock): req = ClientRequest( 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop ) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) proto = mock.Mock(**{ 'transport.get_extra_info.return_value': False, }) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) ClientRequestMock.assert_called_with( 'GET', URL('http://proxy.example.com'), auth=None, headers={'Host': 'www.python.org'}, loop=self.loop, ssl=None) @mock.patch('aiohttp.connector.ClientRequest') def test_proxy_headers(self, ClientRequestMock): req = ClientRequest( 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), proxy_headers={'Foo': 'Bar'}, loop=self.loop) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) proto = mock.Mock(**{ 'transport.get_extra_info.return_value': False, }) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) ClientRequestMock.assert_called_with( 'GET', URL('http://proxy.example.com'), auth=None, headers={'Host': 'www.python.org', 'Foo': 'Bar'}, loop=self.loop, ssl=None) @mock.patch('aiohttp.connector.ClientRequest', **clientrequest_mock_attrs) def test_connect_req_verify_ssl_true(self, ClientRequestMock): req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ssl=True, ) proto = mock.Mock() connector = aiohttp.TCPConnector(loop=self.loop) connector._create_proxy_connection = mock.MagicMock( side_effect=connector._create_proxy_connection) connector._create_direct_connection = mock.MagicMock( side_effect=connector._create_direct_connection) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) self.loop.run_until_complete(connector.connect(req)) connector._create_proxy_connection.assert_called_with( req, traces=None) ((proxy_req,), _) = connector._create_direct_connection.call_args proxy_req.send.assert_called_with(mock.ANY) @mock.patch('aiohttp.connector.ClientRequest', **clientrequest_mock_attrs) def test_connect_req_verify_ssl_false(self, ClientRequestMock): req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ssl=False, ) proto = mock.Mock() connector = aiohttp.TCPConnector(loop=self.loop) connector._create_proxy_connection = mock.MagicMock( side_effect=connector._create_proxy_connection) connector._create_direct_connection = mock.MagicMock( side_effect=connector._create_direct_connection) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) self.loop.run_until_complete(connector.connect(req)) connector._create_proxy_connection.assert_called_with( req, traces=None) ((proxy_req,), _) = connector._create_direct_connection.call_args proxy_req.send.assert_called_with(mock.ANY) def test_proxy_auth(self): with self.assertRaises(ValueError) as ctx: ClientRequest( 'GET', URL('http://python.org'), proxy=URL('http://proxy.example.com'), proxy_auth=('user', 'pass'), loop=mock.Mock()) self.assertEqual( ctx.exception.args[0], "proxy_auth must be None or BasicAuth() tuple", ) def test_proxy_dns_error(self): connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( raise_exception=OSError('dont take it serious')) req = ClientRequest( 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) expected_headers = dict(req.headers) with self.assertRaises(aiohttp.ClientConnectorError): self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url.path, '/') self.assertEqual(dict(req.headers), expected_headers) def test_proxy_connection_error(self): connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([{ 'hostname': 'www.python.org', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': socket.AI_NUMERICHOST}]) connector._loop.create_connection = make_mocked_coro( raise_exception=OSError('dont take it serious')) req = ClientRequest( 'GET', URL('http://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) with self.assertRaises(aiohttp.ClientProxyConnectionError): self.loop.run_until_complete(connector.connect(req)) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) self.loop.run_until_complete(connector._create_connection(req)) self.assertEqual(req.url.path, '/') self.assertEqual(proxy_req.method, 'CONNECT') self.assertEqual(proxy_req.url, URL('https://www.python.org')) tr.close.assert_called_once_with() tr.get_extra_info.assert_called_with('socket', default=None) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() self.loop.run_until_complete(req.close()) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_certificate_error(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) seq = 0 @asyncio.coroutine def create_connection(*args, **kwargs): nonlocal seq seq += 1 # connection to http://proxy.example.com if seq == 1: return mock.Mock(), mock.Mock() # connection to https://www.python.org elif seq == 2: raise ssl.CertificateError else: assert False self.loop.create_connection = create_connection req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) with self.assertRaises(aiohttp.ClientConnectorCertificateError): self.loop.run_until_complete(connector._create_connection(req)) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_ssl_error(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) seq = 0 @asyncio.coroutine def create_connection(*args, **kwargs): nonlocal seq seq += 1 # connection to http://proxy.example.com if seq == 1: return mock.Mock(), mock.Mock() # connection to https://www.python.org elif seq == 2: raise ssl.SSLError else: assert False self.loop.create_connection = create_connection req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) with self.assertRaises(aiohttp.ClientConnectorSSLError): self.loop.run_until_complete(connector._create_connection(req)) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_runtime_error(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) with self.assertRaisesRegex( RuntimeError, "Transport does not expose socket instance"): self.loop.run_until_complete(connector._create_connection(req)) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() self.loop.run_until_complete(req.close()) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_http_proxy_error(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro( mock.Mock(status=400, reason='bad request')) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) with self.assertRaisesRegex( aiohttp.ClientHttpProxyError, "400, message='bad request'"): self.loop.run_until_complete(connector._create_connection(req)) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() self.loop.run_until_complete(req.close()) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_resp_start_error(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro( raise_exception=OSError("error message")) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) with self.assertRaisesRegex(OSError, "error message"): self.loop.run_until_complete(connector._create_connection(req)) @mock.patch('aiohttp.connector.ClientRequest') def test_request_port(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() tr.get_extra_info.return_value = None self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('http://localhost:1234/path'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) self.loop.run_until_complete(connector._create_connection(req)) self.assertEqual(req.url, URL('http://localhost:1234/path')) def test_proxy_auth_property(self): req = aiohttp.ClientRequest( 'GET', URL('http://localhost:1234/path'), proxy=URL('http://proxy.example.com'), proxy_auth=aiohttp.helpers.BasicAuth('user', 'pass'), loop=self.loop) self.assertEqual(('user', 'pass', 'latin1'), req.proxy_auth) def test_proxy_auth_property_default(self): req = aiohttp.ClientRequest( 'GET', URL('http://localhost:1234/path'), proxy=URL('http://proxy.example.com'), loop=self.loop) self.assertIsNone(req.proxy_auth) @mock.patch('aiohttp.connector.ClientRequest') def test_https_connect_pass_ssl_context(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop, ) self.loop.run_until_complete(connector._create_connection(req)) self.loop.create_connection.assert_called_with( mock.ANY, ssl=connector._make_ssl_context(True), sock=mock.ANY, server_hostname='www.python.org') self.assertEqual(req.url.path, '/') self.assertEqual(proxy_req.method, 'CONNECT') self.assertEqual(proxy_req.url, URL('https://www.python.org')) tr.close.assert_called_once_with() tr.get_extra_info.assert_called_with('socket', default=None) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() self.loop.run_until_complete(req.close()) @mock.patch('aiohttp.connector.ClientRequest') def test_https_auth(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), auth=aiohttp.helpers.BasicAuth('user', 'pass'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() self.loop.create_connection = make_mocked_coro((tr, proto)) self.assertIn('AUTHORIZATION', proxy_req.headers) self.assertNotIn('PROXY-AUTHORIZATION', proxy_req.headers) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy=URL('http://proxy.example.com'), loop=self.loop ) self.assertNotIn('AUTHORIZATION', req.headers) self.assertNotIn('PROXY-AUTHORIZATION', req.headers) self.loop.run_until_complete(connector._create_connection(req)) self.assertEqual(req.url.path, '/') self.assertNotIn('AUTHORIZATION', req.headers) self.assertNotIn('PROXY-AUTHORIZATION', req.headers) self.assertNotIn('AUTHORIZATION', proxy_req.headers) self.assertIn('PROXY-AUTHORIZATION', proxy_req.headers) connector._resolve_host.assert_called_with( 'proxy.example.com', 80, traces=None) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() self.loop.run_until_complete(req.close()) aiohttp-3.0.1/tests/test_proxy_functional.py0000666000000000000000000005020213240304665017475 0ustar 00000000000000import asyncio import os import pathlib from unittest import mock import pytest from yarl import URL import aiohttp from aiohttp import web @pytest.fixture def proxy_test_server(aiohttp_raw_server, loop, monkeypatch): """Handle all proxy requests and imitate remote server response.""" _patch_ssl_transport(monkeypatch) default_response = dict( status=200, headers=None, body=None) proxy_mock = mock.Mock() async def proxy_handler(request): proxy_mock.request = request proxy_mock.requests_list.append(request) response = default_response.copy() if isinstance(proxy_mock.return_value, dict): response.update(proxy_mock.return_value) headers = response['headers'] if not headers: headers = {} if request.method == 'CONNECT': response['body'] = None response['headers'] = headers resp = web.Response(**response) await resp.prepare(request) await resp.write_eof() return resp async def proxy_server(): proxy_mock.request = None proxy_mock.auth = None proxy_mock.requests_list = [] server = await aiohttp_raw_server(proxy_handler) proxy_mock.server = server proxy_mock.url = server.make_url('/') return proxy_mock return proxy_server @pytest.fixture() def get_request(loop): async def _request(method='GET', *, url, trust_env=False, **kwargs): connector = aiohttp.TCPConnector(ssl=False, loop=loop) client = aiohttp.ClientSession(connector=connector, trust_env=trust_env) try: resp = await client.request(method, url, **kwargs) await resp.release() return resp finally: await client.close() return _request async def test_proxy_http_absolute_path(proxy_test_server, get_request): url = 'http://aiohttp.io/path?query=yes' proxy = await proxy_test_server() await get_request(url=url, proxy=proxy.url) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path?query=yes' async def test_proxy_http_raw_path(proxy_test_server, get_request): url = 'http://aiohttp.io:2561/space sheep?q=can:fly' raw_url = 'http://aiohttp.io:2561/space%20sheep?q=can:fly' proxy = await proxy_test_server() await get_request(url=url, proxy=proxy.url) assert proxy.request.host == 'aiohttp.io:2561' assert proxy.request.path_qs == raw_url async def test_proxy_http_idna_support(proxy_test_server, get_request): url = 'http://éé.com/' raw_url = 'http://xn--9caa.com/' proxy = await proxy_test_server() await get_request(url=url, proxy=proxy.url) assert proxy.request.host == 'xn--9caa.com' assert proxy.request.path_qs == raw_url async def test_proxy_http_connection_error(get_request): url = 'http://aiohttp.io/path' proxy_url = 'http://localhost:2242/' with pytest.raises(aiohttp.ClientConnectorError): await get_request(url=url, proxy=proxy_url) async def test_proxy_http_bad_response(proxy_test_server, get_request): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() proxy.return_value = dict( status=502, headers={'Proxy-Agent': 'TestProxy'}) resp = await get_request(url=url, proxy=proxy.url) assert resp.status == 502 assert resp.headers['Proxy-Agent'] == 'TestProxy' async def test_proxy_http_auth(proxy_test_server, get_request): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() await get_request(url=url, proxy=proxy.url) assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers auth = aiohttp.BasicAuth('user', 'pass') await get_request(url=url, auth=auth, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers await get_request(url=url, proxy_auth=auth, proxy=proxy.url) assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' in proxy.request.headers await get_request(url=url, auth=auth, proxy_auth=auth, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' in proxy.request.headers async def test_proxy_http_auth_utf8(proxy_test_server, get_request): url = 'http://aiohttp.io/path' auth = aiohttp.BasicAuth('юзер', 'паÑÑ', 'utf-8') proxy = await proxy_test_server() await get_request(url=url, auth=auth, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers async def test_proxy_http_auth_from_url(proxy_test_server, get_request): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() auth_url = URL(url).with_user('user').with_password('pass') await get_request(url=auth_url, proxy=proxy.url) assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy_url = URL(proxy.url).with_user('user').with_password('pass') await get_request(url=url, proxy=proxy_url) assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' in proxy.request.headers async def test_proxy_http_acquired_cleanup(proxy_test_server, loop): url = 'http://aiohttp.io/path' conn = aiohttp.TCPConnector(loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() assert 0 == len(conn._acquired) resp = await sess.get(url, proxy=proxy.url) assert resp.closed assert 0 == len(conn._acquired) await sess.close() @pytest.mark.skip('we need to reconsider how we test this') async def test_proxy_http_acquired_cleanup_force(proxy_test_server, loop): url = 'http://aiohttp.io/path' conn = aiohttp.TCPConnector(force_close=True, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() assert 0 == len(conn._acquired) async def request(): resp = await sess.get(url, proxy=proxy.url) assert 1 == len(conn._acquired) await resp.release() await request() assert 0 == len(conn._acquired) await sess.close() @pytest.mark.skip('we need to reconsider how we test this') async def test_proxy_http_multi_conn_limit(proxy_test_server, loop): url = 'http://aiohttp.io/path' limit, multi_conn_num = 1, 5 conn = aiohttp.TCPConnector(limit=limit, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() current_pid = None async def request(pid): # process requests only one by one nonlocal current_pid resp = await sess.get(url, proxy=proxy.url) current_pid = pid await asyncio.sleep(0.2, loop=loop) assert current_pid == pid await resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] responses = await asyncio.gather(*requests, loop=loop) assert len(responses) == multi_conn_num assert set(resp.status for resp in responses) == {200} await sess.close() @pytest.mark.xfail async def xtest_proxy_https_connect(proxy_test_server, get_request): proxy = await proxy_test_server() url = 'https://www.google.com.ua/search?q=aiohttp proxy' await get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert connect.method == 'CONNECT' assert connect.path == 'www.google.com.ua:443' assert connect.host == 'www.google.com.ua' assert proxy.request.host == 'www.google.com.ua' assert proxy.request.path_qs == '/search?q=aiohttp+proxy' @pytest.mark.xfail async def xtest_proxy_https_connect_with_port(proxy_test_server, get_request): proxy = await proxy_test_server() url = 'https://secure.aiohttp.io:2242/path' await get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert connect.method == 'CONNECT' assert connect.path == 'secure.aiohttp.io:2242' assert connect.host == 'secure.aiohttp.io:2242' assert proxy.request.host == 'secure.aiohttp.io:2242' assert proxy.request.path_qs == '/path' @pytest.mark.xfail async def xtest_proxy_https_send_body(proxy_test_server, loop): sess = aiohttp.ClientSession(loop=loop) proxy = await proxy_test_server() proxy.return_value = {'status': 200, 'body': b'1'*(2**20)} url = 'https://www.google.com.ua/search?q=aiohttp proxy' resp = await sess.get(url, proxy=proxy.url) body = await resp.read() await resp.release() await sess.close() assert body == b'1'*(2**20) @pytest.mark.xfail async def xtest_proxy_https_idna_support(proxy_test_server, get_request): url = 'https://éé.com/' proxy = await proxy_test_server() await get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert connect.method == 'CONNECT' assert connect.path == 'xn--9caa.com:443' assert connect.host == 'xn--9caa.com' async def test_proxy_https_connection_error(get_request): url = 'https://secure.aiohttp.io/path' proxy_url = 'http://localhost:2242/' with pytest.raises(aiohttp.ClientConnectorError): await get_request(url=url, proxy=proxy_url) async def test_proxy_https_bad_response(proxy_test_server, get_request): url = 'https://secure.aiohttp.io/path' proxy = await proxy_test_server() proxy.return_value = dict( status=502, headers={'Proxy-Agent': 'TestProxy'}) with pytest.raises(aiohttp.ClientHttpProxyError): await get_request(url=url, proxy=proxy.url) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'CONNECT' assert proxy.request.path == 'secure.aiohttp.io:443' @pytest.mark.xfail async def xtest_proxy_https_auth(proxy_test_server, get_request): url = 'https://secure.aiohttp.io/path' auth = aiohttp.BasicAuth('user', 'pass') proxy = await proxy_test_server() await get_request(url=url, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' not in connect.headers assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy = await proxy_test_server() await get_request(url=url, auth=auth, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' not in connect.headers assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy = await proxy_test_server() await get_request(url=url, proxy_auth=auth, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' in connect.headers assert 'Authorization' not in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers proxy = await proxy_test_server() await get_request(url=url, auth=auth, proxy_auth=auth, proxy=proxy.url) connect = proxy.requests_list[0] assert 'Authorization' not in connect.headers assert 'Proxy-Authorization' in connect.headers assert 'Authorization' in proxy.request.headers assert 'Proxy-Authorization' not in proxy.request.headers @pytest.mark.xfail async def xtest_proxy_https_acquired_cleanup(proxy_test_server, loop): url = 'https://secure.aiohttp.io/path' conn = aiohttp.TCPConnector(loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() assert 0 == len(conn._acquired) async def request(): resp = await sess.get(url, proxy=proxy.url) assert 1 == len(conn._acquired) await resp.release() await request() assert 0 == len(conn._acquired) await sess.close() @pytest.mark.xfail async def xtest_proxy_https_acquired_cleanup_force(proxy_test_server, loop): url = 'https://secure.aiohttp.io/path' conn = aiohttp.TCPConnector(force_close=True, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() assert 0 == len(conn._acquired) async def request(): resp = await sess.get(url, proxy=proxy.url) assert 1 == len(conn._acquired) await resp.release() await request() assert 0 == len(conn._acquired) await sess.close() @pytest.mark.xfail async def xtest_proxy_https_multi_conn_limit(proxy_test_server, loop): url = 'https://secure.aiohttp.io/path' limit, multi_conn_num = 1, 5 conn = aiohttp.TCPConnector(limit=limit, loop=loop) sess = aiohttp.ClientSession(connector=conn, loop=loop) proxy = await proxy_test_server() current_pid = None async def request(pid): # process requests only one by one nonlocal current_pid resp = await sess.get(url, proxy=proxy.url) current_pid = pid await asyncio.sleep(0.2, loop=loop) assert current_pid == pid await resp.release() return resp requests = [request(pid) for pid in range(multi_conn_num)] responses = await asyncio.gather(*requests, loop=loop) assert len(responses) == multi_conn_num assert set(resp.status for resp in responses) == {200} await sess.close() def _patch_ssl_transport(monkeypatch): """Make ssl transport substitution to prevent ssl handshake.""" def _make_ssl_transport_dummy(self, rawsock, protocol, sslcontext, waiter=None, **kwargs): return self._make_socket_transport(rawsock, protocol, waiter, extra=kwargs.get('extra'), server=kwargs.get('server')) monkeypatch.setattr( "asyncio.selector_events.BaseSelectorEventLoop._make_ssl_transport", _make_ssl_transport_dummy) original_is_file = pathlib.Path.is_file def mock_is_file(self): """ make real netrc file invisible in home dir """ if self.name in ['_netrc', '.netrc'] and self.parent == self.home(): return False else: return original_is_file(self) async def test_proxy_from_env_http(proxy_test_server, get_request, mocker): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url)}) mocker.patch('pathlib.Path.is_file', mock_is_file) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path' assert 'Proxy-Authorization' not in proxy.request.headers async def test_proxy_from_env_http_with_auth(proxy_test_server, get_request, mocker): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() auth = aiohttp.BasicAuth('user', 'pass') mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url .with_user(auth.login) .with_password(auth.password))}) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path' assert proxy.request.headers['Proxy-Authorization'] == auth.encode() async def test_proxy_from_env_http_with_auth_from_netrc( proxy_test_server, get_request, tmpdir, mocker): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() auth = aiohttp.BasicAuth('user', 'pass') netrc_file = tmpdir.join('test_netrc') netrc_file_data = 'machine 127.0.0.1 login %s password %s' % ( auth.login, auth.password) with open(str(netrc_file), 'w') as f: f.write(netrc_file_data) mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url), 'NETRC': str(netrc_file)}) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path' assert proxy.request.headers['Proxy-Authorization'] == auth.encode() async def test_proxy_from_env_http_without_auth_from_netrc( proxy_test_server, get_request, tmpdir, mocker): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() auth = aiohttp.BasicAuth('user', 'pass') netrc_file = tmpdir.join('test_netrc') netrc_file_data = 'machine 127.0.0.2 login %s password %s' % ( auth.login, auth.password) with open(str(netrc_file), 'w') as f: f.write(netrc_file_data) mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url), 'NETRC': str(netrc_file)}) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path' assert 'Proxy-Authorization' not in proxy.request.headers async def test_proxy_from_env_http_without_auth_from_wrong_netrc( proxy_test_server, get_request, tmpdir, mocker): url = 'http://aiohttp.io/path' proxy = await proxy_test_server() auth = aiohttp.BasicAuth('user', 'pass') netrc_file = tmpdir.join('test_netrc') invalid_data = 'machine 127.0.0.1 %s pass %s' % ( auth.login, auth.password) with open(str(netrc_file), 'w') as f: f.write(invalid_data) mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url), 'NETRC': str(netrc_file)}) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 1 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'http://aiohttp.io/path' assert 'Proxy-Authorization' not in proxy.request.headers @pytest.mark.xfail async def xtest_proxy_from_env_https(proxy_test_server, get_request, mocker): url = 'https://aiohttp.io/path' proxy = await proxy_test_server() mocker.patch.dict(os.environ, {'https_proxy': str(proxy.url)}) mock.patch('pathlib.Path.is_file', mock_is_file) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 2 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == 'https://aiohttp.io/path' assert 'Proxy-Authorization' not in proxy.request.headers @pytest.mark.xfail async def xtest_proxy_from_env_https_with_auth(proxy_test_server, get_request, mocker): url = 'https://aiohttp.io/path' proxy = await proxy_test_server() auth = aiohttp.BasicAuth('user', 'pass') mocker.patch.dict(os.environ, {'https_proxy': str(proxy.url .with_user(auth.login) .with_password(auth.password))}) await get_request(url=url, trust_env=True) assert len(proxy.requests_list) == 2 assert proxy.request.method == 'GET' assert proxy.request.host == 'aiohttp.io' assert proxy.request.path_qs == '/path' assert 'Proxy-Authorization' not in proxy.request.headers r2 = proxy.requests_list[0] assert r2.method == 'CONNECT' assert r2.host == 'aiohttp.io' assert r2.path_qs == '/path' assert r2.headers['Proxy-Authorization'] == auth.encode() async def test_proxy_auth(): async with aiohttp.ClientSession() as session: with pytest.raises( ValueError, message="proxy_auth must be None or BasicAuth() tuple"): await session.get('http://python.org', proxy='http://proxy.example.com', proxy_auth=('user', 'pass')) aiohttp-3.0.1/tests/test_pytest_plugin.py0000666000000000000000000001373313240304665017010 0ustar 00000000000000import sys import pytest pytest_plugins = 'pytester' CONFTEST = ''' pytest_plugins = 'aiohttp.pytest_plugin' ''' def test_aiohttp_plugin(testdir): testdir.makepyfile("""\ import pytest from unittest import mock from aiohttp import web async def hello(request): return web.Response(body=b'Hello, world') def create_app(loop=None): app = web.Application() app.router.add_route('GET', '/', hello) return app async def test_hello(aiohttp_client): client = await aiohttp_client(create_app) resp = await client.get('/') assert resp.status == 200 text = await resp.text() assert 'Hello, world' in text async def test_hello_from_app(aiohttp_client, loop): app = web.Application() app.router.add_get('/', hello) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 text = await resp.text() assert 'Hello, world' in text async def test_hello_with_loop(aiohttp_client, loop): client = await aiohttp_client(create_app) resp = await client.get('/') assert resp.status == 200 text = await resp.text() assert 'Hello, world' in text async def test_set_args(aiohttp_client, loop): with pytest.raises(AssertionError): app = web.Application() await aiohttp_client(app, 1, 2, 3) async def test_set_keyword_args(aiohttp_client, loop): app = web.Application() with pytest.raises(TypeError): await aiohttp_client(app, param=1) async def test_noop(): pass async def previous(request): if request.method == 'POST': with pytest.warns(DeprecationWarning): request.app['value'] = (await request.post())['value'] return web.Response(body=b'thanks for the data') else: v = request.app.get('value', 'unknown') return web.Response(body='value: {}'.format(v).encode()) def create_stateful_app(loop): app = web.Application() app.router.add_route('*', '/', previous) return app @pytest.fixture def cli(loop, aiohttp_client): return loop.run_until_complete(aiohttp_client(create_stateful_app)) async def test_set_value(cli): resp = await cli.post('/', data={'value': 'foo'}) assert resp.status == 200 text = await resp.text() assert text == 'thanks for the data' assert cli.server.app['value'] == 'foo' async def test_get_value(cli): resp = await cli.get('/') assert resp.status == 200 text = await resp.text() assert text == 'value: unknown' with pytest.warns(DeprecationWarning): cli.server.app['value'] = 'bar' resp = await cli.get('/') assert resp.status == 200 text = await resp.text() assert text == 'value: bar' def test_noncoro(): assert True async def test_failed_to_create_client(aiohttp_client): def make_app(loop): raise RuntimeError() with pytest.raises(RuntimeError): await aiohttp_client(make_app) async def test_custom_port_aiohttp_client(aiohttp_client, aiohttp_unused_port): port = aiohttp_unused_port() client = await aiohttp_client(create_app, server_kwargs={'port': port}) assert client.port == port resp = await client.get('/') assert resp.status == 200 text = await resp.text() assert 'Hello, world' in text async def test_custom_port_test_server(aiohttp_server, aiohttp_unused_port): app = create_app() port = aiohttp_unused_port() server = await aiohttp_server(app, port=port) assert server.port == port """) testdir.makeconftest(CONFTEST) result = testdir.runpytest('-p', 'no:sugar', '--aiohttp-loop=pyloop') result.assert_outcomes(passed=12) def test_warning_checks(testdir): testdir.makepyfile("""\ async def foobar(): return 123 async def test_good(): v = await foobar() assert v == 123 async def test_bad(): foobar() """) testdir.makeconftest(CONFTEST) result = testdir.runpytest('-p', 'no:sugar', '-s', '-W', 'default', '--aiohttp-loop=pyloop') result.assert_outcomes(passed=1, failed=1) def test_aiohttp_plugin_async_fixture(testdir, capsys): testdir.makepyfile("""\ import pytest from aiohttp import web async def hello(request): return web.Response(body=b'Hello, world') def create_app(loop): app = web.Application() app.router.add_route('GET', '/', hello) return app @pytest.fixture async def cli(aiohttp_client): client = await aiohttp_client(create_app) return client @pytest.fixture async def foo(): return 42 @pytest.fixture async def bar(request): # request should be accessible in async fixtures if needed return request.function async def test_hello(cli): resp = await cli.get('/') assert resp.status == 200 def test_foo(loop, foo): assert foo == 42 def test_foo_without_loop(foo): # will raise an error because there is no loop pass def test_bar(loop, bar): assert bar is test_bar """) testdir.makeconftest(CONFTEST) result = testdir.runpytest('-p', 'no:sugar', '--aiohttp-loop=pyloop') result.assert_outcomes(passed=3, error=1) result.stdout.fnmatch_lines( "*Asynchronous fixtures must depend on the 'loop' fixture " "or be used in tests depending from it." ) @pytest.mark.skipif(sys.version_info < (3, 6), reason='old python') def test_aiohttp_plugin_async_gen_fixture(testdir): testdir.makepyfile("""\ import pytest from unittest import mock from aiohttp import web canary = mock.Mock() async def hello(request): return web.Response(body=b'Hello, world') def create_app(loop): app = web.Application() app.router.add_route('GET', '/', hello) return app @pytest.fixture async def cli(aiohttp_client): yield await aiohttp_client(create_app) canary() async def test_hello(cli): resp = await cli.get('/') assert resp.status == 200 def test_finalized(): assert canary.called is True """) testdir.makeconftest(CONFTEST) result = testdir.runpytest('-p', 'no:sugar', '--aiohttp-loop=pyloop') result.assert_outcomes(passed=2) aiohttp-3.0.1/tests/test_resolver.py0000666000000000000000000001662013240304665015741 0ustar 00000000000000import asyncio import ipaddress import socket from unittest.mock import Mock, patch import pytest from aiohttp.resolver import AsyncResolver, DefaultResolver, ThreadedResolver try: import aiodns gethostbyname = hasattr(aiodns.DNSResolver, 'gethostbyname') except ImportError: aiodns = None gethostbyname = False class FakeResult: def __init__(self, addresses): self.addresses = addresses class FakeQueryResult: def __init__(self, host): self.host = host async def fake_result(addresses): return FakeResult(addresses=tuple(addresses)) async def fake_query_result(result): return [FakeQueryResult(host=h) for h in result] def fake_addrinfo(hosts): async def fake(*args, **kwargs): if not hosts: raise socket.gaierror return list([(None, None, None, None, [h, 0]) for h in hosts]) return fake @pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") async def test_async_resolver_positive_lookup(loop): with patch('aiodns.DNSResolver') as mock: mock().gethostbyname.return_value = fake_result(['127.0.0.1']) resolver = AsyncResolver(loop=loop) real = await resolver.resolve('www.python.org') ipaddress.ip_address(real[0]['host']) mock().gethostbyname.assert_called_with('www.python.org', socket.AF_INET) @pytest.mark.skipif(aiodns is None, reason="aiodns required") async def test_async_resolver_query_positive_lookup(loop): with patch('aiodns.DNSResolver') as mock: del mock().gethostbyname mock().query.return_value = fake_query_result(['127.0.0.1']) resolver = AsyncResolver(loop=loop) real = await resolver.resolve('www.python.org') ipaddress.ip_address(real[0]['host']) mock().query.assert_called_with('www.python.org', 'A') @pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") async def test_async_resolver_multiple_replies(loop): with patch('aiodns.DNSResolver') as mock: ips = ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4'] mock().gethostbyname.return_value = fake_result(ips) resolver = AsyncResolver(loop=loop) real = await resolver.resolve('www.google.com') ips = [ipaddress.ip_address(x['host']) for x in real] assert len(ips) > 3, "Expecting multiple addresses" @pytest.mark.skipif(aiodns is None, reason="aiodns required") async def test_async_resolver_query_multiple_replies(loop): with patch('aiodns.DNSResolver') as mock: del mock().gethostbyname ips = ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4'] mock().query.return_value = fake_query_result(ips) resolver = AsyncResolver(loop=loop) real = await resolver.resolve('www.google.com') ips = [ipaddress.ip_address(x['host']) for x in real] @pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") async def test_async_resolver_negative_lookup(loop): with patch('aiodns.DNSResolver') as mock: mock().gethostbyname.side_effect = aiodns.error.DNSError() resolver = AsyncResolver(loop=loop) with pytest.raises(OSError): await resolver.resolve('doesnotexist.bla') @pytest.mark.skipif(aiodns is None, reason="aiodns required") async def test_async_resolver_query_negative_lookup(loop): with patch('aiodns.DNSResolver') as mock: del mock().gethostbyname mock().query.side_effect = aiodns.error.DNSError() resolver = AsyncResolver(loop=loop) with pytest.raises(OSError): await resolver.resolve('doesnotexist.bla') @pytest.mark.skipif(aiodns is None, reason="aiodns required") async def test_async_resolver_no_hosts_in_query(loop): with patch('aiodns.DNSResolver') as mock: del mock().gethostbyname mock().query.return_value = fake_query_result([]) resolver = AsyncResolver(loop=loop) with pytest.raises(OSError): await resolver.resolve('doesnotexist.bla') @pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") async def test_async_resolver_no_hosts_in_gethostbyname(loop): with patch('aiodns.DNSResolver') as mock: mock().gethostbyname.return_value = fake_result([]) resolver = AsyncResolver(loop=loop) with pytest.raises(OSError): await resolver.resolve('doesnotexist.bla') async def test_threaded_resolver_positive_lookup(): loop = Mock() loop.getaddrinfo = fake_addrinfo(["127.0.0.1"]) resolver = ThreadedResolver(loop=loop) real = await resolver.resolve('www.python.org') ipaddress.ip_address(real[0]['host']) async def test_threaded_resolver_multiple_replies(): loop = Mock() ips = ['127.0.0.1', '127.0.0.2', '127.0.0.3', '127.0.0.4'] loop.getaddrinfo = fake_addrinfo(ips) resolver = ThreadedResolver(loop=loop) real = await resolver.resolve('www.google.com') ips = [ipaddress.ip_address(x['host']) for x in real] assert len(ips) > 3, "Expecting multiple addresses" async def test_threaded_negative_lookup(): loop = Mock() ips = [] loop.getaddrinfo = fake_addrinfo(ips) resolver = ThreadedResolver(loop=loop) with pytest.raises(socket.gaierror): await resolver.resolve('doesnotexist.bla') async def test_close_for_threaded_resolver(loop): resolver = ThreadedResolver(loop=loop) await resolver.close() @pytest.mark.skipif(aiodns is None, reason="aiodns required") async def test_close_for_async_resolver(loop): resolver = AsyncResolver(loop=loop) await resolver.close() def test_default_loop_for_threaded_resolver(loop): asyncio.set_event_loop(loop) resolver = ThreadedResolver() assert resolver._loop is loop @pytest.mark.skipif(aiodns is None, reason="aiodns required") def test_default_loop_for_async_resolver(loop): asyncio.set_event_loop(loop) resolver = AsyncResolver() assert resolver._loop is loop @pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") async def test_async_resolver_ipv6_positive_lookup(loop): with patch('aiodns.DNSResolver') as mock: mock().gethostbyname.return_value = fake_result(['::1']) resolver = AsyncResolver(loop=loop) real = await resolver.resolve('www.python.org', family=socket.AF_INET6) ipaddress.ip_address(real[0]['host']) mock().gethostbyname.assert_called_with('www.python.org', socket.AF_INET6) @pytest.mark.skipif(aiodns is None, reason="aiodns required") async def test_async_resolver_query_ipv6_positive_lookup(loop): with patch('aiodns.DNSResolver') as mock: del mock().gethostbyname mock().query.return_value = fake_query_result(['::1']) resolver = AsyncResolver(loop=loop) real = await resolver.resolve('www.python.org', family=socket.AF_INET6) ipaddress.ip_address(real[0]['host']) mock().query.assert_called_with('www.python.org', 'AAAA') def test_async_resolver_aiodns_not_present(loop, monkeypatch): monkeypatch.setattr("aiohttp.resolver.aiodns", None) with pytest.raises(RuntimeError): AsyncResolver(loop=loop) def test_default_resolver(): # if gethostbyname: # assert DefaultResolver is AsyncResolver # else: # assert DefaultResolver is ThreadedResolver assert DefaultResolver is ThreadedResolver aiohttp-3.0.1/tests/test_route_def.py0000666000000000000000000001311413240304665016047 0ustar 00000000000000import pytest from aiohttp import web from aiohttp.web_urldispatcher import UrlDispatcher @pytest.fixture def router(): return UrlDispatcher() def test_get(router): async def handler(request): pass router.add_routes([web.get('/', handler)]) assert len(router.routes()) == 2 # GET and HEAD route = list(router.routes())[1] assert route.handler is handler assert route.method == 'GET' assert str(route.url_for()) == '/' route2 = list(router.routes())[0] assert route2.handler is handler assert route2.method == 'HEAD' def test_head(router): async def handler(request): pass router.add_routes([web.head('/', handler)]) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.handler is handler assert route.method == 'HEAD' assert str(route.url_for()) == '/' def test_post(router): async def handler(request): pass router.add_routes([web.post('/', handler)]) route = list(router.routes())[0] assert route.handler is handler assert route.method == 'POST' assert str(route.url_for()) == '/' def test_put(router): async def handler(request): pass router.add_routes([web.put('/', handler)]) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.handler is handler assert route.method == 'PUT' assert str(route.url_for()) == '/' def test_patch(router): async def handler(request): pass router.add_routes([web.patch('/', handler)]) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.handler is handler assert route.method == 'PATCH' assert str(route.url_for()) == '/' def test_delete(router): async def handler(request): pass router.add_routes([web.delete('/', handler)]) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.handler is handler assert route.method == 'DELETE' assert str(route.url_for()) == '/' def test_route(router): async def handler(request): pass router.add_routes([web.route('OTHER', '/', handler)]) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.handler is handler assert route.method == 'OTHER' assert str(route.url_for()) == '/' def test_head_deco(router): routes = web.RouteTableDef() @routes.head('/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.method == 'HEAD' assert str(route.url_for()) == '/path' def test_get_deco(router): routes = web.RouteTableDef() @routes.get('/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 2 route1 = list(router.routes())[0] assert route1.method == 'HEAD' assert str(route1.url_for()) == '/path' route2 = list(router.routes())[1] assert route2.method == 'GET' assert str(route2.url_for()) == '/path' def test_post_deco(router): routes = web.RouteTableDef() @routes.post('/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.method == 'POST' assert str(route.url_for()) == '/path' def test_put_deco(router): routes = web.RouteTableDef() @routes.put('/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.method == 'PUT' assert str(route.url_for()) == '/path' def test_patch_deco(router): routes = web.RouteTableDef() @routes.patch('/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.method == 'PATCH' assert str(route.url_for()) == '/path' def test_delete_deco(router): routes = web.RouteTableDef() @routes.delete('/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.method == 'DELETE' assert str(route.url_for()) == '/path' def test_route_deco(router): routes = web.RouteTableDef() @routes.route('OTHER', '/path') async def handler(request): pass router.add_routes(routes) assert len(router.routes()) == 1 route = list(router.routes())[0] assert route.method == 'OTHER' assert str(route.url_for()) == '/path' def test_routedef_sequence_protocol(): routes = web.RouteTableDef() @routes.delete('/path') async def handler(request): pass assert len(routes) == 1 info = routes[0] assert isinstance(info, web.RouteDef) assert info in routes assert list(routes)[0] is info def test_repr_route_def(): routes = web.RouteTableDef() @routes.get('/path') async def handler(request): pass rd = routes[0] assert repr(rd) == " 'handler'>" def test_repr_route_def_with_extra_info(): routes = web.RouteTableDef() @routes.get('/path', extra='info') async def handler(request): pass rd = routes[0] assert repr(rd) == " 'handler', extra='info'>" def test_repr_route_table_def(): routes = web.RouteTableDef() @routes.get('/path') async def handler(request): pass assert repr(routes) == "" aiohttp-3.0.1/tests/test_run_app.py0000666000000000000000000004046513240304665015550 0ustar 00000000000000import asyncio import contextlib import os import platform import signal import socket import ssl import subprocess import sys from unittest import mock from uuid import uuid4 import pytest from aiohttp import web from aiohttp.test_utils import make_mocked_coro # Test for features of OS' socket support _has_unix_domain_socks = hasattr(socket, 'AF_UNIX') if _has_unix_domain_socks: _abstract_path_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) try: _abstract_path_sock.bind(b"\x00" + uuid4().hex.encode('ascii')) except FileNotFoundError: _abstract_path_failed = True else: _abstract_path_failed = False finally: _abstract_path_sock.close() del _abstract_path_sock else: _abstract_path_failed = True skip_if_no_abstract_paths = pytest.mark.skipif( _abstract_path_failed, reason="Linux-style abstract paths are not supported." ) skip_if_no_unix_socks = pytest.mark.skipif( not _has_unix_domain_socks, reason="Unix domain sockets are not supported" ) del _has_unix_domain_socks, _abstract_path_failed HAS_IPV6 = socket.has_ipv6 if HAS_IPV6: # The socket.has_ipv6 flag may be True if Python was built with IPv6 # support, but the target system still may not have it. # So let's ensure that we really have IPv6 support. try: socket.socket(socket.AF_INET6, socket.SOCK_STREAM) except OSError: HAS_IPV6 = False # tokio event loop does not allow to override attributes def skip_if_no_dict(loop): if not hasattr(loop, '__dict__'): pytest.skip("can not override loop attributes") def skip_if_on_windows(): if platform.system() == "Windows": pytest.skip("the test is not valid for Windows") @pytest.fixture def patched_loop(loop): skip_if_no_dict(loop) server = mock.Mock() server.wait_closed = make_mocked_coro(None) loop.create_server = make_mocked_coro(server) unix_server = mock.Mock() unix_server.wait_closed = make_mocked_coro(None) loop.create_unix_server = make_mocked_coro(unix_server) asyncio.set_event_loop(loop) return loop def stopper(loop): def raiser(): raise KeyboardInterrupt def f(*args): loop.call_soon(raiser) return f def test_run_app_http(patched_loop): app = web.Application() startup_handler = make_mocked_coro() app.on_startup.append(startup_handler) cleanup_handler = make_mocked_coro() app.on_cleanup.append(cleanup_handler) web.run_app(app, print=stopper(patched_loop)) patched_loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None) startup_handler.assert_called_once_with(app) cleanup_handler.assert_called_once_with(app) def test_run_app_close_loop(patched_loop): app = web.Application() web.run_app(app, print=stopper(patched_loop)) patched_loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None) assert patched_loop.is_closed() mock_unix_server_single = [ mock.call(mock.ANY, '/tmp/testsock1.sock', ssl=None, backlog=128), ] mock_unix_server_multi = [ mock.call(mock.ANY, '/tmp/testsock1.sock', ssl=None, backlog=128), mock.call(mock.ANY, '/tmp/testsock2.sock', ssl=None, backlog=128), ] mock_server_single = [ mock.call(mock.ANY, '127.0.0.1', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None), ] mock_server_multi = [ mock.call(mock.ANY, '127.0.0.1', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None), mock.call(mock.ANY, '192.168.1.1', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None), ] mock_server_default_8989 = [ mock.call(mock.ANY, '0.0.0.0', 8989, ssl=None, backlog=128, reuse_address=None, reuse_port=None) ] mock_socket = mock.Mock(getsockname=lambda: ('mock-socket', 123)) mixed_bindings_tests = ( ( "Nothing Specified", {}, [mock.call(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None)], [] ), ( "Port Only", {'port': 8989}, mock_server_default_8989, [] ), ( "Multiple Hosts", {'host': ('127.0.0.1', '192.168.1.1')}, mock_server_multi, [] ), ( "Multiple Paths", {'path': ('/tmp/testsock1.sock', '/tmp/testsock2.sock')}, [], mock_unix_server_multi ), ( "Multiple Paths, Port", {'path': ('/tmp/testsock1.sock', '/tmp/testsock2.sock'), 'port': 8989}, mock_server_default_8989, mock_unix_server_multi, ), ( "Multiple Paths, Single Host", {'path': ('/tmp/testsock1.sock', '/tmp/testsock2.sock'), 'host': '127.0.0.1'}, mock_server_single, mock_unix_server_multi ), ( "Single Path, Single Host", {'path': '/tmp/testsock1.sock', 'host': '127.0.0.1'}, mock_server_single, mock_unix_server_single ), ( "Single Path, Multiple Hosts", {'path': '/tmp/testsock1.sock', 'host': ('127.0.0.1', '192.168.1.1')}, mock_server_multi, mock_unix_server_single ), ( "Single Path, Port", {'path': '/tmp/testsock1.sock', 'port': 8989}, mock_server_default_8989, mock_unix_server_single ), ( "Multiple Paths, Multiple Hosts, Port", {'path': ('/tmp/testsock1.sock', '/tmp/testsock2.sock'), 'host': ('127.0.0.1', '192.168.1.1'), 'port': 8000}, [mock.call(mock.ANY, '127.0.0.1', 8000, ssl=None, backlog=128, reuse_address=None, reuse_port=None), mock.call(mock.ANY, '192.168.1.1', 8000, ssl=None, backlog=128, reuse_address=None, reuse_port=None)], mock_unix_server_multi ), ( "Only socket", {"sock": [mock_socket]}, [mock.call(mock.ANY, ssl=None, sock=mock_socket, backlog=128)], [], ), ( "Socket, port", {"sock": [mock_socket], "port": 8765}, [mock.call(mock.ANY, '0.0.0.0', 8765, ssl=None, backlog=128, reuse_address=None, reuse_port=None), mock.call(mock.ANY, sock=mock_socket, ssl=None, backlog=128)], [], ), ( "Socket, Host, No port", {"sock": [mock_socket], "host": 'localhost'}, [mock.call(mock.ANY, 'localhost', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=None), mock.call(mock.ANY, sock=mock_socket, ssl=None, backlog=128)], [], ), ( "reuse_port", {"reuse_port": True}, [mock.call(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=True)], [] ), ( "reuse_address", {"reuse_address": False}, [mock.call(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128, reuse_address=False, reuse_port=None)], [] ), ( "reuse_port, reuse_address", {"reuse_address": True, "reuse_port": True}, [mock.call(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128, reuse_address=True, reuse_port=True)], [] ), ( "Port, reuse_port", {'port': 8989, "reuse_port": True}, [mock.call(mock.ANY, '0.0.0.0', 8989, ssl=None, backlog=128, reuse_address=None, reuse_port=True)], [] ), ( "Multiple Hosts, reuse_port", {'host': ('127.0.0.1', '192.168.1.1'), "reuse_port": True}, [ mock.call(mock.ANY, '127.0.0.1', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=True), mock.call(mock.ANY, '192.168.1.1', 8080, ssl=None, backlog=128, reuse_address=None, reuse_port=True), ], [] ), ( "Multiple Paths, Port, reuse_address", {'path': ('/tmp/testsock1.sock', '/tmp/testsock2.sock'), 'port': 8989, 'reuse_address': False}, [mock.call(mock.ANY, '0.0.0.0', 8989, ssl=None, backlog=128, reuse_address=False, reuse_port=None)], mock_unix_server_multi, ), ( "Multiple Paths, Single Host, reuse_address, reuse_port", {'path': ('/tmp/testsock1.sock', '/tmp/testsock2.sock'), 'host': '127.0.0.1', 'reuse_address': True, 'reuse_port': True}, [ mock.call(mock.ANY, '127.0.0.1', 8080, ssl=None, backlog=128, reuse_address=True, reuse_port=True), ], mock_unix_server_multi ), ) mixed_bindings_test_ids = [test[0] for test in mixed_bindings_tests] mixed_bindings_test_params = [test[1:] for test in mixed_bindings_tests] @pytest.mark.parametrize( 'run_app_kwargs, expected_server_calls, expected_unix_server_calls', mixed_bindings_test_params, ids=mixed_bindings_test_ids ) def test_run_app_mixed_bindings(run_app_kwargs, expected_server_calls, expected_unix_server_calls, patched_loop): app = web.Application() web.run_app(app, print=stopper(patched_loop), **run_app_kwargs) assert (patched_loop.create_unix_server.mock_calls == expected_unix_server_calls) assert (patched_loop.create_server.mock_calls == expected_server_calls) def test_run_app_https(patched_loop): app = web.Application() ssl_context = ssl.create_default_context() web.run_app(app, ssl_context=ssl_context, print=stopper(patched_loop)) patched_loop.create_server.assert_called_with( mock.ANY, '0.0.0.0', 8443, ssl=ssl_context, backlog=128, reuse_address=None, reuse_port=None) def test_run_app_nondefault_host_port(patched_loop, aiohttp_unused_port): port = aiohttp_unused_port() host = '127.0.0.1' app = web.Application() web.run_app(app, host=host, port=port, print=stopper(patched_loop)) patched_loop.create_server.assert_called_with(mock.ANY, host, port, ssl=None, backlog=128, reuse_address=None, reuse_port=None) def test_run_app_custom_backlog(patched_loop): app = web.Application() web.run_app(app, backlog=10, print=stopper(patched_loop)) patched_loop.create_server.assert_called_with( mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=10, reuse_address=None, reuse_port=None) def test_run_app_custom_backlog_unix(patched_loop): app = web.Application() web.run_app(app, path='/tmp/tmpsock.sock', backlog=10, print=stopper(patched_loop)) patched_loop.create_unix_server.assert_called_with( mock.ANY, '/tmp/tmpsock.sock', ssl=None, backlog=10) @skip_if_no_unix_socks def test_run_app_http_unix_socket(patched_loop, shorttmpdir): app = web.Application() sock_path = str(shorttmpdir.join('socket.sock')) printer = mock.Mock(wraps=stopper(patched_loop)) web.run_app(app, path=sock_path, print=printer) patched_loop.create_unix_server.assert_called_with(mock.ANY, sock_path, ssl=None, backlog=128) assert "http://unix:{}:".format(sock_path) in printer.call_args[0][0] @skip_if_no_unix_socks def test_run_app_https_unix_socket(patched_loop, shorttmpdir): app = web.Application() sock_path = str(shorttmpdir.join('socket.sock')) ssl_context = ssl.create_default_context() printer = mock.Mock(wraps=stopper(patched_loop)) web.run_app(app, path=sock_path, ssl_context=ssl_context, print=printer) patched_loop.create_unix_server.assert_called_with( mock.ANY, sock_path, ssl=ssl_context, backlog=128) assert "https://unix:{}:".format(sock_path) in printer.call_args[0][0] @skip_if_no_unix_socks @skip_if_no_abstract_paths def test_run_app_abstract_linux_socket(patched_loop): sock_path = b"\x00" + uuid4().hex.encode('ascii') app = web.Application() web.run_app( app, path=sock_path.decode('ascii', 'ignore'), print=stopper(patched_loop)) patched_loop.create_unix_server.assert_called_with( mock.ANY, sock_path.decode('ascii'), ssl=None, backlog=128 ) def test_run_app_preexisting_inet_socket(patched_loop, mocker): app = web.Application() sock = socket.socket() with contextlib.closing(sock): sock.bind(('0.0.0.0', 0)) _, port = sock.getsockname() printer = mock.Mock(wraps=stopper(patched_loop)) web.run_app(app, sock=sock, print=printer) patched_loop.create_server.assert_called_with( mock.ANY, sock=sock, backlog=128, ssl=None ) assert "http://0.0.0.0:{}".format(port) in printer.call_args[0][0] @pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not available") def test_run_app_preexisting_inet6_socket(patched_loop): app = web.Application() sock = socket.socket(socket.AF_INET6) with contextlib.closing(sock): sock.bind(('::', 0)) port = sock.getsockname()[1] printer = mock.Mock(wraps=stopper(patched_loop)) web.run_app(app, sock=sock, print=printer) patched_loop.create_server.assert_called_with( mock.ANY, sock=sock, backlog=128, ssl=None ) assert "http://[::]:{}".format(port) in printer.call_args[0][0] @skip_if_no_unix_socks def test_run_app_preexisting_unix_socket(patched_loop, mocker): app = web.Application() sock_path = '/tmp/test_preexisting_sock1' sock = socket.socket(socket.AF_UNIX) with contextlib.closing(sock): sock.bind(sock_path) os.unlink(sock_path) printer = mock.Mock(wraps=stopper(patched_loop)) web.run_app(app, sock=sock, print=printer) patched_loop.create_server.assert_called_with( mock.ANY, sock=sock, backlog=128, ssl=None ) assert "http://unix:{}:".format(sock_path) in printer.call_args[0][0] def test_run_app_multiple_preexisting_sockets(patched_loop): app = web.Application() sock1 = socket.socket() sock2 = socket.socket() with contextlib.closing(sock1), contextlib.closing(sock2): sock1.bind(('0.0.0.0', 0)) _, port1 = sock1.getsockname() sock2.bind(('0.0.0.0', 0)) _, port2 = sock2.getsockname() printer = mock.Mock(wraps=stopper(patched_loop)) web.run_app(app, sock=(sock1, sock2), print=printer) patched_loop.create_server.assert_has_calls([ mock.call(mock.ANY, sock=sock1, backlog=128, ssl=None), mock.call(mock.ANY, sock=sock2, backlog=128, ssl=None) ]) assert "http://0.0.0.0:{}".format(port1) in printer.call_args[0][0] assert "http://0.0.0.0:{}".format(port2) in printer.call_args[0][0] _script_test_signal = """ from aiohttp import web app = web.Application() web.run_app(app, host=()) """ def test_sigint(): skip_if_on_windows() proc = subprocess.Popen([sys.executable, "-u", "-c", _script_test_signal], stdout=subprocess.PIPE) for line in proc.stdout: if line.startswith(b"======== Running on"): break proc.send_signal(signal.SIGINT) assert proc.wait() == 0 def test_sigterm(): skip_if_on_windows() proc = subprocess.Popen([sys.executable, "-u", "-c", _script_test_signal], stdout=subprocess.PIPE) for line in proc.stdout: if line.startswith(b"======== Running on"): break proc.terminate() assert proc.wait() == 0 def test_startup_cleanup_signals_even_on_failure(patched_loop): patched_loop.create_server = mock.Mock(side_effect=RuntimeError()) app = web.Application() startup_handler = make_mocked_coro() app.on_startup.append(startup_handler) cleanup_handler = make_mocked_coro() app.on_cleanup.append(cleanup_handler) with pytest.raises(RuntimeError): web.run_app(app, print=stopper(patched_loop)) startup_handler.assert_called_once_with(app) cleanup_handler.assert_called_once_with(app) aiohttp-3.0.1/tests/test_signals.py0000666000000000000000000000704513240304665015541 0ustar 00000000000000import re from unittest import mock import pytest from multidict import CIMultiDict from aiohttp.signals import Signal from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import Application, Response @pytest.fixture def app(): return Application() def make_request(app, method, path, headers=CIMultiDict()): return make_mocked_request(method, path, headers, app=app) async def test_add_signal_handler_not_a_callable(app): callback = True app.on_response_prepare.append(callback) app.on_response_prepare.freeze() with pytest.raises(TypeError): await app.on_response_prepare(None, None) async def test_function_signal_dispatch(app): signal = Signal(app) kwargs = {'foo': 1, 'bar': 2} callback_mock = mock.Mock() async def callback(**kwargs): callback_mock(**kwargs) signal.append(callback) signal.freeze() await signal.send(**kwargs) callback_mock.assert_called_once_with(**kwargs) async def test_function_signal_dispatch2(app): signal = Signal(app) args = {'a', 'b'} kwargs = {'foo': 1, 'bar': 2} callback_mock = mock.Mock() async def callback(*args, **kwargs): callback_mock(*args, **kwargs) signal.append(callback) signal.freeze() await signal.send(*args, **kwargs) callback_mock.assert_called_once_with(*args, **kwargs) async def test_response_prepare(app): callback = mock.Mock() async def cb(*args, **kwargs): callback(*args, **kwargs) app.on_response_prepare.append(cb) app.on_response_prepare.freeze() request = make_request(app, 'GET', '/') response = Response(body=b'') await response.prepare(request) callback.assert_called_once_with(request, response) async def test_non_coroutine(app): signal = Signal(app) kwargs = {'foo': 1, 'bar': 2} callback = mock.Mock() signal.append(callback) signal.freeze() with pytest.raises(TypeError): await signal.send(**kwargs) def test_setitem(app): signal = Signal(app) m1 = mock.Mock() signal.append(m1) assert signal[0] is m1 m2 = mock.Mock() signal[0] = m2 assert signal[0] is m2 def test_delitem(app): signal = Signal(app) m1 = mock.Mock() signal.append(m1) assert len(signal) == 1 del signal[0] assert len(signal) == 0 def test_cannot_append_to_frozen_signal(app): signal = Signal(app) m1 = mock.Mock() m2 = mock.Mock() signal.append(m1) signal.freeze() with pytest.raises(RuntimeError): signal.append(m2) assert list(signal) == [m1] def test_cannot_setitem_in_frozen_signal(app): signal = Signal(app) m1 = mock.Mock() m2 = mock.Mock() signal.append(m1) signal.freeze() with pytest.raises(RuntimeError): signal[0] = m2 assert list(signal) == [m1] def test_cannot_delitem_in_frozen_signal(app): signal = Signal(app) m1 = mock.Mock() signal.append(m1) signal.freeze() with pytest.raises(RuntimeError): del signal[0] assert list(signal) == [m1] async def test_cannot_send_non_frozen_signal(app): signal = Signal(app) callback = make_mocked_coro() signal.append(callback) with pytest.raises(RuntimeError): await signal.send() assert not callback.called async def test_repr(app): signal = Signal(app) callback = make_mocked_coro() signal.append(callback) assert re.match(r", frozen=False, " r"\[\]>", repr(signal)) aiohttp-3.0.1/tests/test_streams.py0000666000000000000000000010761713240304665015565 0ustar 00000000000000"""Tests for streams.py""" import asyncio import unittest from unittest import mock import pytest from aiohttp import streams DATA = b'line1\nline2\nline3\n' def run_briefly(loop): async def once(): pass t = loop.create_task(once()) loop.run_until_complete(t) def chunkify(seq, n): for i in range(0, len(seq), n): yield seq[i:i+n] def create_stream(loop): protocol = mock.Mock(_reading_paused=False) stream = streams.StreamReader(protocol, loop=loop) stream.feed_data(DATA) stream.feed_eof() return stream @pytest.fixture def protocol(): return mock.Mock(_reading_paused=False) class TestStreamReader(unittest.TestCase): DATA = b'line1\nline2\nline3\n' def setUp(self): self.protocol = mock.Mock(_reading_paused=False) self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, *args, **kwargs): return streams.StreamReader(self.protocol, loop=self.loop, *args, **kwargs) def test_create_waiter(self): stream = self._make_one() stream._waiter = self.loop.create_future with self.assertRaises(RuntimeError): self.loop.run_until_complete(stream._wait('test')) @mock.patch('aiohttp.streams.asyncio') def test_ctor_global_loop(self, m_asyncio): stream = streams.StreamReader(self.protocol) self.assertIs(stream._loop, m_asyncio.get_event_loop.return_value) def test_at_eof(self): stream = self._make_one() self.assertFalse(stream.at_eof()) stream.feed_data(b'some data\n') self.assertFalse(stream.at_eof()) self.loop.run_until_complete(stream.readline()) self.assertFalse(stream.at_eof()) stream.feed_data(b'some data\n') stream.feed_eof() self.loop.run_until_complete(stream.readline()) self.assertTrue(stream.at_eof()) def test_wait_eof(self): stream = self._make_one() wait_task = asyncio.Task(stream.wait_eof(), loop=self.loop) async def cb(): await asyncio.sleep(0.1, loop=self.loop) stream.feed_eof() asyncio.Task(cb(), loop=self.loop) self.loop.run_until_complete(wait_task) self.assertTrue(stream.is_eof()) self.assertIsNone(stream._eof_waiter) def test_wait_eof_eof(self): stream = self._make_one() stream.feed_eof() wait_task = asyncio.Task(stream.wait_eof(), loop=self.loop) self.loop.run_until_complete(wait_task) self.assertTrue(stream.is_eof()) def test_feed_empty_data(self): stream = self._make_one() stream.feed_data(b'') stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_feed_nonempty_data(self): stream = self._make_one() stream.feed_data(self.DATA) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_read_zero(self): # Read zero bytes. stream = self._make_one() stream.feed_data(self.DATA) data = self.loop.run_until_complete(stream.read(0)) self.assertEqual(b'', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_read(self): # Read bytes. stream = self._make_one() read_task = asyncio.Task(stream.read(30), loop=self.loop) def cb(): stream.feed_data(self.DATA) self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(self.DATA, data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_line_breaks(self): # Read bytes without line breaks. stream = self._make_one() stream.feed_data(b'line1') stream.feed_data(b'line2') data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line1', data) data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line2', data) def test_read_all(self): # Read all avaliable buffered bytes stream = self._make_one() stream.feed_data(b'line1') stream.feed_data(b'line2') stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'line1line2', data) def test_read_up_to(self): # Read available buffered bytes up to requested amount stream = self._make_one() stream.feed_data(b'line1') stream.feed_data(b'line2') data = self.loop.run_until_complete(stream.read(8)) self.assertEqual(b'line1lin', data) data = self.loop.run_until_complete(stream.read(8)) self.assertEqual(b'e2', data) def test_read_eof(self): # Read bytes, stop at eof. stream = self._make_one() read_task = asyncio.Task(stream.read(1024), loop=self.loop) def cb(): stream.feed_eof() self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(b'', data) data = self.loop.run_until_complete(stream.read()) self.assertEqual(data, b'') @mock.patch('aiohttp.streams.internal_logger') def test_read_eof_infinit(self, internal_logger): # Read bytes. stream = self._make_one() stream.feed_eof() self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.assertTrue(internal_logger.warning.called) @mock.patch('aiohttp.streams.internal_logger') def test_read_eof_unread_data_no_warning(self, internal_logger): # Read bytes. stream = self._make_one() stream.feed_eof() self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) stream.unread_data(b'data') self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.assertFalse(internal_logger.warning.called) def test_read_until_eof(self): # Read all bytes until eof. stream = self._make_one() read_task = asyncio.Task(stream.read(-1), loop=self.loop) def cb(): stream.feed_data(b'chunk1\n') stream.feed_data(b'chunk2') stream.feed_eof() self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(b'chunk1\nchunk2', data) data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.read(2)) self.assertEqual(b'li', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.read(2)) def test_readline(self): # Read one line. 'readline' will need to wait for the data # to come from 'cb' stream = self._make_one() stream.feed_data(b'chunk1 ') read_task = asyncio.Task(stream.readline(), loop=self.loop) def cb(): stream.feed_data(b'chunk2 ') stream.feed_data(b'chunk3 ') stream.feed_data(b'\n chunk4') self.loop.call_soon(cb) line = self.loop.run_until_complete(read_task) self.assertEqual(b'chunk1 chunk2 chunk3 \n', line) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b' chunk4', data) def test_readline_limit_with_existing_data(self): # Read one line. The data is in StreamReader's buffer # before the event loop is run. stream = self._make_one(limit=2) stream.feed_data(b'li') stream.feed_data(b'ne1\nline2\n') self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) # The buffer should contain the remaining data after exception stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'line2\n', data) def test_readline_limit(self): # Read one line. StreamReaders are fed with data after # their 'readline' methods are called. stream = self._make_one(limit=4) def cb(): stream.feed_data(b'chunk1') stream.feed_data(b'chunk2\n') stream.feed_data(b'chunk3\n') stream.feed_eof() self.loop.call_soon(cb) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'chunk3\n', data) def test_readline_nolimit_nowait(self): # All needed data for the first 'readline' call will be # in the buffer. stream = self._make_one() stream.feed_data(self.DATA[:6]) stream.feed_data(self.DATA[6:]) line = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'line1\n', line) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'line2\nline3\n', data) def test_readline_eof(self): stream = self._make_one() stream.feed_data(b'some data') stream.feed_eof() line = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'some data', line) def test_readline_empty_eof(self): stream = self._make_one() stream.feed_eof() line = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'', line) def test_readline_read_byte_count(self): stream = self._make_one() stream.feed_data(self.DATA) self.loop.run_until_complete(stream.readline()) data = self.loop.run_until_complete(stream.read(7)) self.assertEqual(b'line2\nl', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'ine3\n', data) def test_readline_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'line\n', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) def test_readexactly_zero_or_less(self): # Read exact number of bytes (zero or less). stream = self._make_one() stream.feed_data(self.DATA) data = self.loop.run_until_complete(stream.readexactly(0)) self.assertEqual(b'', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) stream = self._make_one() stream.feed_data(self.DATA) data = self.loop.run_until_complete(stream.readexactly(-1)) self.assertEqual(b'', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_readexactly(self): # Read exact number of bytes. stream = self._make_one() n = 2 * len(self.DATA) read_task = asyncio.Task(stream.readexactly(n), loop=self.loop) def cb(): stream.feed_data(self.DATA) stream.feed_data(self.DATA) stream.feed_data(self.DATA) self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(self.DATA + self.DATA, data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_readexactly_eof(self): # Read exact number of bytes (eof). stream = self._make_one() n = 2 * len(self.DATA) read_task = asyncio.Task(stream.readexactly(n), loop=self.loop) def cb(): stream.feed_data(self.DATA) stream.feed_eof() self.loop.call_soon(cb) with self.assertRaises(asyncio.IncompleteReadError) as cm: self.loop.run_until_complete(read_task) self.assertEqual(cm.exception.partial, self.DATA) self.assertEqual(cm.exception.expected, n) self.assertEqual(str(cm.exception), '18 bytes read on a total of 36 expected bytes') data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_readexactly_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.readexactly(2)) self.assertEqual(b'li', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readexactly(2)) def test_unread_data(self): stream = self._make_one() stream.feed_data(b'line1') stream.feed_data(b'line2') stream.feed_data(b'onemoreline') data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line1', data) stream.unread_data(data) data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line1', data) data = self.loop.run_until_complete(stream.read(4)) self.assertEqual(b'line', data) stream.unread_data(b'line1line') data = b'' while len(data) < 10: data += self.loop.run_until_complete(stream.read(10)) self.assertEqual(b'line1line2', data) data = self.loop.run_until_complete(stream.read(7)) self.assertEqual(b'onemore', data) stream.unread_data(data) data = b'' while len(data) < 11: data += self.loop.run_until_complete(stream.read(11)) self.assertEqual(b'onemoreline', data) stream.unread_data(b'line') data = self.loop.run_until_complete(stream.read(4)) self.assertEqual(b'line', data) stream.feed_eof() stream.unread_data(b'at_eof') data = self.loop.run_until_complete(stream.read(6)) self.assertEqual(b'at_eof', data) def test_exception(self): stream = self._make_one() self.assertIsNone(stream.exception()) exc = ValueError() stream.set_exception(exc) self.assertIs(stream.exception(), exc) def test_exception_waiter(self): stream = self._make_one() async def set_err(): stream.set_exception(ValueError()) t1 = asyncio.Task(stream.readline(), loop=self.loop) t2 = asyncio.Task(set_err(), loop=self.loop) self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop)) self.assertRaises(ValueError, t1.result) def test_exception_cancel(self): stream = self._make_one() async def read_a_line(): await stream.readline() t = asyncio.Task(read_a_line(), loop=self.loop) run_briefly(self.loop) t.cancel() run_briefly(self.loop) # The following line fails if set_exception() isn't careful. stream.set_exception(RuntimeError('message')) run_briefly(self.loop) self.assertIs(stream._waiter, None) def test_readany_eof(self): stream = self._make_one() read_task = asyncio.Task(stream.readany(), loop=self.loop) self.loop.call_soon(stream.feed_data, b'chunk1\n') data = self.loop.run_until_complete(read_task) self.assertEqual(b'chunk1\n', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_readany_empty_eof(self): stream = self._make_one() stream.feed_eof() read_task = asyncio.Task(stream.readany(), loop=self.loop) data = self.loop.run_until_complete(read_task) self.assertEqual(b'', data) def test_readany_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.readany()) self.assertEqual(b'line\n', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readany()) def test_read_nowait(self): stream = self._make_one() stream.feed_data(b'line1\nline2\n') self.assertEqual(stream.read_nowait(), b'line1\nline2\n') self.assertEqual(stream.read_nowait(), b'') stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_nowait_n(self): stream = self._make_one() stream.feed_data(b'line1\nline2\n') self.assertEqual( stream.read_nowait(4), b'line') self.assertEqual( stream.read_nowait(), b'1\nline2\n') self.assertEqual(stream.read_nowait(), b'') stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_nowait_exception(self): stream = self._make_one() stream.feed_data(b'line\n') stream.set_exception(ValueError()) self.assertRaises(ValueError, stream.read_nowait) def test_read_nowait_waiter(self): stream = self._make_one() stream.feed_data(b'line\n') stream._waiter = self.loop.create_future() self.assertRaises(RuntimeError, stream.read_nowait) def test_readchunk(self): stream = self._make_one() def cb(): stream.feed_data(b'chunk1') stream.feed_data(b'chunk2') stream.feed_eof() self.loop.call_soon(cb) data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'chunk1', data) self.assertFalse(end_of_chunk) data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'chunk2', data) self.assertFalse(end_of_chunk) data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'', data) self.assertFalse(end_of_chunk) def test_readchunk_wait_eof(self): stream = self._make_one() async def cb(): await asyncio.sleep(0.1, loop=self.loop) stream.feed_eof() asyncio.Task(cb(), loop=self.loop) data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b"", data) self.assertFalse(end_of_chunk) self.assertTrue(stream.is_eof()) def test_begin_and_end_chunk_receiving(self): stream = self._make_one() stream.begin_http_chunk_receiving() stream.feed_data(b'part1') stream.feed_data(b'part2') stream.end_http_chunk_receiving() data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'part1part2', data) self.assertTrue(end_of_chunk) stream.begin_http_chunk_receiving() stream.feed_data(b'part3') data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'part3', data) self.assertFalse(end_of_chunk) stream.end_http_chunk_receiving() data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'', data) self.assertTrue(end_of_chunk) stream.feed_eof() data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'', data) self.assertFalse(end_of_chunk) def test_end_chunk_receiving_without_begin(self): stream = self._make_one() self.assertRaises(RuntimeError, stream.end_http_chunk_receiving) def test_readchunk_with_unread(self): """Test that stream.unread does not break controlled chunk receiving. """ stream = self._make_one() # Send 2 chunks stream.begin_http_chunk_receiving() stream.feed_data(b'part1') stream.end_http_chunk_receiving() stream.begin_http_chunk_receiving() stream.feed_data(b'part2') stream.end_http_chunk_receiving() # Read only one chunk data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) # Try to unread a part of the first chunk stream.unread_data(b'rt1') # The end_of_chunk signal was already received for the first chunk, # so we receive up to the second one data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'rt1part2', data) self.assertTrue(end_of_chunk) # Unread a part of the second chunk stream.unread_data(b'rt2') data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'rt2', data) # end_of_chunk was already received for this chunk self.assertFalse(end_of_chunk) stream.feed_eof() data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'', data) self.assertFalse(end_of_chunk) def test_readchunk_with_other_read_calls(self): """Test that stream.readchunk works when other read calls are made on the stream. """ stream = self._make_one() stream.begin_http_chunk_receiving() stream.feed_data(b'part1') stream.end_http_chunk_receiving() stream.begin_http_chunk_receiving() stream.feed_data(b'part2') stream.end_http_chunk_receiving() data = self.loop.run_until_complete(stream.read(7)) self.assertEqual(b'part1pa', data) data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'rt2', data) self.assertTrue(end_of_chunk) stream.feed_eof() data, end_of_chunk = self.loop.run_until_complete(stream.readchunk()) self.assertEqual(b'', data) self.assertFalse(end_of_chunk) def test___repr__(self): stream = self._make_one() self.assertEqual("", repr(stream)) def test___repr__nondefault_limit(self): stream = self._make_one(limit=123) self.assertEqual("", repr(stream)) def test___repr__eof(self): stream = self._make_one() stream.feed_eof() self.assertEqual("", repr(stream)) def test___repr__data(self): stream = self._make_one() stream.feed_data(b'data') self.assertEqual("", repr(stream)) def test___repr__exception(self): stream = self._make_one() exc = RuntimeError() stream.set_exception(exc) self.assertEqual("", repr(stream)) def test___repr__waiter(self): stream = self._make_one() stream._waiter = self.loop.create_future() self.assertRegex( repr(stream), ">") stream._waiter.set_result(None) self.loop.run_until_complete(stream._waiter) stream._waiter = None self.assertEqual("", repr(stream)) def test_unread_empty(self): stream = self._make_one() stream.feed_data(b'line1') stream.feed_eof() stream.unread_data(b'') data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line1', data) self.assertTrue(stream.at_eof()) class TestEmptyStreamReader(unittest.TestCase): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def test_empty_stream_reader(self): s = streams.EmptyStreamReader() self.assertIsNone(s.set_exception(ValueError())) self.assertIsNone(s.exception()) self.assertIsNone(s.feed_eof()) self.assertIsNone(s.feed_data(b'data')) self.assertTrue(s.at_eof()) self.assertIsNone( self.loop.run_until_complete(s.wait_eof())) self.assertEqual( self.loop.run_until_complete(s.read()), b'') self.assertEqual( self.loop.run_until_complete(s.readline()), b'') self.assertEqual( self.loop.run_until_complete(s.readany()), b'') self.assertEqual( self.loop.run_until_complete(s.readchunk()), (b'', False)) self.assertRaises( asyncio.IncompleteReadError, self.loop.run_until_complete, s.readexactly(10)) self.assertEqual(s.read_nowait(), b'') class DataQueueMixin: def test_is_eof(self): self.assertFalse(self.buffer.is_eof()) self.buffer.feed_eof() self.assertTrue(self.buffer.is_eof()) def test_at_eof(self): self.assertFalse(self.buffer.at_eof()) self.buffer.feed_eof() self.assertTrue(self.buffer.at_eof()) self.buffer._buffer.append(object()) self.assertFalse(self.buffer.at_eof()) def test_feed_data(self): item = object() self.buffer.feed_data(item, 1) self.assertEqual([(item, 1)], list(self.buffer._buffer)) def test_feed_eof(self): self.buffer.feed_eof() self.assertTrue(self.buffer._eof) def test_read(self): item = object() read_task = asyncio.Task(self.buffer.read(), loop=self.loop) def cb(): self.buffer.feed_data(item, 1) self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertIs(item, data) def test_read_eof(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) def cb(): self.buffer.feed_eof() self.loop.call_soon(cb) self.assertRaises( streams.EofStream, self.loop.run_until_complete, read_task) def test_read_cancelled(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) run_briefly(self.loop) waiter = self.buffer._waiter self.assertTrue(asyncio.isfuture(waiter)) read_task.cancel() self.assertRaises( asyncio.CancelledError, self.loop.run_until_complete, read_task) self.assertTrue(waiter.cancelled()) self.assertIsNone(self.buffer._waiter) self.buffer.feed_data(b'test', 4) self.assertIsNone(self.buffer._waiter) def test_read_until_eof(self): item = object() self.buffer.feed_data(item, 1) self.buffer.feed_eof() data = self.loop.run_until_complete(self.buffer.read()) self.assertIs(data, item) self.assertRaises( streams.EofStream, self.loop.run_until_complete, self.buffer.read()) def test_read_exc(self): item = object() self.buffer.feed_data(item) self.buffer.set_exception(ValueError) read_task = asyncio.Task(self.buffer.read(), loop=self.loop) data = self.loop.run_until_complete(read_task) self.assertIs(item, data) self.assertRaises( ValueError, self.loop.run_until_complete, self.buffer.read()) def test_read_exception(self): self.buffer.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, self.buffer.read()) def test_read_exception_with_data(self): val = object() self.buffer.feed_data(val, 1) self.buffer.set_exception(ValueError()) self.assertIs(val, self.loop.run_until_complete(self.buffer.read())) self.assertRaises( ValueError, self.loop.run_until_complete, self.buffer.read()) def test_read_exception_on_wait(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) run_briefly(self.loop) self.assertTrue(asyncio.isfuture(self.buffer._waiter)) self.buffer.feed_eof() self.buffer.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, read_task) def test_exception(self): self.assertIsNone(self.buffer.exception()) exc = ValueError() self.buffer.set_exception(exc) self.assertIs(self.buffer.exception(), exc) def test_exception_waiter(self): async def set_err(): self.buffer.set_exception(ValueError()) t1 = asyncio.Task(self.buffer.read(), loop=self.loop) t2 = asyncio.Task(set_err(), loop=self.loop) self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop)) self.assertRaises(ValueError, t1.result) class TestDataQueue(unittest.TestCase, DataQueueMixin): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) self.buffer = streams.DataQueue(loop=self.loop) def tearDown(self): self.loop.close() def test_feed_data_waiters(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) waiter = reader._waiter = loop.create_future() eof_waiter = reader._eof_waiter = loop.create_future() reader.feed_data(b'1') assert list(reader._buffer) == [b'1'] assert reader._size == 1 assert reader.total_bytes == 1 assert waiter.done() assert not eof_waiter.done() assert reader._waiter is None assert reader._eof_waiter is eof_waiter def test_feed_data_completed_waiters(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) waiter = reader._waiter = loop.create_future() waiter.set_result(1) reader.feed_data(b'1') assert reader._waiter is None def test_feed_eof_waiters(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) waiter = reader._waiter = loop.create_future() eof_waiter = reader._eof_waiter = loop.create_future() reader.feed_eof() assert reader._eof assert waiter.done() assert eof_waiter.done() assert reader._waiter is None assert reader._eof_waiter is None def test_feed_eof_cancelled(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) waiter = reader._waiter = loop.create_future() eof_waiter = reader._eof_waiter = loop.create_future() waiter.set_result(1) eof_waiter.set_result(1) reader.feed_eof() assert waiter.done() assert eof_waiter.done() assert reader._waiter is None assert reader._eof_waiter is None def test_on_eof(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) on_eof = mock.Mock() reader.on_eof(on_eof) assert not on_eof.called reader.feed_eof() assert on_eof.called def test_on_eof_empty_reader(loop): reader = streams.EmptyStreamReader() on_eof = mock.Mock() reader.on_eof(on_eof) assert on_eof.called def test_on_eof_exc_in_callback(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) on_eof = mock.Mock() on_eof.side_effect = ValueError reader.on_eof(on_eof) assert not on_eof.called reader.feed_eof() assert on_eof.called assert not reader._eof_callbacks def test_on_eof_exc_in_callback_empty_stream_reader(loop): reader = streams.EmptyStreamReader() on_eof = mock.Mock() on_eof.side_effect = ValueError reader.on_eof(on_eof) assert on_eof.called def test_on_eof_eof_is_set(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) reader.feed_eof() on_eof = mock.Mock() reader.on_eof(on_eof) assert on_eof.called assert not reader._eof_callbacks def test_on_eof_eof_is_set_exception(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) reader.feed_eof() on_eof = mock.Mock() on_eof.side_effect = ValueError reader.on_eof(on_eof) assert on_eof.called assert not reader._eof_callbacks def test_set_exception(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) waiter = reader._waiter = loop.create_future() eof_waiter = reader._eof_waiter = loop.create_future() exc = ValueError() reader.set_exception(exc) assert waiter.exception() is exc assert eof_waiter.exception() is exc assert reader._waiter is None assert reader._eof_waiter is None def test_set_exception_cancelled(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) waiter = reader._waiter = loop.create_future() eof_waiter = reader._eof_waiter = loop.create_future() waiter.set_result(1) eof_waiter.set_result(1) exc = ValueError() reader.set_exception(exc) assert waiter.exception() is None assert eof_waiter.exception() is None assert reader._waiter is None assert reader._eof_waiter is None def test_set_exception_eof_callbacks(loop, protocol): reader = streams.StreamReader(protocol, loop=loop) on_eof = mock.Mock() reader.on_eof(on_eof) reader.set_exception(ValueError()) assert not on_eof.called assert not reader._eof_callbacks async def test_stream_reader_lines(loop): line_iter = iter(DATA.splitlines(keepends=True)) async for line in create_stream(loop): assert line == next(line_iter, None) pytest.raises(StopIteration, next, line_iter) async def test_stream_reader_chunks_complete(loop): """Tests if chunked iteration works if the chunking works out (i.e. the data is divisible by the chunk size) """ chunk_iter = chunkify(DATA, 9) async for data in create_stream(loop).iter_chunked(9): assert data == next(chunk_iter, None) pytest.raises(StopIteration, next, chunk_iter) async def test_stream_reader_chunks_incomplete(loop): """Tests if chunked iteration works if the last chunk is incomplete""" chunk_iter = chunkify(DATA, 8) async for data in create_stream(loop).iter_chunked(8): assert data == next(chunk_iter, None) pytest.raises(StopIteration, next, chunk_iter) async def test_data_queue_empty(loop): """Tests that async looping yields nothing if nothing is there""" buffer = streams.DataQueue(loop=loop) buffer.feed_eof() async for _ in buffer: # NOQA assert False async def test_data_queue_items(loop): """Tests that async looping yields objects identically""" buffer = streams.DataQueue(loop=loop) items = [object(), object()] buffer.feed_data(items[0], 1) buffer.feed_data(items[1], 1) buffer.feed_eof() item_iter = iter(items) async for item in buffer: assert item is next(item_iter, None) pytest.raises(StopIteration, next, item_iter) async def test_stream_reader_iter_any(loop): it = iter([b'line1\nline2\nline3\n']) async for raw in create_stream(loop).iter_any(): assert raw == next(it) pytest.raises(StopIteration, next, it) async def test_stream_reader_iter(loop): it = iter([b'line1\n', b'line2\n', b'line3\n']) async for raw in create_stream(loop): assert raw == next(it) pytest.raises(StopIteration, next, it) async def test_stream_reader_iter_chunks_no_chunked_encoding(loop): it = iter([b'line1\nline2\nline3\n']) async for data, end_of_chunk in create_stream(loop).iter_chunks(): assert (data, end_of_chunk) == (next(it), False) pytest.raises(StopIteration, next, it) async def test_stream_reader_iter_chunks_chunked_encoding(loop, protocol): stream = streams.StreamReader(protocol, loop=loop) for line in DATA.splitlines(keepends=True): stream.begin_http_chunk_receiving() stream.feed_data(line) stream.end_http_chunk_receiving() stream.feed_eof() it = iter([b'line1\n', b'line2\n', b'line3\n']) async for data, end_of_chunk in stream.iter_chunks(): assert (data, end_of_chunk) == (next(it), True) pytest.raises(StopIteration, next, it) aiohttp-3.0.1/tests/test_tcp_helpers.py0000666000000000000000000001073013240304665016404 0ustar 00000000000000import socket from unittest import mock import pytest from aiohttp.tcp_helpers import CORK, tcp_cork, tcp_nodelay has_ipv6 = socket.has_ipv6 if has_ipv6: # The socket.has_ipv6 flag may be True if Python was built with IPv6 # support, but the target system still may not have it. # So let's ensure that we really have IPv6 support. try: socket.socket(socket.AF_INET6, socket.SOCK_STREAM) except OSError: has_ipv6 = False # nodelay def test_tcp_nodelay_exception(): transport = mock.Mock() s = mock.Mock() s.setsockopt = mock.Mock() s.family = socket.AF_INET s.setsockopt.side_effect = OSError transport.get_extra_info.return_value = s tcp_nodelay(transport, True) s.setsockopt.assert_called_with( socket.IPPROTO_TCP, socket.TCP_NODELAY, True ) def test_tcp_nodelay_enable(): transport = mock.Mock() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_nodelay(transport, True) assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) def test_tcp_nodelay_enable_and_disable(): transport = mock.Mock() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_nodelay(transport, True) assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) tcp_nodelay(transport, False) assert not s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) @pytest.mark.skipif(not has_ipv6, reason="IPv6 is not available") def test_tcp_nodelay_enable_ipv6(): transport = mock.Mock() s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_nodelay(transport, True) assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) @pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'), reason="requires unix sockets") def test_tcp_nodelay_enable_unix(): # do not set nodelay for unix socket transport = mock.Mock() s = mock.Mock(family=socket.AF_UNIX, type=socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_nodelay(transport, True) assert not s.setsockopt.called def test_tcp_nodelay_enable_no_socket(): transport = mock.Mock() transport.get_extra_info.return_value = None tcp_nodelay(transport, True) # cork @pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required") def test_tcp_cork_enable(): transport = mock.Mock() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_cork(transport, True) assert s.getsockopt(socket.IPPROTO_TCP, CORK) @pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required") def test_set_cork_enable_and_disable(): transport = mock.Mock() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_cork(transport, True) assert s.getsockopt(socket.IPPROTO_TCP, CORK) tcp_cork(transport, False) assert not s.getsockopt(socket.IPPROTO_TCP, CORK) @pytest.mark.skipif(not has_ipv6, reason="IPv6 is not available") @pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required") def test_set_cork_enable_ipv6(): transport = mock.Mock() s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_cork(transport, True) assert s.getsockopt(socket.IPPROTO_TCP, CORK) @pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'), reason="requires unix sockets") @pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required") def test_set_cork_enable_unix(): transport = mock.Mock() s = mock.Mock(family=socket.AF_UNIX, type=socket.SOCK_STREAM) transport.get_extra_info.return_value = s tcp_cork(transport, True) assert not s.setsockopt.called @pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required") def test_set_cork_enable_no_socket(): transport = mock.Mock() transport.get_extra_info.return_value = None tcp_cork(transport, True) @pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required") def test_set_cork_exception(): transport = mock.Mock() s = mock.Mock() s.setsockopt = mock.Mock() s.family = socket.AF_INET s.setsockopt.side_effect = OSError transport.get_extra_info.return_value = s tcp_cork(transport, True) s.setsockopt.assert_called_with( socket.IPPROTO_TCP, CORK, True ) aiohttp-3.0.1/tests/test_test_utils.py0000666000000000000000000002062313240304665016275 0ustar 00000000000000import gzip from unittest import mock import pytest from multidict import CIMultiDict from yarl import URL import aiohttp from aiohttp import web from aiohttp.test_utils import AioHTTPTestCase from aiohttp.test_utils import TestClient as _TestClient from aiohttp.test_utils import TestServer as _TestServer from aiohttp.test_utils import (loop_context, make_mocked_request, setup_test_loop, teardown_test_loop, unittest_run_loop) _hello_world_str = "Hello, world" _hello_world_bytes = _hello_world_str.encode('utf-8') _hello_world_gz = gzip.compress(_hello_world_bytes) def _create_example_app(): async def hello(request): return web.Response(body=_hello_world_bytes) async def websocket_handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive() if msg.type == aiohttp.WSMsgType.TEXT: if msg.data == 'close': await ws.close() else: await ws.send_str(msg.data + '/answer') return ws async def cookie_handler(request): resp = web.Response(body=_hello_world_bytes) resp.set_cookie('cookie', 'val') return resp app = web.Application() app.router.add_route('*', '/', hello) app.router.add_route('*', '/websocket', websocket_handler) app.router.add_route('*', '/cookie', cookie_handler) return app # these exist to test the pytest scenario @pytest.fixture def loop(): with loop_context() as loop: yield loop @pytest.fixture def app(): return _create_example_app() @pytest.fixture def test_client(loop, app): client = _TestClient(_TestServer(app, loop=loop), loop=loop) loop.run_until_complete(client.start_server()) yield client loop.run_until_complete(client.close()) def test_with_test_server_fails(loop): app = _create_example_app() with pytest.raises(TypeError): with _TestServer(app, loop=loop): pass def test_with_client_fails(loop): app = _create_example_app() with pytest.raises(TypeError): with _TestClient(_TestServer(app, loop=loop), loop=loop): pass def test_aiohttp_client_close_is_idempotent(): """ a test client, called multiple times, should not attempt to close the server again. """ loop = setup_test_loop() app = _create_example_app() client = _TestClient(_TestServer(app, loop=loop), loop=loop) loop.run_until_complete(client.close()) loop.run_until_complete(client.close()) teardown_test_loop(loop) class TestAioHTTPTestCase(AioHTTPTestCase): def get_app(self): return _create_example_app() @unittest_run_loop async def test_example_with_loop(self): request = await self.client.request("GET", "/") assert request.status == 200 text = await request.text() assert _hello_world_str == text def test_example(self): async def test_get_route(): resp = await self.client.request("GET", "/") assert resp.status == 200 text = await resp.text() assert _hello_world_str == text self.loop.run_until_complete(test_get_route()) def test_get_route(loop, test_client): async def test_get_route(): resp = await test_client.request("GET", "/") assert resp.status == 200 text = await resp.text() assert _hello_world_str == text loop.run_until_complete(test_get_route()) async def test_client_websocket(loop, test_client): resp = await test_client.ws_connect("/websocket") await resp.send_str("foo") msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.TEXT assert "foo" in msg.data await resp.send_str("close") msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSE async def test_client_cookie(loop, test_client): assert not test_client.session.cookie_jar await test_client.get("/cookie") cookies = list(test_client.session.cookie_jar) assert cookies[0].key == 'cookie' assert cookies[0].value == 'val' @pytest.mark.parametrize("method", [ "get", "post", "options", "post", "put", "patch", "delete" ]) async def test_test_client_methods(method, loop, test_client): resp = await getattr(test_client, method)("/") assert resp.status == 200 text = await resp.text() assert _hello_world_str == text async def test_test_client_head(loop, test_client): resp = await test_client.head("/") assert resp.status == 200 @pytest.mark.parametrize( "headers", [{'token': 'x'}, CIMultiDict({'token': 'x'}), {}]) def test_make_mocked_request(headers): req = make_mocked_request('GET', '/', headers=headers) assert req.method == "GET" assert req.path == "/" assert isinstance(req, web.Request) assert isinstance(req.headers, CIMultiDict) def test_make_mocked_request_sslcontext(): req = make_mocked_request('GET', '/') assert req.transport.get_extra_info('sslcontext') is None def test_make_mocked_request_unknown_extra_info(): req = make_mocked_request('GET', '/') assert req.transport.get_extra_info('unknown_extra_info') is None def test_make_mocked_request_app(): app = mock.Mock() req = make_mocked_request('GET', '/', app=app) assert req.app is app def test_make_mocked_request_match_info(): req = make_mocked_request('GET', '/', match_info={'a': '1', 'b': '2'}) assert req.match_info == {'a': '1', 'b': '2'} def test_make_mocked_request_content(): payload = mock.Mock() req = make_mocked_request('GET', '/', payload=payload) assert req.content is payload def test_make_mocked_request_transport(): transport = mock.Mock() req = make_mocked_request('GET', '/', transport=transport) assert req.transport is transport async def test_test_client_props(loop): app = _create_example_app() client = _TestClient(_TestServer(app, host='127.0.0.1', loop=loop), loop=loop) assert client.host == '127.0.0.1' assert client.port is None async with client: assert isinstance(client.port, int) assert client.server is not None assert client.port is None async def test_test_server_context_manager(loop): app = _create_example_app() async with _TestServer(app, loop=loop) as server: client = aiohttp.ClientSession(loop=loop) resp = await client.head(server.make_url('/')) assert resp.status == 200 resp.close() await client.close() def test_client_unsupported_arg(): with pytest.raises(TypeError): _TestClient('string') async def test_server_make_url_yarl_compatibility(loop): app = _create_example_app() async with _TestServer(app, loop=loop) as server: make_url = server.make_url assert make_url(URL('/foo')) == make_url('/foo') with pytest.raises(AssertionError): make_url('http://foo.com') with pytest.raises(AssertionError): make_url(URL('http://foo.com')) def test_testcase_no_app(testdir, loop): testdir.makepyfile( """ from aiohttp.test_utils import AioHTTPTestCase class InvalidTestCase(AioHTTPTestCase): def test_noop(self): pass """) result = testdir.runpytest() result.stdout.fnmatch_lines(["*RuntimeError*"]) async def test_server_context_manager(app, loop): async with _TestServer(app, loop=loop) as server: async with aiohttp.ClientSession(loop=loop) as client: async with client.head(server.make_url('/')) as resp: assert resp.status == 200 @pytest.mark.parametrize("method", [ "head", "get", "post", "options", "post", "put", "patch", "delete" ]) async def test_client_context_manager_response(method, app, loop): async with _TestClient(_TestServer(app), loop=loop) as client: async with getattr(client, method)('/') as resp: assert resp.status == 200 if method != 'head': text = await resp.text() assert "Hello, world" in text async def test_custom_port(loop, app, aiohttp_unused_port): port = aiohttp_unused_port() client = _TestClient(_TestServer(app, loop=loop, port=port), loop=loop) await client.start_server() assert client.server.port == port resp = await client.get('/') assert resp.status == 200 text = await resp.text() assert _hello_world_str == text await client.close() aiohttp-3.0.1/tests/test_tracing.py0000666000000000000000000001121113240304665015516 0ustar 00000000000000import asyncio from types import SimpleNamespace from unittest.mock import Mock import pytest from aiohttp.tracing import (Trace, TraceConfig, TraceConnectionCreateEndParams, TraceConnectionCreateStartParams, TraceConnectionQueuedEndParams, TraceConnectionQueuedStartParams, TraceConnectionReuseconnParams, TraceDnsCacheHitParams, TraceDnsCacheMissParams, TraceDnsResolveHostEndParams, TraceDnsResolveHostStartParams, TraceRequestEndParams, TraceRequestExceptionParams, TraceRequestRedirectParams, TraceRequestStartParams) class TestTraceConfig: def test_trace_config_ctx_default(self): trace_config = TraceConfig() assert isinstance(trace_config.trace_config_ctx(), SimpleNamespace) def test_trace_config_ctx_factory(self): trace_config = TraceConfig(trace_config_ctx_factory=dict) assert isinstance(trace_config.trace_config_ctx(), dict) def test_trace_config_ctx_request_ctx(self): trace_request_ctx = Mock() trace_config = TraceConfig() trace_config_ctx = trace_config.trace_config_ctx( trace_request_ctx=trace_request_ctx) assert trace_config_ctx.trace_request_ctx is trace_request_ctx def test_freeze(self): trace_config = TraceConfig() trace_config.freeze() assert trace_config.on_request_start.frozen assert trace_config.on_request_end.frozen assert trace_config.on_request_exception.frozen assert trace_config.on_request_redirect.frozen assert trace_config.on_connection_queued_start.frozen assert trace_config.on_connection_queued_end.frozen assert trace_config.on_connection_create_start.frozen assert trace_config.on_connection_create_end.frozen assert trace_config.on_connection_reuseconn.frozen assert trace_config.on_dns_resolvehost_start.frozen assert trace_config.on_dns_resolvehost_end.frozen assert trace_config.on_dns_cache_hit.frozen assert trace_config.on_dns_cache_miss.frozen class TestTrace: @pytest.mark.parametrize('signal,params,param_obj', [ ( 'request_start', (Mock(), Mock(), Mock()), TraceRequestStartParams ), ( 'request_end', (Mock(), Mock(), Mock(), Mock()), TraceRequestEndParams ), ( 'request_exception', (Mock(), Mock(), Mock(), Mock()), TraceRequestExceptionParams ), ( 'request_redirect', (Mock(), Mock(), Mock(), Mock()), TraceRequestRedirectParams ), ( 'connection_queued_start', (), TraceConnectionQueuedStartParams ), ( 'connection_queued_end', (), TraceConnectionQueuedEndParams ), ( 'connection_create_start', (), TraceConnectionCreateStartParams ), ( 'connection_create_end', (), TraceConnectionCreateEndParams ), ( 'connection_reuseconn', (), TraceConnectionReuseconnParams ), ( 'dns_resolvehost_start', (Mock(),), TraceDnsResolveHostStartParams ), ( 'dns_resolvehost_end', (Mock(),), TraceDnsResolveHostEndParams ), ( 'dns_cache_hit', (Mock(),), TraceDnsCacheHitParams ), ( 'dns_cache_miss', (Mock(),), TraceDnsCacheMissParams ) ]) async def test_send(self, loop, signal, params, param_obj): session = Mock() trace_request_ctx = Mock() callback = Mock(side_effect=asyncio.coroutine(Mock())) trace_config = TraceConfig() getattr(trace_config, "on_%s" % signal).append(callback) trace_config.freeze() trace = Trace( session, trace_config, trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx) ) await getattr(trace, "send_%s" % signal)(*params) callback.assert_called_once_with( session, SimpleNamespace(trace_request_ctx=trace_request_ctx), param_obj(*params) ) aiohttp-3.0.1/tests/test_urldispatch.py0000666000000000000000000010323013240304665016414 0ustar 00000000000000import os import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized from urllib.parse import unquote import pytest from yarl import URL import aiohttp from aiohttp import hdrs, web from aiohttp.test_utils import make_mocked_request from aiohttp.web import HTTPMethodNotAllowed, HTTPNotFound, Response from aiohttp.web_urldispatcher import (PATH_SEP, AbstractResource, ResourceRoute, SystemRoute, View, _default_expect_handler) def make_request(method, path): return make_mocked_request(method, path) def make_handler(): async def handler(request): return Response(request) # pragma: no cover return handler @pytest.fixture def app(loop): app = web.Application() app._set_loop(loop) return app @pytest.fixture def router(app): return app.router @pytest.fixture def fill_routes(router): def go(): route1 = router.add_route('GET', '/plain', make_handler()) route2 = router.add_route('GET', '/variable/{name}', make_handler()) resource = router.add_static('/static', os.path.dirname(aiohttp.__file__)) return [route1, route2] + list(resource) return go def test_register_uncommon_http_methods(router): uncommon_http_methods = { 'PROPFIND', 'PROPPATCH', 'COPY', 'LOCK', 'UNLOCK' 'MOVE', 'SUBSCRIBE', 'UNSUBSCRIBE', 'NOTIFY' } for method in uncommon_http_methods: router.add_route(method, '/handler/to/path', make_handler()) async def test_add_route_root(router): handler = make_handler() router.add_route('GET', '/', handler) req = make_request('GET', '/') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_route_simple(router): handler = make_handler() router.add_route('GET', '/handler/to/path', handler) req = make_request('GET', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_with_matchdict(router): handler = make_handler() router.add_route('GET', '/handler/{to}', handler) req = make_request('GET', '/handler/tail') info = await router.resolve(req) assert info is not None assert {'to': 'tail'} == info assert handler is info.handler assert info.route.name is None async def test_add_with_matchdict_with_colon(router): handler = make_handler() router.add_route('GET', '/handler/{to}', handler) req = make_request('GET', '/handler/1:2:3') info = await router.resolve(req) assert info is not None assert {'to': '1:2:3'} == info assert handler is info.handler assert info.route.name is None async def test_add_route_with_add_get_shortcut(router): handler = make_handler() router.add_get('/handler/to/path', handler) req = make_request('GET', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_route_with_add_post_shortcut(router): handler = make_handler() router.add_post('/handler/to/path', handler) req = make_request('POST', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_route_with_add_put_shortcut(router): handler = make_handler() router.add_put('/handler/to/path', handler) req = make_request('PUT', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_route_with_add_patch_shortcut(router): handler = make_handler() router.add_patch('/handler/to/path', handler) req = make_request('PATCH', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_route_with_add_delete_shortcut(router): handler = make_handler() router.add_delete('/handler/to/path', handler) req = make_request('DELETE', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_route_with_add_head_shortcut(router): handler = make_handler() router.add_head('/handler/to/path', handler) req = make_request('HEAD', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 0 == len(info) assert handler is info.handler assert info.route.name is None async def test_add_with_name(router): handler = make_handler() router.add_route('GET', '/handler/to/path', handler, name='name') req = make_request('GET', '/handler/to/path') info = await router.resolve(req) assert info is not None assert 'name' == info.route.name async def test_add_with_tailing_slash(router): handler = make_handler() router.add_route('GET', '/handler/to/path/', handler) req = make_request('GET', '/handler/to/path/') info = await router.resolve(req) assert info is not None assert {} == info assert handler is info.handler def test_add_invalid_path(router): handler = make_handler() with pytest.raises(ValueError): router.add_route('GET', '/{/', handler) def test_add_url_invalid1(router): handler = make_handler() with pytest.raises(ValueError): router.add_route('post', '/post/{id', handler) def test_add_url_invalid2(router): handler = make_handler() with pytest.raises(ValueError): router.add_route('post', '/post/{id{}}', handler) def test_add_url_invalid3(router): handler = make_handler() with pytest.raises(ValueError): router.add_route('post', '/post/{id{}', handler) def test_add_url_invalid4(router): handler = make_handler() with pytest.raises(ValueError): router.add_route('post', '/post/{id"}', handler) async def test_add_url_escaping(router): handler = make_handler() router.add_route('GET', '/+$', handler) req = make_request('GET', '/+$') info = await router.resolve(req) assert info is not None assert handler is info.handler async def test_any_method(router): handler = make_handler() route = router.add_route(hdrs.METH_ANY, '/', handler) req = make_request('GET', '/') info1 = await router.resolve(req) assert info1 is not None assert route is info1.route req = make_request('POST', '/') info2 = await router.resolve(req) assert info2 is not None assert info1.route is info2.route async def test_match_second_result_in_table(router): handler1 = make_handler() handler2 = make_handler() router.add_route('GET', '/h1', handler1) router.add_route('POST', '/h2', handler2) req = make_request('POST', '/h2') info = await router.resolve(req) assert info is not None assert {} == info assert handler2 is info.handler async def test_raise_method_not_allowed(router): handler1 = make_handler() handler2 = make_handler() router.add_route('GET', '/', handler1) router.add_route('POST', '/', handler2) req = make_request('PUT', '/') match_info = await router.resolve(req) assert isinstance(match_info.route, SystemRoute) assert {} == match_info with pytest.raises(HTTPMethodNotAllowed) as ctx: await match_info.handler(req) exc = ctx.value assert 'PUT' == exc.method assert 405 == exc.status assert {'POST', 'GET'} == exc.allowed_methods async def test_raise_method_not_found(router): handler = make_handler() router.add_route('GET', '/a', handler) req = make_request('GET', '/b') match_info = await router.resolve(req) assert isinstance(match_info.route, SystemRoute) assert {} == match_info with pytest.raises(HTTPNotFound) as ctx: await match_info.handler(req) exc = ctx.value assert 404 == exc.status def test_double_add_url_with_the_same_name(router): handler1 = make_handler() handler2 = make_handler() router.add_route('GET', '/get', handler1, name='name') regexp = ("Duplicate 'name', already handled by") with pytest.raises(ValueError) as ctx: router.add_route('GET', '/get_other', handler2, name='name') assert re.match(regexp, str(ctx.value)) def test_route_plain(router): handler = make_handler() route = router.add_route('GET', '/get', handler, name='name') route2 = next(iter(router['name'])) url = route2.url_for() assert '/get' == str(url) assert route is route2 def test_route_unknown_route_name(router): with pytest.raises(KeyError): router['unknown'] def test_route_dynamic(router): handler = make_handler() route = router.add_route('GET', '/get/{name}', handler, name='name') route2 = next(iter(router['name'])) url = route2.url_for(name='John') assert '/get/John' == str(url) assert route is route2 def test_add_static(router): resource = router.add_static('/st', os.path.dirname(aiohttp.__file__), name='static') assert router['static'] is resource url = resource.url_for(filename='/dir/a.txt') assert '/st/dir/a.txt' == str(url) assert len(resource) == 2 def test_add_static_append_version(router): resource = router.add_static('/st', os.path.dirname(__file__), name='static') url = resource.url_for(filename='/data.unknown_mime_type', append_version=True) expect_url = '/st/data.unknown_mime_type?' \ 'v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D' assert expect_url == str(url) def test_add_static_append_version_set_from_constructor(router): resource = router.add_static('/st', os.path.dirname(__file__), append_version=True, name='static') url = resource.url_for(filename='/data.unknown_mime_type') expect_url = '/st/data.unknown_mime_type?' \ 'v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D' assert expect_url == str(url) def test_add_static_append_version_override_constructor(router): resource = router.add_static('/st', os.path.dirname(__file__), append_version=True, name='static') url = resource.url_for(filename='/data.unknown_mime_type', append_version=False) expect_url = '/st/data.unknown_mime_type' assert expect_url == str(url) def test_add_static_append_version_filename_without_slash(router): resource = router.add_static('/st', os.path.dirname(__file__), name='static') url = resource.url_for(filename='data.unknown_mime_type', append_version=True) expect_url = '/st/data.unknown_mime_type?' \ 'v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D' assert expect_url == str(url) def test_add_static_append_version_non_exists_file(router): resource = router.add_static('/st', os.path.dirname(__file__), name='static') url = resource.url_for(filename='/non_exists_file', append_version=True) assert '/st/non_exists_file' == str(url) def test_add_static_append_version_non_exists_file_without_slash(router): resource = router.add_static('/st', os.path.dirname(__file__), name='static') url = resource.url_for(filename='non_exists_file', append_version=True) assert '/st/non_exists_file' == str(url) def test_add_static_append_version_follow_symlink(router, tmpdir): """ Tests the access to a symlink, in static folder with apeend_version """ tmp_dir_path = str(tmpdir) symlink_path = os.path.join(tmp_dir_path, 'append_version_symlink') symlink_target_path = os.path.dirname(__file__) os.symlink(symlink_target_path, symlink_path, True) # Register global static route: resource = router.add_static('/st', tmp_dir_path, follow_symlinks=True, append_version=True) url = resource.url_for( filename='/append_version_symlink/data.unknown_mime_type') expect_url = '/st/append_version_symlink/data.unknown_mime_type?' \ 'v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D' assert expect_url == str(url) def test_add_static_append_version_not_follow_symlink(router, tmpdir): """ Tests the access to a symlink, in static folder with apeend_version """ tmp_dir_path = str(tmpdir) symlink_path = os.path.join(tmp_dir_path, 'append_version_symlink') symlink_target_path = os.path.dirname(__file__) os.symlink(symlink_target_path, symlink_path, True) # Register global static route: resource = router.add_static('/st', tmp_dir_path, follow_symlinks=False, append_version=True) filename = '/append_version_symlink/data.unknown_mime_type' url = resource.url_for(filename=filename) assert '/st/append_version_symlink/data.unknown_mime_type' == str(url) def test_plain_not_match(router): handler = make_handler() router.add_route('GET', '/get/path', handler, name='name') route = router['name'] assert route._match('/another/path') is None def test_dynamic_not_match(router): handler = make_handler() router.add_route('GET', '/get/{name}', handler, name='name') route = router['name'] assert route._match('/another/path') is None async def test_static_not_match(router): router.add_static('/pre', os.path.dirname(aiohttp.__file__), name='name') resource = router['name'] ret = await resource.resolve( make_mocked_request('GET', '/another/path')) assert (None, set()) == ret def test_dynamic_with_trailing_slash(router): handler = make_handler() router.add_route('GET', '/get/{name}/', handler, name='name') route = router['name'] assert {'name': 'John'} == route._match('/get/John/') def test_len(router): handler = make_handler() router.add_route('GET', '/get1', handler, name='name1') router.add_route('GET', '/get2', handler, name='name2') assert 2 == len(router) def test_iter(router): handler = make_handler() router.add_route('GET', '/get1', handler, name='name1') router.add_route('GET', '/get2', handler, name='name2') assert {'name1', 'name2'} == set(iter(router)) def test_contains(router): handler = make_handler() router.add_route('GET', '/get1', handler, name='name1') router.add_route('GET', '/get2', handler, name='name2') assert 'name1' in router assert 'name3' not in router def test_static_repr(router): router.add_static('/get', os.path.dirname(aiohttp.__file__), name='name') assert re.match(r"+++)': nothing to repeat") assert ctx.value.__cause__ is None def test_route_dynamic_with_regex_spec(router): handler = make_handler() route = router.add_route('GET', '/get/{num:^\d+}', handler, name='name') url = route.url_for(num='123') assert '/get/123' == str(url) def test_route_dynamic_with_regex_spec_and_trailing_slash(router): handler = make_handler() route = router.add_route('GET', '/get/{num:^\d+}/', handler, name='name') url = route.url_for(num='123') assert '/get/123/' == str(url) def test_route_dynamic_with_regex(router): handler = make_handler() route = router.add_route('GET', r'/{one}/{two:.+}', handler) url = route.url_for(one='1', two='2') assert '/1/2' == str(url) def test_route_dynamic_quoting(router): handler = make_handler() route = router.add_route('GET', r'/{arg}', handler) url = route.url_for(arg='1 2/текÑÑ‚') assert '/1%202/%D1%82%D0%B5%D0%BA%D1%81%D1%82' == str(url) async def test_regular_match_info(router): handler = make_handler() router.add_route('GET', '/get/{name}', handler) req = make_request('GET', '/get/john') match_info = await router.resolve(req) assert {'name': 'john'} == match_info assert re.match(">", repr(match_info)) async def test_match_info_with_plus(router): handler = make_handler() router.add_route('GET', '/get/{version}', handler) req = make_request('GET', '/get/1.0+test') match_info = await router.resolve(req) assert {'version': '1.0+test'} == match_info async def test_not_found_repr(router): req = make_request('POST', '/path/to') match_info = await router.resolve(req) assert "" == repr(match_info) async def test_not_allowed_repr(router): handler = make_handler() router.add_route('GET', '/path/to', handler) handler2 = make_handler() router.add_route('POST', '/path/to', handler2) req = make_request('PUT', '/path/to') match_info = await router.resolve(req) assert "" == repr(match_info) def test_default_expect_handler(router): route = router.add_route('GET', '/', make_handler()) assert route._expect_handler is _default_expect_handler def test_custom_expect_handler_plain(router): async def handler(request): pass route = router.add_route( 'GET', '/', make_handler(), expect_handler=handler) assert route._expect_handler is handler assert isinstance(route, ResourceRoute) def test_custom_expect_handler_dynamic(router): async def handler(request): pass route = router.add_route( 'GET', '/get/{name}', make_handler(), expect_handler=handler) assert route._expect_handler is handler assert isinstance(route, ResourceRoute) def test_expect_handler_non_coroutine(router): def handler(request): pass with pytest.raises(AssertionError): router.add_route('GET', '/', make_handler(), expect_handler=handler) async def test_dynamic_match_non_ascii(router): handler = make_handler() router.add_route('GET', '/{var}', handler) req = make_request( 'GET', '/%D1%80%D1%83%D1%81%20%D1%82%D0%B5%D0%BA%D1%81%D1%82') match_info = await router.resolve(req) assert {'var': 'Ñ€ÑƒÑ Ñ‚ÐµÐºÑÑ‚'} == match_info async def test_dynamic_match_with_static_part(router): handler = make_handler() router.add_route('GET', '/{name}.html', handler) req = make_request('GET', '/file.html') match_info = await router.resolve(req) assert {'name': 'file'} == match_info async def test_dynamic_match_two_part2(router): handler = make_handler() router.add_route('GET', '/{name}.{ext}', handler) req = make_request('GET', '/file.html') match_info = await router.resolve(req) assert {'name': 'file', 'ext': 'html'} == match_info async def test_dynamic_match_unquoted_path(router): handler = make_handler() router.add_route('GET', '/{path}/{subpath}', handler) resource_id = 'my%2Fpath%7Cwith%21some%25strange%24characters' req = make_request('GET', '/path/{0}'.format(resource_id)) match_info = await router.resolve(req) assert match_info == { 'path': 'path', 'subpath': unquote(resource_id) } def test_add_route_not_started_with_slash(router): with pytest.raises(ValueError): handler = make_handler() router.add_route('GET', 'invalid_path', handler) def test_add_route_invalid_method(router): sample_bad_methods = { 'BAD METHOD', 'B@D_METHOD', '[BAD_METHOD]', '{BAD_METHOD}', '(BAD_METHOD)', 'B?D_METHOD', } for bad_method in sample_bad_methods: with pytest.raises(ValueError): handler = make_handler() router.add_route(bad_method, '/path', handler) def test_routes_view_len(router, fill_routes): fill_routes() assert 4 == len(router.routes()) def test_routes_view_iter(router, fill_routes): routes = fill_routes() assert list(routes) == list(router.routes()) def test_routes_view_contains(router, fill_routes): routes = fill_routes() for route in routes: assert route in router.routes() def test_routes_abc(router): assert isinstance(router.routes(), Sized) assert isinstance(router.routes(), Iterable) assert isinstance(router.routes(), Container) def test_named_resources_abc(router): assert isinstance(router.named_resources(), Mapping) assert not isinstance(router.named_resources(), MutableMapping) def test_named_resources(router): route1 = router.add_route('GET', '/plain', make_handler(), name='route1') route2 = router.add_route('GET', '/variable/{name}', make_handler(), name='route2') route3 = router.add_static('/static', os.path.dirname(aiohttp.__file__), name='route3') names = {route1.name, route2.name, route3.name} assert 3 == len(router.named_resources()) for name in names: assert name in router.named_resources() assert isinstance(router.named_resources()[name], AbstractResource) def test_resource_iter(router): async def handler(request): pass resource = router.add_resource('/path') r1 = resource.add_route('GET', handler) r2 = resource.add_route('POST', handler) assert 2 == len(resource) assert [r1, r2] == list(resource) def test_deprecate_bare_generators(router): resource = router.add_resource('/path') def gen(request): yield with pytest.warns(DeprecationWarning): resource.add_route('GET', gen) def test_view_route(router): resource = router.add_resource('/path') route = resource.add_route('GET', View) assert View is route.handler def test_resource_route_match(router): async def handler(request): pass resource = router.add_resource('/path') route = resource.add_route('GET', handler) assert {} == route.resource._match('/path') def test_error_on_double_route_adding(router): async def handler(request): pass resource = router.add_resource('/path') resource.add_route('GET', handler) with pytest.raises(RuntimeError): resource.add_route('GET', handler) def test_error_on_adding_route_after_wildcard(router): async def handler(request): pass resource = router.add_resource('/path') resource.add_route('*', handler) with pytest.raises(RuntimeError): resource.add_route('GET', handler) async def test_http_exception_is_none_when_resolved(router): handler = make_handler() router.add_route('GET', '/', handler) req = make_request('GET', '/') info = await router.resolve(req) assert info.http_exception is None async def test_http_exception_is_not_none_when_not_resolved(router): handler = make_handler() router.add_route('GET', '/', handler) req = make_request('GET', '/abc') info = await router.resolve(req) assert info.http_exception.status == 404 async def test_match_info_get_info_plain(router): handler = make_handler() router.add_route('GET', '/', handler) req = make_request('GET', '/') info = await router.resolve(req) assert info.get_info() == {'path': '/'} async def test_match_info_get_info_dynamic(router): handler = make_handler() router.add_route('GET', '/{a}', handler) req = make_request('GET', '/value') info = await router.resolve(req) assert info.get_info() == { 'pattern': re.compile(PATH_SEP+'(?P[^{}/]+)'), 'formatter': '/{a}'} async def test_match_info_get_info_dynamic2(router): handler = make_handler() router.add_route('GET', '/{a}/{b}', handler) req = make_request('GET', '/path/to') info = await router.resolve(req) assert info.get_info() == { 'pattern': re.compile(PATH_SEP + '(?P[^{}/]+)' + PATH_SEP + '(?P[^{}/]+)'), 'formatter': '/{a}/{b}'} def test_static_resource_get_info(router): directory = pathlib.Path(aiohttp.__file__).parent resource = router.add_static('/st', directory) assert resource.get_info() == {'directory': directory, 'prefix': '/st'} async def test_system_route_get_info(router): handler = make_handler() router.add_route('GET', '/', handler) req = make_request('GET', '/abc') info = await router.resolve(req) assert info.get_info()['http_exception'].status == 404 def test_resources_view_len(router): router.add_resource('/plain') router.add_resource('/variable/{name}') assert 2 == len(router.resources()) def test_resources_view_iter(router): resource1 = router.add_resource('/plain') resource2 = router.add_resource('/variable/{name}') resources = [resource1, resource2] assert list(resources) == list(router.resources()) def test_resources_view_contains(router): resource1 = router.add_resource('/plain') resource2 = router.add_resource('/variable/{name}') resources = [resource1, resource2] for resource in resources: assert resource in router.resources() def test_resources_abc(router): assert isinstance(router.resources(), Sized) assert isinstance(router.resources(), Iterable) assert isinstance(router.resources(), Container) def test_static_route_user_home(router): here = pathlib.Path(aiohttp.__file__).parent home = pathlib.Path(os.path.expanduser('~')) if not str(here).startswith(str(home)): # pragma: no cover pytest.skip("aiohttp folder is not placed in user's HOME") static_dir = '~/' + str(here.relative_to(home)) route = router.add_static('/st', static_dir) assert here == route.get_info()['directory'] def test_static_route_points_to_file(router): here = pathlib.Path(aiohttp.__file__).parent / '__init__.py' with pytest.raises(ValueError): router.add_static('/st', here) async def test_404_for_static_resource(router): resource = router.add_static('/st', os.path.dirname(aiohttp.__file__)) ret = await resource.resolve( make_mocked_request('GET', '/unknown/path')) assert (None, set()) == ret async def test_405_for_resource_adapter(router): resource = router.add_static('/st', os.path.dirname(aiohttp.__file__)) ret = await resource.resolve( make_mocked_request('POST', '/st/abc.py')) assert (None, {'HEAD', 'GET'}) == ret async def test_check_allowed_method_for_found_resource(router): handler = make_handler() resource = router.add_resource('/') resource.add_route('GET', handler) ret = await resource.resolve(make_mocked_request('GET', '/')) assert ret[0] is not None assert {'GET'} == ret[1] def test_url_for_in_static_resource(router): resource = router.add_static('/static', os.path.dirname(aiohttp.__file__)) assert URL('/static/file.txt') == resource.url_for(filename='file.txt') def test_url_for_in_static_resource_pathlib(router): resource = router.add_static('/static', os.path.dirname(aiohttp.__file__)) assert URL('/static/file.txt') == resource.url_for( filename=pathlib.Path('file.txt')) def test_url_for_in_resource_route(router): route = router.add_route('GET', '/get/{name}', make_handler(), name='name') assert URL('/get/John') == route.url_for(name='John') def test_subapp_get_info(app, loop): subapp = web.Application() resource = subapp.add_subapp('/pre', subapp) assert resource.get_info() == {'prefix': '/pre', 'app': subapp} def test_subapp_url_for(app, loop): subapp = web.Application() resource = app.add_subapp('/pre', subapp) with pytest.raises(RuntimeError): resource.url_for() def test_subapp_repr(app, loop): subapp = web.Application() resource = app.add_subapp('/pre', subapp) assert repr(resource).startswith( ' ; rel="blocked-by"' == resp.headers['Link'] aiohttp-3.0.1/tests/test_web_functional.py0000666000000000000000000013716413240304665017106 0ustar 00000000000000import asyncio import io import json import pathlib import zlib from unittest import mock import pytest from multidict import MultiDict from yarl import URL import aiohttp from aiohttp import (FormData, HttpVersion10, HttpVersion11, TraceConfig, multipart, web) try: import ssl except ImportError: ssl = False @pytest.fixture def here(): return pathlib.Path(__file__).parent @pytest.fixture def fname(here): return here / 'sample.key' async def test_simple_get(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body return web.Response(body=b'OK') app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status txt = await resp.text() assert 'OK' == txt async def test_simple_get_with_text(aiohttp_client): async def handler(request): body = await request.read() assert b'' == body return web.Response(text='OK', headers={'content-type': 'text/plain'}) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status txt = await resp.text() assert 'OK' == txt async def test_handler_returns_not_response(aiohttp_server, aiohttp_client): logger = mock.Mock() async def handler(request): return 'abc' app = web.Application() app.router.add_get('/', handler) server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) resp = await client.get('/') assert 500 == resp.status assert logger.exception.called async def test_head_returns_empty_body(aiohttp_client): async def handler(request): return web.Response(body=b'test') app = web.Application() app.router.add_head('/', handler) client = await aiohttp_client(app, version=HttpVersion11) resp = await client.head('/') assert 200 == resp.status txt = await resp.text() assert '' == txt async def test_response_before_complete(aiohttp_client): async def handler(request): return web.Response(body=b'OK') app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) data = b'0' * 1024 * 1024 resp = await client.post('/', data=data) assert 200 == resp.status text = await resp.text() assert 'OK' == text async def test_post_form(aiohttp_client): async def handler(request): data = await request.post() assert {'a': '1', 'b': '2', 'c': ''} == data return web.Response(body=b'OK') app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data={'a': 1, 'b': 2, 'c': ''}) assert 200 == resp.status txt = await resp.text() assert 'OK' == txt async def test_post_text(aiohttp_client): async def handler(request): data = await request.text() assert 'руÑÑкий' == data data2 = await request.text() assert data == data2 return web.Response(text=data) app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data='руÑÑкий') assert 200 == resp.status txt = await resp.text() assert 'руÑÑкий' == txt async def test_post_json(aiohttp_client): dct = {'key': 'текÑÑ‚'} async def handler(request): data = await request.json() assert dct == data data2 = await request.json(loads=json.loads) assert data == data2 resp = web.Response() resp.content_type = 'application/json' resp.body = json.dumps(data).encode('utf8') return resp app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) headers = {'Content-Type': 'application/json'} resp = await client.post('/', data=json.dumps(dct), headers=headers) assert 200 == resp.status data = await resp.json() assert dct == data async def test_multipart(aiohttp_client): with multipart.MultipartWriter() as writer: writer.append('test') writer.append_json({'passed': True}) async def handler(request): reader = await request.multipart() assert isinstance(reader, multipart.MultipartReader) part = await reader.next() assert isinstance(part, multipart.BodyPartReader) thing = await part.text() assert thing == 'test' part = await reader.next() assert isinstance(part, multipart.BodyPartReader) assert part.headers['Content-Type'] == 'application/json' thing = await part.json() assert thing == {'passed': True} resp = web.Response() resp.content_type = 'application/json' resp.body = b'' return resp app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=writer, headers=writer.headers) assert 200 == resp.status await resp.release() async def test_multipart_content_transfer_encoding(aiohttp_client): """For issue #1168""" with multipart.MultipartWriter() as writer: writer.append(b'\x00' * 10, headers={'Content-Transfer-Encoding': 'binary'}) async def handler(request): reader = await request.multipart() assert isinstance(reader, multipart.MultipartReader) part = await reader.next() assert isinstance(part, multipart.BodyPartReader) assert part.headers['Content-Transfer-Encoding'] == 'binary' thing = await part.read() assert thing == b'\x00' * 10 resp = web.Response() resp.content_type = 'application/json' resp.body = b'' return resp app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=writer, headers=writer.headers) assert 200 == resp.status await resp.release() async def test_render_redirect(aiohttp_client): async def handler(request): raise web.HTTPMovedPermanently(location='/path') app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/', allow_redirects=False) assert 301 == resp.status txt = await resp.text() assert '301: Moved Permanently' == txt assert '/path' == resp.headers['location'] async def test_post_single_file(aiohttp_client): here = pathlib.Path(__file__).parent def check_file(fs): fullname = here / fs.filename with fullname.open() as f: test_data = f.read().encode() data = fs.file.read() assert test_data == data async def handler(request): data = await request.post() assert ['sample.crt'] == list(data.keys()) for fs in data.values(): check_file(fs) fs.file.close() resp = web.Response(body=b'OK') return resp app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) fname = here / 'sample.crt' resp = await client.post('/', data=[fname.open()]) assert 200 == resp.status async def test_files_upload_with_same_key(aiohttp_client): async def handler(request): data = await request.post() files = data.getall('file') file_names = set() for _file in files: assert not _file.file.closed if _file.filename == 'test1.jpeg': assert _file.file.read() == b'binary data 1' if _file.filename == 'test2.jpeg': assert _file.file.read() == b'binary data 2' file_names.add(_file.filename) assert len(files) == 2 assert file_names == {'test1.jpeg', 'test2.jpeg'} resp = web.Response(body=b'OK') return resp app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) data = FormData() data.add_field('file', b'binary data 1', content_type='image/jpeg', filename='test1.jpeg') data.add_field('file', b'binary data 2', content_type='image/jpeg', filename='test2.jpeg') resp = await client.post('/', data=data) assert 200 == resp.status async def test_post_files(aiohttp_client): here = pathlib.Path(__file__).parent def check_file(fs): fullname = here / fs.filename with fullname.open() as f: test_data = f.read().encode() data = fs.file.read() assert test_data == data async def handler(request): data = await request.post() assert ['sample.crt', 'sample.key'] == list(data.keys()) for fs in data.values(): check_file(fs) fs.file.close() resp = web.Response(body=b'OK') return resp app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) with (here / 'sample.crt').open() as f1: with (here / 'sample.key').open() as f2: resp = await client.post('/', data=[f1, f2]) assert 200 == resp.status async def test_release_post_data(aiohttp_client): async def handler(request): await request.release() chunk = await request.content.readany() assert chunk == b'' return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data='post text') assert 200 == resp.status async def test_POST_DATA_with_content_transfer_encoding(aiohttp_client): async def handler(request): data = await request.post() assert b'123' == data['name'] return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) form = FormData() form.add_field('name', b'123', content_transfer_encoding='base64') resp = await client.post('/', data=form) assert 200 == resp.status async def test_post_form_with_duplicate_keys(aiohttp_client): async def handler(request): data = await request.post() lst = list(data.items()) assert [('a', '1'), ('a', '2')] == lst return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=MultiDict([('a', 1), ('a', 2)])) assert 200 == resp.status def test_repr_for_application(): app = web.Application() assert "".format(id(app)) == repr(app) async def test_expect_default_handler_unknown(aiohttp_client): """Test default Expect handler for unknown Expect value. A server that does not understand or is unable to comply with any of the expectation values in the Expect field of a request MUST respond with appropriate error status. The server MUST respond with a 417 (Expectation Failed) status if any of the expectations cannot be met or, if there are other problems with the request, some other 4xx status. http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.20 """ async def handler(request): await request.post() pytest.xfail('Handler should not proceed to this point in case of ' 'unknown Expect header') app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', headers={'Expect': 'SPAM'}) assert 417 == resp.status async def test_100_continue(aiohttp_client): async def handler(request): data = await request.post() assert b'123' == data['name'] return web.Response() form = FormData() form.add_field('name', b'123', content_transfer_encoding='base64') app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=form, expect100=True) assert 200 == resp.status async def test_100_continue_custom(aiohttp_client): expect_received = False async def handler(request): data = await request.post() assert b'123' == data['name'] return web.Response() async def expect_handler(request): nonlocal expect_received expect_received = True if request.version == HttpVersion11: request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n") form = FormData() form.add_field('name', b'123', content_transfer_encoding='base64') app = web.Application() app.router.add_post('/', handler, expect_handler=expect_handler) client = await aiohttp_client(app) resp = await client.post('/', data=form, expect100=True) assert 200 == resp.status assert expect_received async def test_100_continue_custom_response(aiohttp_client): async def handler(request): data = await request.post() assert b'123', data['name'] return web.Response() async def expect_handler(request): if request.version == HttpVersion11: if auth_err: raise web.HTTPForbidden() request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") form = FormData() form.add_field('name', b'123', content_transfer_encoding='base64') app = web.Application() app.router.add_post('/', handler, expect_handler=expect_handler) client = await aiohttp_client(app) auth_err = False resp = await client.post('/', data=form, expect100=True) assert 200 == resp.status auth_err = True resp = await client.post('/', data=form, expect100=True) assert 403 == resp.status async def test_100_continue_for_not_found(aiohttp_client): app = web.Application() client = await aiohttp_client(app) resp = await client.post('/not_found', data='data', expect100=True) assert 404 == resp.status async def test_100_continue_for_not_allowed(aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.get('/', expect100=True) assert 405 == resp.status async def test_http11_keep_alive_default(aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app, version=HttpVersion11) resp = await client.get('/') assert 200 == resp.status assert resp.version == HttpVersion11 assert 'Connection' not in resp.headers @pytest.mark.xfail async def test_http10_keep_alive_default(aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app, version=HttpVersion10) resp = await client.get('/') assert 200 == resp.status assert resp.version == HttpVersion10 assert resp.headers['Connection'] == 'keep-alive' async def test_http10_keep_alive_with_headers_close(aiohttp_client): async def handler(request): await request.read() return web.Response(body=b'OK') app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app, version=HttpVersion10) headers = {'Connection': 'close'} resp = await client.get('/', headers=headers) assert 200 == resp.status assert resp.version == HttpVersion10 assert 'Connection' not in resp.headers async def test_http10_keep_alive_with_headers(aiohttp_client): async def handler(request): await request.read() return web.Response(body=b'OK') app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app, version=HttpVersion10) headers = {'Connection': 'keep-alive'} resp = await client.get('/', headers=headers) assert 200 == resp.status assert resp.version == HttpVersion10 assert resp.headers['Connection'] == 'keep-alive' async def test_upload_file(aiohttp_client): here = pathlib.Path(__file__).parent fname = here / 'aiohttp.png' with fname.open('rb') as f: data = f.read() async def handler(request): form = await request.post() raw_data = form['file'].file.read() assert data == raw_data return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data={'file': data}) assert 200 == resp.status async def test_upload_file_object(aiohttp_client): here = pathlib.Path(__file__).parent fname = here / 'aiohttp.png' with fname.open('rb') as f: data = f.read() async def handler(request): form = await request.post() raw_data = form['file'].file.read() assert data == raw_data return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) with fname.open('rb') as f: resp = await client.post('/', data={'file': f}) assert 200 == resp.status async def test_empty_content_for_query_without_body(aiohttp_client): async def handler(request): assert not request.body_exists assert not request.can_read_body with pytest.warns(DeprecationWarning): assert not request.has_body return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/') assert 200 == resp.status async def test_empty_content_for_query_with_body(aiohttp_client): async def handler(request): assert request.body_exists assert request.can_read_body with pytest.warns(DeprecationWarning): assert request.has_body body = await request.read() return web.Response(body=body) app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post('/', data=b'data') assert 200 == resp.status async def test_get_with_empty_arg(aiohttp_client): async def handler(request): assert 'arg' in request.query assert '' == request.query['arg'] return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/?arg') assert 200 == resp.status async def test_large_header(aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) headers = {'Long-Header': 'ab' * 8129} resp = await client.get('/', headers=headers) assert 400 == resp.status async def test_large_header_allowed(aiohttp_client, aiohttp_server): async def handler(request): return web.Response() app = web.Application() app.router.add_post('/', handler) server = await aiohttp_server(app, max_field_size=81920) client = await aiohttp_client(server) headers = {'Long-Header': 'ab' * 8129} resp = await client.post('/', headers=headers) assert 200 == resp.status async def test_get_with_empty_arg_with_equal(aiohttp_client): async def handler(request): assert 'arg' in request.query assert '' == request.query['arg'] return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/?arg=') assert 200 == resp.status async def test_response_with_streamer(aiohttp_client, fname): with fname.open('rb') as f: data = f.read() data_size = len(data) @aiohttp.streamer def stream(writer, f_name): with f_name.open('rb') as f: data = f.read(100) while data: yield from writer.write(data) data = f.read(100) async def handler(request): headers = {'Content-Length': str(data_size)} return web.Response(body=stream(fname), headers=headers) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status resp_data = await resp.read() assert resp_data == data assert resp.headers.get('Content-Length') == str(len(resp_data)) async def test_response_with_streamer_no_params(aiohttp_client, fname): with fname.open('rb') as f: data = f.read() data_size = len(data) @aiohttp.streamer def stream(writer): with fname.open('rb') as f: data = f.read(100) while data: yield from writer.write(data) data = f.read(100) async def handler(request): headers = {'Content-Length': str(data_size)} return web.Response(body=stream, headers=headers) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status resp_data = await resp.read() assert resp_data == data assert resp.headers.get('Content-Length') == str(len(resp_data)) async def test_response_with_file(aiohttp_client, fname): with fname.open('rb') as f: data = f.read() async def handler(request): return web.Response(body=fname.open('rb')) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status resp_data = await resp.read() assert resp_data == data assert resp.headers.get('Content-Type') in ( 'application/octet-stream', 'application/pgp-keys') assert resp.headers.get('Content-Length') == str(len(resp_data)) assert (resp.headers.get('Content-Disposition') == 'attachment; filename="sample.key"; filename*=utf-8\'\'sample.key') async def test_response_with_file_ctype(aiohttp_client, fname): with fname.open('rb') as f: data = f.read() async def handler(request): return web.Response( body=fname.open('rb'), headers={'content-type': 'text/binary'}) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status resp_data = await resp.read() assert resp_data == data assert resp.headers.get('Content-Type') == 'text/binary' assert resp.headers.get('Content-Length') == str(len(resp_data)) assert (resp.headers.get('Content-Disposition') == 'attachment; filename="sample.key"; filename*=utf-8\'\'sample.key') async def test_response_with_payload_disp(aiohttp_client, fname): with fname.open('rb') as f: data = f.read() async def handler(request): pl = aiohttp.get_payload(fname.open('rb')) pl.set_content_disposition('inline', filename='test.txt') return web.Response( body=pl, headers={'content-type': 'text/binary'}) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status resp_data = await resp.read() assert resp_data == data assert resp.headers.get('Content-Type') == 'text/binary' assert resp.headers.get('Content-Length') == str(len(resp_data)) assert (resp.headers.get('Content-Disposition') == 'inline; filename="test.txt"; filename*=utf-8\'\'test.txt') async def test_response_with_payload_stringio(aiohttp_client, fname): async def handler(request): return web.Response(body=io.StringIO('test')) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status resp_data = await resp.read() assert resp_data == b'test' async def test_response_with_precompressed_body_gzip(aiohttp_client): async def handler(request): headers = {'Content-Encoding': 'gzip'} zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) data = zcomp.compress(b'mydata') + zcomp.flush() return web.Response(body=data, headers=headers) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status data = await resp.read() assert b'mydata' == data assert resp.headers.get('Content-Encoding') == 'gzip' async def test_response_with_precompressed_body_deflate(aiohttp_client): async def handler(request): headers = {'Content-Encoding': 'deflate'} zcomp = zlib.compressobj(wbits=-zlib.MAX_WBITS) data = zcomp.compress(b'mydata') + zcomp.flush() return web.Response(body=data, headers=headers) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status data = await resp.read() assert b'mydata' == data assert resp.headers.get('Content-Encoding') == 'deflate' async def test_bad_request_payload(aiohttp_client): async def handler(request): assert request.method == 'POST' with pytest.raises(aiohttp.web.RequestPayloadError): await request.content.read() return web.Response() app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) resp = await client.post( '/', data=b'test', headers={'content-encoding': 'gzip'}) assert 200 == resp.status async def test_stream_response_multiple_chunks(aiohttp_client): async def handler(request): resp = web.StreamResponse() resp.enable_chunked_encoding() await resp.prepare(request) await resp.write(b'x') await resp.write(b'y') await resp.write(b'z') return resp app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status data = await resp.read() assert b'xyz' == data async def test_start_without_routes(aiohttp_client): app = web.Application() client = await aiohttp_client(app) resp = await client.get('/') assert 404 == resp.status async def test_requests_count(aiohttp_client): async def handler(request): return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) assert client.server.handler.requests_count == 0 resp = await client.get('/') assert 200 == resp.status assert client.server.handler.requests_count == 1 resp = await client.get('/') assert 200 == resp.status assert client.server.handler.requests_count == 2 resp = await client.get('/') assert 200 == resp.status assert client.server.handler.requests_count == 3 async def test_redirect_url(aiohttp_client): async def redirector(request): raise web.HTTPFound(location=URL('/redirected')) async def redirected(request): return web.Response() app = web.Application() app.router.add_get('/redirector', redirector) app.router.add_get('/redirected', redirected) client = await aiohttp_client(app) resp = await client.get('/redirector') assert resp.status == 200 async def test_simple_subapp(aiohttp_client): async def handler(request): return web.Response(text="OK") app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) app.add_subapp('/path', subapp) client = await aiohttp_client(app) resp = await client.get('/path/to') assert resp.status == 200 txt = await resp.text() assert 'OK' == txt async def test_subapp_reverse_url(aiohttp_client): async def handler(request): raise web.HTTPMovedPermanently( location=subapp.router['name'].url_for()) async def handler2(request): return web.Response(text="OK") app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) subapp.router.add_get('/final', handler2, name='name') app.add_subapp('/path', subapp) client = await aiohttp_client(app) resp = await client.get('/path/to') assert resp.status == 200 txt = await resp.text() assert 'OK' == txt assert resp.url.path == '/path/final' async def test_subapp_reverse_variable_url(aiohttp_client): async def handler(request): raise web.HTTPMovedPermanently( location=subapp.router['name'].url_for(part='final')) async def handler2(request): return web.Response(text="OK") app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) subapp.router.add_get('/{part}', handler2, name='name') app.add_subapp('/path', subapp) client = await aiohttp_client(app) resp = await client.get('/path/to') assert resp.status == 200 txt = await resp.text() assert 'OK' == txt assert resp.url.path == '/path/final' async def test_subapp_reverse_static_url(aiohttp_client): fname = 'aiohttp.png' async def handler(request): raise web.HTTPMovedPermanently( location=subapp.router['name'].url_for(filename=fname)) app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) here = pathlib.Path(__file__).parent subapp.router.add_static('/static', here, name='name') app.add_subapp('/path', subapp) client = await aiohttp_client(app) resp = await client.get('/path/to') assert resp.url.path == '/path/static/' + fname assert resp.status == 200 body = await resp.read() with (here / fname).open('rb') as f: assert body == f.read() async def test_subapp_app(aiohttp_client): async def handler(request): assert request.app is subapp return web.Response(text='OK') app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) app.add_subapp('/path/', subapp) client = await aiohttp_client(app) resp = await client.get('/path/to') assert resp.status == 200 txt = await resp.text() assert 'OK' == txt async def test_subapp_not_found(aiohttp_client): async def handler(request): return web.Response(text='OK') app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) app.add_subapp('/path/', subapp) client = await aiohttp_client(app) resp = await client.get('/path/other') assert resp.status == 404 async def test_subapp_not_found2(aiohttp_client): async def handler(request): return web.Response(text='OK') app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) app.add_subapp('/path/', subapp) client = await aiohttp_client(app) resp = await client.get('/invalid/other') assert resp.status == 404 async def test_subapp_not_allowed(aiohttp_client): async def handler(request): return web.Response(text='OK') app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) app.add_subapp('/path/', subapp) client = await aiohttp_client(app) resp = await client.post('/path/to') assert resp.status == 405 assert resp.headers['Allow'] == 'GET,HEAD' async def test_subapp_cannot_add_app_in_handler(aiohttp_client): async def handler(request): request.match_info.add_app(app) return web.Response(text='OK') app = web.Application() subapp = web.Application() subapp.router.add_get('/to', handler) app.add_subapp('/path/', subapp) client = await aiohttp_client(app) resp = await client.get('/path/to') assert resp.status == 500 async def test_subapp_middlewares(aiohttp_client): order = [] async def handler(request): return web.Response(text='OK') async def middleware_factory(app, handler): async def middleware(request): order.append((1, app)) resp = await handler(request) assert 200 == resp.status order.append((2, app)) return resp return middleware app = web.Application(middlewares=[middleware_factory]) subapp1 = web.Application(middlewares=[middleware_factory]) subapp2 = web.Application(middlewares=[middleware_factory]) subapp2.router.add_get('/to', handler) with pytest.warns(DeprecationWarning): subapp1.add_subapp('/b/', subapp2) app.add_subapp('/a/', subapp1) client = await aiohttp_client(app) resp = await client.get('/a/b/to') assert resp.status == 200 assert [(1, app), (1, subapp1), (1, subapp2), (2, subapp2), (2, subapp1), (2, app)] == order async def test_subapp_on_response_prepare(aiohttp_client): order = [] async def handler(request): return web.Response(text='OK') def make_signal(app): async def on_response(request, response): order.append(app) return on_response app = web.Application() app.on_response_prepare.append(make_signal(app)) subapp1 = web.Application() subapp1.on_response_prepare.append(make_signal(subapp1)) subapp2 = web.Application() subapp2.on_response_prepare.append(make_signal(subapp2)) subapp2.router.add_get('/to', handler) subapp1.add_subapp('/b/', subapp2) app.add_subapp('/a/', subapp1) client = await aiohttp_client(app) resp = await client.get('/a/b/to') assert resp.status == 200 assert [app, subapp1, subapp2] == order async def test_subapp_on_startup(aiohttp_server): order = [] async def on_signal(app): order.append(app) app = web.Application() app.on_startup.append(on_signal) subapp1 = web.Application() subapp1.on_startup.append(on_signal) subapp2 = web.Application() subapp2.on_startup.append(on_signal) subapp1.add_subapp('/b/', subapp2) app.add_subapp('/a/', subapp1) await aiohttp_server(app) assert [app, subapp1, subapp2] == order async def test_subapp_on_shutdown(aiohttp_server): order = [] async def on_signal(app): order.append(app) app = web.Application() app.on_shutdown.append(on_signal) subapp1 = web.Application() subapp1.on_shutdown.append(on_signal) subapp2 = web.Application() subapp2.on_shutdown.append(on_signal) subapp1.add_subapp('/b/', subapp2) app.add_subapp('/a/', subapp1) server = await aiohttp_server(app) await server.close() assert [app, subapp1, subapp2] == order async def test_subapp_on_cleanup(aiohttp_server): order = [] async def on_signal(app): order.append(app) app = web.Application() app.on_cleanup.append(on_signal) subapp1 = web.Application() subapp1.on_cleanup.append(on_signal) subapp2 = web.Application() subapp2.on_cleanup.append(on_signal) subapp1.add_subapp('/b/', subapp2) app.add_subapp('/a/', subapp1) server = await aiohttp_server(app) await server.close() assert [app, subapp1, subapp2] == order @pytest.mark.parametrize('route,expected,middlewares', [ ('/sub/', ['A: root', 'C: sub', 'D: sub'], 'AC'), ('/', ['A: root', 'B: root'], 'AC'), ('/sub/', ['A: root', 'D: sub'], 'A'), ('/', ['A: root', 'B: root'], 'A'), ('/sub/', ['C: sub', 'D: sub'], 'C'), ('/', ['B: root'], 'C'), ('/sub/', ['D: sub'], ''), ('/', ['B: root'], ''), ]) async def test_subapp_middleware_context(aiohttp_client, route, expected, middlewares): values = [] def show_app_context(appname): @web.middleware async def middleware(request, handler): values.append('{}: {}'.format( appname, request.app['my_value'])) return await handler(request) return middleware def make_handler(appname): async def handler(request): values.append('{}: {}'.format( appname, request.app['my_value'])) return web.Response(text='Ok') return handler app = web.Application() app['my_value'] = 'root' if 'A' in middlewares: app.middlewares.append(show_app_context('A')) app.router.add_get('/', make_handler('B')) subapp = web.Application() subapp['my_value'] = 'sub' if 'C' in middlewares: subapp.middlewares.append(show_app_context('C')) subapp.router.add_get('/', make_handler('D')) app.add_subapp('/sub/', subapp) client = await aiohttp_client(app) resp = await client.get(route) assert 200 == resp.status assert 'Ok' == await resp.text() assert expected == values async def test_custom_date_header(aiohttp_client): async def handler(request): return web.Response(headers={'Date': 'Sun, 30 Oct 2016 03:13:52 GMT'}) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status assert resp.headers['Date'] == 'Sun, 30 Oct 2016 03:13:52 GMT' async def test_response_prepared_with_clone(aiohttp_client): async def handler(request): cloned = request.clone() resp = web.StreamResponse() await resp.prepare(cloned) return resp app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status async def test_app_max_client_size(aiohttp_client): async def handler(request): await request.post() return web.Response(body=b'ok') max_size = 1024**2 app = web.Application() app.router.add_post('/', handler) client = await aiohttp_client(app) data = {"long_string": max_size * 'x' + 'xxx'} with pytest.warns(ResourceWarning): resp = await client.post('/', data=data) assert 413 == resp.status resp_text = await resp.text() assert 'Request Entity Too Large' in resp_text async def test_app_max_client_size_adjusted(aiohttp_client): async def handler(request): await request.post() return web.Response(body=b'ok') default_max_size = 1024**2 custom_max_size = default_max_size * 2 app = web.Application(client_max_size=custom_max_size) app.router.add_post('/', handler) client = await aiohttp_client(app) data = {'long_string': default_max_size * 'x' + 'xxx'} with pytest.warns(ResourceWarning): resp = await client.post('/', data=data) assert 200 == resp.status resp_text = await resp.text() assert 'ok' == resp_text too_large_data = {'log_string': custom_max_size * 'x' + "xxx"} with pytest.warns(ResourceWarning): resp = await client.post('/', data=too_large_data) assert 413 == resp.status resp_text = await resp.text() assert 'Request Entity Too Large' in resp_text async def test_app_max_client_size_none(aiohttp_client): async def handler(request): await request.post() return web.Response(body=b'ok') default_max_size = 1024**2 custom_max_size = None app = web.Application(client_max_size=custom_max_size) app.router.add_post('/', handler) client = await aiohttp_client(app) data = {'long_string': default_max_size * 'x' + 'xxx'} with pytest.warns(ResourceWarning): resp = await client.post('/', data=data) assert 200 == resp.status resp_text = await resp.text() assert 'ok' == resp_text too_large_data = {'log_string': default_max_size * 2 * 'x'} with pytest.warns(ResourceWarning): resp = await client.post('/', data=too_large_data) assert 200 == resp.status resp_text = await resp.text() assert resp_text == 'ok' async def test_post_max_client_size(aiohttp_client): async def handler(request): try: await request.post() except ValueError: return web.Response() raise web.HTTPBadRequest() app = web.Application(client_max_size=10) app.router.add_post('/', handler) client = await aiohttp_client(app) data = {"long_string": 1024 * 'x', 'file': io.BytesIO(b'test')} resp = await client.post('/', data=data) assert 200 == resp.status async def test_post_max_client_size_for_file(aiohttp_client): async def handler(request): try: await request.post() except ValueError: return web.Response() raise web.HTTPBadRequest() app = web.Application(client_max_size=2) app.router.add_post('/', handler) client = await aiohttp_client(app) data = {'file': io.BytesIO(b'test')} resp = await client.post('/', data=data) assert 200 == resp.status async def test_response_with_bodypart(aiohttp_client): async def handler(request): reader = await request.multipart() part = await reader.next() return web.Response(body=part) app = web.Application(client_max_size=2) app.router.add_post('/', handler) client = await aiohttp_client(app) data = {'file': io.BytesIO(b'test')} resp = await client.post('/', data=data) assert 200 == resp.status body = await resp.read() assert body == b'test' disp = multipart.parse_content_disposition( resp.headers['content-disposition']) assert disp == ('attachment', {'name': 'file', 'filename': 'file', 'filename*': 'file'}) async def test_request_clone(aiohttp_client): async def handler(request): r2 = request.clone(method='POST') assert r2.method == 'POST' assert r2.match_info is request.match_info return web.Response() app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status async def test_await(aiohttp_server): async def handler(request): resp = web.StreamResponse(headers={'content-length': str(4)}) await resp.prepare(request) with pytest.warns(DeprecationWarning): await resp.drain() await asyncio.sleep(0.01) await resp.write(b'test') await asyncio.sleep(0.01) await resp.write_eof() return resp app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession() as session: resp = await session.get(server.make_url('/')) assert resp.status == 200 assert resp.connection is not None await resp.read() await resp.release() assert resp.connection is None async def test_response_context_manager(aiohttp_server): async def handler(request): return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) resp = await aiohttp.ClientSession().get(server.make_url('/')) async with resp: assert resp.status == 200 assert resp.connection is None assert resp.connection is None async def test_response_context_manager_error(aiohttp_server): async def handler(request): return web.Response(text='some text') app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) session = aiohttp.ClientSession() cm = session.get(server.make_url('/')) resp = await cm with pytest.raises(RuntimeError): async with resp: assert resp.status == 200 resp.content.set_exception(RuntimeError()) await resp.read() assert resp.closed assert len(session._connector._conns) == 1 async def aiohttp_client_api_context_manager(aiohttp_server): async def handler(request): return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession() as session: async with session.get(server.make_url('/')) as resp: assert resp.status == 200 assert resp.connection is None assert resp.connection is None async def test_context_manager_close_on_release(aiohttp_server, mocker): async def handler(request): resp = web.StreamResponse() await resp.prepare(request) with pytest.warns(DeprecationWarning): await resp.drain() await asyncio.sleep(10) return resp app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession() as session: resp = await session.get(server.make_url('/')) proto = resp.connection._protocol mocker.spy(proto, 'close') async with resp: assert resp.status == 200 assert resp.connection is not None assert resp.connection is None assert proto.close.called async def test_iter_any(aiohttp_server): data = b'0123456789' * 1024 async def handler(request): buf = [] async for raw in request.content.iter_any(): buf.append(raw) assert b''.join(buf) == data return web.Response() app = web.Application() app.router.add_route('POST', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession() as session: async with session.post(server.make_url('/'), data=data) as resp: assert resp.status == 200 async def test_request_tracing(aiohttp_client): on_request_start = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) on_request_end = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) on_request_redirect = mock.Mock(side_effect=asyncio.coroutine(mock.Mock())) on_connection_create_start = mock.Mock( side_effect=asyncio.coroutine(mock.Mock())) on_connection_create_end = mock.Mock( side_effect=asyncio.coroutine(mock.Mock())) async def redirector(request): raise web.HTTPFound(location=URL('/redirected')) async def redirected(request): return web.Response() trace_config = TraceConfig() trace_config.on_request_start.append(on_request_start) trace_config.on_request_end.append(on_request_end) trace_config.on_request_redirect.append(on_request_redirect) trace_config.on_connection_create_start.append( on_connection_create_start) trace_config.on_connection_create_end.append( on_connection_create_end) app = web.Application() app.router.add_get('/redirector', redirector) app.router.add_get('/redirected', redirected) client = await aiohttp_client(app, trace_configs=[trace_config]) await client.get('/redirector', data="foo") assert on_request_start.called assert on_request_end.called assert on_request_redirect.called assert on_connection_create_start.called assert on_connection_create_end.called async def test_return_http_exception_deprecated(aiohttp_client): async def handler(request): return web.HTTPForbidden() app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) with pytest.warns(DeprecationWarning): await client.get('/') async def test_request_path(aiohttp_client): async def handler(request): assert request.path_qs == '/path%20to?a=1' assert request.path == '/path to' assert request.raw_path == '/path%20to?a=1' return web.Response(body=b'OK') app = web.Application() app.router.add_get('/path to', handler) client = await aiohttp_client(app) resp = await client.get('/path to', params={'a': '1'}) assert 200 == resp.status txt = await resp.text() assert 'OK' == txt aiohttp-3.0.1/tests/test_web_middleware.py0000666000000000000000000002743413240304665017057 0ustar 00000000000000import re import pytest from aiohttp import web async def test_middleware_modifies_response(loop, aiohttp_client): async def handler(request): return web.Response(body=b'OK') @web.middleware async def middleware(request, handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + '[MIDDLEWARE]' return resp app = web.Application() app.middlewares.append(middleware) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 201 == resp.status txt = await resp.text() assert 'OK[MIDDLEWARE]' == txt async def test_middleware_handles_exception(loop, aiohttp_client): async def handler(request): raise RuntimeError('Error text') @web.middleware async def middleware(request, handler): with pytest.raises(RuntimeError) as ctx: await handler(request) return web.Response(status=501, text=str(ctx.value) + '[MIDDLEWARE]') app = web.Application() app.middlewares.append(middleware) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 501 == resp.status txt = await resp.text() assert 'Error text[MIDDLEWARE]' == txt async def test_middleware_chain(loop, aiohttp_client): async def handler(request): return web.Response(text='OK') def make_middleware(num): @web.middleware async def middleware(request, handler): resp = await handler(request) resp.text = resp.text + '[{}]'.format(num) return resp return middleware app = web.Application() app.middlewares.append(make_middleware(1)) app.middlewares.append(make_middleware(2)) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status txt = await resp.text() assert 'OK[2][1]' == txt @pytest.fixture def cli(loop, aiohttp_client): async def handler(request): return web.Response(text="OK") def wrapper(extra_middlewares): app = web.Application() app.router.add_route( 'GET', '/resource1', handler) app.router.add_route( 'GET', '/resource2/', handler) app.router.add_route( 'GET', '/resource1/a/b', handler) app.router.add_route( 'GET', '/resource2/a/b/', handler) app.middlewares.extend(extra_middlewares) return aiohttp_client(app, server_kwargs={'skip_url_asserts': True}) return wrapper class TestNormalizePathMiddleware: @pytest.mark.parametrize("path, status", [ ('/resource1', 200), ('/resource1/', 404), ('/resource2', 200), ('/resource2/', 200), ('/resource1?p1=1&p2=2', 200), ('/resource1/?p1=1&p2=2', 404), ('/resource2?p1=1&p2=2', 200), ('/resource2/?p1=1&p2=2', 200) ]) async def test_add_trailing_when_necessary( self, path, status, cli): extra_middlewares = [ web.normalize_path_middleware(merge_slashes=False)] client = await cli(extra_middlewares) resp = await client.get(path) assert resp.status == status @pytest.mark.parametrize("path, status", [ ('/resource1', 200), ('/resource1/', 404), ('/resource2', 404), ('/resource2/', 200), ('/resource1?p1=1&p2=2', 200), ('/resource1/?p1=1&p2=2', 404), ('/resource2?p1=1&p2=2', 404), ('/resource2/?p1=1&p2=2', 200) ]) async def test_no_trailing_slash_when_disabled( self, path, status, cli): extra_middlewares = [ web.normalize_path_middleware( append_slash=False, merge_slashes=False)] client = await cli(extra_middlewares) resp = await client.get(path) assert resp.status == status @pytest.mark.parametrize("path, status", [ ('/resource1/a/b', 200), ('//resource1//a//b', 200), ('//resource1//a//b/', 404), ('///resource1//a//b', 200), ('/////resource1/a///b', 200), ('/////resource1/a//b/', 404), ('/resource1/a/b?p=1', 200), ('//resource1//a//b?p=1', 200), ('//resource1//a//b/?p=1', 404), ('///resource1//a//b?p=1', 200), ('/////resource1/a///b?p=1', 200), ('/////resource1/a//b/?p=1', 404), ]) async def test_merge_slash(self, path, status, cli): extra_middlewares = [ web.normalize_path_middleware(append_slash=False)] client = await cli(extra_middlewares) resp = await client.get(path) assert resp.status == status @pytest.mark.parametrize("path, status", [ ('/resource1/a/b', 200), ('/resource1/a/b/', 404), ('//resource2//a//b', 200), ('//resource2//a//b/', 200), ('///resource1//a//b', 200), ('///resource1//a//b/', 404), ('/////resource1/a///b', 200), ('/////resource1/a///b/', 404), ('/resource2/a/b', 200), ('//resource2//a//b', 200), ('//resource2//a//b/', 200), ('///resource2//a//b', 200), ('///resource2//a//b/', 200), ('/////resource2/a///b', 200), ('/////resource2/a///b/', 200), ('/resource1/a/b?p=1', 200), ('/resource1/a/b/?p=1', 404), ('//resource2//a//b?p=1', 200), ('//resource2//a//b/?p=1', 200), ('///resource1//a//b?p=1', 200), ('///resource1//a//b/?p=1', 404), ('/////resource1/a///b?p=1', 200), ('/////resource1/a///b/?p=1', 404), ('/resource2/a/b?p=1', 200), ('//resource2//a//b?p=1', 200), ('//resource2//a//b/?p=1', 200), ('///resource2//a//b?p=1', 200), ('///resource2//a//b/?p=1', 200), ('/////resource2/a///b?p=1', 200), ('/////resource2/a///b/?p=1', 200) ]) async def test_append_and_merge_slash(self, path, status, cli): extra_middlewares = [ web.normalize_path_middleware()] client = await cli(extra_middlewares) resp = await client.get(path) assert resp.status == status async def test_old_style_middleware(loop, aiohttp_client): async def handler(request): return web.Response(body=b'OK') async def middleware_factory(app, handler): async def middleware(request): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + '[old style middleware]' return resp return middleware with pytest.warns(DeprecationWarning) as warning_checker: app = web.Application() app.middlewares.append(middleware_factory) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 201 == resp.status txt = await resp.text() assert 'OK[old style middleware]' == txt assert len(warning_checker) == 1 msg = str(warning_checker.list[0].message) assert re.match('^old-style middleware ' '".' 'middleware_factory at 0x[0-9a-fA-F]+>" ' 'deprecated, see #2252$', msg) async def test_mixed_middleware(loop, aiohttp_client): async def handler(request): return web.Response(body=b'OK') async def m_old1(app, handler): async def middleware(request): resp = await handler(request) resp.text += '[old style 1]' return resp return middleware @web.middleware async def m_new1(request, handler): resp = await handler(request) resp.text += '[new style 1]' return resp async def m_old2(app, handler): async def middleware(request): resp = await handler(request) resp.text += '[old style 2]' return resp return middleware @web.middleware async def m_new2(request, handler): resp = await handler(request) resp.text += '[new style 2]' return resp middlewares = m_old1, m_new1, m_old2, m_new2 with pytest.warns(DeprecationWarning) as w: app = web.Application(middlewares=middlewares) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status txt = await resp.text() assert 'OK[new style 2][old style 2][new style 1][old style 1]' == txt assert len(w) == 2 tmpl = ('^old-style middleware ' '".' '{} at 0x[0-9a-fA-F]+>" ' 'deprecated, see #2252$') p1 = tmpl.format('m_old1') p2 = tmpl.format('m_old2') assert re.match(p2, str(w.list[0].message)) assert re.match(p1, str(w.list[1].message)) async def test_old_style_middleware_class(loop, aiohttp_client): async def handler(request): return web.Response(body=b'OK') class Middleware: async def __call__(self, app, handler): async def middleware(request): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + '[old style middleware]' return resp return middleware with pytest.warns(DeprecationWarning) as warning_checker: app = web.Application() app.middlewares.append(Middleware()) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 201 == resp.status txt = await resp.text() assert 'OK[old style middleware]' == txt assert len(warning_checker) == 1 msg = str(warning_checker.list[0].message) assert re.match('^old-style middleware ' '".Middleware object ' 'at 0x[0-9a-fA-F]+>" deprecated, see #2252$', msg) async def test_new_style_middleware_class(loop, aiohttp_client): async def handler(request): return web.Response(body=b'OK') @web.middleware class Middleware: async def __call__(self, request, handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + '[new style middleware]' return resp with pytest.warns(None) as warning_checker: app = web.Application() app.middlewares.append(Middleware()) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 201 == resp.status txt = await resp.text() assert 'OK[new style middleware]' == txt assert len(warning_checker) == 0 async def test_new_style_middleware_method(loop, aiohttp_client): async def handler(request): return web.Response(body=b'OK') class Middleware: @web.middleware async def call(self, request, handler): resp = await handler(request) assert 200 == resp.status resp.set_status(201) resp.text = resp.text + '[new style middleware]' return resp with pytest.warns(None) as warning_checker: app = web.Application() app.middlewares.append(Middleware().call) app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 201 == resp.status txt = await resp.text() assert 'OK[new style middleware]' == txt assert len(warning_checker) == 0 aiohttp-3.0.1/tests/test_web_protocol.py0000666000000000000000000004756113240304665016606 0ustar 00000000000000"""Tests for aiohttp/server.py""" import asyncio import socket from functools import partial from html import escape from unittest import mock import pytest from aiohttp import helpers, http, streams, web @pytest.fixture def make_srv(loop, manager): srv = None def maker(*, cls=web.RequestHandler, **kwargs): nonlocal srv m = kwargs.pop('manager', manager) srv = cls(m, loop=loop, access_log=None, **kwargs) return srv yield maker if srv is not None: if srv.transport is not None: srv.connection_lost(None) @pytest.fixture def manager(request_handler, loop): return web.Server(request_handler, loop=loop) @pytest.fixture def srv(make_srv, transport): srv = make_srv() srv.connection_made(transport) transport.close.side_effect = partial(srv.connection_lost, None) srv._drain_helper = mock.Mock() srv._drain_helper.side_effect = helpers.noop return srv @pytest.fixture def buf(): return bytearray() @pytest.fixture def request_handler(): async def handler(request): return web.Response() m = mock.Mock() m.side_effect = handler return m @pytest.fixture def handle_with_error(): def wrapper(exc=ValueError): async def handle(request): raise exc h = mock.Mock() h.side_effect = handle return h return wrapper @pytest.fixture def writer(srv): return http.StreamWriter(srv, srv.transport, srv._loop) @pytest.fixture def transport(buf): transport = mock.Mock() def write(chunk): buf.extend(chunk) transport.write.side_effect = write transport.is_closing.return_value = False return transport @pytest.fixture def ceil(mocker): def ceil(val): return val mocker.patch('aiohttp.helpers.ceil').side_effect = ceil async def test_shutdown(srv, loop, transport): assert transport is srv.transport srv._keepalive = True task_handler = srv._task_handler assert srv._waiter is not None assert srv._task_handler is not None t0 = loop.time() await srv.shutdown() t1 = loop.time() assert t1 - t0 < 0.05, t1-t0 assert transport.close.called assert srv.transport is None assert not srv._task_handler await asyncio.sleep(0.1, loop=loop) assert task_handler.done() async def test_double_shutdown(srv, transport): await srv.shutdown() assert transport.close.called assert srv.transport is None transport.reset_mock() await srv.shutdown() assert not transport.close.called assert srv.transport is None async def test_shutdown_wait_error_handler(loop, srv, transport): async def _error_handle(): pass srv._error_handler = loop.create_task(_error_handle()) await srv.shutdown() assert srv._error_handler.done() async def test_close_after_response(srv, loop, transport): srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') h = srv._task_handler await asyncio.sleep(0.1, loop=loop) assert srv._waiter is None assert srv._task_handler is None assert transport.close.called assert srv.transport is None assert h.done() def test_connection_made(make_srv): srv = make_srv() srv.connection_made(mock.Mock()) assert not srv._force_close def test_connection_made_with_tcp_keepaplive(make_srv, transport): srv = make_srv() sock = mock.Mock() transport.get_extra_info.return_value = sock srv.connection_made(transport) sock.setsockopt.assert_called_with(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) def test_connection_made_without_tcp_keepaplive(make_srv): srv = make_srv(tcp_keepalive=False) sock = mock.Mock() transport = mock.Mock() transport.get_extra_info.return_value = sock srv.connection_made(transport) assert not sock.setsockopt.called def test_eof_received(make_srv): srv = make_srv() srv.connection_made(mock.Mock()) srv.eof_received() # assert srv.reader._eof async def test_connection_lost(srv, loop): srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') srv._keepalive = True handle = srv._task_handler await asyncio.sleep(0, loop=loop) # wait for .start() starting srv.connection_lost(None) assert srv._force_close await handle assert not srv._task_handler def test_srv_keep_alive(srv): assert not srv._keepalive srv.keep_alive(True) assert srv._keepalive srv.keep_alive(False) assert not srv._keepalive def test_srv_keep_alive_disable(srv): handle = srv._keepalive_handle = mock.Mock() srv.keep_alive(False) assert not srv._keepalive assert srv._keepalive_handle is None handle.cancel.assert_called_with() async def test_simple(srv, loop, buf): srv.data_received( b'GET / HTTP/1.1\r\n\r\n') await asyncio.sleep(0, loop=loop) assert buf.startswith(b'HTTP/1.1 200 OK\r\n') async def test_bad_method(srv, loop, buf): srv.data_received( b'!@#$ / HTTP/1.0\r\n' b'Host: example.com\r\n\r\n') await asyncio.sleep(0, loop=loop) assert buf.startswith(b'HTTP/1.0 400 Bad Request\r\n') async def test_data_received_error(srv, loop, buf): transport = srv.transport srv._request_parser = mock.Mock() srv._request_parser.feed_data.side_effect = TypeError srv.data_received( b'!@#$ / HTTP/1.0\r\n' b'Host: example.com\r\n\r\n') await asyncio.sleep(0, loop=loop) assert buf.startswith(b'HTTP/1.0 500 Internal Server Error\r\n') assert transport.close.called assert srv._error_handler is None async def test_line_too_long(srv, loop, buf): srv.data_received(b''.join([b'a' for _ in range(10000)]) + b'\r\n\r\n') await asyncio.sleep(0, loop=loop) assert buf.startswith(b'HTTP/1.0 400 Bad Request\r\n') async def test_invalid_content_length(srv, loop, buf): srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: sdgg\r\n\r\n') await asyncio.sleep(0, loop=loop) assert buf.startswith(b'HTTP/1.0 400 Bad Request\r\n') async def test_handle_error__utf( make_srv, buf, transport, loop, request_handler ): request_handler.side_effect = RuntimeError('что-то пошло не так') srv = make_srv(debug=True) srv.connection_made(transport) srv.keep_alive(True) srv.logger = mock.Mock() srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) assert b'HTTP/1.0 500 Internal Server Error' in buf assert b'Content-Type: text/html; charset=utf-8' in buf pattern = escape("RuntimeError: что-то пошло не так") assert pattern.encode('utf-8') in buf assert not srv._keepalive srv.logger.exception.assert_called_with( "Error handling request", exc_info=mock.ANY) async def test_unhandled_runtime_error( make_srv, loop, transport, request_handler ): async def handle(request): resp = web.Response() resp.write_eof = mock.Mock() resp.write_eof.side_effect = RuntimeError return resp srv = make_srv(lingering_time=0) srv.debug = True srv.connection_made(transport) srv.logger.exception = mock.Mock() request_handler.side_effect = handle srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await srv._task_handler assert request_handler.called srv.logger.exception.assert_called_with( "Unhandled runtime exception", exc_info=mock.ANY) async def test_handle_uncompleted( make_srv, loop, transport, handle_with_error, request_handler): closed = False def close(): nonlocal closed closed = True transport.close.side_effect = close srv = make_srv(lingering_time=0) srv.connection_made(transport) srv.logger.exception = mock.Mock() request_handler.side_effect = handle_with_error() srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 50000\r\n\r\n') await srv._task_handler assert request_handler.called assert closed srv.logger.exception.assert_called_with( "Error handling request", exc_info=mock.ANY) async def test_handle_uncompleted_pipe( make_srv, loop, transport, request_handler, handle_with_error): closed = False normal_completed = False def close(): nonlocal closed closed = True transport.close.side_effect = close srv = make_srv(lingering_time=0) srv.connection_made(transport) srv.logger.exception = mock.Mock() async def handle(request): nonlocal normal_completed normal_completed = True await asyncio.sleep(0.05, loop=loop) return web.Response() # normal request_handler.side_effect = handle srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) # with exception request_handler.side_effect = handle_with_error() srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 50000\r\n\r\n') assert srv._task_handler await asyncio.sleep(0, loop=loop) await srv._task_handler assert normal_completed assert request_handler.called assert closed srv.logger.exception.assert_called_with( "Error handling request", exc_info=mock.ANY) async def test_lingering(srv, loop, transport): assert not transport.close.called async def handle(message, request, writer): pass srv.handle_request = handle srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 3\r\n\r\n') await asyncio.sleep(0.05, loop=loop) assert not transport.close.called srv.data_received(b'123') await asyncio.sleep(0, loop=loop) transport.close.assert_called_with() async def test_lingering_disabled(make_srv, loop, transport, request_handler): async def handle_request(request): await asyncio.sleep(0, loop=loop) srv = make_srv(lingering_time=0) srv.connection_made(transport) request_handler.side_effect = handle_request await asyncio.sleep(0, loop=loop) assert not transport.close.called srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 50\r\n\r\n') await asyncio.sleep(0, loop=loop) assert not transport.close.called await asyncio.sleep(0, loop=loop) transport.close.assert_called_with() async def test_lingering_timeout( make_srv, loop, transport, ceil, request_handler ): async def handle_request(request): await asyncio.sleep(0, loop=loop) srv = make_srv(lingering_time=1e-30) srv.connection_made(transport) request_handler.side_effect = handle_request await asyncio.sleep(0, loop=loop) assert not transport.close.called srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n' b'Content-Length: 50\r\n\r\n') await asyncio.sleep(0, loop=loop) assert not transport.close.called await asyncio.sleep(0, loop=loop) transport.close.assert_called_with() def test_handle_cancel(make_srv, loop, transport): log = mock.Mock() srv = make_srv(logger=log, debug=True) srv.connection_made(transport) async def handle_request(message, payload, writer): await asyncio.sleep(10, loop=loop) srv.handle_request = handle_request async def cancel(): srv._task_handler.cancel() srv.data_received( b'GET / HTTP/1.0\r\n' b'Content-Length: 10\r\n' b'Host: example.com\r\n\r\n') loop.run_until_complete( asyncio.gather(srv._task_handler, cancel(), loop=loop)) assert log.debug.called def test_handle_cancelled(make_srv, loop, transport): log = mock.Mock() srv = make_srv(logger=log, debug=True) srv.connection_made(transport) srv.handle_request = mock.Mock() # start request_handler task loop.run_until_complete(asyncio.sleep(0, loop=loop)) srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n\r\n') r_handler = srv._task_handler assert loop.run_until_complete(r_handler) is None async def test_handle_400(srv, loop, buf, transport): srv.data_received(b'GET / HT/asd\r\n\r\n') await asyncio.sleep(0, loop=loop) assert b'400 Bad Request' in buf def test_handle_500(srv, loop, buf, transport, request_handler): request_handler.side_effect = ValueError srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n\r\n') loop.run_until_complete(srv._task_handler) assert b'500 Internal Server Error' in buf async def test_keep_alive(make_srv, loop, transport, ceil): srv = make_srv(keepalive_timeout=0.05) srv.KEEPALIVE_RESCHEDULE_DELAY = 0.1 srv.connection_made(transport) srv.keep_alive(True) srv.handle_request = mock.Mock() srv.handle_request.return_value = loop.create_future() srv.handle_request.return_value.set_result(1) srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) waiter = srv._waiter assert waiter assert srv._keepalive_handle is not None assert not transport.close.called await asyncio.sleep(0.2, loop=loop) assert transport.close.called assert waiter.cancelled def test_srv_process_request_without_timeout(make_srv, loop, transport): srv = make_srv() srv.connection_made(transport) srv.data_received( b'GET / HTTP/1.0\r\n' b'Host: example.com\r\n\r\n') loop.run_until_complete(srv._task_handler) assert transport.close.called def test_keep_alive_timeout_default(srv): assert 75 == srv.keepalive_timeout def test_keep_alive_timeout_nondefault(make_srv): srv = make_srv(keepalive_timeout=10) assert 10 == srv.keepalive_timeout async def test_supports_connect_method(srv, loop, transport, request_handler): srv.data_received( b'CONNECT aiohttp.readthedocs.org:80 HTTP/1.0\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0.1, loop=loop) assert request_handler.called assert isinstance( request_handler.call_args[0][0].content, streams.StreamReader) async def test_content_length_0(srv, loop, request_handler): srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.org\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) assert request_handler.called assert request_handler.call_args[0][0].content == streams.EMPTY_PAYLOAD def test_rudimentary_transport(srv, loop): transport = mock.Mock() srv.connection_made(transport) srv.pause_reading() assert srv._reading_paused assert transport.pause_reading.called srv.resume_reading() assert not srv._reading_paused assert transport.resume_reading.called transport.resume_reading.side_effect = NotImplementedError() transport.pause_reading.side_effect = NotImplementedError() srv._reading_paused = False srv.pause_reading() assert srv._reading_paused srv.resume_reading() assert not srv._reading_paused async def test_close(srv, loop, transport): transport.close.side_effect = partial(srv.connection_lost, None) srv.connection_made(transport) srv.handle_request = mock.Mock() srv.handle_request.side_effect = helpers.noop assert transport is srv.transport srv._keepalive = True srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n' b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) assert srv._task_handler assert srv._waiter srv.close() await asyncio.sleep(0, loop=loop) assert srv._task_handler is None assert srv.transport is None assert transport.close.called async def test_pipeline_multiple_messages( srv, loop, transport, request_handler ): transport.close.side_effect = partial(srv.connection_lost, None) processed = 0 async def handle(request): nonlocal processed processed += 1 return web.Response() request_handler.side_effect = handle assert transport is srv.transport srv._keepalive = True srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n' b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') assert srv._task_handler is not None assert len(srv._messages) == 2 assert srv._waiter is not None await asyncio.sleep(0, loop=loop) assert srv._task_handler is not None assert srv._waiter is not None assert processed == 2 async def test_pipeline_response_order( srv, loop, buf, transport, request_handler ): transport.close.side_effect = partial(srv.connection_lost, None) srv._keepalive = True processed = [] async def handle1(request): nonlocal processed await asyncio.sleep(0.01, loop=loop) resp = web.StreamResponse() await resp.prepare(request) await resp.write(b'test1') await resp.write_eof() processed.append(1) return resp request_handler.side_effect = handle1 srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) # second async def handle2(request): nonlocal processed resp = web.StreamResponse() await resp.prepare(request) await resp.write(b'test2') await resp.write_eof() processed.append(2) return resp request_handler.side_effect = handle2 srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') await asyncio.sleep(0, loop=loop) assert srv._task_handler is not None await asyncio.sleep(0.1, loop=loop) assert processed == [1, 2] def test_data_received_close(srv): srv.close() srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') assert not srv._messages def test_data_received_force_close(srv): srv.force_close() srv.data_received( b'GET / HTTP/1.1\r\n' b'Host: example.com\r\n' b'Content-Length: 0\r\n\r\n') assert not srv._messages async def test__process_keepalive(loop, srv): # wait till the waiter is waiting await asyncio.sleep(0) assert srv._waiter is not None srv._keepalive_time = 1 srv._keepalive = True srv._keepalive_timeout = 1 expired_time = srv._keepalive_time + srv._keepalive_timeout + 1 with mock.patch.object(loop, "time", return_value=expired_time): srv._process_keepalive() assert srv._force_close async def test__process_keepalive_schedule_next(loop, srv): # wait till the waiter is waiting await asyncio.sleep(0) srv._keepalive = True srv._keepalive_time = 1 srv._keepalive_timeout = 1 expire_time = srv._keepalive_time + srv._keepalive_timeout with mock.patch.object(loop, "time", return_value=expire_time): with mock.patch.object(loop, "call_later") as call_later_patched: srv._process_keepalive() call_later_patched.assert_called_with( 1, srv._process_keepalive ) def test__process_keepalive_force_close(loop, srv): srv._force_close = True with mock.patch.object(loop, "call_at") as call_at_patched: srv._process_keepalive() assert not call_at_patched.called aiohttp-3.0.1/tests/test_web_request.py0000666000000000000000000004257113240304665016431 0ustar 00000000000000import socket from collections import MutableMapping from unittest import mock import pytest from multidict import CIMultiDict, MultiDict from yarl import URL from aiohttp import HttpVersion from aiohttp.streams import StreamReader from aiohttp.test_utils import make_mocked_request from aiohttp.web import HTTPRequestEntityTooLarge @pytest.fixture def protocol(): return mock.Mock(_reading_paused=False) def test_ctor(): req = make_mocked_request('GET', '/path/to?a=1&b=2') assert 'GET' == req.method assert HttpVersion(1, 1) == req.version assert req.host == socket.getfqdn() assert '/path/to?a=1&b=2' == req.path_qs assert '/path/to' == req.path assert 'a=1&b=2' == req.query_string assert CIMultiDict() == req.headers assert () == req.raw_headers assert req.message == req._message get = req.query assert MultiDict([('a', '1'), ('b', '2')]) == get # second call should return the same object assert get is req.query assert req.keep_alive # just make sure that all lines of make_mocked_request covered headers = CIMultiDict(FOO='bar') payload = mock.Mock() protocol = mock.Mock() app = mock.Mock() req = make_mocked_request('GET', '/path/to?a=1&b=2', headers=headers, protocol=protocol, payload=payload, app=app) assert req.app is app assert req.content is payload assert req.protocol is protocol assert req.transport is protocol.transport assert req.headers == headers assert req.raw_headers == ((b'FOO', b'bar'),) assert req.task is req._task def test_doubleslashes(): # NB: //foo/bar is an absolute URL with foo netloc and /bar path req = make_mocked_request('GET', '/bar//foo/') assert '/bar//foo/' == req.path def test_content_type_not_specified(): req = make_mocked_request('Get', '/') assert 'application/octet-stream' == req.content_type def test_content_type_from_spec(): req = make_mocked_request('Get', '/', CIMultiDict([('CONTENT-TYPE', 'application/json')])) assert 'application/json' == req.content_type def test_content_type_from_spec_with_charset(): req = make_mocked_request( 'Get', '/', CIMultiDict([('CONTENT-TYPE', 'text/html; charset=UTF-8')])) assert 'text/html' == req.content_type assert 'UTF-8' == req.charset def test_calc_content_type_on_getting_charset(): req = make_mocked_request( 'Get', '/', CIMultiDict([('CONTENT-TYPE', 'text/html; charset=UTF-8')])) assert 'UTF-8' == req.charset assert 'text/html' == req.content_type def test_urlencoded_querystring(): req = make_mocked_request( 'GET', '/yandsearch?text=%D1%82%D0%B5%D0%BA%D1%81%D1%82') assert {'text': 'текÑÑ‚'} == req.query def test_non_ascii_path(): req = make_mocked_request('GET', '/путь') assert '/путь' == req.path def test_non_ascii_raw_path(): req = make_mocked_request('GET', '/путь') assert '/путь' == req.raw_path def test_content_length(): req = make_mocked_request('Get', '/', CIMultiDict([('CONTENT-LENGTH', '123')])) assert 123 == req.content_length def test_non_keepalive_on_http10(): req = make_mocked_request('GET', '/', version=HttpVersion(1, 0)) assert not req.keep_alive def test_non_keepalive_on_closing(): req = make_mocked_request('GET', '/', closing=True) assert not req.keep_alive async def test_call_POST_on_GET_request(): req = make_mocked_request('GET', '/') ret = await req.post() assert CIMultiDict() == ret async def test_call_POST_on_weird_content_type(): req = make_mocked_request( 'POST', '/', headers=CIMultiDict({'CONTENT-TYPE': 'something/weird'})) ret = await req.post() assert CIMultiDict() == ret async def test_call_POST_twice(): req = make_mocked_request('GET', '/') ret1 = await req.post() ret2 = await req.post() assert ret1 is ret2 def test_no_request_cookies(): req = make_mocked_request('GET', '/') assert req.cookies == {} cookies = req.cookies assert cookies is req.cookies def test_request_cookie(): headers = CIMultiDict(COOKIE='cookie1=value1; cookie2=value2') req = make_mocked_request('GET', '/', headers=headers) assert req.cookies == {'cookie1': 'value1', 'cookie2': 'value2'} def test_request_cookie__set_item(): headers = CIMultiDict(COOKIE='name=value') req = make_mocked_request('GET', '/', headers=headers) assert req.cookies == {'name': 'value'} with pytest.raises(TypeError): req.cookies['my'] = 'value' def test_match_info(): req = make_mocked_request('GET', '/') assert req._match_info is req.match_info def test_request_is_mutable_mapping(): req = make_mocked_request('GET', '/') assert isinstance(req, MutableMapping) req['key'] = 'value' assert 'value' == req['key'] def test_request_delitem(): req = make_mocked_request('GET', '/') req['key'] = 'value' assert 'value' == req['key'] del req['key'] assert 'key' not in req def test_request_len(): req = make_mocked_request('GET', '/') assert len(req) == 0 req['key'] = 'value' assert len(req) == 1 def test_request_iter(): req = make_mocked_request('GET', '/') req['key'] = 'value' req['key2'] = 'value2' assert set(req) == {'key', 'key2'} def test___repr__(): req = make_mocked_request('GET', '/path/to') assert "" == repr(req) def test___repr___non_ascii_path(): req = make_mocked_request('GET', '/path/\U0001f415\U0001f308') assert "" == repr(req) def test_http_scheme(): req = make_mocked_request('GET', '/', headers={'Host': 'example.com'}) assert "http" == req.scheme assert req.secure is False def test_https_scheme_by_ssl_transport(): req = make_mocked_request('GET', '/', headers={'Host': 'example.com'}, sslcontext=True) assert "https" == req.scheme assert req.secure is True def test_single_forwarded_header(): header = 'by=identifier;for=identifier;host=identifier;proto=identifier' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert req.forwarded[0]['by'] == 'identifier' assert req.forwarded[0]['for'] == 'identifier' assert req.forwarded[0]['host'] == 'identifier' assert req.forwarded[0]['proto'] == 'identifier' def test_single_forwarded_header_camelcase(): header = 'bY=identifier;fOr=identifier;HOst=identifier;pRoTO=identifier' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert req.forwarded[0]['by'] == 'identifier' assert req.forwarded[0]['for'] == 'identifier' assert req.forwarded[0]['host'] == 'identifier' assert req.forwarded[0]['proto'] == 'identifier' def test_single_forwarded_header_single_param(): header = 'BY=identifier' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert req.forwarded[0]['by'] == 'identifier' def test_single_forwarded_header_multiple_param(): header = 'By=identifier1,BY=identifier2, By=identifier3 , BY=identifier4' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert len(req.forwarded) == 4 assert req.forwarded[0]['by'] == 'identifier1' assert req.forwarded[1]['by'] == 'identifier2' assert req.forwarded[2]['by'] == 'identifier3' assert req.forwarded[3]['by'] == 'identifier4' def test_single_forwarded_header_quoted_escaped(): header = 'BY=identifier;pROTO="\lala lan\d\~ 123\!&"' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert req.forwarded[0]['by'] == 'identifier' assert req.forwarded[0]['proto'] == 'lala land~ 123!&' def test_single_forwarded_header_custom_param(): header = r'BY=identifier;PROTO=https;SOME="other, \"value\""' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert len(req.forwarded) == 1 assert req.forwarded[0]['by'] == 'identifier' assert req.forwarded[0]['proto'] == 'https' assert req.forwarded[0]['some'] == 'other, "value"' def test_single_forwarded_header_empty_params(): # This is allowed by the grammar given in RFC 7239 header = ';For=identifier;;PROTO=https;;;' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert req.forwarded[0]['for'] == 'identifier' assert req.forwarded[0]['proto'] == 'https' def test_single_forwarded_header_bad_separator(): header = 'BY=identifier PROTO=https' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert 'proto' not in req.forwarded[0] def test_single_forwarded_header_injection1(): # We might receive a header like this if we're sitting behind a reverse # proxy that blindly appends a forwarded-element without checking # the syntax of existing field-values. We should be able to recover # the appended element anyway. header = 'for=_injected;by=", for=_real' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert len(req.forwarded) == 2 assert 'by' not in req.forwarded[0] assert req.forwarded[1]['for'] == '_real' def test_single_forwarded_header_injection2(): header = 'very bad syntax, for=_real' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert len(req.forwarded) == 2 assert 'for' not in req.forwarded[0] assert req.forwarded[1]['for'] == '_real' def test_single_forwarded_header_long_quoted_string(): header = 'for="' + '\\\\' * 5000 + '"' req = make_mocked_request('GET', '/', headers=CIMultiDict({'Forwarded': header})) assert req.forwarded[0]['for'] == '\\' * 5000 def test_multiple_forwarded_headers(): headers = CIMultiDict() headers.add('Forwarded', 'By=identifier1;for=identifier2, BY=identifier3') headers.add('Forwarded', 'By=identifier4;fOr=identifier5') req = make_mocked_request('GET', '/', headers=headers) assert len(req.forwarded) == 3 assert req.forwarded[0]['by'] == 'identifier1' assert req.forwarded[0]['for'] == 'identifier2' assert req.forwarded[1]['by'] == 'identifier3' assert req.forwarded[2]['by'] == 'identifier4' assert req.forwarded[2]['for'] == 'identifier5' def test_multiple_forwarded_headers_bad_syntax(): headers = CIMultiDict() headers.add('Forwarded', 'for=_1;by=_2') headers.add('Forwarded', 'invalid value') headers.add('Forwarded', '') headers.add('Forwarded', 'for=_3;by=_4') req = make_mocked_request('GET', '/', headers=headers) assert len(req.forwarded) == 4 assert req.forwarded[0]['for'] == '_1' assert 'for' not in req.forwarded[1] assert 'for' not in req.forwarded[2] assert req.forwarded[3]['by'] == '_4' def test_multiple_forwarded_headers_injection(): headers = CIMultiDict() # This could be sent by an attacker, hoping to "shadow" the second header. headers.add('Forwarded', 'for=_injected;by="') # This is added by our trusted reverse proxy. headers.add('Forwarded', 'for=_real;by=_actual_proxy') req = make_mocked_request('GET', '/', headers=headers) assert len(req.forwarded) == 2 assert 'by' not in req.forwarded[0] assert req.forwarded[1]['for'] == '_real' assert req.forwarded[1]['by'] == '_actual_proxy' def test_host_by_host_header(): req = make_mocked_request('GET', '/', headers=CIMultiDict({'Host': 'example.com'})) assert req.host == 'example.com' def test_raw_headers(): req = make_mocked_request('GET', '/', headers=CIMultiDict({'X-HEADER': 'aaa'})) assert req.raw_headers == ((b'X-HEADER', b'aaa'),) def test_rel_url(): req = make_mocked_request('GET', '/path') assert URL('/path') == req.rel_url def test_url_url(): req = make_mocked_request('GET', '/path', headers={'HOST': 'example.com'}) assert URL('http://example.com/path') == req.url def test_clone(): req = make_mocked_request('GET', '/path') req2 = req.clone() assert req2.method == 'GET' assert req2.rel_url == URL('/path') def test_clone_client_max_size(): req = make_mocked_request('GET', '/path', client_max_size=1024) req2 = req.clone() assert req._client_max_size == req2._client_max_size assert req2._client_max_size == 1024 def test_clone_method(): req = make_mocked_request('GET', '/path') req2 = req.clone(method='POST') assert req2.method == 'POST' assert req2.rel_url == URL('/path') def test_clone_rel_url(): req = make_mocked_request('GET', '/path') req2 = req.clone(rel_url=URL('/path2')) assert req2.rel_url == URL('/path2') def test_clone_rel_url_str(): req = make_mocked_request('GET', '/path') req2 = req.clone(rel_url='/path2') assert req2.rel_url == URL('/path2') def test_clone_headers(): req = make_mocked_request('GET', '/path', headers={'A': 'B'}) req2 = req.clone(headers=CIMultiDict({'B': 'C'})) assert req2.headers == CIMultiDict({'B': 'C'}) assert req2.raw_headers == ((b'B', b'C'),) def test_clone_headers_dict(): req = make_mocked_request('GET', '/path', headers={'A': 'B'}) req2 = req.clone(headers={'B': 'C'}) assert req2.headers == CIMultiDict({'B': 'C'}) assert req2.raw_headers == ((b'B', b'C'),) async def test_cannot_clone_after_read(loop, protocol): payload = StreamReader(protocol, loop=loop) payload.feed_data(b'data') payload.feed_eof() req = make_mocked_request('GET', '/path', payload=payload) await req.read() with pytest.raises(RuntimeError): req.clone() async def test_make_too_big_request(loop, protocol): payload = StreamReader(protocol, loop=loop) large_file = 1024 ** 2 * b'x' too_large_file = large_file + b'x' payload.feed_data(too_large_file) payload.feed_eof() req = make_mocked_request('POST', '/', payload=payload) with pytest.raises(HTTPRequestEntityTooLarge) as err: await req.read() assert err.value.status_code == 413 async def test_make_too_big_request_adjust_limit(loop, protocol): payload = StreamReader(protocol, loop=loop) large_file = 1024 ** 2 * b'x' too_large_file = large_file + b'x' payload.feed_data(too_large_file) payload.feed_eof() max_size = 1024**2 + 2 req = make_mocked_request('POST', '/', payload=payload, client_max_size=max_size) txt = await req.read() assert len(txt) == 1024**2 + 1 async def test_multipart_formdata(loop, protocol): payload = StreamReader(protocol, loop=loop) payload.feed_data(b"""-----------------------------326931944431359\r Content-Disposition: form-data; name="a"\r \r b\r -----------------------------326931944431359\r Content-Disposition: form-data; name="c"\r \r d\r -----------------------------326931944431359--\r\n""") content_type = "multipart/form-data; boundary="\ "---------------------------326931944431359" payload.feed_eof() req = make_mocked_request('POST', '/', headers={'CONTENT-TYPE': content_type}, payload=payload) result = await req.post() assert dict(result) == {'a': 'b', 'c': 'd'} async def test_make_too_big_request_limit_None(loop, protocol): payload = StreamReader(protocol, loop=loop) large_file = 1024 ** 2 * b'x' too_large_file = large_file + b'x' payload.feed_data(too_large_file) payload.feed_eof() max_size = None req = make_mocked_request('POST', '/', payload=payload, client_max_size=max_size) txt = await req.read() assert len(txt) == 1024**2 + 1 def test_remote_peername_tcp(): transp = mock.Mock() transp.get_extra_info.return_value = ('10.10.10.10', 1234) req = make_mocked_request('GET', '/', transport=transp) assert req.remote == '10.10.10.10' def test_remote_peername_unix(): transp = mock.Mock() transp.get_extra_info.return_value = '/path/to/sock' req = make_mocked_request('GET', '/', transport=transp) assert req.remote == '/path/to/sock' def test_save_state_on_clone(): req = make_mocked_request('GET', '/') req['key'] = 'val' req2 = req.clone() req2['key'] = 'val2' assert req['key'] == 'val' assert req2['key'] == 'val2' def test_clone_scheme(): req = make_mocked_request('GET', '/') req2 = req.clone(scheme='https') assert req2.scheme == 'https' def test_clone_host(): req = make_mocked_request('GET', '/') req2 = req.clone(host='example.com') assert req2.host == 'example.com' def test_clone_remote(): req = make_mocked_request('GET', '/') req2 = req.clone(remote='11.11.11.11') assert req2.remote == '11.11.11.11' def test_request_custom_attr(): req = make_mocked_request('GET', '/') with pytest.warns(DeprecationWarning): req.custom = None def test_remote_with_closed_transport(): req = make_mocked_request('GET', '/') req._protocol = None assert req.remote is None aiohttp-3.0.1/tests/test_web_request_handler.py0000666000000000000000000000277613240304665020131 0ustar 00000000000000from unittest import mock from aiohttp import web from aiohttp.test_utils import make_mocked_coro async def serve(request): return web.Response() def test_repr(loop): manager = web.Server(serve, loop=loop) handler = manager() assert '' == repr(handler) handler.transport = object() assert '' == repr(handler) def test_connections(loop): manager = web.Server(serve, loop=loop) assert manager.connections == [] handler = object() transport = object() manager.connection_made(handler, transport) assert manager.connections == [handler] manager.connection_lost(handler, None) assert manager.connections == [] async def test_shutdown_no_timeout(loop): manager = web.Server(serve, loop=loop) handler = mock.Mock() handler.shutdown = make_mocked_coro(mock.Mock()) transport = mock.Mock() manager.connection_made(handler, transport) await manager.shutdown() manager.connection_lost(handler, None) assert manager.connections == [] handler.shutdown.assert_called_with(None) async def test_shutdown_timeout(loop): manager = web.Server(serve, loop=loop) handler = mock.Mock() handler.shutdown = make_mocked_coro(mock.Mock()) transport = mock.Mock() manager.connection_made(handler, transport) await manager.shutdown(timeout=0.1) manager.connection_lost(handler, None) assert manager.connections == [] handler.shutdown.assert_called_with(0.1) aiohttp-3.0.1/tests/test_web_response.py0000666000000000000000000007547613240304665016611 0ustar 00000000000000import collections import datetime import json import re from unittest import mock import pytest from multidict import CIMultiDict from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs, signals from aiohttp.payload import BytesPayload from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response def make_request(method, path, headers=CIMultiDict(), version=HttpVersion11, on_response_prepare=None, **kwargs): app = kwargs.pop('app', None) or mock.Mock() app._debug = False if on_response_prepare is None: on_response_prepare = signals.Signal(app) app.on_response_prepare = on_response_prepare app.on_response_prepare.freeze() protocol = kwargs.pop('protocol', None) or mock.Mock() return make_mocked_request(method, path, headers, version=version, protocol=protocol, app=app, **kwargs) @pytest.fixture def buf(): return bytearray() @pytest.fixture def writer(buf): writer = mock.Mock() def acquire(cb): cb(writer.transport) def buffer_data(chunk): buf.extend(chunk) def write(chunk): buf.extend(chunk) def write_headers(status_line, headers): headers = status_line + ''.join( [k + ': ' + v + '\r\n' for k, v in headers.items()]) headers = headers.encode('utf-8') + b'\r\n' buf.extend(headers) async def write_eof(chunk=b''): buf.extend(chunk) writer.acquire.side_effect = acquire writer.transport.write.side_effect = write writer.write.side_effect = write writer.write_eof.side_effect = write_eof writer.write_headers.side_effect = write_headers writer.buffer_data.side_effect = buffer_data writer.drain.return_value = () return writer def test_stream_response_ctor(): resp = StreamResponse() assert 200 == resp.status assert resp.keep_alive is None assert resp.task is None req = mock.Mock() resp._req = req assert resp.task is req.task def test_stream_response_hashable(): # should not raise exception hash(StreamResponse()) def test_stream_response_is_mutable_mapping(): resp = StreamResponse() assert isinstance(resp, collections.MutableMapping) resp['key'] = 'value' assert 'value' == resp['key'] def test_stream_response_delitem(): resp = StreamResponse() resp['key'] = 'value' del resp['key'] assert 'key' not in resp def test_stream_response_len(): resp = StreamResponse() assert len(resp) == 0 resp['key'] = 'value' assert len(resp) == 1 def test_request_iter(): resp = StreamResponse() resp['key'] = 'value' resp['key2'] = 'value2' assert set(resp) == {'key', 'key2'} def test_content_length(): resp = StreamResponse() assert resp.content_length is None def test_content_length_setter(): resp = StreamResponse() resp.content_length = 234 assert 234 == resp.content_length def test_content_length_setter_with_enable_chunked_encoding(): resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError): resp.content_length = 234 def test_drop_content_length_header_on_setting_len_to_None(): resp = StreamResponse() resp.content_length = 1 assert "1" == resp.headers['Content-Length'] resp.content_length = None assert 'Content-Length' not in resp.headers def test_set_content_length_to_None_on_non_set(): resp = StreamResponse() resp.content_length = None assert 'Content-Length' not in resp.headers resp.content_length = None assert 'Content-Length' not in resp.headers def test_setting_content_type(): resp = StreamResponse() resp.content_type = 'text/html' assert 'text/html' == resp.headers['content-type'] def test_setting_charset(): resp = StreamResponse() resp.content_type = 'text/html' resp.charset = 'koi8-r' assert 'text/html; charset=koi8-r' == resp.headers['content-type'] def test_default_charset(): resp = StreamResponse() assert resp.charset is None def test_reset_charset(): resp = StreamResponse() resp.content_type = 'text/html' resp.charset = None assert resp.charset is None def test_reset_charset_after_setting(): resp = StreamResponse() resp.content_type = 'text/html' resp.charset = 'koi8-r' resp.charset = None assert resp.charset is None def test_charset_without_content_type(): resp = StreamResponse() with pytest.raises(RuntimeError): resp.charset = 'koi8-r' def test_last_modified_initial(): resp = StreamResponse() assert resp.last_modified is None def test_last_modified_string(): resp = StreamResponse() dt = datetime.datetime(1990, 1, 2, 3, 4, 5, 0, datetime.timezone.utc) resp.last_modified = 'Mon, 2 Jan 1990 03:04:05 GMT' assert resp.last_modified == dt def test_last_modified_timestamp(): resp = StreamResponse() dt = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, datetime.timezone.utc) resp.last_modified = 0 assert resp.last_modified == dt resp.last_modified = 0.0 assert resp.last_modified == dt def test_last_modified_datetime(): resp = StreamResponse() dt = datetime.datetime(2001, 2, 3, 4, 5, 6, 0, datetime.timezone.utc) resp.last_modified = dt assert resp.last_modified == dt def test_last_modified_reset(): resp = StreamResponse() resp.last_modified = 0 resp.last_modified = None assert resp.last_modified is None async def test_start(): req = make_request('GET', '/', payload_writer=mock.Mock()) resp = StreamResponse() assert resp.keep_alive is None msg = await resp.prepare(req) assert msg.write_headers.called msg2 = await resp.prepare(req) assert msg is msg2 assert resp.keep_alive req2 = make_request('GET', '/') # with pytest.raises(RuntimeError): msg3 = await resp.prepare(req2) assert msg is msg3 async def test_chunked_encoding(): req = make_request('GET', '/') resp = StreamResponse() assert not resp.chunked resp.enable_chunked_encoding() assert resp.chunked msg = await resp.prepare(req) assert msg.chunked def test_enable_chunked_encoding_with_content_length(): resp = StreamResponse() resp.content_length = 234 with pytest.raises(RuntimeError): resp.enable_chunked_encoding() async def test_chunk_size(): req = make_request('GET', '/', payload_writer=mock.Mock()) resp = StreamResponse() assert not resp.chunked with pytest.warns(DeprecationWarning): resp.enable_chunked_encoding(chunk_size=8192) assert resp.chunked msg = await resp.prepare(req) assert msg.chunked assert msg.enable_chunking.called assert msg.filter is not None async def test_chunked_encoding_forbidden_for_http_10(): req = make_request('GET', '/', version=HttpVersion10) resp = StreamResponse() resp.enable_chunked_encoding() with pytest.raises(RuntimeError) as ctx: await resp.prepare(req) assert re.match("Using chunked encoding is forbidden for HTTP/1.0", str(ctx.value)) async def test_compression_no_accept(): req = make_request('GET', '/', payload_writer=mock.Mock()) resp = StreamResponse() assert not resp.chunked assert not resp.compression resp.enable_compression() assert resp.compression msg = await resp.prepare(req) assert not msg.enable_compression.called async def test_force_compression_no_accept_backwards_compat(): req = make_request('GET', '/', payload_writer=mock.Mock()) resp = StreamResponse() assert not resp.chunked assert not resp.compression resp.enable_compression(force=True) assert resp.compression msg = await resp.prepare(req) assert msg.enable_compression.called assert msg.filter is not None async def test_force_compression_false_backwards_compat(): req = make_request('GET', '/', payload_writer=mock.Mock()) resp = StreamResponse() assert not resp.compression resp.enable_compression(force=False) assert resp.compression msg = await resp.prepare(req) assert not msg.enable_compression.called async def test_compression_default_coding(): req = make_request( 'GET', '/', headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'})) resp = StreamResponse() assert not resp.chunked assert not resp.compression resp.enable_compression() assert resp.compression msg = await resp.prepare(req) msg.enable_compression.assert_called_with('deflate') assert 'deflate' == resp.headers.get(hdrs.CONTENT_ENCODING) assert msg.filter is not None async def test_force_compression_deflate(): req = make_request( 'GET', '/', headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'})) resp = StreamResponse() resp.enable_compression(ContentCoding.deflate) assert resp.compression msg = await resp.prepare(req) msg.enable_compression.assert_called_with('deflate') assert 'deflate' == resp.headers.get(hdrs.CONTENT_ENCODING) async def test_force_compression_no_accept_deflate(): req = make_request('GET', '/') resp = StreamResponse() resp.enable_compression(ContentCoding.deflate) assert resp.compression msg = await resp.prepare(req) msg.enable_compression.assert_called_with('deflate') assert 'deflate' == resp.headers.get(hdrs.CONTENT_ENCODING) async def test_force_compression_gzip(): req = make_request( 'GET', '/', headers=CIMultiDict({hdrs.ACCEPT_ENCODING: 'gzip, deflate'})) resp = StreamResponse() resp.enable_compression(ContentCoding.gzip) assert resp.compression msg = await resp.prepare(req) msg.enable_compression.assert_called_with('gzip') assert 'gzip' == resp.headers.get(hdrs.CONTENT_ENCODING) async def test_force_compression_no_accept_gzip(): req = make_request('GET', '/') resp = StreamResponse() resp.enable_compression(ContentCoding.gzip) assert resp.compression msg = await resp.prepare(req) msg.enable_compression.assert_called_with('gzip') assert 'gzip' == resp.headers.get(hdrs.CONTENT_ENCODING) async def test_change_content_length_if_compression_enabled(): req = make_request('GET', '/') resp = Response(body=b'answer') resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length is not None and \ resp.content_length != len(b'answer') async def test_set_content_length_if_compression_enabled(): writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH in headers assert headers[hdrs.CONTENT_LENGTH] == '26' assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', payload_writer=writer) resp = Response(body=b'answer') resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length == 26 del resp.headers[hdrs.CONTENT_LENGTH] assert resp.content_length == 26 async def test_remove_content_length_if_compression_enabled_http11(): writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH not in headers assert headers.get(hdrs.TRANSFER_ENCODING, '') == 'chunked' writer.write_headers.side_effect = write_headers req = make_request('GET', '/', payload_writer=writer) resp = StreamResponse() resp.content_length = 123 resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length is None async def test_remove_content_length_if_compression_enabled_http10(): writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH not in headers assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', version=HttpVersion10, payload_writer=writer) resp = StreamResponse() resp.content_length = 123 resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length is None async def test_force_compression_identity(): writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH in headers assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', payload_writer=writer) resp = StreamResponse() resp.content_length = 123 resp.enable_compression(ContentCoding.identity) await resp.prepare(req) assert resp.content_length == 123 async def test_force_compression_identity_response(): writer = mock.Mock() def write_headers(status_line, headers): assert headers[hdrs.CONTENT_LENGTH] == "6" assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', payload_writer=writer) resp = Response(body=b'answer') resp.enable_compression(ContentCoding.identity) await resp.prepare(req) assert resp.content_length == 6 async def test_remove_content_length_if_compression_enabled_on_payload_http11(): # noqa writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH not in headers assert headers.get(hdrs.TRANSFER_ENCODING, '') == 'chunked' writer.write_headers.side_effect = write_headers req = make_request('GET', '/', payload_writer=writer) payload = BytesPayload(b'answer', headers={"X-Test-Header": "test"}) resp = Response(body=payload) assert resp.content_length == 6 resp.body = payload resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length is None async def test_remove_content_length_if_compression_enabled_on_payload_http10(): # noqa writer = mock.Mock() def write_headers(status_line, headers): assert hdrs.CONTENT_LENGTH not in headers assert hdrs.TRANSFER_ENCODING not in headers writer.write_headers.side_effect = write_headers req = make_request('GET', '/', version=HttpVersion10, payload_writer=writer) resp = Response(body=BytesPayload(b'answer')) resp.enable_compression(ContentCoding.gzip) await resp.prepare(req) assert resp.content_length is None async def test_content_length_on_chunked(): req = make_request('GET', '/') resp = Response(body=b'answer') assert resp.content_length == 6 resp.enable_chunked_encoding() assert resp.content_length is None await resp.prepare(req) async def test_write_non_byteish(): resp = StreamResponse() await resp.prepare(make_request('GET', '/')) with pytest.raises(AssertionError): await resp.write(123) async def test_write_before_start(): resp = StreamResponse() with pytest.raises(RuntimeError): await resp.write(b'data') async def test_cannot_write_after_eof(): resp = StreamResponse() req = make_request('GET', '/') await resp.prepare(req) await resp.write(b'data') await resp.write_eof() req.writer.write.reset_mock() with pytest.raises(RuntimeError): await resp.write(b'next data') assert not req.writer.write.called async def test___repr___after_eof(): resp = StreamResponse() await resp.prepare(make_request('GET', '/')) assert resp.prepared await resp.write(b'data') await resp.write_eof() assert not resp.prepared resp_repr = repr(resp) assert resp_repr == '' async def test_cannot_write_eof_before_headers(): resp = StreamResponse() with pytest.raises(AssertionError): await resp.write_eof() async def test_cannot_write_eof_twice(): resp = StreamResponse() writer = mock.Mock() resp_impl = await resp.prepare(make_request('GET', '/')) resp_impl.write = make_mocked_coro(None) resp_impl.write_eof = make_mocked_coro(None) await resp.write(b'data') assert resp_impl.write.called await resp.write_eof() resp_impl.write.reset_mock() await resp.write_eof() assert not writer.write.called def test_force_close(): resp = StreamResponse() assert resp.keep_alive is None resp.force_close() assert resp.keep_alive is False async def test_response_output_length(): resp = StreamResponse() await resp.prepare(make_request('GET', '/')) with pytest.warns(DeprecationWarning): assert resp.output_length def test_response_cookies(): resp = StreamResponse() assert resp.cookies == {} assert str(resp.cookies) == '' resp.set_cookie('name', 'value') assert str(resp.cookies) == 'Set-Cookie: name=value; Path=/' resp.set_cookie('name', 'other_value') assert str(resp.cookies) == 'Set-Cookie: name=other_value; Path=/' resp.cookies['name'] = 'another_other_value' resp.cookies['name']['max-age'] = 10 assert (str(resp.cookies) == 'Set-Cookie: name=another_other_value; Max-Age=10; Path=/') resp.del_cookie('name') expected = ('Set-Cookie: name=("")?; ' 'expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/') assert re.match(expected, str(resp.cookies)) resp.set_cookie('name', 'value', domain='local.host') expected = 'Set-Cookie: name=value; Domain=local.host; Path=/' assert str(resp.cookies) == expected def test_response_cookie_path(): resp = StreamResponse() assert resp.cookies == {} resp.set_cookie('name', 'value', path='/some/path') assert str(resp.cookies) == 'Set-Cookie: name=value; Path=/some/path' resp.set_cookie('name', 'value', expires='123') assert (str(resp.cookies) == 'Set-Cookie: name=value; expires=123; Path=/') resp.set_cookie('name', 'value', domain='example.com', path='/home', expires='123', max_age='10', secure=True, httponly=True, version='2.0') assert (str(resp.cookies).lower() == 'set-cookie: name=value; ' 'domain=example.com; ' 'expires=123; ' 'httponly; ' 'max-age=10; ' 'path=/home; ' 'secure; ' 'version=2.0') def test_response_cookie__issue_del_cookie(): resp = StreamResponse() assert resp.cookies == {} assert str(resp.cookies) == '' resp.del_cookie('name') expected = ('Set-Cookie: name=("")?; ' 'expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/') assert re.match(expected, str(resp.cookies)) def test_cookie_set_after_del(): resp = StreamResponse() resp.del_cookie('name') resp.set_cookie('name', 'val') # check for Max-Age dropped expected = 'Set-Cookie: name=val; Path=/' assert str(resp.cookies) == expected def test_set_status_with_reason(): resp = StreamResponse() resp.set_status(200, "Everithing is fine!") assert 200 == resp.status assert "Everithing is fine!" == resp.reason async def test_start_force_close(): req = make_request('GET', '/') resp = StreamResponse() resp.force_close() assert not resp.keep_alive await resp.prepare(req) assert not resp.keep_alive async def test___repr__(): req = make_request('GET', '/path/to') resp = StreamResponse(reason=301) await resp.prepare(req) assert "" == repr(resp) def test___repr___not_prepared(): resp = StreamResponse(reason=301) assert "" == repr(resp) async def test_keep_alive_http10_default(): req = make_request('GET', '/', version=HttpVersion10) resp = StreamResponse() await resp.prepare(req) assert not resp.keep_alive async def test_keep_alive_http10_switched_on(): headers = CIMultiDict(Connection='keep-alive') req = make_request('GET', '/', version=HttpVersion10, headers=headers) req._message = req._message._replace(should_close=False) resp = StreamResponse() await resp.prepare(req) assert resp.keep_alive async def test_keep_alive_http09(): headers = CIMultiDict(Connection='keep-alive') req = make_request('GET', '/', version=HttpVersion(0, 9), headers=headers) resp = StreamResponse() await resp.prepare(req) assert not resp.keep_alive async def test_prepare_twice(): req = make_request('GET', '/') resp = StreamResponse() impl1 = await resp.prepare(req) impl2 = await resp.prepare(req) assert impl1 is impl2 async def test_prepare_calls_signal(): app = mock.Mock() sig = make_mocked_coro() on_response_prepare = signals.Signal(app) on_response_prepare.append(sig) req = make_request('GET', '/', app=app, on_response_prepare=on_response_prepare) resp = StreamResponse() await resp.prepare(req) sig.assert_called_with(req, resp) # Response class def test_response_ctor(): resp = Response() assert 200 == resp.status assert 'OK' == resp.reason assert resp.body is None assert resp.content_length == 0 assert 'CONTENT-LENGTH' not in resp.headers def test_ctor_with_headers_and_status(): resp = Response(body=b'body', status=201, headers={'Age': '12', 'DATE': 'date'}) assert 201 == resp.status assert b'body' == resp.body assert resp.headers['AGE'] == '12' resp._start(mock.Mock(version=HttpVersion11)) assert 4 == resp.content_length assert resp.headers['CONTENT-LENGTH'] == '4' def test_ctor_content_type(): resp = Response(content_type='application/json') assert 200 == resp.status assert 'OK' == resp.reason assert 0 == resp.content_length assert (CIMultiDict([('CONTENT-TYPE', 'application/json')]) == resp.headers) def test_ctor_text_body_combined(): with pytest.raises(ValueError): Response(body=b'123', text='test text') def test_ctor_text(): resp = Response(text='test text') assert 200 == resp.status assert 'OK' == resp.reason assert 9 == resp.content_length assert (CIMultiDict( [('CONTENT-TYPE', 'text/plain; charset=utf-8')]) == resp.headers) assert resp.body == b'test text' assert resp.text == 'test text' resp.headers['DATE'] = 'date' resp._start(mock.Mock(version=HttpVersion11)) assert resp.headers['CONTENT-LENGTH'] == '9' def test_ctor_charset(): resp = Response(text='текÑÑ‚', charset='koi8-r') assert 'текÑÑ‚'.encode('koi8-r') == resp.body assert 'koi8-r' == resp.charset def test_ctor_charset_default_utf8(): resp = Response(text='test test', charset=None) assert 'utf-8' == resp.charset def test_ctor_charset_in_content_type(): with pytest.raises(ValueError): Response(text='test test', content_type='text/plain; charset=utf-8') def test_ctor_charset_without_text(): resp = Response(content_type='text/plain', charset='koi8-r') assert 'koi8-r' == resp.charset def test_ctor_content_type_with_extra(): resp = Response(text='test test', content_type='text/plain; version=0.0.4') assert resp.content_type == 'text/plain' assert resp.headers['content-type'] == \ 'text/plain; version=0.0.4; charset=utf-8' def test_ctor_both_content_type_param_and_header_with_text(): with pytest.raises(ValueError): Response(headers={'Content-Type': 'application/json'}, content_type='text/html', text='text') def test_ctor_both_charset_param_and_header_with_text(): with pytest.raises(ValueError): Response(headers={'Content-Type': 'application/json'}, charset='koi8-r', text='text') def test_ctor_both_content_type_param_and_header(): with pytest.raises(ValueError): Response(headers={'Content-Type': 'application/json'}, content_type='text/html') def test_ctor_both_charset_param_and_header(): with pytest.raises(ValueError): Response(headers={'Content-Type': 'application/json'}, charset='koi8-r') def test_assign_nonbyteish_body(): resp = Response(body=b'data') with pytest.raises(ValueError): resp.body = 123 assert b'data' == resp.body assert 4 == resp.content_length resp.headers['DATE'] = 'date' resp._start(mock.Mock(version=HttpVersion11)) assert resp.headers['CONTENT-LENGTH'] == '4' assert 4 == resp.content_length def test_assign_nonstr_text(): resp = Response(text='test') with pytest.raises(AssertionError): resp.text = b'123' assert b'test' == resp.body assert 4 == resp.content_length def test_response_set_content_length(): resp = Response() with pytest.raises(RuntimeError): resp.content_length = 1 async def test_send_headers_for_empty_body(buf, writer): req = make_request('GET', '/', payload_writer=writer) resp = Response() await resp.prepare(req) await resp.write_eof() txt = buf.decode('utf8') assert re.match('HTTP/1.1 200 OK\r\n' 'Content-Length: 0\r\n' 'Content-Type: application/octet-stream\r\n' 'Date: .+\r\n' 'Server: .+\r\n\r\n', txt) async def test_render_with_body(buf, writer): req = make_request('GET', '/', payload_writer=writer) resp = Response(body=b'data') await resp.prepare(req) await resp.write_eof() txt = buf.decode('utf8') assert re.match('HTTP/1.1 200 OK\r\n' 'Content-Length: 4\r\n' 'Content-Type: application/octet-stream\r\n' 'Date: .+\r\n' 'Server: .+\r\n\r\n' 'data', txt) async def test_send_set_cookie_header(buf, writer): resp = Response() resp.cookies['name'] = 'value' req = make_request('GET', '/', payload_writer=writer) await resp.prepare(req) await resp.write_eof() txt = buf.decode('utf8') assert re.match('HTTP/1.1 200 OK\r\n' 'Content-Length: 0\r\n' 'Set-Cookie: name=value\r\n' 'Content-Type: application/octet-stream\r\n' 'Date: .+\r\n' 'Server: .+\r\n\r\n', txt) async def test_consecutive_write_eof(): payload_writer = mock.Mock() payload_writer.write_eof = make_mocked_coro() req = make_request('GET', '/', payload_writer=payload_writer) data = b'data' resp = Response(body=data) await resp.prepare(req) await resp.write_eof() await resp.write_eof() payload_writer.write_eof.assert_called_once_with(data) def test_set_text_with_content_type(): resp = Response() resp.content_type = "text/html" resp.text = "text" assert "text" == resp.text assert b"text" == resp.body assert "text/html" == resp.content_type def test_set_text_with_charset(): resp = Response() resp.content_type = 'text/plain' resp.charset = "KOI8-R" resp.text = "текÑÑ‚" assert "текÑÑ‚" == resp.text assert "текÑÑ‚".encode('koi8-r') == resp.body assert "koi8-r" == resp.charset def test_default_content_type_in_stream_response(): resp = StreamResponse() assert resp.content_type == 'application/octet-stream' def test_default_content_type_in_response(): resp = Response() assert resp.content_type == 'application/octet-stream' def test_content_type_with_set_text(): resp = Response(text='text') assert resp.content_type == 'text/plain' def test_content_type_with_set_body(): resp = Response(body=b'body') assert resp.content_type == 'application/octet-stream' def test_started_when_not_started(): resp = StreamResponse() assert not resp.prepared async def test_started_when_started(): resp = StreamResponse() await resp.prepare(make_request('GET', '/')) assert resp.prepared async def test_drain_before_start(): resp = StreamResponse() with pytest.raises(AssertionError): await resp.drain() async def test_changing_status_after_prepare_raises(): resp = StreamResponse() await resp.prepare(make_request('GET', '/')) with pytest.raises(AssertionError): resp.set_status(400) def test_nonstr_text_in_ctor(): with pytest.raises(TypeError): Response(text=b'data') def test_text_in_ctor_with_content_type(): resp = Response(text='data', content_type='text/html') assert 'data' == resp.text assert 'text/html' == resp.content_type def test_text_in_ctor_with_content_type_header(): resp = Response(text='текÑÑ‚', headers={'Content-Type': 'text/html; charset=koi8-r'}) assert 'текÑÑ‚'.encode('koi8-r') == resp.body assert 'text/html' == resp.content_type assert 'koi8-r' == resp.charset def test_text_in_ctor_with_content_type_header_multidict(): headers = CIMultiDict({'Content-Type': 'text/html; charset=koi8-r'}) resp = Response(text='текÑÑ‚', headers=headers) assert 'текÑÑ‚'.encode('koi8-r') == resp.body assert 'text/html' == resp.content_type assert 'koi8-r' == resp.charset def test_body_in_ctor_with_content_type_header_multidict(): headers = CIMultiDict({'Content-Type': 'text/html; charset=koi8-r'}) resp = Response(body='текÑÑ‚'.encode('koi8-r'), headers=headers) assert 'текÑÑ‚'.encode('koi8-r') == resp.body assert 'text/html' == resp.content_type assert 'koi8-r' == resp.charset def test_text_with_empty_payload(): resp = Response(status=200) assert resp.body is None assert resp.text is None def test_response_with_content_length_header_without_body(): resp = Response(headers={'Content-Length': 123}) assert resp.content_length == 123 class TestJSONResponse: def test_content_type_is_application_json_by_default(self): resp = json_response('') assert 'application/json' == resp.content_type def test_passing_text_only(self): resp = json_response(text=json.dumps('jaysawn')) assert resp.text == json.dumps('jaysawn') def test_data_and_text_raises_value_error(self): with pytest.raises(ValueError) as excinfo: json_response(data='foo', text='bar') expected_message = ( 'only one of data, text, or body should be specified' ) assert expected_message == excinfo.value.args[0] def test_data_and_body_raises_value_error(self): with pytest.raises(ValueError) as excinfo: json_response(data='foo', body=b'bar') expected_message = ( 'only one of data, text, or body should be specified' ) assert expected_message == excinfo.value.args[0] def test_text_is_json_encoded(self): resp = json_response({'foo': 42}) assert json.dumps({'foo': 42}) == resp.text def test_content_type_is_overrideable(self): resp = json_response({'foo': 42}, content_type='application/vnd.json+api') assert 'application/vnd.json+api' == resp.content_type aiohttp-3.0.1/tests/test_web_runner.py0000666000000000000000000000456213240304665016250 0ustar 00000000000000import asyncio import platform import signal import pytest from aiohttp import web @pytest.fixture def app(): return web.Application() @pytest.fixture def make_runner(loop, app): asyncio.set_event_loop(loop) runners = [] def go(**kwargs): runner = web.AppRunner(app, **kwargs) runners.append(runner) return runner yield go for runner in runners: loop.run_until_complete(runner.cleanup()) async def test_site_for_nonfrozen_app(make_runner): runner = make_runner() with pytest.raises(RuntimeError): web.TCPSite(runner) assert len(runner.sites) == 0 @pytest.mark.skipif(platform.system() == "Windows", reason="the test is not valid for Windows") async def test_runner_setup_handle_signals(make_runner): runner = make_runner(handle_signals=True) await runner.setup() assert signal.getsignal(signal.SIGTERM) is not signal.SIG_DFL await runner.cleanup() assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL @pytest.mark.skipif(platform.system() == "Windows", reason="the test is not valid for Windows") async def test_runner_setup_without_signal_handling(make_runner): runner = make_runner(handle_signals=False) await runner.setup() assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL await runner.cleanup() assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL async def test_site_double_added(make_runner): runner = make_runner() await runner.setup() site = web.TCPSite(runner) await site.start() with pytest.raises(RuntimeError): await site.start() assert len(runner.sites) == 1 async def test_site_stop_not_started(make_runner): runner = make_runner() await runner.setup() site = web.TCPSite(runner) with pytest.raises(RuntimeError): await site.stop() assert len(runner.sites) == 0 async def test_custom_log_format(make_runner): runner = make_runner(access_log_format='abc') await runner.setup() assert runner.server._kwargs['access_log_format'] == 'abc' async def test_unreg_site(make_runner): runner = make_runner() await runner.setup() site = web.TCPSite(runner) with pytest.raises(RuntimeError): runner._unreg_site(site) async def test_app_property(make_runner, app): runner = make_runner() assert runner.app is app aiohttp-3.0.1/tests/test_web_sendfile.py0000666000000000000000000001333413240304665016525 0ustar 00000000000000from unittest import mock from aiohttp import hdrs from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web_fileresponse import FileResponse, SendfileStreamWriter def test_static_handle_eof(loop): fake_loop = mock.Mock() with mock.patch('aiohttp.web_fileresponse.os') as m_os: out_fd = 30 in_fd = 31 fut = loop.create_future() m_os.sendfile.return_value = 0 writer = SendfileStreamWriter(mock.Mock(), mock.Mock(), fake_loop) writer._sendfile_cb(fut, out_fd, in_fd, 0, 100, fake_loop, False) m_os.sendfile.assert_called_with(out_fd, in_fd, 0, 100) assert fut.done() assert fut.result() is None assert not fake_loop.add_writer.called assert not fake_loop.remove_writer.called def test_static_handle_again(loop): fake_loop = mock.Mock() with mock.patch('aiohttp.web_fileresponse.os') as m_os: out_fd = 30 in_fd = 31 fut = loop.create_future() m_os.sendfile.side_effect = BlockingIOError() writer = SendfileStreamWriter(mock.Mock(), mock.Mock(), fake_loop) writer._sendfile_cb(fut, out_fd, in_fd, 0, 100, fake_loop, False) m_os.sendfile.assert_called_with(out_fd, in_fd, 0, 100) assert not fut.done() fake_loop.add_writer.assert_called_with(out_fd, writer._sendfile_cb, fut, out_fd, in_fd, 0, 100, fake_loop, True) assert not fake_loop.remove_writer.called def test_static_handle_exception(loop): fake_loop = mock.Mock() with mock.patch('aiohttp.web_fileresponse.os') as m_os: out_fd = 30 in_fd = 31 fut = loop.create_future() exc = OSError() m_os.sendfile.side_effect = exc writer = SendfileStreamWriter(mock.Mock(), mock.Mock(), fake_loop) writer._sendfile_cb(fut, out_fd, in_fd, 0, 100, fake_loop, False) m_os.sendfile.assert_called_with(out_fd, in_fd, 0, 100) assert fut.done() assert exc is fut.exception() assert not fake_loop.add_writer.called assert not fake_loop.remove_writer.called def test__sendfile_cb_return_on_cancelling(loop): fake_loop = mock.Mock() with mock.patch('aiohttp.web_fileresponse.os') as m_os: out_fd = 30 in_fd = 31 fut = loop.create_future() fut.cancel() writer = SendfileStreamWriter(mock.Mock(), mock.Mock(), fake_loop) writer._sendfile_cb(fut, out_fd, in_fd, 0, 100, fake_loop, False) assert fut.done() assert not fake_loop.add_writer.called assert not fake_loop.remove_writer.called assert not m_os.sendfile.called def test_using_gzip_if_header_present_and_file_available(loop): request = make_mocked_request( 'GET', 'http://python.org/logo.png', headers={ hdrs.ACCEPT_ENCODING: 'gzip' } ) gz_filepath = mock.Mock() gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = True gz_filepath.stat.return_value = mock.MagicMock() gz_filepath.stat.st_size = 1024 filepath = mock.Mock() filepath.name = 'logo.png' filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath file_sender = FileResponse(filepath) file_sender._sendfile = make_mocked_coro(None) loop.run_until_complete(file_sender.prepare(request)) assert not filepath.open.called assert gz_filepath.open.called def test_gzip_if_header_not_present_and_file_available(loop): request = make_mocked_request( 'GET', 'http://python.org/logo.png', headers={ } ) gz_filepath = mock.Mock() gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = True filepath = mock.Mock() filepath.name = 'logo.png' filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() filepath.stat.st_size = 1024 file_sender = FileResponse(filepath) file_sender._sendfile = make_mocked_coro(None) loop.run_until_complete(file_sender.prepare(request)) assert filepath.open.called assert not gz_filepath.open.called def test_gzip_if_header_not_present_and_file_not_available(loop): request = make_mocked_request( 'GET', 'http://python.org/logo.png', headers={ } ) gz_filepath = mock.Mock() gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = False filepath = mock.Mock() filepath.name = 'logo.png' filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() filepath.stat.st_size = 1024 file_sender = FileResponse(filepath) file_sender._sendfile = make_mocked_coro(None) loop.run_until_complete(file_sender.prepare(request)) assert filepath.open.called assert not gz_filepath.open.called def test_gzip_if_header_present_and_file_not_available(loop): request = make_mocked_request( 'GET', 'http://python.org/logo.png', headers={ hdrs.ACCEPT_ENCODING: 'gzip' } ) gz_filepath = mock.Mock() gz_filepath.open = mock.mock_open() gz_filepath.is_file.return_value = False filepath = mock.Mock() filepath.name = 'logo.png' filepath.open = mock.mock_open() filepath.with_name.return_value = gz_filepath filepath.stat.return_value = mock.MagicMock() filepath.stat.st_size = 1024 file_sender = FileResponse(filepath) file_sender._sendfile = make_mocked_coro(None) loop.run_until_complete(file_sender.prepare(request)) assert filepath.open.called assert not gz_filepath.open.called aiohttp-3.0.1/tests/test_web_sendfile_functional.py0000666000000000000000000003556413240304665020760 0ustar 00000000000000import asyncio import os import pathlib import pytest import aiohttp from aiohttp import web try: import ssl except ImportError: ssl = False @pytest.fixture(params=['sendfile', 'fallback'], ids=['sendfile', 'fallback']) def sender(request): def maker(*args, **kwargs): ret = web.FileResponse(*args, **kwargs) if request.param == 'fallback': ret._sendfile = ret._sendfile_fallback return ret return maker async def test_static_file_ok(aiohttp_client, sender): filepath = pathlib.Path(__file__).parent / 'data.unknown_mime_type' async def handler(request): return sender(filepath) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 txt = await resp.text() assert 'file content' == txt.rstrip() assert 'application/octet-stream' == resp.headers['Content-Type'] assert resp.headers.get('Content-Encoding') is None await resp.release() async def test_static_file_ok_string_path(aiohttp_client, sender): filepath = pathlib.Path(__file__).parent / 'data.unknown_mime_type' async def handler(request): return sender(str(filepath)) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 txt = await resp.text() assert 'file content' == txt.rstrip() assert 'application/octet-stream' == resp.headers['Content-Type'] assert resp.headers.get('Content-Encoding') is None await resp.release() async def test_static_file_not_exists(aiohttp_client): app = web.Application() client = await aiohttp_client(app) resp = await client.get('/fake') assert resp.status == 404 await resp.release() async def test_static_file_name_too_long(aiohttp_client): app = web.Application() client = await aiohttp_client(app) resp = await client.get('/x*500') assert resp.status == 404 await resp.release() async def test_static_file_upper_directory(aiohttp_client): app = web.Application() client = await aiohttp_client(app) resp = await client.get('/../../') assert resp.status == 404 await resp.release() async def test_static_file_with_content_type(aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent / 'aiohttp.jpg') async def handler(request): return sender(filepath, chunk_size=16) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 body = await resp.read() with filepath.open('rb') as f: content = f.read() assert content == body assert resp.headers['Content-Type'] == 'image/jpeg' assert resp.headers.get('Content-Encoding') is None resp.close() async def test_static_file_custom_content_type(aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent / 'hello.txt.gz') async def handler(request): resp = sender(filepath, chunk_size=16) resp.content_type = 'application/pdf' return resp app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 body = await resp.read() with filepath.open('rb') as f: content = f.read() assert content == body assert resp.headers['Content-Type'] == 'application/pdf' assert resp.headers.get('Content-Encoding') is None resp.close() async def test_static_file_custom_content_type_compress(aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent / 'hello.txt') async def handler(request): resp = sender(filepath, chunk_size=16) resp.content_type = 'application/pdf' return resp app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 200 body = await resp.read() assert b'hello aiohttp\n' == body assert resp.headers['Content-Type'] == 'application/pdf' assert resp.headers.get('Content-Encoding') == 'gzip' resp.close() async def test_static_file_with_content_encoding(aiohttp_client, sender): filepath = pathlib.Path(__file__).parent / 'hello.txt.gz' async def handler(request): return sender(filepath) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status body = await resp.read() assert b'hello aiohttp\n' == body ct = resp.headers['CONTENT-TYPE'] assert 'text/plain' == ct encoding = resp.headers['CONTENT-ENCODING'] assert 'gzip' == encoding resp.close() async def test_static_file_if_modified_since(aiohttp_client, sender): filename = 'data.unknown_mime_type' filepath = pathlib.Path(__file__).parent / filename async def handler(request): return sender(filepath) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert 200 == resp.status lastmod = resp.headers.get('Last-Modified') assert lastmod is not None resp.close() resp = await client.get('/', headers={'If-Modified-Since': lastmod}) body = await resp.read() assert 304 == resp.status assert resp.headers.get('Content-Length') is None assert b'' == body resp.close() async def test_static_file_if_modified_since_past_date(aiohttp_client, sender): filename = 'data.unknown_mime_type' filepath = pathlib.Path(__file__).parent / filename async def handler(request): return sender(filepath) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) lastmod = 'Mon, 1 Jan 1990 01:01:01 GMT' resp = await client.get('/', headers={'If-Modified-Since': lastmod}) assert 200 == resp.status resp.close() async def test_static_file_if_modified_since_invalid_date(aiohttp_client, sender): filename = 'data.unknown_mime_type' filepath = pathlib.Path(__file__).parent / filename async def handler(request): return sender(filepath) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) lastmod = 'not a valid HTTP-date' resp = await client.get('/', headers={'If-Modified-Since': lastmod}) assert 200 == resp.status resp.close() async def test_static_file_if_modified_since_future_date(aiohttp_client, sender): filename = 'data.unknown_mime_type' filepath = pathlib.Path(__file__).parent / filename async def handler(request): return sender(filepath) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) lastmod = 'Fri, 31 Dec 9999 23:59:59 GMT' resp = await client.get('/', headers={'If-Modified-Since': lastmod}) body = await resp.read() assert 304 == resp.status assert resp.headers.get('Content-Length') is None assert b'' == body resp.close() @pytest.mark.skipif(not ssl, reason="ssl not supported") async def test_static_file_ssl(aiohttp_server, aiohttp_client): dirname = os.path.dirname(__file__) filename = 'data.unknown_mime_type' ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ssl_ctx.load_cert_chain( os.path.join(dirname, 'sample.crt'), os.path.join(dirname, 'sample.key') ) app = web.Application() app.router.add_static('/static', dirname) server = await aiohttp_server(app, ssl=ssl_ctx) conn = aiohttp.TCPConnector(ssl=False) client = await aiohttp_client(server, connector=conn) resp = await client.get('/static/'+filename) assert 200 == resp.status txt = await resp.text() assert 'file content' == txt.rstrip() ct = resp.headers['CONTENT-TYPE'] assert 'application/octet-stream' == ct assert resp.headers.get('CONTENT-ENCODING') is None async def test_static_file_directory_traversal_attack(loop, aiohttp_client): dirname = os.path.dirname(__file__) relpath = '../README.rst' assert os.path.isfile(os.path.join(dirname, relpath)) app = web.Application() app.router.add_static('/static', dirname) client = await aiohttp_client(app) resp = await client.get('/static/'+relpath) assert 404 == resp.status url_relpath2 = '/static/dir/../' + relpath resp = await client.get(url_relpath2) assert 404 == resp.status url_abspath = \ '/static/' + os.path.abspath(os.path.join(dirname, relpath)) resp = await client.get(url_abspath) assert 404 == resp.status def test_static_route_path_existence_check(): directory = os.path.dirname(__file__) web.StaticResource("/", directory) nodirectory = os.path.join(directory, "nonexistent-uPNiOEAg5d") with pytest.raises(ValueError): web.StaticResource("/", nodirectory) async def test_static_file_huge(loop, aiohttp_client, tmpdir): filename = 'huge_data.unknown_mime_type' # fill 100MB file with tmpdir.join(filename).open('w') as f: for i in range(1024*20): f.write(chr(i % 64 + 0x20) * 1024) file_st = os.stat(str(tmpdir.join(filename))) app = web.Application() app.router.add_static('/static', str(tmpdir)) client = await aiohttp_client(app) resp = await client.get('/static/'+filename) assert 200 == resp.status ct = resp.headers['CONTENT-TYPE'] assert 'application/octet-stream' == ct assert resp.headers.get('CONTENT-ENCODING') is None assert int(resp.headers.get('CONTENT-LENGTH')) == file_st.st_size f = tmpdir.join(filename).open('rb') off = 0 cnt = 0 while off < file_st.st_size: chunk = await resp.content.readany() expected = f.read(len(chunk)) assert chunk == expected off += len(chunk) cnt += 1 f.close() async def test_static_file_range(loop, aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent.parent / 'LICENSE.txt') async def handler(request): return sender(filepath, chunk_size=16) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(lambda loop: app) with filepath.open('rb') as f: content = f.read() # Ensure the whole file requested in parts is correct responses = await asyncio.gather( client.get('/', headers={'Range': 'bytes=0-999'}), client.get('/', headers={'Range': 'bytes=1000-1999'}), client.get('/', headers={'Range': 'bytes=2000-'}), loop=loop ) assert len(responses) == 3 assert responses[0].status == 206, \ "failed 'bytes=0-999': %s" % responses[0].reason assert responses[1].status == 206, \ "failed 'bytes=1000-1999': %s" % responses[1].reason assert responses[2].status == 206, \ "failed 'bytes=2000-': %s" % responses[2].reason body = await asyncio.gather( *(resp.read() for resp in responses), loop=loop ) assert len(body[0]) == 1000, \ "failed 'bytes=0-999', received %d bytes" % len(body[0]) assert len(body[1]) == 1000, \ "failed 'bytes=1000-1999', received %d bytes" % len(body[1]) responses[0].close() responses[1].close() responses[2].close() assert content == b"".join(body) async def test_static_file_range_end_bigger_than_size( loop, aiohttp_client, sender ): filepath = (pathlib.Path(__file__).parent / 'aiohttp.png') async def handler(request): return sender(filepath, chunk_size=16) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(lambda loop: app) with filepath.open('rb') as f: content = f.read() # Ensure the whole file requested in parts is correct response = await client.get( '/', headers={'Range': 'bytes=61000-62000'}) assert response.status == 206, \ "failed 'bytes=61000-62000': %s" % response.reason body = await response.read() assert len(body) == 108, \ "failed 'bytes=0-999', received %d bytes" % len(body[0]) assert content[61000:] == body async def test_static_file_range_beyond_eof(loop, aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent / 'aiohttp.png') async def handler(request): return sender(filepath, chunk_size=16) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(lambda loop: app) # Ensure the whole file requested in parts is correct response = await client.get( '/', headers={'Range': 'bytes=1000000-1200000'}) assert response.status == 206, \ "failed 'bytes=1000000-1200000': %s" % response.reason assert response.headers['content-length'] == '0' async def test_static_file_range_tail(loop, aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent / 'aiohttp.png') async def handler(request): return sender(filepath, chunk_size=16) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(lambda loop: app) with filepath.open('rb') as f: content = f.read() # Ensure the tail of the file is correct resp = await client.get('/', headers={'Range': 'bytes=-500'}) assert resp.status == 206, resp.reason body4 = await resp.read() resp.close() assert content[-500:] == body4 async def test_static_file_invalid_range(loop, aiohttp_client, sender): filepath = (pathlib.Path(__file__).parent / 'aiohttp.png') async def handler(request): return sender(filepath, chunk_size=16) app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(lambda loop: app) # range must be in bytes resp = await client.get('/', headers={'Range': 'blocks=0-10'}) assert resp.status == 416, 'Range must be in bytes' resp.close() # start > end resp = await client.get('/', headers={'Range': 'bytes=100-0'}) assert resp.status == 416, "Range start can't be greater than end" resp.close() # start > end resp = await client.get('/', headers={'Range': 'bytes=10-9'}) assert resp.status == 416, "Range start can't be greater than end" resp.close() # non-number range resp = await client.get('/', headers={'Range': 'bytes=a-f'}) assert resp.status == 416, 'Range must be integers' resp.close() # double dash range resp = await client.get('/', headers={'Range': 'bytes=0--10'}) assert resp.status == 416, 'double dash in range' resp.close() # no range resp = await client.get('/', headers={'Range': 'bytes=-'}) assert resp.status == 416, 'no range given' resp.close() aiohttp-3.0.1/tests/test_web_server.py0000666000000000000000000000667213240304665016251 0ustar 00000000000000import asyncio from unittest import mock import pytest from aiohttp import client, web async def test_simple_server(aiohttp_raw_server, aiohttp_client): async def handler(request): return web.Response(text=str(request.rel_url)) server = await aiohttp_raw_server(handler) cli = await aiohttp_client(server) resp = await cli.get('/path/to') assert resp.status == 200 txt = await resp.text() assert txt == '/path/to' async def test_raw_server_not_http_exception(aiohttp_raw_server, aiohttp_client): exc = RuntimeError("custom runtime error") async def handler(request): raise exc logger = mock.Mock() server = await aiohttp_raw_server(handler, logger=logger) cli = await aiohttp_client(server) resp = await cli.get('/path/to') assert resp.status == 500 txt = await resp.text() assert "

    500 Internal Server Error

    " in txt logger.exception.assert_called_with( "Error handling request", exc_info=exc) async def test_raw_server_handler_timeout(aiohttp_raw_server, aiohttp_client): exc = asyncio.TimeoutError("error") async def handler(request): raise exc logger = mock.Mock() server = await aiohttp_raw_server(handler, logger=logger) cli = await aiohttp_client(server) resp = await cli.get('/path/to') assert resp.status == 504 await resp.text() logger.debug.assert_called_with("Request handler timed out.") async def test_raw_server_do_not_swallow_exceptions(aiohttp_raw_server, aiohttp_client): async def handler(request): raise asyncio.CancelledError() logger = mock.Mock() server = await aiohttp_raw_server(handler, logger=logger) cli = await aiohttp_client(server) with pytest.raises(client.ServerDisconnectedError): await cli.get('/path/to') logger.debug.assert_called_with('Ignored premature client disconnection') async def test_raw_server_cancelled_in_write_eof(aiohttp_raw_server, aiohttp_client): async def handler(request): resp = web.Response(text=str(request.rel_url)) resp.write_eof = mock.Mock(side_effect=asyncio.CancelledError("error")) return resp logger = mock.Mock() server = await aiohttp_raw_server(handler, logger=logger) cli = await aiohttp_client(server) resp = await cli.get('/path/to') with pytest.raises(client.ClientPayloadError): await resp.read() logger.debug.assert_called_with('Ignored premature client disconnection ') async def test_raw_server_not_http_exception_debug(aiohttp_raw_server, aiohttp_client): exc = RuntimeError("custom runtime error") async def handler(request): raise exc logger = mock.Mock() server = await aiohttp_raw_server(handler, logger=logger, debug=True) cli = await aiohttp_client(server) resp = await cli.get('/path/to') assert resp.status == 500 txt = await resp.text() assert "

    Traceback:

    " in txt logger.exception.assert_called_with( "Error handling request", exc_info=exc) def test_create_web_server_with_implicit_loop(loop): asyncio.set_event_loop(loop) async def handler(request): return web.Response() # pragma: no cover srv = web.Server(handler) assert srv._loop is loop aiohttp-3.0.1/tests/test_web_urldispatcher.py0000666000000000000000000003120413240304665017601 0ustar 00000000000000import functools import os import shutil import tempfile from unittest import mock from unittest.mock import MagicMock import pytest from aiohttp import abc, web from aiohttp.web_urldispatcher import SystemRoute @pytest.fixture(scope='function') def tmp_dir_path(request): """ Give a path for a temporary directory The directory is destroyed at the end of the test. """ # Temporary directory. tmp_dir = tempfile.mkdtemp() def teardown(): # Delete the whole directory: shutil.rmtree(tmp_dir) request.addfinalizer(teardown) return tmp_dir @pytest.mark.parametrize( "show_index,status,prefix,data", [pytest.param(False, 403, '/', None, id="index_forbidden"), pytest.param(True, 200, '/', b'\n\nIndex of /.\n' b'\n\n

    Index of /.

    \n
    \n\n', id="index_root"), pytest.param(True, 200, '/static', b'\n\nIndex of /.\n' b'\n\n

    Index of /.

    \n\n\n', id="index_static")]) async def test_access_root_of_static_handler(tmp_dir_path, aiohttp_client, show_index, status, prefix, data): """ Tests the operation of static file server. Try to access the root of static file server, and make sure that correct HTTP statuses are returned depending if we directory index should be shown or not. """ # Put a file inside tmp_dir_path: my_file_path = os.path.join(tmp_dir_path, 'my_file') with open(my_file_path, 'w') as fw: fw.write('hello') my_dir_path = os.path.join(tmp_dir_path, 'my_dir') os.mkdir(my_dir_path) my_file_path = os.path.join(my_dir_path, 'my_file_in_dir') with open(my_file_path, 'w') as fw: fw.write('world') app = web.Application() # Register global static route: app.router.add_static(prefix, tmp_dir_path, show_index=show_index) client = await aiohttp_client(app) # Request the root of the static directory. r = await client.get(prefix) assert r.status == status if data: assert r.headers['Content-Type'] == "text/html; charset=utf-8" read_ = (await r.read()) assert read_ == data async def test_follow_symlink(tmp_dir_path, aiohttp_client): """ Tests the access to a symlink, in static folder """ data = 'hello world' my_dir_path = os.path.join(tmp_dir_path, 'my_dir') os.mkdir(my_dir_path) my_file_path = os.path.join(my_dir_path, 'my_file_in_dir') with open(my_file_path, 'w') as fw: fw.write(data) my_symlink_path = os.path.join(tmp_dir_path, 'my_symlink') os.symlink(my_dir_path, my_symlink_path) app = web.Application() # Register global static route: app.router.add_static('/', tmp_dir_path, follow_symlinks=True) client = await aiohttp_client(app) # Request the root of the static directory. r = await client.get('/my_symlink/my_file_in_dir') assert r.status == 200 assert (await r.text()) == data @pytest.mark.parametrize('dir_name,filename,data', [ ('', 'test file.txt', 'test text'), ('test dir name', 'test dir file .txt', 'test text file folder') ]) async def test_access_to_the_file_with_spaces(tmp_dir_path, aiohttp_client, dir_name, filename, data): """ Checks operation of static files with spaces """ my_dir_path = os.path.join(tmp_dir_path, dir_name) if dir_name: os.mkdir(my_dir_path) my_file_path = os.path.join(my_dir_path, filename) with open(my_file_path, 'w') as fw: fw.write(data) app = web.Application() url = os.path.join('/', dir_name, filename) app.router.add_static('/', tmp_dir_path) client = await aiohttp_client(app) r = await client.get(url) assert r.status == 200 assert (await r.text()) == data async def test_access_non_existing_resource(tmp_dir_path, aiohttp_client): """ Tests accessing non-existing resource Try to access a non-exiting resource and make sure that 404 HTTP status returned. """ app = web.Application() # Register global static route: app.router.add_static('/', tmp_dir_path, show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. r = await client.get('/non_existing_resource') assert r.status == 404 @pytest.mark.parametrize('registered_path,request_url', [ ('/a:b', '/a:b'), ('/a@b', '/a@b'), ('/a:b', '/a%3Ab'), ]) async def test_url_escaping(aiohttp_client, registered_path, request_url): """ Tests accessing a resource with """ app = web.Application() async def handler(request): return web.Response() app.router.add_get(registered_path, handler) client = await aiohttp_client(app) r = await client.get(request_url) assert r.status == 200 async def test_handler_metadata_persistence(): """ Tests accessing metadata of a handler after registering it on the app router. """ app = web.Application() async def async_handler(request): """Doc""" return web.Response() def sync_handler(request): """Doc""" return web.Response() app.router.add_get('/async', async_handler) with pytest.warns(DeprecationWarning): app.router.add_get('/sync', sync_handler) for resource in app.router.resources(): for route in resource: assert route.handler.__doc__ == 'Doc' async def test_unauthorized_folder_access(tmp_dir_path, aiohttp_client): """ Tests the unauthorized access to a folder of static file server. Try to list a folder content of static file server when server does not have permissions to do so for the folder. """ my_dir_path = os.path.join(tmp_dir_path, 'my_dir') os.mkdir(my_dir_path) app = web.Application() with mock.patch('pathlib.Path.__new__') as path_constructor: path = MagicMock() path.joinpath.return_value = path path.resolve.return_value = path path.iterdir.return_value.__iter__.side_effect = PermissionError() path_constructor.return_value = path # Register global static route: app.router.add_static('/', tmp_dir_path, show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. r = await client.get('/my_dir') assert r.status == 403 async def test_access_symlink_loop(tmp_dir_path, aiohttp_client): """ Tests the access to a looped symlink, which could not be resolved. """ my_dir_path = os.path.join(tmp_dir_path, 'my_symlink') os.symlink(my_dir_path, my_dir_path) app = web.Application() # Register global static route: app.router.add_static('/', tmp_dir_path, show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. r = await client.get('/my_symlink') assert r.status == 404 async def test_access_special_resource(tmp_dir_path, aiohttp_client): """ Tests the access to a resource that is neither a file nor a directory. Checks that if a special resource is accessed (f.e. named pipe or UNIX domain socket) then 404 HTTP status returned. """ app = web.Application() with mock.patch('pathlib.Path.__new__') as path_constructor: special = MagicMock() special.is_dir.return_value = False special.is_file.return_value = False path = MagicMock() path.joinpath.side_effect = lambda p: (special if p == 'special' else path) path.resolve.return_value = path special.resolve.return_value = special path_constructor.return_value = path # Register global static route: app.router.add_static('/', tmp_dir_path, show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. r = await client.get('/special') assert r.status == 404 async def test_partialy_applied_handler(aiohttp_client): app = web.Application() async def handler(data, request): return web.Response(body=data) with pytest.warns(DeprecationWarning): app.router.add_route('GET', '/', functools.partial(handler, b'hello')) client = await aiohttp_client(app) r = await client.get('/') data = (await r.read()) assert data == b'hello' def test_system_route(): route = SystemRoute(web.HTTPCreated(reason='test')) with pytest.raises(RuntimeError): route.url_for() assert route.name is None assert route.resource is None assert "" == repr(route) assert 201 == route.status assert 'test' == route.reason async def test_412_is_returned(aiohttp_client): class MyRouter(abc.AbstractRouter): async def resolve(self, request): raise web.HTTPPreconditionFailed() app = web.Application(router=MyRouter()) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 412 async def test_allow_head(aiohttp_client): """ Test allow_head on routes. """ app = web.Application() async def handler(_): return web.Response() app.router.add_get('/a', handler, name='a') app.router.add_get('/b', handler, allow_head=False, name='b') client = await aiohttp_client(app) r = await client.get('/a') assert r.status == 200 await r.release() r = await client.head('/a') assert r.status == 200 await r.release() r = await client.get('/b') assert r.status == 200 await r.release() r = await client.head('/b') assert r.status == 405 await r.release() @pytest.mark.parametrize("path", [ '/a', '/{a}', ]) def test_reuse_last_added_resource(path): """ Test that adding a route with the same name and path of the last added resource doesn't create a new resource. """ app = web.Application() async def handler(request): return web.Response() app.router.add_get(path, handler, name="a") app.router.add_post(path, handler, name="a") assert len(app.router.resources()) == 1 def test_resource_raw_match(): app = web.Application() async def handler(request): return web.Response() route = app.router.add_get("/a", handler, name="a") assert route.resource.raw_match("/a") route = app.router.add_get("/{b}", handler, name="b") assert route.resource.raw_match("/{b}") resource = app.router.add_static("/static", ".") assert not resource.raw_match("/static") async def test_add_view(aiohttp_client): app = web.Application() class MyView(web.View): async def get(self): return web.Response() async def post(self): return web.Response() app.router.add_view("/a", MyView) client = await aiohttp_client(app) r = await client.get("/a") assert r.status == 200 await r.release() r = await client.post("/a") assert r.status == 200 await r.release() r = await client.put("/a") assert r.status == 405 await r.release() async def test_decorate_view(aiohttp_client): routes = web.RouteTableDef() @routes.view("/a") class MyView(web.View): async def get(self): return web.Response() async def post(self): return web.Response() app = web.Application() app.router.add_routes(routes) client = await aiohttp_client(app) r = await client.get("/a") assert r.status == 200 await r.release() r = await client.post("/a") assert r.status == 200 await r.release() r = await client.put("/a") assert r.status == 405 await r.release() async def test_web_view(aiohttp_client): app = web.Application() class MyView(web.View): async def get(self): return web.Response() async def post(self): return web.Response() app.router.add_routes([ web.view("/a", MyView) ]) client = await aiohttp_client(app) r = await client.get("/a") assert r.status == 200 await r.release() r = await client.post("/a") assert r.status == 200 await r.release() r = await client.put("/a") assert r.status == 405 await r.release() aiohttp-3.0.1/tests/test_web_websocket.py0000666000000000000000000003171213240304665016722 0ustar 00000000000000import asyncio from unittest import mock import pytest from multidict import CIMultiDict from aiohttp import WSMessage, WSMsgType, signals from aiohttp.log import ws_logger from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import HTTPBadRequest, HTTPMethodNotAllowed, WebSocketResponse from aiohttp.web_ws import WS_CLOSED_MESSAGE, WebSocketReady @pytest.fixture def app(loop): ret = mock.Mock() ret.loop = loop ret._debug = False ret.on_response_prepare = signals.Signal(ret) ret.on_response_prepare.freeze() return ret @pytest.fixture def writer(loop): writer = mock.Mock() writer.drain.return_value = loop.create_future() writer.drain.return_value.set_result(None) writer.write_eof.return_value = loop.create_future() writer.write_eof.return_value.set_result(None) return writer @pytest.fixture def protocol(): ret = mock.Mock() ret.set_parser.return_value = ret return ret @pytest.fixture def make_request(app, protocol, writer): def maker(method, path, headers=None, protocols=False): if headers is None: headers = CIMultiDict( {'HOST': 'server.example.com', 'UPGRADE': 'websocket', 'CONNECTION': 'Upgrade', 'SEC-WEBSOCKET-KEY': 'dGhlIHNhbXBsZSBub25jZQ==', 'ORIGIN': 'http://example.com', 'SEC-WEBSOCKET-VERSION': '13'}) if protocols: headers['SEC-WEBSOCKET-PROTOCOL'] = 'chat, superchat' return make_mocked_request( method, path, headers, app=app, protocol=protocol, payload_writer=writer, loop=app.loop) return maker async def test_nonstarted_ping(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.ping() async def test_nonstarted_pong(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.pong() async def test_nonstarted_send_str(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.send_str('string') async def test_nonstarted_send_bytes(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.send_bytes(b'bytes') async def test_nonstarted_send_json(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.send_json({'type': 'json'}) async def test_nonstarted_close(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.close() async def test_nonstarted_receive_str(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.receive_str() async def test_nonstarted_receive_bytes(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.receive_bytes() async def test_nonstarted_receive_json(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.receive_json() async def test_receive_str_nonstring(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) async def receive(): return WSMessage(WSMsgType.BINARY, b'data', b'') ws.receive = receive with pytest.raises(TypeError): await ws.receive_str() async def test_receive_bytes_nonsbytes(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) async def receive(): return WSMessage(WSMsgType.TEXT, 'data', b'') ws.receive = receive with pytest.raises(TypeError): await ws.receive_bytes() async def test_send_str_nonstring(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) with pytest.raises(TypeError): await ws.send_str(b'bytes') async def test_send_bytes_nonbytes(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) with pytest.raises(TypeError): await ws.send_bytes('string') async def test_send_json_nonjson(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) with pytest.raises(TypeError): await ws.send_json(set()) async def test_write_non_prepared(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.write(b'data') def test_websocket_ready(): websocket_ready = WebSocketReady(True, 'chat') assert websocket_ready.ok is True assert websocket_ready.protocol == 'chat' def test_websocket_not_ready(): websocket_ready = WebSocketReady(False, None) assert websocket_ready.ok is False assert websocket_ready.protocol is None def test_websocket_ready_unknown_protocol(): websocket_ready = WebSocketReady(True, None) assert websocket_ready.ok is True assert websocket_ready.protocol is None def test_bool_websocket_ready(): websocket_ready = WebSocketReady(True, None) assert bool(websocket_ready) is True def test_bool_websocket_not_ready(): websocket_ready = WebSocketReady(False, None) assert bool(websocket_ready) is False def test_can_prepare_ok(make_request): req = make_request('GET', '/', protocols=True) ws = WebSocketResponse(protocols=('chat',)) assert WebSocketReady(True, 'chat') == ws.can_prepare(req) def test_can_prepare_unknown_protocol(make_request): req = make_request('GET', '/') ws = WebSocketResponse() assert WebSocketReady(True, None) == ws.can_prepare(req) def test_can_prepare_invalid_method(make_request): req = make_request('POST', '/') ws = WebSocketResponse() assert WebSocketReady(False, None) == ws.can_prepare(req) def test_can_prepare_without_upgrade(make_request): req = make_request('GET', '/', headers=CIMultiDict({})) ws = WebSocketResponse() assert WebSocketReady(False, None) == ws.can_prepare(req) async def test_can_prepare_started(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) with pytest.raises(RuntimeError) as ctx: ws.can_prepare(req) assert 'Already started' in str(ctx.value) def test_closed_after_ctor(): ws = WebSocketResponse() assert not ws.closed assert ws.close_code is None async def test_send_str_closed(make_request, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() mocker.spy(ws_logger, 'warning') await ws.send_str('string') assert ws_logger.warning.called async def test_send_bytes_closed(make_request, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() mocker.spy(ws_logger, 'warning') await ws.send_bytes(b'bytes') assert ws_logger.warning.called async def test_send_json_closed(make_request, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() mocker.spy(ws_logger, 'warning') await ws.send_json({'type': 'json'}) assert ws_logger.warning.called async def test_ping_closed(make_request, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() mocker.spy(ws_logger, 'warning') await ws.ping() assert ws_logger.warning.called async def test_pong_closed(make_request, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() mocker.spy(ws_logger, 'warning') await ws.pong() assert ws_logger.warning.called async def test_close_idempotent(make_request, writer): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) assert (await ws.close(code=1, message='message1')) assert ws.closed assert not (await ws.close(code=2, message='message2')) async def test_prepare_invalid_method(make_request): req = make_request('POST', '/') ws = WebSocketResponse() with pytest.raises(HTTPMethodNotAllowed): await ws.prepare(req) async def test_prepare_without_upgrade(make_request): req = make_request('GET', '/', headers=CIMultiDict({})) ws = WebSocketResponse() with pytest.raises(HTTPBadRequest): await ws.prepare(req) async def test_wait_closed_before_start(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.close() async def test_write_eof_not_started(): ws = WebSocketResponse() with pytest.raises(RuntimeError): await ws.write_eof() async def test_write_eof_idempotent(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() await ws.write_eof() await ws.write_eof() await ws.write_eof() async def test_receive_exc_in_reader(make_request, loop): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader = mock.Mock() exc = ValueError() res = loop.create_future() res.set_exception(exc) ws._reader.read = make_mocked_coro(res) ws._payload_writer.drain = mock.Mock() ws._payload_writer.drain.return_value = loop.create_future() ws._payload_writer.drain.return_value.set_result(True) msg = await ws.receive() assert msg.type == WSMsgType.ERROR assert msg.data is exc assert ws.exception() is exc async def test_receive_cancelled(make_request, loop): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader = mock.Mock() res = loop.create_future() res.set_exception(asyncio.CancelledError()) ws._reader.read = make_mocked_coro(res) with pytest.raises(asyncio.CancelledError): await ws.receive() async def test_receive_timeouterror(make_request, loop): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader = mock.Mock() res = loop.create_future() res.set_exception(asyncio.TimeoutError()) ws._reader.read = make_mocked_coro(res) with pytest.raises(asyncio.TimeoutError): await ws.receive() async def test_multiple_receive_on_close_connection(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() await ws.receive() await ws.receive() await ws.receive() await ws.receive() with pytest.raises(RuntimeError): await ws.receive() async def test_concurrent_receive(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._waiting = True with pytest.raises(RuntimeError): await ws.receive() async def test_close_exc(make_request, loop, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) ws._reader = mock.Mock() exc = ValueError() ws._reader.read.return_value = loop.create_future() ws._reader.read.return_value.set_exception(exc) ws._payload_writer.drain = mock.Mock() ws._payload_writer.drain.return_value = loop.create_future() ws._payload_writer.drain.return_value.set_result(True) await ws.close() assert ws.closed assert ws.exception() is exc ws._closed = False ws._reader.read.return_value = loop.create_future() ws._reader.read.return_value.set_exception(asyncio.CancelledError()) with pytest.raises(asyncio.CancelledError): await ws.close() assert ws.close_code == 1006 async def test_close_exc2(make_request): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) exc = ValueError() ws._writer = mock.Mock() ws._writer.close.side_effect = exc await ws.close() assert ws.closed assert ws.exception() is exc ws._closed = False ws._writer.close.side_effect = asyncio.CancelledError() with pytest.raises(asyncio.CancelledError): await ws.close() async def test_prepare_twice_idempotent(make_request): req = make_request('GET', '/') ws = WebSocketResponse() impl1 = await ws.prepare(req) impl2 = await ws.prepare(req) assert impl1 is impl2 async def test_send_with_per_message_deflate(make_request, mocker): req = make_request('GET', '/') ws = WebSocketResponse() await ws.prepare(req) writer_send = ws._writer.send = make_mocked_coro() await ws.send_str('string', compress=15) writer_send.assert_called_with('string', binary=False, compress=15) await ws.send_bytes(b'bytes', compress=0) writer_send.assert_called_with(b'bytes', binary=True, compress=0) await ws.send_json('[{}]', compress=9) writer_send.assert_called_with('"[{}]"', binary=False, compress=9) aiohttp-3.0.1/tests/test_web_websocket_functional.py0000666000000000000000000004677213240304665021160 0ustar 00000000000000"""HTTP websocket server functional tests""" import asyncio import pytest import aiohttp from aiohttp import web from aiohttp.http import WSMsgType @pytest.fixture def ceil(mocker): def ceil(val): return val mocker.patch('aiohttp.helpers.ceil').side_effect = ceil async def test_websocket_can_prepare(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() if not ws.can_prepare(request): raise web.HTTPUpgradeRequired() return web.Response() app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') assert resp.status == 426 async def test_websocket_json(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() if not ws.can_prepare(request): return web.HTTPUpgradeRequired() await ws.prepare(request) msg = await ws.receive() msg_json = msg.json() answer = msg_json['test'] await ws.send_str(answer) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') expected_value = 'value' payload = '{"test": "%s"}' % expected_value await ws.send_str(payload) resp = await ws.receive() assert resp.data == expected_value async def test_websocket_json_invalid_message(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) try: await ws.receive_json() except ValueError: await ws.send_str('ValueError was raised') else: raise Exception('No Exception') finally: await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') payload = 'NOT A VALID JSON STRING' await ws.send_str(payload) data = await ws.receive_str() assert 'ValueError was raised' in data async def test_websocket_send_json(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) data = await ws.receive_json() await ws.send_json(data) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') expected_value = 'value' await ws.send_json({'test': expected_value}) data = await ws.receive_json() assert data['test'] == expected_value async def test_websocket_receive_json(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) data = await ws.receive_json() answer = data['test'] await ws.send_str(answer) await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') expected_value = 'value' payload = '{"test": "%s"}' % expected_value await ws.send_str(payload) resp = await ws.receive() assert resp.data == expected_value async def test_send_recv_text(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_str() await ws.send_str(msg+'/answer') await ws.close() closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.send_str('ask') msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.TEXT assert 'ask/answer' == msg.data msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.CLOSE assert msg.data == 1000 assert msg.extra == '' assert ws.closed assert ws.close_code == 1000 await closed async def test_send_recv_bytes(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) msg = await ws.receive_bytes() await ws.send_bytes(msg+b'/answer') await ws.close() closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.send_bytes(b'ask') msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.BINARY assert b'ask/answer' == msg.data msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.CLOSE assert msg.data == 1000 assert msg.extra == '' assert ws.closed assert ws.close_code == 1000 await closed async def test_send_recv_json(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) data = await ws.receive_json() await ws.send_json({'response': data['request']}) await ws.close() closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.send_str('{"request": "test"}') msg = await ws.receive() data = msg.json() assert msg.type == aiohttp.WSMsgType.TEXT assert data['response'] == 'test' msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.CLOSE assert msg.data == 1000 assert msg.extra == '' await ws.close() await closed async def test_close_timeout(loop, aiohttp_client): aborted = loop.create_future() async def handler(request): ws = web.WebSocketResponse(timeout=0.1) await ws.prepare(request) assert 'request' == (await ws.receive_str()) await ws.send_str('reply') begin = ws._loop.time() assert (await ws.close()) elapsed = ws._loop.time() - begin assert elapsed < 0.201, \ 'close() should have returned before ' \ 'at most 2x timeout.' assert ws.close_code == 1006 assert isinstance(ws.exception(), asyncio.TimeoutError) aborted.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.send_str('request') assert 'reply' == (await ws.receive_str()) # The server closes here. Then the client sends bogus messages with an # internval shorter than server-side close timeout, to make the server # hanging indefinitely. await asyncio.sleep(0.08, loop=loop) msg = await ws._reader.read() assert msg.type == WSMsgType.CLOSE await ws.send_str('hang') # i am not sure what do we test here # under uvloop this code raises RuntimeError try: await asyncio.sleep(0.08, loop=loop) await ws.send_str('hang') await asyncio.sleep(0.08, loop=loop) await ws.send_str('hang') await asyncio.sleep(0.08, loop=loop) await ws.send_str('hang') except RuntimeError: pass await asyncio.sleep(0.08, loop=loop) assert (await aborted) await ws.close() async def test_concurrent_close(loop, aiohttp_client): srv_ws = None async def handler(request): nonlocal srv_ws ws = srv_ws = web.WebSocketResponse( autoclose=False, protocols=('foo', 'bar')) await ws.prepare(request) msg = await ws.receive() assert msg.type == WSMsgType.CLOSING msg = await ws.receive() assert msg.type == WSMsgType.CLOSING await asyncio.sleep(0, loop=loop) msg = await ws.receive() assert msg.type == WSMsgType.CLOSED return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoclose=False, protocols=('eggs', 'bar')) await srv_ws.close(code=1007) msg = await ws.receive() assert msg.type == WSMsgType.CLOSE await asyncio.sleep(0, loop=loop) msg = await ws.receive() assert msg.type == WSMsgType.CLOSED async def test_auto_pong_with_closing_by_peer(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive() msg = await ws.receive() assert msg.type == WSMsgType.CLOSE assert msg.data == 1000 assert msg.extra == 'exit message' closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoclose=False, autoping=False) await ws.ping() await ws.send_str('ask') msg = await ws.receive() assert msg.type == WSMsgType.PONG await ws.close(code=1000, message='exit message') await closed async def test_ping(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.ping('data') await ws.receive() closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoping=False) msg = await ws.receive() assert msg.type == WSMsgType.PING assert msg.data == b'data' await ws.pong() await ws.close() await closed async def aiohttp_client_ping(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) await ws.receive() closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoping=False) await ws.ping('data') msg = await ws.receive() assert msg.type == WSMsgType.PONG assert msg.data == b'data' await ws.pong() await ws.close() async def test_pong(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse(autoping=False) await ws.prepare(request) msg = await ws.receive() assert msg.type == WSMsgType.PING await ws.pong('data') msg = await ws.receive() assert msg.type == WSMsgType.CLOSE assert msg.data == 1000 assert msg.extra == 'exit message' closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoping=False) await ws.ping('data') msg = await ws.receive() assert msg.type == WSMsgType.PONG assert msg.data == b'data' await ws.close(code=1000, message='exit message') await closed async def test_change_status(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() ws.set_status(200) assert 200 == ws.status await ws.prepare(request) assert 101 == ws.status await ws.close() closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoping=False) await ws.close() await closed await ws.close() async def test_handle_protocol(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse(protocols=('foo', 'bar')) await ws.prepare(request) await ws.close() assert 'bar' == ws.ws_protocol closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', protocols=('eggs', 'bar')) await ws.close() await closed async def test_server_close_handshake(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse(protocols=('foo', 'bar')) await ws.prepare(request) await ws.close() closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoclose=False, protocols=('eggs', 'bar')) msg = await ws.receive() assert msg.type == WSMsgType.CLOSE await ws.close() await closed async def aiohttp_client_close_handshake(loop, aiohttp_client, ceil): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse( autoclose=False, protocols=('foo', 'bar')) await ws.prepare(request) msg = await ws.receive() assert msg.type == WSMsgType.CLOSE assert not ws.closed await ws.close() assert ws.closed assert ws.close_code == 1007 msg = await ws.receive() assert msg.type == WSMsgType.CLOSED closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoclose=False, protocols=('eggs', 'bar')) await ws.close(code=1007) msg = await ws.receive() assert msg.type == WSMsgType.CLOSED await closed async def test_server_close_handshake_server_eats_client_messages( loop, aiohttp_client ): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse(protocols=('foo', 'bar')) await ws.prepare(request) await ws.close() closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoclose=False, autoping=False, protocols=('eggs', 'bar')) msg = await ws.receive() assert msg.type == WSMsgType.CLOSE await ws.send_str('text') await ws.send_bytes(b'bytes') await ws.ping() await ws.close() await closed async def test_receive_timeout(loop, aiohttp_client): raised = False async def handler(request): ws = web.WebSocketResponse(receive_timeout=0.1) await ws.prepare(request) try: await ws.receive() except asyncio.TimeoutError: nonlocal raised raised = True await ws.close() return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.receive() await ws.close() assert raised async def test_custom_receive_timeout(loop, aiohttp_client): raised = False async def handler(request): ws = web.WebSocketResponse(receive_timeout=None) await ws.prepare(request) try: await ws.receive(0.1) except asyncio.TimeoutError: nonlocal raised raised = True await ws.close() return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.receive() await ws.close() assert raised async def test_heartbeat(loop, aiohttp_client, ceil): async def handler(request): ws = web.WebSocketResponse(heartbeat=0.05) await ws.prepare(request) await ws.receive() await ws.close() return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoping=False) msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.ping await ws.close() async def test_heartbeat_no_pong(loop, aiohttp_client, ceil): cancelled = False async def handler(request): nonlocal cancelled ws = web.WebSocketResponse(heartbeat=0.05) await ws.prepare(request) try: await ws.receive() except asyncio.CancelledError: cancelled = True return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/', autoping=False) msg = await ws.receive() assert msg.type == aiohttp.WSMsgType.ping await ws.receive() assert cancelled async def test_server_ws_async_for(loop, aiohttp_server): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: assert msg.type == aiohttp.WSMsgType.TEXT s = msg.data await ws.send_str(s + '/answer') await ws.close() closed.set_result(1) return ws app = web.Application() app.router.add_route('GET', '/', handler) server = await aiohttp_server(app) async with aiohttp.ClientSession(loop=loop) as sm: async with sm.ws_connect(server.make_url('/')) as resp: items = ['q1', 'q2', 'q3'] for item in items: await resp.send_str(item) msg = await resp.receive() assert msg.type == aiohttp.WSMsgType.TEXT assert item + '/answer' == msg.data await resp.close() await closed async def test_closed_async_for(loop, aiohttp_client): closed = loop.create_future() async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) messages = [] async for msg in ws: messages.append(msg) if 'stop' == msg.data: await ws.send_str('stopping') await ws.close() assert 1 == len(messages) assert messages[0].type == WSMsgType.TEXT assert messages[0].data == 'stop' closed.set_result(None) return ws app = web.Application() app.router.add_get('/', handler) client = await aiohttp_client(app) ws = await client.ws_connect('/') await ws.send_str('stop') msg = await ws.receive() assert msg.type == WSMsgType.TEXT assert msg.data == 'stopping' await ws.close() await closed async def test_websocket_disable_keepalive(loop, aiohttp_client): async def handler(request): ws = web.WebSocketResponse() if not ws.can_prepare(request): return web.Response(text='OK') assert request.protocol._keepalive await ws.prepare(request) assert not request.protocol._keepalive assert not request.protocol._keepalive_handle await ws.send_str('OK') await ws.close() return ws app = web.Application() app.router.add_route('GET', '/', handler) client = await aiohttp_client(app) resp = await client.get('/') txt = await resp.text() assert txt == 'OK' ws = await client.ws_connect('/') data = await ws.receive_str() assert data == 'OK' aiohttp-3.0.1/tests/test_worker.py0000666000000000000000000001764413240304665015420 0ustar 00000000000000"""Tests for aiohttp/worker.py""" import asyncio import os import pathlib import socket import ssl from unittest import mock import pytest from aiohttp import web from aiohttp.test_utils import make_mocked_coro base_worker = pytest.importorskip('aiohttp.worker') try: import uvloop except ImportError: uvloop = None WRONG_LOG_FORMAT = '%a "%{Referrer}i" %(h)s %(l)s %s' ACCEPTABLE_LOG_FORMAT = '%a "%{Referrer}i" %s' # tokio event loop does not allow to override attributes def skip_if_no_dict(loop): if not hasattr(loop, '__dict__'): pytest.skip("can not override loop attributes") class BaseTestWorker: def __init__(self): self.servers = {} self.exit_code = 0 self._notify_waiter = None self.cfg = mock.Mock() self.cfg.graceful_timeout = 100 self.pid = 'pid' self.wsgi = web.Application() class AsyncioWorker(BaseTestWorker, base_worker.GunicornWebWorker): pass PARAMS = [AsyncioWorker] if uvloop is not None: class UvloopWorker(BaseTestWorker, base_worker.GunicornUVLoopWebWorker): pass PARAMS.append(UvloopWorker) @pytest.fixture(params=PARAMS) def worker(request, loop): asyncio.set_event_loop(loop) ret = request.param() ret.notify = mock.Mock() return ret def test_init_process(worker): with mock.patch('aiohttp.worker.asyncio') as m_asyncio: try: worker.init_process() except TypeError: pass assert m_asyncio.get_event_loop.return_value.close.called assert m_asyncio.new_event_loop.called assert m_asyncio.set_event_loop.called def test_run(worker, loop): worker.log = mock.Mock() worker.cfg = mock.Mock() worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT worker.loop = loop worker._run = make_mocked_coro(None) with pytest.raises(SystemExit): worker.run() assert worker._run.called worker._runner.server is None assert loop.is_closed() def test_handle_quit(worker, loop): worker.loop = mock.Mock() worker.handle_quit(object(), object()) assert not worker.alive assert worker.exit_code == 0 worker.loop.call_later.asset_called_with( 0.1, worker._notify_waiter_done) def test_handle_abort(worker): with mock.patch('aiohttp.worker.sys') as m_sys: worker.handle_abort(object(), object()) assert not worker.alive assert worker.exit_code == 1 m_sys.exit.assert_called_with(1) def test__wait_next_notify(worker): worker.loop = mock.Mock() worker._notify_waiter_done = mock.Mock() fut = worker._wait_next_notify() assert worker._notify_waiter == fut worker.loop.call_later.assert_called_with(1.0, worker._notify_waiter_done, fut) def test__notify_waiter_done(worker): worker._notify_waiter = None worker._notify_waiter_done() assert worker._notify_waiter is None waiter = worker._notify_waiter = mock.Mock() worker._notify_waiter.done.return_value = False worker._notify_waiter_done() assert worker._notify_waiter is None waiter.set_result.assert_called_with(True) def test__notify_waiter_done_explicit_waiter(worker): worker._notify_waiter = None assert worker._notify_waiter is None waiter = worker._notify_waiter = mock.Mock() waiter.done.return_value = False waiter2 = worker._notify_waiter = mock.Mock() worker._notify_waiter_done(waiter) assert worker._notify_waiter is waiter2 waiter.set_result.assert_called_with(True) assert not waiter2.set_result.called def test_init_signals(worker): worker.loop = mock.Mock() worker.init_signals() assert worker.loop.add_signal_handler.called @pytest.mark.parametrize('source,result', [ (ACCEPTABLE_LOG_FORMAT, ACCEPTABLE_LOG_FORMAT), (AsyncioWorker.DEFAULT_GUNICORN_LOG_FORMAT, AsyncioWorker.DEFAULT_AIOHTTP_LOG_FORMAT), ]) def test__get_valid_log_format_ok(worker, source, result): assert result == worker._get_valid_log_format(source) def test__get_valid_log_format_exc(worker): with pytest.raises(ValueError) as exc: worker._get_valid_log_format(WRONG_LOG_FORMAT) assert '%(name)s' in str(exc) async def test__run_ok_parent_changed(worker, loop, aiohttp_unused_port): skip_if_no_dict(loop) worker.ppid = 0 worker.alive = True sock = socket.socket() addr = ('localhost', aiohttp_unused_port()) sock.bind(addr) worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT worker.cfg.max_requests = 0 worker.cfg.is_ssl = False worker._runner = web.AppRunner(worker.wsgi) await worker._runner.setup() await worker._run() worker.notify.assert_called_with() worker.log.info.assert_called_with("Parent changed, shutting down: %s", worker) assert worker._runner.server is None async def test__run_exc(worker, loop, aiohttp_unused_port): skip_if_no_dict(loop) worker.ppid = os.getppid() worker.alive = True sock = socket.socket() addr = ('localhost', aiohttp_unused_port()) sock.bind(addr) worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT worker.cfg.max_requests = 0 worker.cfg.is_ssl = False worker._runner = web.AppRunner(worker.wsgi) await worker._runner.setup() def raiser(): waiter = worker._notify_waiter worker.alive = False waiter.set_exception(RuntimeError()) loop.call_later(0.1, raiser) await worker._run() worker.notify.assert_called_with() assert worker._runner.server is None async def test__run_ok_max_requests_exceeded(worker, loop, aiohttp_unused_port): skip_if_no_dict(loop) worker.ppid = os.getppid() worker.alive = True worker.servers = {} sock = socket.socket() addr = ('localhost', aiohttp_unused_port()) sock.bind(addr) worker.sockets = [sock] worker.log = mock.Mock() worker.loop = loop worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT worker.cfg.max_requests = 10 worker.cfg.is_ssl = False worker._runner = web.AppRunner(worker.wsgi) await worker._runner.setup() worker._runner.server.requests_count = 30 await worker._run() worker.notify.assert_called_with() worker.log.info.assert_called_with("Max requests, shutting down: %s", worker) assert worker._runner.server is None def test__create_ssl_context_without_certs_and_ciphers(worker): here = pathlib.Path(__file__).parent worker.cfg.ssl_version = ssl.PROTOCOL_SSLv23 worker.cfg.cert_reqs = ssl.CERT_OPTIONAL worker.cfg.certfile = str(here / 'sample.crt') worker.cfg.keyfile = str(here / 'sample.key') worker.cfg.ca_certs = None worker.cfg.ciphers = None crt = worker._create_ssl_context(worker.cfg) assert isinstance(crt, ssl.SSLContext) def test__create_ssl_context_with_ciphers(worker): here = pathlib.Path(__file__).parent worker.cfg.ssl_version = ssl.PROTOCOL_SSLv23 worker.cfg.cert_reqs = ssl.CERT_OPTIONAL worker.cfg.certfile = str(here / 'sample.crt') worker.cfg.keyfile = str(here / 'sample.key') worker.cfg.ca_certs = None worker.cfg.ciphers = 'PSK' ctx = worker._create_ssl_context(worker.cfg) assert isinstance(ctx, ssl.SSLContext) def test__create_ssl_context_with_ca_certs(worker): here = pathlib.Path(__file__).parent worker.cfg.ssl_version = ssl.PROTOCOL_SSLv23 worker.cfg.cert_reqs = ssl.CERT_OPTIONAL worker.cfg.certfile = str(here / 'sample.crt') worker.cfg.keyfile = str(here / 'sample.key') worker.cfg.ca_certs = str(here / 'sample.crt') worker.cfg.ciphers = None ctx = worker._create_ssl_context(worker.cfg) assert isinstance(ctx, ssl.SSLContext) aiohttp-3.0.1/tools/0000777000000000000000000000000013240305035012450 5ustar 00000000000000aiohttp-3.0.1/tools/build-wheels.sh0000666000000000000000000000320613240304665015401 0ustar 00000000000000#!/bin/bash if [ -n "$DEBUG" ] then set -x fi set -euo pipefail # ref: https://coderwall.com/p/fkfaqq/safer-bash-scripts-with-set-euxo-pipefail PYTHON_VERSIONS="cp35-cp35m cp36-cp36m" # Avoid creation of __pycache__/*.py[c|o] export PYTHONDONTWRITEBYTECODE=1 package_name="$1" if [ -z "$package_name" ] then &>2 echo "Please pass package name as a first argument of this script ($0)" exit 1 fi arch=`uname -m` echo echo echo "Compile wheels" for PYTHON in ${PYTHON_VERSIONS}; do /opt/python/${PYTHON}/bin/pip install -r /io/requirements/wheel.txt /opt/python/${PYTHON}/bin/pip wheel /io/ -w /io/dist/ done echo echo echo "Bundle external shared libraries into the wheels" for whl in /io/dist/${package_name}-*-linux_${arch}.whl; do echo "Repairing $whl..." auditwheel repair "$whl" -w /io/dist/ done echo echo echo "Cleanup OS specific wheels" rm -fv /io/dist/*-linux_*.whl echo echo echo "Cleanup non-$package_name wheels" find /io/dist -maxdepth 1 -type f ! -name "$package_name"'-*-manylinux1_*.whl' -print0 | xargs -0 rm -rf echo echo echo "Install packages and test" echo "dist directory:" ls /io/dist for PYTHON in ${PYTHON_VERSIONS}; do # clear python cache find /io -type d -name __pycache__ -print0 | xargs -0 rm -rf echo echo -n "Test $PYTHON: " /opt/python/${PYTHON}/bin/python -c "import platform; print('Building wheel for {platform} platform.'.format(platform=platform.platform()))" /opt/python/${PYTHON}/bin/pip install -r /io/requirements/ci-wheel.txt /opt/python/${PYTHON}/bin/pip install "$package_name" --no-index -f file:///io/dist /opt/python/${PYTHON}/bin/py.test /io/tests done aiohttp-3.0.1/tools/build.cmd0000777000000000000000000000150613240304665014251 0ustar 00000000000000@echo off :: To build extensions for 64 bit Python 3, we need to configure environment :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: :: MS Windows SDK for Windows 7 and .NET Framework 4 :: :: More details at: :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows IF "%DISTUTILS_USE_SDK%"=="1" ( ECHO Configuring environment to build with MSVC on a 64bit architecture ECHO Using Windows SDK 7.1 "C:\Program Files\Microsoft SDKs\Windows\v7.1\Setup\WindowsSdkVer.exe" -q -version:v7.1 CALL "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 /release SET MSSdk=1 REM Need the following to allow tox to see the SDK compiler SET TOX_TESTENV_PASSENV=DISTUTILS_USE_SDK MSSdk INCLUDE LIB ) ELSE ( ECHO Using default MSVC build environment ) CALL %* aiohttp-3.0.1/tools/check_changes.py0000666000000000000000000000226513240304665015604 0ustar 00000000000000#!/usr/bin/env python3 import sys from pathlib import Path ALLOWED_SUFFIXES = ['.feature', '.bugfix', '.doc', '.removal', '.misc'] def get_root(script_path): folder = script_path.absolute().parent while not (folder / '.git').exists(): folder = folder.parent if folder == folder.anchor: raise RuntimeError("git repo not found") return folder def main(argv): print('Check "CHANGES" folder... ', end='', flush=True) here = Path(argv[0]) root = get_root(here) changes = root / 'CHANGES' failed = False for fname in changes.iterdir(): if fname.name == '.gitignore': continue if fname.suffix not in ALLOWED_SUFFIXES: if not failed: print('') print(fname, 'has illegal suffix', file=sys.stderr) failed = True if failed: print('', file=sys.stderr) print('Allowed suffixes are:', ALLOWED_SUFFIXES, file=sys.stderr) print('', file=sys.stderr) else: print('OK') return int(failed) if __name__ == '__main__': sys.exit(main(sys.argv)) aiohttp-3.0.1/tools/drop_merged_branches.sh0000666000000000000000000000005513240304665017150 0ustar 00000000000000#!/usr/bin/env bash git remote prune origin aiohttp-3.0.1/tools/run_docker.sh0000666000000000000000000000215313240304665015150 0ustar 00000000000000#!/bin/bash set -e package_name="$1" if [ -z "$package_name" ] then &>2 echo "Please pass package name as a first argument of this script ($0)" exit 1 fi manylinux1_image_prefix="quay.io/pypa/manylinux1_" dock_ext_args="" declare -A docker_pull_pids=() # This syntax requires at least bash v4 for arch in x86_64 i686 do docker pull "${manylinux1_image_prefix}${arch}" & docker_pull_pids[$arch]=$! done echo Creating dist folder with privileges of host-machine user mkdir -p dist # This is required to be created with host-machine user privileges for arch in x86_64 i686 do echo echo arch_pull_pid=${docker_pull_pids[$arch]} echo Waiting for docker pull PID $arch_pull_pid to complete downloading container for $arch arch... wait $arch_pull_pid # await for docker image for current arch to be pulled from hub [ $arch == "i686" ] && dock_ext_args="linux32" echo Building wheel for $arch arch docker run --rm -v `pwd`:/io "${manylinux1_image_prefix}${arch}" $dock_ext_args /io/tools/build-wheels.sh "$package_name" dock_ext_args="" # Reset docker args, just in case done aiohttp-3.0.1/tox.ini0000666000000000000000000000173313240304665012637 0ustar 00000000000000[tox] envlist = check, {py35,py36}-{debug,release}-{cchardet,cython,pure}, report [testenv] deps = pytest pytest-mock # pytest-cov coverage gunicorn cchardet: cython cchardet: cchardet cython: cython commands = # --cov={envsitepackagesdir}/tests # py.test --cov={envsitepackagesdir}/aiohttp tests {posargs} coverage run -m pytest {posargs:tests} mv .coverage .coverage.{envname} setenv = debug: PYTHONASYNCIODEBUG = 1 pure: AIOHTTP_NO_EXTENSIONS = 1 basepython: py35: python3.5 py36: python3.6 whitelist_externals = coverage mv echo [testenv:check] deps = wheel flake8 pyflakes>=1.0.0 coverage commands = flake8 aiohttp examples tests python setup.py check -rm coverage erase basepython: python3.6 [testenv:report] commands = coverage combine coverage report coverage html echo "open file://{toxinidir}/htmlcov/index.html" basepython: python3.6 aiohttp-3.0.1/vendor/0000777000000000000000000000000013240305035012605 5ustar 00000000000000aiohttp-3.0.1/vendor/http-parser/0000777000000000000000000000000013240305035015056 5ustar 00000000000000aiohttp-3.0.1/vendor/http-parser/.gitignore0000666000000000000000000000037713240304665017065 0ustar 00000000000000/out/ core tags *.o test test_g test_fast bench url_parser parsertrace parsertrace_g *.mk *.Makefile *.so.* *.exe.* *.exe *.a # Visual Studio uglies *.suo *.sln *.vcxproj *.vcxproj.filters *.vcxproj.user *.opensdf *.ncrunchsolution* *.sdf *.vsp *.psess aiohttp-3.0.1/vendor/http-parser/.mailmap0000666000000000000000000000074013240304665016510 0ustar 00000000000000# update AUTHORS with: # git log --all --reverse --format='%aN <%aE>' | perl -ne 'BEGIN{print "# Authors ordered by first contribution.\n"} print unless $h{$_}; $h{$_} = 1' > AUTHORS Ryan Dahl Salman Haq Simon Zimmermann Thomas LE ROUX LE ROUX Thomas Thomas LE ROUX Thomas LE ROUX Fedor Indutny aiohttp-3.0.1/vendor/http-parser/.travis.yml0000666000000000000000000000020413240304665017173 0ustar 00000000000000language: c compiler: - clang - gcc script: - "make" notifications: email: false irc: - "irc.freenode.net#node-ci" aiohttp-3.0.1/vendor/http-parser/AUTHORS0000666000000000000000000000470613240304665016145 0ustar 00000000000000# Authors ordered by first contribution. Ryan Dahl Jeremy Hinegardner Sergey Shepelev Joe Damato tomika Phoenix Sol Cliff Frey Ewen Cheslack-Postava Santiago Gala Tim Becker Jeff Terrace Ben Noordhuis Nathan Rajlich Mark Nottingham Aman Gupta Tim Becker Sean Cunningham Peter Griess Salman Haq Cliff Frey Jon Kolb Fouad Mardini Paul Querna Felix Geisendörfer koichik Andre Caron Ivo Raisr James McLaughlin David Gwynne Thomas LE ROUX Randy Rizun Andre Louis Caron Simon Zimmermann Erik Dubbelboer Martell Malone Bertrand Paquet BogDan Vatra Peter Faiman Corey Richardson Tóth Tamás Cam Swords Chris Dickinson Uli Köhler Charlie Somerville Patrik Stutz Fedor Indutny runner Alexis Campailla David Wragg Vinnie Falco Alex Butum Rex Feng Alex Kocharin Mark Koopman Helge Heß Alexis La Goutte George Miroshnykov Maciej Małecki Marc O'Morain Jeff Pinner Timothy J Fontaine Akagi201 Romain Giraud Jay Satiro Arne Steen Kjell Schubert Olivier Mengué aiohttp-3.0.1/vendor/http-parser/bench.c0000666000000000000000000000651013240304665016313 0ustar 00000000000000/* Copyright Fedor Indutny. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ #include "http_parser.h" #include #include #include #include static const char data[] = "POST /joyent/http-parser HTTP/1.1\r\n" "Host: github.com\r\n" "DNT: 1\r\n" "Accept-Encoding: gzip, deflate, sdch\r\n" "Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4\r\n" "User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/39.0.2171.65 Safari/537.36\r\n" "Accept: text/html,application/xhtml+xml,application/xml;q=0.9," "image/webp,*/*;q=0.8\r\n" "Referer: https://github.com/joyent/http-parser\r\n" "Connection: keep-alive\r\n" "Transfer-Encoding: chunked\r\n" "Cache-Control: max-age=0\r\n\r\nb\r\nhello world\r\n0\r\n\r\n"; static const size_t data_len = sizeof(data) - 1; static int on_info(http_parser* p) { return 0; } static int on_data(http_parser* p, const char *at, size_t length) { return 0; } static http_parser_settings settings = { .on_message_begin = on_info, .on_headers_complete = on_info, .on_message_complete = on_info, .on_header_field = on_data, .on_header_value = on_data, .on_url = on_data, .on_status = on_data, .on_body = on_data }; int bench(int iter_count, int silent) { struct http_parser parser; int i; int err; struct timeval start; struct timeval end; float rps; if (!silent) { err = gettimeofday(&start, NULL); assert(err == 0); } for (i = 0; i < iter_count; i++) { size_t parsed; http_parser_init(&parser, HTTP_REQUEST); parsed = http_parser_execute(&parser, &settings, data, data_len); assert(parsed == data_len); } if (!silent) { err = gettimeofday(&end, NULL); assert(err == 0); fprintf(stdout, "Benchmark result:\n"); rps = (float) (end.tv_sec - start.tv_sec) + (end.tv_usec - start.tv_usec) * 1e-6f; fprintf(stdout, "Took %f seconds to run\n", rps); rps = (float) iter_count / rps; fprintf(stdout, "%f req/sec\n", rps); fflush(stdout); } return 0; } int main(int argc, char** argv) { if (argc == 2 && strcmp(argv[1], "infinite") == 0) { for (;;) bench(5000000, 1); return 0; } else { return bench(5000000, 0); } } aiohttp-3.0.1/vendor/http-parser/contrib/0000777000000000000000000000000013240305035016516 5ustar 00000000000000aiohttp-3.0.1/vendor/http-parser/contrib/parsertrace.c0000666000000000000000000001013413240304665021204 0ustar 00000000000000/* Copyright Joyent, Inc. and other Node contributors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ /* Dump what the parser finds to stdout as it happen */ #include "http_parser.h" #include #include #include int on_message_begin(http_parser* _) { (void)_; printf("\n***MESSAGE BEGIN***\n\n"); return 0; } int on_headers_complete(http_parser* _) { (void)_; printf("\n***HEADERS COMPLETE***\n\n"); return 0; } int on_message_complete(http_parser* _) { (void)_; printf("\n***MESSAGE COMPLETE***\n\n"); return 0; } int on_url(http_parser* _, const char* at, size_t length) { (void)_; printf("Url: %.*s\n", (int)length, at); return 0; } int on_header_field(http_parser* _, const char* at, size_t length) { (void)_; printf("Header field: %.*s\n", (int)length, at); return 0; } int on_header_value(http_parser* _, const char* at, size_t length) { (void)_; printf("Header value: %.*s\n", (int)length, at); return 0; } int on_body(http_parser* _, const char* at, size_t length) { (void)_; printf("Body: %.*s\n", (int)length, at); return 0; } void usage(const char* name) { fprintf(stderr, "Usage: %s $type $filename\n" " type: -x, where x is one of {r,b,q}\n" " parses file as a Response, reQuest, or Both\n", name); exit(EXIT_FAILURE); } int main(int argc, char* argv[]) { enum http_parser_type file_type; if (argc != 3) { usage(argv[0]); } char* type = argv[1]; if (type[0] != '-') { usage(argv[0]); } switch (type[1]) { /* in the case of "-", type[1] will be NUL */ case 'r': file_type = HTTP_RESPONSE; break; case 'q': file_type = HTTP_REQUEST; break; case 'b': file_type = HTTP_BOTH; break; default: usage(argv[0]); } char* filename = argv[2]; FILE* file = fopen(filename, "r"); if (file == NULL) { perror("fopen"); goto fail; } fseek(file, 0, SEEK_END); long file_length = ftell(file); if (file_length == -1) { perror("ftell"); goto fail; } fseek(file, 0, SEEK_SET); char* data = malloc(file_length); if (fread(data, 1, file_length, file) != (size_t)file_length) { fprintf(stderr, "couldn't read entire file\n"); free(data); goto fail; } http_parser_settings settings; memset(&settings, 0, sizeof(settings)); settings.on_message_begin = on_message_begin; settings.on_url = on_url; settings.on_header_field = on_header_field; settings.on_header_value = on_header_value; settings.on_headers_complete = on_headers_complete; settings.on_body = on_body; settings.on_message_complete = on_message_complete; http_parser parser; http_parser_init(&parser, file_type); size_t nparsed = http_parser_execute(&parser, &settings, data, file_length); free(data); if (nparsed != (size_t)file_length) { fprintf(stderr, "Error: %s (%s)\n", http_errno_description(HTTP_PARSER_ERRNO(&parser)), http_errno_name(HTTP_PARSER_ERRNO(&parser))); goto fail; } return EXIT_SUCCESS; fail: fclose(file); return EXIT_FAILURE; } aiohttp-3.0.1/vendor/http-parser/contrib/url_parser.c0000666000000000000000000000217713240304665021057 0ustar 00000000000000#include "http_parser.h" #include #include void dump_url (const char *url, const struct http_parser_url *u) { unsigned int i; printf("\tfield_set: 0x%x, port: %u\n", u->field_set, u->port); for (i = 0; i < UF_MAX; i++) { if ((u->field_set & (1 << i)) == 0) { printf("\tfield_data[%u]: unset\n", i); continue; } printf("\tfield_data[%u]: off: %u, len: %u, part: %.*s\n", i, u->field_data[i].off, u->field_data[i].len, u->field_data[i].len, url + u->field_data[i].off); } } int main(int argc, char ** argv) { struct http_parser_url u; int len, connect, result; if (argc != 3) { printf("Syntax : %s connect|get url\n", argv[0]); return 1; } len = strlen(argv[2]); connect = strcmp("connect", argv[1]) == 0 ? 1 : 0; printf("Parsing %s, connect %d\n", argv[2], connect); http_parser_url_init(&u); result = http_parser_parse_url(argv[2], len, connect, &u); if (result != 0) { printf("Parse error : %d\n", result); return result; } printf("Parse ok, result : \n"); dump_url(argv[2], &u); return 0; } aiohttp-3.0.1/vendor/http-parser/http_parser.c0000666000000000000000000021010413240304665017563 0ustar 00000000000000/* Copyright Joyent, Inc. and other Node contributors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ #include "http_parser.h" #include #include #include #include #include #include #ifndef ULLONG_MAX # define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */ #endif #ifndef MIN # define MIN(a,b) ((a) < (b) ? (a) : (b)) #endif #ifndef ARRAY_SIZE # define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) #endif #ifndef BIT_AT # define BIT_AT(a, i) \ (!!((unsigned int) (a)[(unsigned int) (i) >> 3] & \ (1 << ((unsigned int) (i) & 7)))) #endif #ifndef ELEM_AT # define ELEM_AT(a, i, v) ((unsigned int) (i) < ARRAY_SIZE(a) ? (a)[(i)] : (v)) #endif #define SET_ERRNO(e) \ do { \ parser->http_errno = (e); \ } while(0) #define CURRENT_STATE() p_state #define UPDATE_STATE(V) p_state = (enum state) (V); #define RETURN(V) \ do { \ parser->state = CURRENT_STATE(); \ return (V); \ } while (0); #define REEXECUTE() \ goto reexecute; \ #ifdef __GNUC__ # define LIKELY(X) __builtin_expect(!!(X), 1) # define UNLIKELY(X) __builtin_expect(!!(X), 0) #else # define LIKELY(X) (X) # define UNLIKELY(X) (X) #endif /* Run the notify callback FOR, returning ER if it fails */ #define CALLBACK_NOTIFY_(FOR, ER) \ do { \ assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ \ if (LIKELY(settings->on_##FOR)) { \ parser->state = CURRENT_STATE(); \ if (UNLIKELY(0 != settings->on_##FOR(parser))) { \ SET_ERRNO(HPE_CB_##FOR); \ } \ UPDATE_STATE(parser->state); \ \ /* We either errored above or got paused; get out */ \ if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ return (ER); \ } \ } \ } while (0) /* Run the notify callback FOR and consume the current byte */ #define CALLBACK_NOTIFY(FOR) CALLBACK_NOTIFY_(FOR, p - data + 1) /* Run the notify callback FOR and don't consume the current byte */ #define CALLBACK_NOTIFY_NOADVANCE(FOR) CALLBACK_NOTIFY_(FOR, p - data) /* Run data callback FOR with LEN bytes, returning ER if it fails */ #define CALLBACK_DATA_(FOR, LEN, ER) \ do { \ assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ \ if (FOR##_mark) { \ if (LIKELY(settings->on_##FOR)) { \ parser->state = CURRENT_STATE(); \ if (UNLIKELY(0 != \ settings->on_##FOR(parser, FOR##_mark, (LEN)))) { \ SET_ERRNO(HPE_CB_##FOR); \ } \ UPDATE_STATE(parser->state); \ \ /* We either errored above or got paused; get out */ \ if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ return (ER); \ } \ } \ FOR##_mark = NULL; \ } \ } while (0) /* Run the data callback FOR and consume the current byte */ #define CALLBACK_DATA(FOR) \ CALLBACK_DATA_(FOR, p - FOR##_mark, p - data + 1) /* Run the data callback FOR and don't consume the current byte */ #define CALLBACK_DATA_NOADVANCE(FOR) \ CALLBACK_DATA_(FOR, p - FOR##_mark, p - data) /* Set the mark FOR; non-destructive if mark is already set */ #define MARK(FOR) \ do { \ if (!FOR##_mark) { \ FOR##_mark = p; \ } \ } while (0) /* Don't allow the total size of the HTTP headers (including the status * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect * embedders against denial-of-service attacks where the attacker feeds * us a never-ending header that the embedder keeps buffering. * * This check is arguably the responsibility of embedders but we're doing * it on the embedder's behalf because most won't bother and this way we * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger * than any reasonable request or response so this should never affect * day-to-day operation. */ #define COUNT_HEADER_SIZE(V) \ do { \ parser->nread += (V); \ if (UNLIKELY(parser->nread > (HTTP_MAX_HEADER_SIZE))) { \ SET_ERRNO(HPE_HEADER_OVERFLOW); \ goto error; \ } \ } while (0) #define PROXY_CONNECTION "proxy-connection" #define CONNECTION "connection" #define CONTENT_LENGTH "content-length" #define TRANSFER_ENCODING "transfer-encoding" #define UPGRADE "upgrade" #define CHUNKED "chunked" #define KEEP_ALIVE "keep-alive" #define CLOSE "close" static const char *method_strings[] = { #define XX(num, name, string) #string, HTTP_METHOD_MAP(XX) #undef XX }; /* Tokens as defined by rfc 2616. Also lowercases them. * token = 1* * separators = "(" | ")" | "<" | ">" | "@" * | "," | ";" | ":" | "\" | <"> * | "/" | "[" | "]" | "?" | "=" * | "{" | "}" | SP | HT */ static const char tokens[256] = { /* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ 0, 0, 0, 0, 0, 0, 0, 0, /* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ 0, 0, 0, 0, 0, 0, 0, 0, /* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ 0, 0, 0, 0, 0, 0, 0, 0, /* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ 0, 0, 0, 0, 0, 0, 0, 0, /* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ 0, '!', 0, '#', '$', '%', '&', '\'', /* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ 0, 0, '*', '+', 0, '-', '.', 0, /* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ '0', '1', '2', '3', '4', '5', '6', '7', /* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ '8', '9', 0, 0, 0, 0, 0, 0, /* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ 0, 'a', 'b', 'c', 'd', 'e', 'f', 'g', /* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', /* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', /* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ 'x', 'y', 'z', 0, 0, 0, '^', '_', /* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', /* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', /* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', /* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ 'x', 'y', 'z', 0, '|', 0, '~', 0 }; static const int8_t unhex[256] = {-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 , 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1,-1,-1,-1,-1 ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 }; #if HTTP_PARSER_STRICT # define T(v) 0 #else # define T(v) v #endif static const uint8_t normal_url_char[32] = { /* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, /* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ 0 | T(2) | 0 | 0 | T(16) | 0 | 0 | 0, /* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, /* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, /* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ 0 | 2 | 4 | 0 | 16 | 32 | 64 | 128, /* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, /* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, /* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, }; #undef T enum state { s_dead = 1 /* important that this is > 0 */ , s_start_req_or_res , s_res_or_resp_H , s_start_res , s_res_H , s_res_HT , s_res_HTT , s_res_HTTP , s_res_http_major , s_res_http_dot , s_res_http_minor , s_res_http_end , s_res_first_status_code , s_res_status_code , s_res_status_start , s_res_status , s_res_line_almost_done , s_start_req , s_req_method , s_req_spaces_before_url , s_req_schema , s_req_schema_slash , s_req_schema_slash_slash , s_req_server_start , s_req_server , s_req_server_with_at , s_req_path , s_req_query_string_start , s_req_query_string , s_req_fragment_start , s_req_fragment , s_req_http_start , s_req_http_H , s_req_http_HT , s_req_http_HTT , s_req_http_HTTP , s_req_http_major , s_req_http_dot , s_req_http_minor , s_req_http_end , s_req_line_almost_done , s_header_field_start , s_header_field , s_header_value_discard_ws , s_header_value_discard_ws_almost_done , s_header_value_discard_lws , s_header_value_start , s_header_value , s_header_value_lws , s_header_almost_done , s_chunk_size_start , s_chunk_size , s_chunk_parameters , s_chunk_size_almost_done , s_headers_almost_done , s_headers_done /* Important: 's_headers_done' must be the last 'header' state. All * states beyond this must be 'body' states. It is used for overflow * checking. See the PARSING_HEADER() macro. */ , s_chunk_data , s_chunk_data_almost_done , s_chunk_data_done , s_body_identity , s_body_identity_eof , s_message_done }; #define PARSING_HEADER(state) (state <= s_headers_done) enum header_states { h_general = 0 , h_C , h_CO , h_CON , h_matching_connection , h_matching_proxy_connection , h_matching_content_length , h_matching_transfer_encoding , h_matching_upgrade , h_connection , h_content_length , h_transfer_encoding , h_upgrade , h_matching_transfer_encoding_chunked , h_matching_connection_token_start , h_matching_connection_keep_alive , h_matching_connection_close , h_matching_connection_upgrade , h_matching_connection_token , h_transfer_encoding_chunked , h_connection_keep_alive , h_connection_close , h_connection_upgrade }; enum http_host_state { s_http_host_dead = 1 , s_http_userinfo_start , s_http_userinfo , s_http_host_start , s_http_host_v6_start , s_http_host , s_http_host_v6 , s_http_host_v6_end , s_http_host_v6_zone_start , s_http_host_v6_zone , s_http_host_port_start , s_http_host_port }; /* Macros for character classes; depends on strict-mode */ #define CR '\r' #define LF '\n' #define LOWER(c) (unsigned char)(c | 0x20) #define IS_ALPHA(c) (LOWER(c) >= 'a' && LOWER(c) <= 'z') #define IS_NUM(c) ((c) >= '0' && (c) <= '9') #define IS_ALPHANUM(c) (IS_ALPHA(c) || IS_NUM(c)) #define IS_HEX(c) (IS_NUM(c) || (LOWER(c) >= 'a' && LOWER(c) <= 'f')) #define IS_MARK(c) ((c) == '-' || (c) == '_' || (c) == '.' || \ (c) == '!' || (c) == '~' || (c) == '*' || (c) == '\'' || (c) == '(' || \ (c) == ')') #define IS_USERINFO_CHAR(c) (IS_ALPHANUM(c) || IS_MARK(c) || (c) == '%' || \ (c) == ';' || (c) == ':' || (c) == '&' || (c) == '=' || (c) == '+' || \ (c) == '$' || (c) == ',') #define STRICT_TOKEN(c) (tokens[(unsigned char)c]) #if HTTP_PARSER_STRICT #define TOKEN(c) (tokens[(unsigned char)c]) #define IS_URL_CHAR(c) (BIT_AT(normal_url_char, (unsigned char)c)) #define IS_HOST_CHAR(c) (IS_ALPHANUM(c) || (c) == '.' || (c) == '-') #else #define TOKEN(c) ((c == ' ') ? ' ' : tokens[(unsigned char)c]) #define IS_URL_CHAR(c) \ (BIT_AT(normal_url_char, (unsigned char)c) || ((c) & 0x80)) #define IS_HOST_CHAR(c) \ (IS_ALPHANUM(c) || (c) == '.' || (c) == '-' || (c) == '_') #endif /** * Verify that a char is a valid visible (printable) US-ASCII * character or %x80-FF **/ #define IS_HEADER_CHAR(ch) \ (ch == CR || ch == LF || ch == 9 || ((unsigned char)ch > 31 && ch != 127)) #define start_state (parser->type == HTTP_REQUEST ? s_start_req : s_start_res) #if HTTP_PARSER_STRICT # define STRICT_CHECK(cond) \ do { \ if (cond) { \ SET_ERRNO(HPE_STRICT); \ goto error; \ } \ } while (0) # define NEW_MESSAGE() (http_should_keep_alive(parser) ? start_state : s_dead) #else # define STRICT_CHECK(cond) # define NEW_MESSAGE() start_state #endif /* Map errno values to strings for human-readable output */ #define HTTP_STRERROR_GEN(n, s) { "HPE_" #n, s }, static struct { const char *name; const char *description; } http_strerror_tab[] = { HTTP_ERRNO_MAP(HTTP_STRERROR_GEN) }; #undef HTTP_STRERROR_GEN int http_message_needs_eof(const http_parser *parser); /* Our URL parser. * * This is designed to be shared by http_parser_execute() for URL validation, * hence it has a state transition + byte-for-byte interface. In addition, it * is meant to be embedded in http_parser_parse_url(), which does the dirty * work of turning state transitions URL components for its API. * * This function should only be invoked with non-space characters. It is * assumed that the caller cares about (and can detect) the transition between * URL and non-URL states by looking for these. */ static enum state parse_url_char(enum state s, const char ch) { if (ch == ' ' || ch == '\r' || ch == '\n') { return s_dead; } #if HTTP_PARSER_STRICT if (ch == '\t' || ch == '\f') { return s_dead; } #endif switch (s) { case s_req_spaces_before_url: /* Proxied requests are followed by scheme of an absolute URI (alpha). * All methods except CONNECT are followed by '/' or '*'. */ if (ch == '/' || ch == '*') { return s_req_path; } if (IS_ALPHA(ch)) { return s_req_schema; } break; case s_req_schema: if (IS_ALPHA(ch)) { return s; } if (ch == ':') { return s_req_schema_slash; } break; case s_req_schema_slash: if (ch == '/') { return s_req_schema_slash_slash; } break; case s_req_schema_slash_slash: if (ch == '/') { return s_req_server_start; } break; case s_req_server_with_at: if (ch == '@') { return s_dead; } /* FALLTHROUGH */ case s_req_server_start: case s_req_server: if (ch == '/') { return s_req_path; } if (ch == '?') { return s_req_query_string_start; } if (ch == '@') { return s_req_server_with_at; } if (IS_USERINFO_CHAR(ch) || ch == '[' || ch == ']') { return s_req_server; } break; case s_req_path: if (IS_URL_CHAR(ch)) { return s; } switch (ch) { case '?': return s_req_query_string_start; case '#': return s_req_fragment_start; } break; case s_req_query_string_start: case s_req_query_string: if (IS_URL_CHAR(ch)) { return s_req_query_string; } switch (ch) { case '?': /* allow extra '?' in query string */ return s_req_query_string; case '#': return s_req_fragment_start; } break; case s_req_fragment_start: if (IS_URL_CHAR(ch)) { return s_req_fragment; } switch (ch) { case '?': return s_req_fragment; case '#': return s; } break; case s_req_fragment: if (IS_URL_CHAR(ch)) { return s; } switch (ch) { case '?': case '#': return s; } break; default: break; } /* We should never fall out of the switch above unless there's an error */ return s_dead; } size_t http_parser_execute (http_parser *parser, const http_parser_settings *settings, const char *data, size_t len) { char c, ch; int8_t unhex_val; const char *p = data; const char *header_field_mark = 0; const char *header_value_mark = 0; const char *url_mark = 0; const char *body_mark = 0; const char *status_mark = 0; enum state p_state = (enum state) parser->state; const unsigned int lenient = parser->lenient_http_headers; /* We're in an error state. Don't bother doing anything. */ if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { return 0; } if (len == 0) { switch (CURRENT_STATE()) { case s_body_identity_eof: /* Use of CALLBACK_NOTIFY() here would erroneously return 1 byte read if * we got paused. */ CALLBACK_NOTIFY_NOADVANCE(message_complete); return 0; case s_dead: case s_start_req_or_res: case s_start_res: case s_start_req: return 0; default: SET_ERRNO(HPE_INVALID_EOF_STATE); return 1; } } if (CURRENT_STATE() == s_header_field) header_field_mark = data; if (CURRENT_STATE() == s_header_value) header_value_mark = data; switch (CURRENT_STATE()) { case s_req_path: case s_req_schema: case s_req_schema_slash: case s_req_schema_slash_slash: case s_req_server_start: case s_req_server: case s_req_server_with_at: case s_req_query_string_start: case s_req_query_string: case s_req_fragment_start: case s_req_fragment: url_mark = data; break; case s_res_status: status_mark = data; break; default: break; } for (p=data; p != data + len; p++) { ch = *p; if (PARSING_HEADER(CURRENT_STATE())) COUNT_HEADER_SIZE(1); reexecute: switch (CURRENT_STATE()) { case s_dead: /* this state is used after a 'Connection: close' message * the parser will error out if it reads another message */ if (LIKELY(ch == CR || ch == LF)) break; SET_ERRNO(HPE_CLOSED_CONNECTION); goto error; case s_start_req_or_res: { if (ch == CR || ch == LF) break; parser->flags = 0; parser->content_length = ULLONG_MAX; if (ch == 'H') { UPDATE_STATE(s_res_or_resp_H); CALLBACK_NOTIFY(message_begin); } else { parser->type = HTTP_REQUEST; UPDATE_STATE(s_start_req); REEXECUTE(); } break; } case s_res_or_resp_H: if (ch == 'T') { parser->type = HTTP_RESPONSE; UPDATE_STATE(s_res_HT); } else { if (UNLIKELY(ch != 'E')) { SET_ERRNO(HPE_INVALID_CONSTANT); goto error; } parser->type = HTTP_REQUEST; parser->method = HTTP_HEAD; parser->index = 2; UPDATE_STATE(s_req_method); } break; case s_start_res: { parser->flags = 0; parser->content_length = ULLONG_MAX; switch (ch) { case 'H': UPDATE_STATE(s_res_H); break; case CR: case LF: break; default: SET_ERRNO(HPE_INVALID_CONSTANT); goto error; } CALLBACK_NOTIFY(message_begin); break; } case s_res_H: STRICT_CHECK(ch != 'T'); UPDATE_STATE(s_res_HT); break; case s_res_HT: STRICT_CHECK(ch != 'T'); UPDATE_STATE(s_res_HTT); break; case s_res_HTT: STRICT_CHECK(ch != 'P'); UPDATE_STATE(s_res_HTTP); break; case s_res_HTTP: STRICT_CHECK(ch != '/'); UPDATE_STATE(s_res_http_major); break; case s_res_http_major: if (UNLIKELY(!IS_NUM(ch))) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } parser->http_major = ch - '0'; UPDATE_STATE(s_res_http_dot); break; case s_res_http_dot: { if (UNLIKELY(ch != '.')) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } UPDATE_STATE(s_res_http_minor); break; } case s_res_http_minor: if (UNLIKELY(!IS_NUM(ch))) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } parser->http_minor = ch - '0'; UPDATE_STATE(s_res_http_end); break; case s_res_http_end: { if (UNLIKELY(ch != ' ')) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } UPDATE_STATE(s_res_first_status_code); break; } case s_res_first_status_code: { if (!IS_NUM(ch)) { if (ch == ' ') { break; } SET_ERRNO(HPE_INVALID_STATUS); goto error; } parser->status_code = ch - '0'; UPDATE_STATE(s_res_status_code); break; } case s_res_status_code: { if (!IS_NUM(ch)) { switch (ch) { case ' ': UPDATE_STATE(s_res_status_start); break; case CR: case LF: UPDATE_STATE(s_res_status_start); REEXECUTE(); break; default: SET_ERRNO(HPE_INVALID_STATUS); goto error; } break; } parser->status_code *= 10; parser->status_code += ch - '0'; if (UNLIKELY(parser->status_code > 999)) { SET_ERRNO(HPE_INVALID_STATUS); goto error; } break; } case s_res_status_start: { MARK(status); UPDATE_STATE(s_res_status); parser->index = 0; if (ch == CR || ch == LF) REEXECUTE(); break; } case s_res_status: if (ch == CR) { UPDATE_STATE(s_res_line_almost_done); CALLBACK_DATA(status); break; } if (ch == LF) { UPDATE_STATE(s_header_field_start); CALLBACK_DATA(status); break; } break; case s_res_line_almost_done: STRICT_CHECK(ch != LF); UPDATE_STATE(s_header_field_start); break; case s_start_req: { if (ch == CR || ch == LF) break; parser->flags = 0; parser->content_length = ULLONG_MAX; if (UNLIKELY(!IS_ALPHA(ch))) { SET_ERRNO(HPE_INVALID_METHOD); goto error; } parser->method = (enum http_method) 0; parser->index = 1; switch (ch) { case 'A': parser->method = HTTP_ACL; break; case 'B': parser->method = HTTP_BIND; break; case 'C': parser->method = HTTP_CONNECT; /* or COPY, CHECKOUT */ break; case 'D': parser->method = HTTP_DELETE; break; case 'G': parser->method = HTTP_GET; break; case 'H': parser->method = HTTP_HEAD; break; case 'L': parser->method = HTTP_LOCK; /* or LINK */ break; case 'M': parser->method = HTTP_MKCOL; /* or MOVE, MKACTIVITY, MERGE, M-SEARCH, MKCALENDAR */ break; case 'N': parser->method = HTTP_NOTIFY; break; case 'O': parser->method = HTTP_OPTIONS; break; case 'P': parser->method = HTTP_POST; /* or PROPFIND|PROPPATCH|PUT|PATCH|PURGE */ break; case 'R': parser->method = HTTP_REPORT; /* or REBIND */ break; case 'S': parser->method = HTTP_SUBSCRIBE; /* or SEARCH */ break; case 'T': parser->method = HTTP_TRACE; break; case 'U': parser->method = HTTP_UNLOCK; /* or UNSUBSCRIBE, UNBIND, UNLINK */ break; default: SET_ERRNO(HPE_INVALID_METHOD); goto error; } UPDATE_STATE(s_req_method); CALLBACK_NOTIFY(message_begin); break; } case s_req_method: { const char *matcher; if (UNLIKELY(ch == '\0')) { SET_ERRNO(HPE_INVALID_METHOD); goto error; } matcher = method_strings[parser->method]; if (ch == ' ' && matcher[parser->index] == '\0') { UPDATE_STATE(s_req_spaces_before_url); } else if (ch == matcher[parser->index]) { ; /* nada */ } else if ((ch >= 'A' && ch <= 'Z') || ch == '-') { switch (parser->method << 16 | parser->index << 8 | ch) { #define XX(meth, pos, ch, new_meth) \ case (HTTP_##meth << 16 | pos << 8 | ch): \ parser->method = HTTP_##new_meth; break; XX(POST, 1, 'U', PUT) XX(POST, 1, 'A', PATCH) XX(POST, 1, 'R', PROPFIND) XX(PUT, 2, 'R', PURGE) XX(CONNECT, 1, 'H', CHECKOUT) XX(CONNECT, 2, 'P', COPY) XX(MKCOL, 1, 'O', MOVE) XX(MKCOL, 1, 'E', MERGE) XX(MKCOL, 1, '-', MSEARCH) XX(MKCOL, 2, 'A', MKACTIVITY) XX(MKCOL, 3, 'A', MKCALENDAR) XX(SUBSCRIBE, 1, 'E', SEARCH) XX(REPORT, 2, 'B', REBIND) XX(PROPFIND, 4, 'P', PROPPATCH) XX(LOCK, 1, 'I', LINK) XX(UNLOCK, 2, 'S', UNSUBSCRIBE) XX(UNLOCK, 2, 'B', UNBIND) XX(UNLOCK, 3, 'I', UNLINK) #undef XX default: SET_ERRNO(HPE_INVALID_METHOD); goto error; } } else { SET_ERRNO(HPE_INVALID_METHOD); goto error; } ++parser->index; break; } case s_req_spaces_before_url: { if (ch == ' ') break; MARK(url); if (parser->method == HTTP_CONNECT) { UPDATE_STATE(s_req_server_start); } UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); if (UNLIKELY(CURRENT_STATE() == s_dead)) { SET_ERRNO(HPE_INVALID_URL); goto error; } break; } case s_req_schema: case s_req_schema_slash: case s_req_schema_slash_slash: case s_req_server_start: { switch (ch) { /* No whitespace allowed here */ case ' ': case CR: case LF: SET_ERRNO(HPE_INVALID_URL); goto error; default: UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); if (UNLIKELY(CURRENT_STATE() == s_dead)) { SET_ERRNO(HPE_INVALID_URL); goto error; } } break; } case s_req_server: case s_req_server_with_at: case s_req_path: case s_req_query_string_start: case s_req_query_string: case s_req_fragment_start: case s_req_fragment: { switch (ch) { case ' ': UPDATE_STATE(s_req_http_start); CALLBACK_DATA(url); break; case CR: case LF: parser->http_major = 0; parser->http_minor = 9; UPDATE_STATE((ch == CR) ? s_req_line_almost_done : s_header_field_start); CALLBACK_DATA(url); break; default: UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); if (UNLIKELY(CURRENT_STATE() == s_dead)) { SET_ERRNO(HPE_INVALID_URL); goto error; } } break; } case s_req_http_start: switch (ch) { case 'H': UPDATE_STATE(s_req_http_H); break; case ' ': break; default: SET_ERRNO(HPE_INVALID_CONSTANT); goto error; } break; case s_req_http_H: STRICT_CHECK(ch != 'T'); UPDATE_STATE(s_req_http_HT); break; case s_req_http_HT: STRICT_CHECK(ch != 'T'); UPDATE_STATE(s_req_http_HTT); break; case s_req_http_HTT: STRICT_CHECK(ch != 'P'); UPDATE_STATE(s_req_http_HTTP); break; case s_req_http_HTTP: STRICT_CHECK(ch != '/'); UPDATE_STATE(s_req_http_major); break; case s_req_http_major: if (UNLIKELY(!IS_NUM(ch))) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } parser->http_major = ch - '0'; UPDATE_STATE(s_req_http_dot); break; case s_req_http_dot: { if (UNLIKELY(ch != '.')) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } UPDATE_STATE(s_req_http_minor); break; } case s_req_http_minor: if (UNLIKELY(!IS_NUM(ch))) { SET_ERRNO(HPE_INVALID_VERSION); goto error; } parser->http_minor = ch - '0'; UPDATE_STATE(s_req_http_end); break; case s_req_http_end: { if (ch == CR) { UPDATE_STATE(s_req_line_almost_done); break; } if (ch == LF) { UPDATE_STATE(s_header_field_start); break; } SET_ERRNO(HPE_INVALID_VERSION); goto error; break; } /* end of request line */ case s_req_line_almost_done: { if (UNLIKELY(ch != LF)) { SET_ERRNO(HPE_LF_EXPECTED); goto error; } UPDATE_STATE(s_header_field_start); break; } case s_header_field_start: { if (ch == CR) { UPDATE_STATE(s_headers_almost_done); break; } if (ch == LF) { /* they might be just sending \n instead of \r\n so this would be * the second \n to denote the end of headers*/ UPDATE_STATE(s_headers_almost_done); REEXECUTE(); } c = TOKEN(ch); if (UNLIKELY(!c)) { SET_ERRNO(HPE_INVALID_HEADER_TOKEN); goto error; } MARK(header_field); parser->index = 0; UPDATE_STATE(s_header_field); switch (c) { case 'c': parser->header_state = h_C; break; case 'p': parser->header_state = h_matching_proxy_connection; break; case 't': parser->header_state = h_matching_transfer_encoding; break; case 'u': parser->header_state = h_matching_upgrade; break; default: parser->header_state = h_general; break; } break; } case s_header_field: { const char* start = p; for (; p != data + len; p++) { ch = *p; c = TOKEN(ch); if (!c) break; switch (parser->header_state) { case h_general: break; case h_C: parser->index++; parser->header_state = (c == 'o' ? h_CO : h_general); break; case h_CO: parser->index++; parser->header_state = (c == 'n' ? h_CON : h_general); break; case h_CON: parser->index++; switch (c) { case 'n': parser->header_state = h_matching_connection; break; case 't': parser->header_state = h_matching_content_length; break; default: parser->header_state = h_general; break; } break; /* connection */ case h_matching_connection: parser->index++; if (parser->index > sizeof(CONNECTION)-1 || c != CONNECTION[parser->index]) { parser->header_state = h_general; } else if (parser->index == sizeof(CONNECTION)-2) { parser->header_state = h_connection; } break; /* proxy-connection */ case h_matching_proxy_connection: parser->index++; if (parser->index > sizeof(PROXY_CONNECTION)-1 || c != PROXY_CONNECTION[parser->index]) { parser->header_state = h_general; } else if (parser->index == sizeof(PROXY_CONNECTION)-2) { parser->header_state = h_connection; } break; /* content-length */ case h_matching_content_length: parser->index++; if (parser->index > sizeof(CONTENT_LENGTH)-1 || c != CONTENT_LENGTH[parser->index]) { parser->header_state = h_general; } else if (parser->index == sizeof(CONTENT_LENGTH)-2) { parser->header_state = h_content_length; } break; /* transfer-encoding */ case h_matching_transfer_encoding: parser->index++; if (parser->index > sizeof(TRANSFER_ENCODING)-1 || c != TRANSFER_ENCODING[parser->index]) { parser->header_state = h_general; } else if (parser->index == sizeof(TRANSFER_ENCODING)-2) { parser->header_state = h_transfer_encoding; } break; /* upgrade */ case h_matching_upgrade: parser->index++; if (parser->index > sizeof(UPGRADE)-1 || c != UPGRADE[parser->index]) { parser->header_state = h_general; } else if (parser->index == sizeof(UPGRADE)-2) { parser->header_state = h_upgrade; } break; case h_connection: case h_content_length: case h_transfer_encoding: case h_upgrade: if (ch != ' ') parser->header_state = h_general; break; default: assert(0 && "Unknown header_state"); break; } } COUNT_HEADER_SIZE(p - start); if (p == data + len) { --p; break; } if (ch == ':') { UPDATE_STATE(s_header_value_discard_ws); CALLBACK_DATA(header_field); break; } SET_ERRNO(HPE_INVALID_HEADER_TOKEN); goto error; } case s_header_value_discard_ws: if (ch == ' ' || ch == '\t') break; if (ch == CR) { UPDATE_STATE(s_header_value_discard_ws_almost_done); break; } if (ch == LF) { UPDATE_STATE(s_header_value_discard_lws); break; } /* FALLTHROUGH */ case s_header_value_start: { MARK(header_value); UPDATE_STATE(s_header_value); parser->index = 0; c = LOWER(ch); switch (parser->header_state) { case h_upgrade: parser->flags |= F_UPGRADE; parser->header_state = h_general; break; case h_transfer_encoding: /* looking for 'Transfer-Encoding: chunked' */ if ('c' == c) { parser->header_state = h_matching_transfer_encoding_chunked; } else { parser->header_state = h_general; } break; case h_content_length: if (UNLIKELY(!IS_NUM(ch))) { SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); goto error; } if (parser->flags & F_CONTENTLENGTH) { SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH); goto error; } parser->flags |= F_CONTENTLENGTH; parser->content_length = ch - '0'; break; case h_connection: /* looking for 'Connection: keep-alive' */ if (c == 'k') { parser->header_state = h_matching_connection_keep_alive; /* looking for 'Connection: close' */ } else if (c == 'c') { parser->header_state = h_matching_connection_close; } else if (c == 'u') { parser->header_state = h_matching_connection_upgrade; } else { parser->header_state = h_matching_connection_token; } break; /* Multi-value `Connection` header */ case h_matching_connection_token_start: break; default: parser->header_state = h_general; break; } break; } case s_header_value: { const char* start = p; enum header_states h_state = (enum header_states) parser->header_state; for (; p != data + len; p++) { ch = *p; if (ch == CR) { UPDATE_STATE(s_header_almost_done); parser->header_state = h_state; CALLBACK_DATA(header_value); break; } if (ch == LF) { UPDATE_STATE(s_header_almost_done); COUNT_HEADER_SIZE(p - start); parser->header_state = h_state; CALLBACK_DATA_NOADVANCE(header_value); REEXECUTE(); } if (!lenient && !IS_HEADER_CHAR(ch)) { SET_ERRNO(HPE_INVALID_HEADER_TOKEN); goto error; } c = LOWER(ch); switch (h_state) { case h_general: { const char* p_cr; const char* p_lf; size_t limit = data + len - p; limit = MIN(limit, HTTP_MAX_HEADER_SIZE); p_cr = (const char*) memchr(p, CR, limit); p_lf = (const char*) memchr(p, LF, limit); if (p_cr != NULL) { if (p_lf != NULL && p_cr >= p_lf) p = p_lf; else p = p_cr; } else if (UNLIKELY(p_lf != NULL)) { p = p_lf; } else { p = data + len; } --p; break; } case h_connection: case h_transfer_encoding: assert(0 && "Shouldn't get here."); break; case h_content_length: { uint64_t t; if (ch == ' ') break; if (UNLIKELY(!IS_NUM(ch))) { SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); parser->header_state = h_state; goto error; } t = parser->content_length; t *= 10; t += ch - '0'; /* Overflow? Test against a conservative limit for simplicity. */ if (UNLIKELY((ULLONG_MAX - 10) / 10 < parser->content_length)) { SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); parser->header_state = h_state; goto error; } parser->content_length = t; break; } /* Transfer-Encoding: chunked */ case h_matching_transfer_encoding_chunked: parser->index++; if (parser->index > sizeof(CHUNKED)-1 || c != CHUNKED[parser->index]) { h_state = h_general; } else if (parser->index == sizeof(CHUNKED)-2) { h_state = h_transfer_encoding_chunked; } break; case h_matching_connection_token_start: /* looking for 'Connection: keep-alive' */ if (c == 'k') { h_state = h_matching_connection_keep_alive; /* looking for 'Connection: close' */ } else if (c == 'c') { h_state = h_matching_connection_close; } else if (c == 'u') { h_state = h_matching_connection_upgrade; } else if (STRICT_TOKEN(c)) { h_state = h_matching_connection_token; } else if (c == ' ' || c == '\t') { /* Skip lws */ } else { h_state = h_general; } break; /* looking for 'Connection: keep-alive' */ case h_matching_connection_keep_alive: parser->index++; if (parser->index > sizeof(KEEP_ALIVE)-1 || c != KEEP_ALIVE[parser->index]) { h_state = h_matching_connection_token; } else if (parser->index == sizeof(KEEP_ALIVE)-2) { h_state = h_connection_keep_alive; } break; /* looking for 'Connection: close' */ case h_matching_connection_close: parser->index++; if (parser->index > sizeof(CLOSE)-1 || c != CLOSE[parser->index]) { h_state = h_matching_connection_token; } else if (parser->index == sizeof(CLOSE)-2) { h_state = h_connection_close; } break; /* looking for 'Connection: upgrade' */ case h_matching_connection_upgrade: parser->index++; if (parser->index > sizeof(UPGRADE) - 1 || c != UPGRADE[parser->index]) { h_state = h_matching_connection_token; } else if (parser->index == sizeof(UPGRADE)-2) { h_state = h_connection_upgrade; } break; case h_matching_connection_token: if (ch == ',') { h_state = h_matching_connection_token_start; parser->index = 0; } break; case h_transfer_encoding_chunked: if (ch != ' ') h_state = h_general; break; case h_connection_keep_alive: case h_connection_close: case h_connection_upgrade: if (ch == ',') { if (h_state == h_connection_keep_alive) { parser->flags |= F_CONNECTION_KEEP_ALIVE; } else if (h_state == h_connection_close) { parser->flags |= F_CONNECTION_CLOSE; } else if (h_state == h_connection_upgrade) { parser->flags |= F_CONNECTION_UPGRADE; } h_state = h_matching_connection_token_start; parser->index = 0; } else if (ch != ' ') { h_state = h_matching_connection_token; } break; default: UPDATE_STATE(s_header_value); h_state = h_general; break; } } parser->header_state = h_state; COUNT_HEADER_SIZE(p - start); if (p == data + len) --p; break; } case s_header_almost_done: { if (UNLIKELY(ch != LF)) { SET_ERRNO(HPE_LF_EXPECTED); goto error; } UPDATE_STATE(s_header_value_lws); break; } case s_header_value_lws: { if (ch == ' ' || ch == '\t') { UPDATE_STATE(s_header_value_start); REEXECUTE(); } /* finished the header */ switch (parser->header_state) { case h_connection_keep_alive: parser->flags |= F_CONNECTION_KEEP_ALIVE; break; case h_connection_close: parser->flags |= F_CONNECTION_CLOSE; break; case h_transfer_encoding_chunked: parser->flags |= F_CHUNKED; break; case h_connection_upgrade: parser->flags |= F_CONNECTION_UPGRADE; break; default: break; } UPDATE_STATE(s_header_field_start); REEXECUTE(); } case s_header_value_discard_ws_almost_done: { STRICT_CHECK(ch != LF); UPDATE_STATE(s_header_value_discard_lws); break; } case s_header_value_discard_lws: { if (ch == ' ' || ch == '\t') { UPDATE_STATE(s_header_value_discard_ws); break; } else { switch (parser->header_state) { case h_connection_keep_alive: parser->flags |= F_CONNECTION_KEEP_ALIVE; break; case h_connection_close: parser->flags |= F_CONNECTION_CLOSE; break; case h_connection_upgrade: parser->flags |= F_CONNECTION_UPGRADE; break; case h_transfer_encoding_chunked: parser->flags |= F_CHUNKED; break; default: break; } /* header value was empty */ MARK(header_value); UPDATE_STATE(s_header_field_start); CALLBACK_DATA_NOADVANCE(header_value); REEXECUTE(); } } case s_headers_almost_done: { STRICT_CHECK(ch != LF); if (parser->flags & F_TRAILING) { /* End of a chunked request */ UPDATE_STATE(s_message_done); CALLBACK_NOTIFY_NOADVANCE(chunk_complete); REEXECUTE(); } /* Cannot use chunked encoding and a content-length header together per the HTTP specification. */ if ((parser->flags & F_CHUNKED) && (parser->flags & F_CONTENTLENGTH)) { SET_ERRNO(HPE_UNEXPECTED_CONTENT_LENGTH); goto error; } UPDATE_STATE(s_headers_done); /* Set this here so that on_headers_complete() callbacks can see it */ if ((parser->flags & F_UPGRADE) && (parser->flags & F_CONNECTION_UPGRADE)) { /* For responses, "Upgrade: foo" and "Connection: upgrade" are * mandatory only when it is a 101 Switching Protocols response, * otherwise it is purely informational, to announce support. */ parser->upgrade = (parser->type == HTTP_REQUEST || parser->status_code == 101); } else { parser->upgrade = (parser->method == HTTP_CONNECT); } /* Here we call the headers_complete callback. This is somewhat * different than other callbacks because if the user returns 1, we * will interpret that as saying that this message has no body. This * is needed for the annoying case of recieving a response to a HEAD * request. * * We'd like to use CALLBACK_NOTIFY_NOADVANCE() here but we cannot, so * we have to simulate it by handling a change in errno below. */ if (settings->on_headers_complete) { switch (settings->on_headers_complete(parser)) { case 0: break; case 2: parser->upgrade = 1; /* FALLTHROUGH */ case 1: parser->flags |= F_SKIPBODY; break; default: SET_ERRNO(HPE_CB_headers_complete); RETURN(p - data); /* Error */ } } if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { RETURN(p - data); } REEXECUTE(); } case s_headers_done: { int hasBody; STRICT_CHECK(ch != LF); parser->nread = 0; hasBody = parser->flags & F_CHUNKED || (parser->content_length > 0 && parser->content_length != ULLONG_MAX); if (parser->upgrade && (parser->method == HTTP_CONNECT || (parser->flags & F_SKIPBODY) || !hasBody)) { /* Exit, the rest of the message is in a different protocol. */ UPDATE_STATE(NEW_MESSAGE()); CALLBACK_NOTIFY(message_complete); RETURN((p - data) + 1); } if (parser->flags & F_SKIPBODY) { UPDATE_STATE(NEW_MESSAGE()); CALLBACK_NOTIFY(message_complete); } else if (parser->flags & F_CHUNKED) { /* chunked encoding - ignore Content-Length header */ UPDATE_STATE(s_chunk_size_start); } else { if (parser->content_length == 0) { /* Content-Length header given but zero: Content-Length: 0\r\n */ UPDATE_STATE(NEW_MESSAGE()); CALLBACK_NOTIFY(message_complete); } else if (parser->content_length != ULLONG_MAX) { /* Content-Length header given and non-zero */ UPDATE_STATE(s_body_identity); } else { if (!http_message_needs_eof(parser)) { /* Assume content-length 0 - read the next */ UPDATE_STATE(NEW_MESSAGE()); CALLBACK_NOTIFY(message_complete); } else { /* Read body until EOF */ UPDATE_STATE(s_body_identity_eof); } } } break; } case s_body_identity: { uint64_t to_read = MIN(parser->content_length, (uint64_t) ((data + len) - p)); assert(parser->content_length != 0 && parser->content_length != ULLONG_MAX); /* The difference between advancing content_length and p is because * the latter will automaticaly advance on the next loop iteration. * Further, if content_length ends up at 0, we want to see the last * byte again for our message complete callback. */ MARK(body); parser->content_length -= to_read; p += to_read - 1; if (parser->content_length == 0) { UPDATE_STATE(s_message_done); /* Mimic CALLBACK_DATA_NOADVANCE() but with one extra byte. * * The alternative to doing this is to wait for the next byte to * trigger the data callback, just as in every other case. The * problem with this is that this makes it difficult for the test * harness to distinguish between complete-on-EOF and * complete-on-length. It's not clear that this distinction is * important for applications, but let's keep it for now. */ CALLBACK_DATA_(body, p - body_mark + 1, p - data); REEXECUTE(); } break; } /* read until EOF */ case s_body_identity_eof: MARK(body); p = data + len - 1; break; case s_message_done: UPDATE_STATE(NEW_MESSAGE()); CALLBACK_NOTIFY(message_complete); if (parser->upgrade) { /* Exit, the rest of the message is in a different protocol. */ RETURN((p - data) + 1); } break; case s_chunk_size_start: { assert(parser->nread == 1); assert(parser->flags & F_CHUNKED); unhex_val = unhex[(unsigned char)ch]; if (UNLIKELY(unhex_val == -1)) { SET_ERRNO(HPE_INVALID_CHUNK_SIZE); goto error; } parser->content_length = unhex_val; UPDATE_STATE(s_chunk_size); break; } case s_chunk_size: { uint64_t t; assert(parser->flags & F_CHUNKED); if (ch == CR) { UPDATE_STATE(s_chunk_size_almost_done); break; } unhex_val = unhex[(unsigned char)ch]; if (unhex_val == -1) { if (ch == ';' || ch == ' ') { UPDATE_STATE(s_chunk_parameters); break; } SET_ERRNO(HPE_INVALID_CHUNK_SIZE); goto error; } t = parser->content_length; t *= 16; t += unhex_val; /* Overflow? Test against a conservative limit for simplicity. */ if (UNLIKELY((ULLONG_MAX - 16) / 16 < parser->content_length)) { SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); goto error; } parser->content_length = t; break; } case s_chunk_parameters: { assert(parser->flags & F_CHUNKED); /* just ignore this shit. TODO check for overflow */ if (ch == CR) { UPDATE_STATE(s_chunk_size_almost_done); break; } break; } case s_chunk_size_almost_done: { assert(parser->flags & F_CHUNKED); STRICT_CHECK(ch != LF); parser->nread = 0; if (parser->content_length == 0) { parser->flags |= F_TRAILING; UPDATE_STATE(s_header_field_start); } else { UPDATE_STATE(s_chunk_data); } CALLBACK_NOTIFY(chunk_header); break; } case s_chunk_data: { uint64_t to_read = MIN(parser->content_length, (uint64_t) ((data + len) - p)); assert(parser->flags & F_CHUNKED); assert(parser->content_length != 0 && parser->content_length != ULLONG_MAX); /* See the explanation in s_body_identity for why the content * length and data pointers are managed this way. */ MARK(body); parser->content_length -= to_read; p += to_read - 1; if (parser->content_length == 0) { UPDATE_STATE(s_chunk_data_almost_done); } break; } case s_chunk_data_almost_done: assert(parser->flags & F_CHUNKED); assert(parser->content_length == 0); STRICT_CHECK(ch != CR); UPDATE_STATE(s_chunk_data_done); CALLBACK_DATA(body); break; case s_chunk_data_done: assert(parser->flags & F_CHUNKED); STRICT_CHECK(ch != LF); parser->nread = 0; UPDATE_STATE(s_chunk_size_start); CALLBACK_NOTIFY(chunk_complete); break; default: assert(0 && "unhandled state"); SET_ERRNO(HPE_INVALID_INTERNAL_STATE); goto error; } } /* Run callbacks for any marks that we have leftover after we ran our of * bytes. There should be at most one of these set, so it's OK to invoke * them in series (unset marks will not result in callbacks). * * We use the NOADVANCE() variety of callbacks here because 'p' has already * overflowed 'data' and this allows us to correct for the off-by-one that * we'd otherwise have (since CALLBACK_DATA() is meant to be run with a 'p' * value that's in-bounds). */ assert(((header_field_mark ? 1 : 0) + (header_value_mark ? 1 : 0) + (url_mark ? 1 : 0) + (body_mark ? 1 : 0) + (status_mark ? 1 : 0)) <= 1); CALLBACK_DATA_NOADVANCE(header_field); CALLBACK_DATA_NOADVANCE(header_value); CALLBACK_DATA_NOADVANCE(url); CALLBACK_DATA_NOADVANCE(body); CALLBACK_DATA_NOADVANCE(status); RETURN(len); error: if (HTTP_PARSER_ERRNO(parser) == HPE_OK) { SET_ERRNO(HPE_UNKNOWN); } RETURN(p - data); } /* Does the parser need to see an EOF to find the end of the message? */ int http_message_needs_eof (const http_parser *parser) { if (parser->type == HTTP_REQUEST) { return 0; } /* See RFC 2616 section 4.4 */ if (parser->status_code / 100 == 1 || /* 1xx e.g. Continue */ parser->status_code == 204 || /* No Content */ parser->status_code == 304 || /* Not Modified */ parser->flags & F_SKIPBODY) { /* response to a HEAD request */ return 0; } if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) { return 0; } return 1; } int http_should_keep_alive (const http_parser *parser) { if (parser->http_major > 0 && parser->http_minor > 0) { /* HTTP/1.1 */ if (parser->flags & F_CONNECTION_CLOSE) { return 0; } } else { /* HTTP/1.0 or earlier */ if (!(parser->flags & F_CONNECTION_KEEP_ALIVE)) { return 0; } } return !http_message_needs_eof(parser); } const char * http_method_str (enum http_method m) { return ELEM_AT(method_strings, m, ""); } void http_parser_init (http_parser *parser, enum http_parser_type t) { void *data = parser->data; /* preserve application data */ memset(parser, 0, sizeof(*parser)); parser->data = data; parser->type = t; parser->state = (t == HTTP_REQUEST ? s_start_req : (t == HTTP_RESPONSE ? s_start_res : s_start_req_or_res)); parser->http_errno = HPE_OK; } void http_parser_settings_init(http_parser_settings *settings) { memset(settings, 0, sizeof(*settings)); } const char * http_errno_name(enum http_errno err) { assert(((size_t) err) < ARRAY_SIZE(http_strerror_tab)); return http_strerror_tab[err].name; } const char * http_errno_description(enum http_errno err) { assert(((size_t) err) < ARRAY_SIZE(http_strerror_tab)); return http_strerror_tab[err].description; } static enum http_host_state http_parse_host_char(enum http_host_state s, const char ch) { switch(s) { case s_http_userinfo: case s_http_userinfo_start: if (ch == '@') { return s_http_host_start; } if (IS_USERINFO_CHAR(ch)) { return s_http_userinfo; } break; case s_http_host_start: if (ch == '[') { return s_http_host_v6_start; } if (IS_HOST_CHAR(ch)) { return s_http_host; } break; case s_http_host: if (IS_HOST_CHAR(ch)) { return s_http_host; } /* FALLTHROUGH */ case s_http_host_v6_end: if (ch == ':') { return s_http_host_port_start; } break; case s_http_host_v6: if (ch == ']') { return s_http_host_v6_end; } /* FALLTHROUGH */ case s_http_host_v6_start: if (IS_HEX(ch) || ch == ':' || ch == '.') { return s_http_host_v6; } if (s == s_http_host_v6 && ch == '%') { return s_http_host_v6_zone_start; } break; case s_http_host_v6_zone: if (ch == ']') { return s_http_host_v6_end; } /* FALLTHROUGH */ case s_http_host_v6_zone_start: /* RFC 6874 Zone ID consists of 1*( unreserved / pct-encoded) */ if (IS_ALPHANUM(ch) || ch == '%' || ch == '.' || ch == '-' || ch == '_' || ch == '~') { return s_http_host_v6_zone; } break; case s_http_host_port: case s_http_host_port_start: if (IS_NUM(ch)) { return s_http_host_port; } break; default: break; } return s_http_host_dead; } static int http_parse_host(const char * buf, struct http_parser_url *u, int found_at) { enum http_host_state s; const char *p; size_t buflen = u->field_data[UF_HOST].off + u->field_data[UF_HOST].len; assert(u->field_set & (1 << UF_HOST)); u->field_data[UF_HOST].len = 0; s = found_at ? s_http_userinfo_start : s_http_host_start; for (p = buf + u->field_data[UF_HOST].off; p < buf + buflen; p++) { enum http_host_state new_s = http_parse_host_char(s, *p); if (new_s == s_http_host_dead) { return 1; } switch(new_s) { case s_http_host: if (s != s_http_host) { u->field_data[UF_HOST].off = p - buf; } u->field_data[UF_HOST].len++; break; case s_http_host_v6: if (s != s_http_host_v6) { u->field_data[UF_HOST].off = p - buf; } u->field_data[UF_HOST].len++; break; case s_http_host_v6_zone_start: case s_http_host_v6_zone: u->field_data[UF_HOST].len++; break; case s_http_host_port: if (s != s_http_host_port) { u->field_data[UF_PORT].off = p - buf; u->field_data[UF_PORT].len = 0; u->field_set |= (1 << UF_PORT); } u->field_data[UF_PORT].len++; break; case s_http_userinfo: if (s != s_http_userinfo) { u->field_data[UF_USERINFO].off = p - buf ; u->field_data[UF_USERINFO].len = 0; u->field_set |= (1 << UF_USERINFO); } u->field_data[UF_USERINFO].len++; break; default: break; } s = new_s; } /* Make sure we don't end somewhere unexpected */ switch (s) { case s_http_host_start: case s_http_host_v6_start: case s_http_host_v6: case s_http_host_v6_zone_start: case s_http_host_v6_zone: case s_http_host_port_start: case s_http_userinfo: case s_http_userinfo_start: return 1; default: break; } return 0; } void http_parser_url_init(struct http_parser_url *u) { memset(u, 0, sizeof(*u)); } int http_parser_parse_url(const char *buf, size_t buflen, int is_connect, struct http_parser_url *u) { enum state s; const char *p; enum http_parser_url_fields uf, old_uf; int found_at = 0; u->port = u->field_set = 0; s = is_connect ? s_req_server_start : s_req_spaces_before_url; old_uf = UF_MAX; for (p = buf; p < buf + buflen; p++) { s = parse_url_char(s, *p); /* Figure out the next field that we're operating on */ switch (s) { case s_dead: return 1; /* Skip delimeters */ case s_req_schema_slash: case s_req_schema_slash_slash: case s_req_server_start: case s_req_query_string_start: case s_req_fragment_start: continue; case s_req_schema: uf = UF_SCHEMA; break; case s_req_server_with_at: found_at = 1; /* FALLTHROUGH */ case s_req_server: uf = UF_HOST; break; case s_req_path: uf = UF_PATH; break; case s_req_query_string: uf = UF_QUERY; break; case s_req_fragment: uf = UF_FRAGMENT; break; default: assert(!"Unexpected state"); return 1; } /* Nothing's changed; soldier on */ if (uf == old_uf) { u->field_data[uf].len++; continue; } u->field_data[uf].off = p - buf; u->field_data[uf].len = 1; u->field_set |= (1 << uf); old_uf = uf; } /* host must be present if there is a schema */ /* parsing http:///toto will fail */ if ((u->field_set & (1 << UF_SCHEMA)) && (u->field_set & (1 << UF_HOST)) == 0) { return 1; } if (u->field_set & (1 << UF_HOST)) { if (http_parse_host(buf, u, found_at) != 0) { return 1; } } /* CONNECT requests can only contain "hostname:port" */ if (is_connect && u->field_set != ((1 << UF_HOST)|(1 << UF_PORT))) { return 1; } if (u->field_set & (1 << UF_PORT)) { /* Don't bother with endp; we've already validated the string */ unsigned long v = strtoul(buf + u->field_data[UF_PORT].off, NULL, 10); /* Ports have a max value of 2^16 */ if (v > 0xffff) { return 1; } u->port = (uint16_t) v; } return 0; } void http_parser_pause(http_parser *parser, int paused) { /* Users should only be pausing/unpausing a parser that is not in an error * state. In non-debug builds, there's not much that we can do about this * other than ignore it. */ if (HTTP_PARSER_ERRNO(parser) == HPE_OK || HTTP_PARSER_ERRNO(parser) == HPE_PAUSED) { SET_ERRNO((paused) ? HPE_PAUSED : HPE_OK); } else { assert(0 && "Attempting to pause parser in error state"); } } int http_body_is_final(const struct http_parser *parser) { return parser->state == s_message_done; } unsigned long http_parser_version(void) { return HTTP_PARSER_VERSION_MAJOR * 0x10000 | HTTP_PARSER_VERSION_MINOR * 0x00100 | HTTP_PARSER_VERSION_PATCH * 0x00001; } aiohttp-3.0.1/vendor/http-parser/http_parser.gyp0000666000000000000000000000544713240304665020154 0ustar 00000000000000# This file is used with the GYP meta build system. # http://code.google.com/p/gyp/ # To build try this: # svn co http://gyp.googlecode.com/svn/trunk gyp # ./gyp/gyp -f make --depth=`pwd` http_parser.gyp # ./out/Debug/test { 'target_defaults': { 'default_configuration': 'Debug', 'configurations': { # TODO: hoist these out and put them somewhere common, because # RuntimeLibrary MUST MATCH across the entire project 'Debug': { 'defines': [ 'DEBUG', '_DEBUG' ], 'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ], 'msvs_settings': { 'VCCLCompilerTool': { 'RuntimeLibrary': 1, # static debug }, }, }, 'Release': { 'defines': [ 'NDEBUG' ], 'cflags': [ '-Wall', '-Wextra', '-O3' ], 'msvs_settings': { 'VCCLCompilerTool': { 'RuntimeLibrary': 0, # static release }, }, } }, 'msvs_settings': { 'VCCLCompilerTool': { }, 'VCLibrarianTool': { }, 'VCLinkerTool': { 'GenerateDebugInformation': 'true', }, }, 'conditions': [ ['OS == "win"', { 'defines': [ 'WIN32' ], }] ], }, 'targets': [ { 'target_name': 'http_parser', 'type': 'static_library', 'include_dirs': [ '.' ], 'direct_dependent_settings': { 'defines': [ 'HTTP_PARSER_STRICT=0' ], 'include_dirs': [ '.' ], }, 'defines': [ 'HTTP_PARSER_STRICT=0' ], 'sources': [ './http_parser.c', ], 'conditions': [ ['OS=="win"', { 'msvs_settings': { 'VCCLCompilerTool': { # Compile as C++. http_parser.c is actually C99, but C++ is # close enough in this case. 'CompileAs': 2, }, }, }] ], }, { 'target_name': 'http_parser_strict', 'type': 'static_library', 'include_dirs': [ '.' ], 'direct_dependent_settings': { 'defines': [ 'HTTP_PARSER_STRICT=1' ], 'include_dirs': [ '.' ], }, 'defines': [ 'HTTP_PARSER_STRICT=1' ], 'sources': [ './http_parser.c', ], 'conditions': [ ['OS=="win"', { 'msvs_settings': { 'VCCLCompilerTool': { # Compile as C++. http_parser.c is actually C99, but C++ is # close enough in this case. 'CompileAs': 2, }, }, }] ], }, { 'target_name': 'test-nonstrict', 'type': 'executable', 'dependencies': [ 'http_parser' ], 'sources': [ 'test.c' ] }, { 'target_name': 'test-strict', 'type': 'executable', 'dependencies': [ 'http_parser_strict' ], 'sources': [ 'test.c' ] } ] } aiohttp-3.0.1/vendor/http-parser/http_parser.h0000666000000000000000000004437213240304665017604 0ustar 00000000000000/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ #ifndef http_parser_h #define http_parser_h #ifdef __cplusplus extern "C" { #endif /* Also update SONAME in the Makefile whenever you change these. */ #define HTTP_PARSER_VERSION_MAJOR 2 #define HTTP_PARSER_VERSION_MINOR 7 #define HTTP_PARSER_VERSION_PATCH 1 #include #if defined(_WIN32) && !defined(__MINGW32__) && \ (!defined(_MSC_VER) || _MSC_VER<1600) && !defined(__WINE__) #include typedef __int8 int8_t; typedef unsigned __int8 uint8_t; typedef __int16 int16_t; typedef unsigned __int16 uint16_t; typedef __int32 int32_t; typedef unsigned __int32 uint32_t; typedef __int64 int64_t; typedef unsigned __int64 uint64_t; #else #include #endif /* Compile with -DHTTP_PARSER_STRICT=0 to make less checks, but run * faster */ #ifndef HTTP_PARSER_STRICT # define HTTP_PARSER_STRICT 1 #endif /* Maximium header size allowed. If the macro is not defined * before including this header then the default is used. To * change the maximum header size, define the macro in the build * environment (e.g. -DHTTP_MAX_HEADER_SIZE=). To remove * the effective limit on the size of the header, define the macro * to a very large number (e.g. -DHTTP_MAX_HEADER_SIZE=0x7fffffff) */ #ifndef HTTP_MAX_HEADER_SIZE # define HTTP_MAX_HEADER_SIZE (80*1024) #endif typedef struct http_parser http_parser; typedef struct http_parser_settings http_parser_settings; /* Callbacks should return non-zero to indicate an error. The parser will * then halt execution. * * The one exception is on_headers_complete. In a HTTP_RESPONSE parser * returning '1' from on_headers_complete will tell the parser that it * should not expect a body. This is used when receiving a response to a * HEAD request which may contain 'Content-Length' or 'Transfer-Encoding: * chunked' headers that indicate the presence of a body. * * Returning `2` from on_headers_complete will tell parser that it should not * expect neither a body nor any futher responses on this connection. This is * useful for handling responses to a CONNECT request which may not contain * `Upgrade` or `Connection: upgrade` headers. * * http_data_cb does not return data chunks. It will be called arbitrarily * many times for each string. E.G. you might get 10 callbacks for "on_url" * each providing just a few characters more data. */ typedef int (*http_data_cb) (http_parser*, const char *at, size_t length); typedef int (*http_cb) (http_parser*); /* Status Codes */ #define HTTP_STATUS_MAP(XX) \ XX(100, CONTINUE, Continue) \ XX(101, SWITCHING_PROTOCOLS, Switching Protocols) \ XX(102, PROCESSING, Processing) \ XX(200, OK, OK) \ XX(201, CREATED, Created) \ XX(202, ACCEPTED, Accepted) \ XX(203, NON_AUTHORITATIVE_INFORMATION, Non-Authoritative Information) \ XX(204, NO_CONTENT, No Content) \ XX(205, RESET_CONTENT, Reset Content) \ XX(206, PARTIAL_CONTENT, Partial Content) \ XX(207, MULTI_STATUS, Multi-Status) \ XX(208, ALREADY_REPORTED, Already Reported) \ XX(226, IM_USED, IM Used) \ XX(300, MULTIPLE_CHOICES, Multiple Choices) \ XX(301, MOVED_PERMANENTLY, Moved Permanently) \ XX(302, FOUND, Found) \ XX(303, SEE_OTHER, See Other) \ XX(304, NOT_MODIFIED, Not Modified) \ XX(305, USE_PROXY, Use Proxy) \ XX(307, TEMPORARY_REDIRECT, Temporary Redirect) \ XX(308, PERMANENT_REDIRECT, Permanent Redirect) \ XX(400, BAD_REQUEST, Bad Request) \ XX(401, UNAUTHORIZED, Unauthorized) \ XX(402, PAYMENT_REQUIRED, Payment Required) \ XX(403, FORBIDDEN, Forbidden) \ XX(404, NOT_FOUND, Not Found) \ XX(405, METHOD_NOT_ALLOWED, Method Not Allowed) \ XX(406, NOT_ACCEPTABLE, Not Acceptable) \ XX(407, PROXY_AUTHENTICATION_REQUIRED, Proxy Authentication Required) \ XX(408, REQUEST_TIMEOUT, Request Timeout) \ XX(409, CONFLICT, Conflict) \ XX(410, GONE, Gone) \ XX(411, LENGTH_REQUIRED, Length Required) \ XX(412, PRECONDITION_FAILED, Precondition Failed) \ XX(413, PAYLOAD_TOO_LARGE, Payload Too Large) \ XX(414, URI_TOO_LONG, URI Too Long) \ XX(415, UNSUPPORTED_MEDIA_TYPE, Unsupported Media Type) \ XX(416, RANGE_NOT_SATISFIABLE, Range Not Satisfiable) \ XX(417, EXPECTATION_FAILED, Expectation Failed) \ XX(421, MISDIRECTED_REQUEST, Misdirected Request) \ XX(422, UNPROCESSABLE_ENTITY, Unprocessable Entity) \ XX(423, LOCKED, Locked) \ XX(424, FAILED_DEPENDENCY, Failed Dependency) \ XX(426, UPGRADE_REQUIRED, Upgrade Required) \ XX(428, PRECONDITION_REQUIRED, Precondition Required) \ XX(429, TOO_MANY_REQUESTS, Too Many Requests) \ XX(431, REQUEST_HEADER_FIELDS_TOO_LARGE, Request Header Fields Too Large) \ XX(451, UNAVAILABLE_FOR_LEGAL_REASONS, Unavailable For Legal Reasons) \ XX(500, INTERNAL_SERVER_ERROR, Internal Server Error) \ XX(501, NOT_IMPLEMENTED, Not Implemented) \ XX(502, BAD_GATEWAY, Bad Gateway) \ XX(503, SERVICE_UNAVAILABLE, Service Unavailable) \ XX(504, GATEWAY_TIMEOUT, Gateway Timeout) \ XX(505, HTTP_VERSION_NOT_SUPPORTED, HTTP Version Not Supported) \ XX(506, VARIANT_ALSO_NEGOTIATES, Variant Also Negotiates) \ XX(507, INSUFFICIENT_STORAGE, Insufficient Storage) \ XX(508, LOOP_DETECTED, Loop Detected) \ XX(510, NOT_EXTENDED, Not Extended) \ XX(511, NETWORK_AUTHENTICATION_REQUIRED, Network Authentication Required) \ enum http_status { #define XX(num, name, string) HTTP_STATUS_##name = num, HTTP_STATUS_MAP(XX) #undef XX }; /* Request Methods */ #define HTTP_METHOD_MAP(XX) \ XX(0, DELETE, DELETE) \ XX(1, GET, GET) \ XX(2, HEAD, HEAD) \ XX(3, POST, POST) \ XX(4, PUT, PUT) \ /* pathological */ \ XX(5, CONNECT, CONNECT) \ XX(6, OPTIONS, OPTIONS) \ XX(7, TRACE, TRACE) \ /* WebDAV */ \ XX(8, COPY, COPY) \ XX(9, LOCK, LOCK) \ XX(10, MKCOL, MKCOL) \ XX(11, MOVE, MOVE) \ XX(12, PROPFIND, PROPFIND) \ XX(13, PROPPATCH, PROPPATCH) \ XX(14, SEARCH, SEARCH) \ XX(15, UNLOCK, UNLOCK) \ XX(16, BIND, BIND) \ XX(17, REBIND, REBIND) \ XX(18, UNBIND, UNBIND) \ XX(19, ACL, ACL) \ /* subversion */ \ XX(20, REPORT, REPORT) \ XX(21, MKACTIVITY, MKACTIVITY) \ XX(22, CHECKOUT, CHECKOUT) \ XX(23, MERGE, MERGE) \ /* upnp */ \ XX(24, MSEARCH, M-SEARCH) \ XX(25, NOTIFY, NOTIFY) \ XX(26, SUBSCRIBE, SUBSCRIBE) \ XX(27, UNSUBSCRIBE, UNSUBSCRIBE) \ /* RFC-5789 */ \ XX(28, PATCH, PATCH) \ XX(29, PURGE, PURGE) \ /* CalDAV */ \ XX(30, MKCALENDAR, MKCALENDAR) \ /* RFC-2068, section 19.6.1.2 */ \ XX(31, LINK, LINK) \ XX(32, UNLINK, UNLINK) \ enum http_method { #define XX(num, name, string) HTTP_##name = num, HTTP_METHOD_MAP(XX) #undef XX }; enum http_parser_type { HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH }; /* Flag values for http_parser.flags field */ enum flags { F_CHUNKED = 1 << 0 , F_CONNECTION_KEEP_ALIVE = 1 << 1 , F_CONNECTION_CLOSE = 1 << 2 , F_CONNECTION_UPGRADE = 1 << 3 , F_TRAILING = 1 << 4 , F_UPGRADE = 1 << 5 , F_SKIPBODY = 1 << 6 , F_CONTENTLENGTH = 1 << 7 }; /* Map for errno-related constants * * The provided argument should be a macro that takes 2 arguments. */ #define HTTP_ERRNO_MAP(XX) \ /* No error */ \ XX(OK, "success") \ \ /* Callback-related errors */ \ XX(CB_message_begin, "the on_message_begin callback failed") \ XX(CB_url, "the on_url callback failed") \ XX(CB_header_field, "the on_header_field callback failed") \ XX(CB_header_value, "the on_header_value callback failed") \ XX(CB_headers_complete, "the on_headers_complete callback failed") \ XX(CB_body, "the on_body callback failed") \ XX(CB_message_complete, "the on_message_complete callback failed") \ XX(CB_status, "the on_status callback failed") \ XX(CB_chunk_header, "the on_chunk_header callback failed") \ XX(CB_chunk_complete, "the on_chunk_complete callback failed") \ \ /* Parsing-related errors */ \ XX(INVALID_EOF_STATE, "stream ended at an unexpected time") \ XX(HEADER_OVERFLOW, \ "too many header bytes seen; overflow detected") \ XX(CLOSED_CONNECTION, \ "data received after completed connection: close message") \ XX(INVALID_VERSION, "invalid HTTP version") \ XX(INVALID_STATUS, "invalid HTTP status code") \ XX(INVALID_METHOD, "invalid HTTP method") \ XX(INVALID_URL, "invalid URL") \ XX(INVALID_HOST, "invalid host") \ XX(INVALID_PORT, "invalid port") \ XX(INVALID_PATH, "invalid path") \ XX(INVALID_QUERY_STRING, "invalid query string") \ XX(INVALID_FRAGMENT, "invalid fragment") \ XX(LF_EXPECTED, "LF character expected") \ XX(INVALID_HEADER_TOKEN, "invalid character in header") \ XX(INVALID_CONTENT_LENGTH, \ "invalid character in content-length header") \ XX(UNEXPECTED_CONTENT_LENGTH, \ "unexpected content-length header") \ XX(INVALID_CHUNK_SIZE, \ "invalid character in chunk size header") \ XX(INVALID_CONSTANT, "invalid constant string") \ XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\ XX(STRICT, "strict mode assertion failed") \ XX(PAUSED, "parser is paused") \ XX(UNKNOWN, "an unknown error occurred") /* Define HPE_* values for each errno value above */ #define HTTP_ERRNO_GEN(n, s) HPE_##n, enum http_errno { HTTP_ERRNO_MAP(HTTP_ERRNO_GEN) }; #undef HTTP_ERRNO_GEN /* Get an http_errno value from an http_parser */ #define HTTP_PARSER_ERRNO(p) ((enum http_errno) (p)->http_errno) struct http_parser { /** PRIVATE **/ unsigned int type : 2; /* enum http_parser_type */ unsigned int flags : 8; /* F_* values from 'flags' enum; semi-public */ unsigned int state : 7; /* enum state from http_parser.c */ unsigned int header_state : 7; /* enum header_state from http_parser.c */ unsigned int index : 7; /* index into current matcher */ unsigned int lenient_http_headers : 1; uint32_t nread; /* # bytes read in various scenarios */ uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */ /** READ-ONLY **/ unsigned short http_major; unsigned short http_minor; unsigned int status_code : 16; /* responses only */ unsigned int method : 8; /* requests only */ unsigned int http_errno : 7; /* 1 = Upgrade header was present and the parser has exited because of that. * 0 = No upgrade header present. * Should be checked when http_parser_execute() returns in addition to * error checking. */ unsigned int upgrade : 1; /** PUBLIC **/ void *data; /* A pointer to get hook to the "connection" or "socket" object */ }; struct http_parser_settings { http_cb on_message_begin; http_data_cb on_url; http_data_cb on_status; http_data_cb on_header_field; http_data_cb on_header_value; http_cb on_headers_complete; http_data_cb on_body; http_cb on_message_complete; /* When on_chunk_header is called, the current chunk length is stored * in parser->content_length. */ http_cb on_chunk_header; http_cb on_chunk_complete; }; enum http_parser_url_fields { UF_SCHEMA = 0 , UF_HOST = 1 , UF_PORT = 2 , UF_PATH = 3 , UF_QUERY = 4 , UF_FRAGMENT = 5 , UF_USERINFO = 6 , UF_MAX = 7 }; /* Result structure for http_parser_parse_url(). * * Callers should index into field_data[] with UF_* values iff field_set * has the relevant (1 << UF_*) bit set. As a courtesy to clients (and * because we probably have padding left over), we convert any port to * a uint16_t. */ struct http_parser_url { uint16_t field_set; /* Bitmask of (1 << UF_*) values */ uint16_t port; /* Converted UF_PORT string */ struct { uint16_t off; /* Offset into buffer in which field starts */ uint16_t len; /* Length of run in buffer */ } field_data[UF_MAX]; }; /* Returns the library version. Bits 16-23 contain the major version number, * bits 8-15 the minor version number and bits 0-7 the patch level. * Usage example: * * unsigned long version = http_parser_version(); * unsigned major = (version >> 16) & 255; * unsigned minor = (version >> 8) & 255; * unsigned patch = version & 255; * printf("http_parser v%u.%u.%u\n", major, minor, patch); */ unsigned long http_parser_version(void); void http_parser_init(http_parser *parser, enum http_parser_type type); /* Initialize http_parser_settings members to 0 */ void http_parser_settings_init(http_parser_settings *settings); /* Executes the parser. Returns number of parsed bytes. Sets * `parser->http_errno` on error. */ size_t http_parser_execute(http_parser *parser, const http_parser_settings *settings, const char *data, size_t len); /* If http_should_keep_alive() in the on_headers_complete or * on_message_complete callback returns 0, then this should be * the last message on the connection. * If you are the server, respond with the "Connection: close" header. * If you are the client, close the connection. */ int http_should_keep_alive(const http_parser *parser); /* Returns a string version of the HTTP method. */ const char *http_method_str(enum http_method m); /* Return a string name of the given error */ const char *http_errno_name(enum http_errno err); /* Return a string description of the given error */ const char *http_errno_description(enum http_errno err); /* Initialize all http_parser_url members to 0 */ void http_parser_url_init(struct http_parser_url *u); /* Parse a URL; return nonzero on failure */ int http_parser_parse_url(const char *buf, size_t buflen, int is_connect, struct http_parser_url *u); /* Pause or un-pause the parser; a nonzero value pauses */ void http_parser_pause(http_parser *parser, int paused); /* Checks if this is the final chunk of the body. */ int http_body_is_final(const http_parser *parser); #ifdef __cplusplus } #endif #endif aiohttp-3.0.1/vendor/http-parser/LICENSE-MIT0000666000000000000000000000206513240304665016525 0ustar 00000000000000Copyright Joyent, Inc. and other Node contributors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. aiohttp-3.0.1/vendor/http-parser/Makefile0000666000000000000000000001224413240304665016531 0ustar 00000000000000# Copyright Joyent, Inc. and other Node contributors. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"') HELPER ?= BINEXT ?= SOLIBNAME = libhttp_parser SOMAJOR = 2 SOMINOR = 7 SOREV = 1 ifeq (darwin,$(PLATFORM)) SOEXT ?= dylib SONAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOEXT) LIBNAME ?= $(SOLIBNAME).$(SOMAJOR).$(SOMINOR).$(SOREV).$(SOEXT) else ifeq (wine,$(PLATFORM)) CC = winegcc BINEXT = .exe.so HELPER = wine else SOEXT ?= so SONAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR) LIBNAME ?= $(SOLIBNAME).$(SOEXT).$(SOMAJOR).$(SOMINOR).$(SOREV) endif CC?=gcc AR?=ar CPPFLAGS ?= LDFLAGS ?= CPPFLAGS += -I. CPPFLAGS_DEBUG = $(CPPFLAGS) -DHTTP_PARSER_STRICT=1 CPPFLAGS_DEBUG += $(CPPFLAGS_DEBUG_EXTRA) CPPFLAGS_FAST = $(CPPFLAGS) -DHTTP_PARSER_STRICT=0 CPPFLAGS_FAST += $(CPPFLAGS_FAST_EXTRA) CPPFLAGS_BENCH = $(CPPFLAGS_FAST) CFLAGS += -Wall -Wextra -Werror CFLAGS_DEBUG = $(CFLAGS) -O0 -g $(CFLAGS_DEBUG_EXTRA) CFLAGS_FAST = $(CFLAGS) -O3 $(CFLAGS_FAST_EXTRA) CFLAGS_BENCH = $(CFLAGS_FAST) -Wno-unused-parameter CFLAGS_LIB = $(CFLAGS_FAST) -fPIC LDFLAGS_LIB = $(LDFLAGS) -shared INSTALL ?= install PREFIX ?= /usr/local LIBDIR = $(PREFIX)/lib INCLUDEDIR = $(PREFIX)/include ifeq (darwin,$(PLATFORM)) LDFLAGS_LIB += -Wl,-install_name,$(LIBDIR)/$(SONAME) else # TODO(bnoordhuis) The native SunOS linker expects -h rather than -soname... LDFLAGS_LIB += -Wl,-soname=$(SONAME) endif test: test_g test_fast $(HELPER) ./test_g$(BINEXT) $(HELPER) ./test_fast$(BINEXT) test_g: http_parser_g.o test_g.o $(CC) $(CFLAGS_DEBUG) $(LDFLAGS) http_parser_g.o test_g.o -o $@ test_g.o: test.c http_parser.h Makefile $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c test.c -o $@ http_parser_g.o: http_parser.c http_parser.h Makefile $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c http_parser.c -o $@ test_fast: http_parser.o test.o http_parser.h $(CC) $(CFLAGS_FAST) $(LDFLAGS) http_parser.o test.o -o $@ test.o: test.c http_parser.h Makefile $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c test.c -o $@ bench: http_parser.o bench.o $(CC) $(CFLAGS_BENCH) $(LDFLAGS) http_parser.o bench.o -o $@ bench.o: bench.c http_parser.h Makefile $(CC) $(CPPFLAGS_BENCH) $(CFLAGS_BENCH) -c bench.c -o $@ http_parser.o: http_parser.c http_parser.h Makefile $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c http_parser.c test-run-timed: test_fast while(true) do time $(HELPER) ./test_fast$(BINEXT) > /dev/null; done test-valgrind: test_g valgrind ./test_g libhttp_parser.o: http_parser.c http_parser.h Makefile $(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o library: libhttp_parser.o $(CC) $(LDFLAGS_LIB) -o $(LIBNAME) $< package: http_parser.o $(AR) rcs libhttp_parser.a http_parser.o url_parser: http_parser.o contrib/url_parser.c $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@ url_parser_g: http_parser_g.o contrib/url_parser.c $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o $@ parsertrace: http_parser.o contrib/parsertrace.c $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o parsertrace$(BINEXT) parsertrace_g: http_parser_g.o contrib/parsertrace.c $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o parsertrace_g$(BINEXT) tags: http_parser.c http_parser.h test.c ctags $^ install: library $(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h $(INSTALL) -D $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME) ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME) ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT) install-strip: library $(INSTALL) -D http_parser.h $(DESTDIR)$(INCLUDEDIR)/http_parser.h $(INSTALL) -D -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(LIBNAME) ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SONAME) ln -s $(LIBNAME) $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT) uninstall: rm $(DESTDIR)$(INCLUDEDIR)/http_parser.h rm $(DESTDIR)$(LIBDIR)/$(SOLIBNAME).$(SOEXT) rm $(DESTDIR)$(LIBDIR)/$(SONAME) rm $(DESTDIR)$(LIBDIR)/$(LIBNAME) clean: rm -f *.o *.a tags test test_fast test_g \ http_parser.tar libhttp_parser.so.* \ url_parser url_parser_g parsertrace parsertrace_g \ *.exe *.exe.so contrib/url_parser.c: http_parser.h contrib/parsertrace.c: http_parser.h .PHONY: clean package test-run test-run-timed test-valgrind install install-strip uninstall aiohttp-3.0.1/vendor/http-parser/README.md0000666000000000000000000002217713240304665016356 0ustar 00000000000000HTTP Parser =========== [![Build Status](https://api.travis-ci.org/nodejs/http-parser.svg?branch=master)](https://travis-ci.org/nodejs/http-parser) This is a parser for HTTP messages written in C. It parses both requests and responses. The parser is designed to be used in performance HTTP applications. It does not make any syscalls nor allocations, it does not buffer data, it can be interrupted at anytime. Depending on your architecture, it only requires about 40 bytes of data per message stream (in a web server that is per connection). Features: * No dependencies * Handles persistent streams (keep-alive). * Decodes chunked encoding. * Upgrade support * Defends against buffer overflow attacks. The parser extracts the following information from HTTP messages: * Header fields and values * Content-Length * Request method * Response status code * Transfer-Encoding * HTTP version * Request URL * Message body Usage ----- One `http_parser` object is used per TCP connection. Initialize the struct using `http_parser_init()` and set the callbacks. That might look something like this for a request parser: ```c http_parser_settings settings; settings.on_url = my_url_callback; settings.on_header_field = my_header_field_callback; /* ... */ http_parser *parser = malloc(sizeof(http_parser)); http_parser_init(parser, HTTP_REQUEST); parser->data = my_socket; ``` When data is received on the socket execute the parser and check for errors. ```c size_t len = 80*1024, nparsed; char buf[len]; ssize_t recved; recved = recv(fd, buf, len, 0); if (recved < 0) { /* Handle error. */ } /* Start up / continue the parser. * Note we pass recved==0 to signal that EOF has been received. */ nparsed = http_parser_execute(parser, &settings, buf, recved); if (parser->upgrade) { /* handle new protocol */ } else if (nparsed != recved) { /* Handle error. Usually just close the connection. */ } ``` `http_parser` needs to know where the end of the stream is. For example, sometimes servers send responses without Content-Length and expect the client to consume input (for the body) until EOF. To tell `http_parser` about EOF, give `0` as the fourth parameter to `http_parser_execute()`. Callbacks and errors can still be encountered during an EOF, so one must still be prepared to receive them. Scalar valued message information such as `status_code`, `method`, and the HTTP version are stored in the parser structure. This data is only temporally stored in `http_parser` and gets reset on each new message. If this information is needed later, copy it out of the structure during the `headers_complete` callback. The parser decodes the transfer-encoding for both requests and responses transparently. That is, a chunked encoding is decoded before being sent to the on_body callback. The Special Problem of Upgrade ------------------------------ `http_parser` supports upgrading the connection to a different protocol. An increasingly common example of this is the WebSocket protocol which sends a request like GET /demo HTTP/1.1 Upgrade: WebSocket Connection: Upgrade Host: example.com Origin: http://example.com WebSocket-Protocol: sample followed by non-HTTP data. (See [RFC6455](https://tools.ietf.org/html/rfc6455) for more information the WebSocket protocol.) To support this, the parser will treat this as a normal HTTP message without a body, issuing both on_headers_complete and on_message_complete callbacks. However http_parser_execute() will stop parsing at the end of the headers and return. The user is expected to check if `parser->upgrade` has been set to 1 after `http_parser_execute()` returns. Non-HTTP data begins at the buffer supplied offset by the return value of `http_parser_execute()`. Callbacks --------- During the `http_parser_execute()` call, the callbacks set in `http_parser_settings` will be executed. The parser maintains state and never looks behind, so buffering the data is not necessary. If you need to save certain data for later usage, you can do that from the callbacks. There are two types of callbacks: * notification `typedef int (*http_cb) (http_parser*);` Callbacks: on_message_begin, on_headers_complete, on_message_complete. * data `typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);` Callbacks: (requests only) on_url, (common) on_header_field, on_header_value, on_body; Callbacks must return 0 on success. Returning a non-zero value indicates error to the parser, making it exit immediately. For cases where it is necessary to pass local information to/from a callback, the `http_parser` object's `data` field can be used. An example of such a case is when using threads to handle a socket connection, parse a request, and then give a response over that socket. By instantiation of a thread-local struct containing relevant data (e.g. accepted socket, allocated memory for callbacks to write into, etc), a parser's callbacks are able to communicate data between the scope of the thread and the scope of the callback in a threadsafe manner. This allows `http_parser` to be used in multi-threaded contexts. Example: ```c typedef struct { socket_t sock; void* buffer; int buf_len; } custom_data_t; int my_url_callback(http_parser* parser, const char *at, size_t length) { /* access to thread local custom_data_t struct. Use this access save parsed data for later use into thread local buffer, or communicate over socket */ parser->data; ... return 0; } ... void http_parser_thread(socket_t sock) { int nparsed = 0; /* allocate memory for user data */ custom_data_t *my_data = malloc(sizeof(custom_data_t)); /* some information for use by callbacks. * achieves thread -> callback information flow */ my_data->sock = sock; /* instantiate a thread-local parser */ http_parser *parser = malloc(sizeof(http_parser)); http_parser_init(parser, HTTP_REQUEST); /* initialise parser */ /* this custom data reference is accessible through the reference to the parser supplied to callback functions */ parser->data = my_data; http_parser_settings settings; /* set up callbacks */ settings.on_url = my_url_callback; /* execute parser */ nparsed = http_parser_execute(parser, &settings, buf, recved); ... /* parsed information copied from callback. can now perform action on data copied into thread-local memory from callbacks. achieves callback -> thread information flow */ my_data->buffer; ... } ``` In case you parse HTTP message in chunks (i.e. `read()` request line from socket, parse, read half headers, parse, etc) your data callbacks may be called more than once. `http_parser` guarantees that data pointer is only valid for the lifetime of callback. You can also `read()` into a heap allocated buffer to avoid copying memory around if this fits your application. Reading headers may be a tricky task if you read/parse headers partially. Basically, you need to remember whether last header callback was field or value and apply the following logic: (on_header_field and on_header_value shortened to on_h_*) ------------------------ ------------ -------------------------------------------- | State (prev. callback) | Callback | Description/action | ------------------------ ------------ -------------------------------------------- | nothing (first call) | on_h_field | Allocate new buffer and copy callback data | | | | into it | ------------------------ ------------ -------------------------------------------- | value | on_h_field | New header started. | | | | Copy current name,value buffers to headers | | | | list and allocate new buffer for new name | ------------------------ ------------ -------------------------------------------- | field | on_h_field | Previous name continues. Reallocate name | | | | buffer and append callback data to it | ------------------------ ------------ -------------------------------------------- | field | on_h_value | Value for current header started. Allocate | | | | new buffer and copy callback data to it | ------------------------ ------------ -------------------------------------------- | value | on_h_value | Value continues. Reallocate value buffer | | | | and append callback data to it | ------------------------ ------------ -------------------------------------------- Parsing URLs ------------ A simplistic zero-copy URL parser is provided as `http_parser_parse_url()`. Users of this library may wish to use it to parse URLs constructed from consecutive `on_url` callbacks. See examples of reading in headers: * [partial example](http://gist.github.com/155877) in C * [from http-parser tests](http://github.com/joyent/http-parser/blob/37a0ff8/test.c#L403) in C * [from Node library](http://github.com/joyent/node/blob/842eaf4/src/http.js#L284) in Javascript aiohttp-3.0.1/vendor/http-parser/test.c0000666000000000000000000034622013240304665016220 0ustar 00000000000000/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ #include "http_parser.h" #include #include #include #include /* rand */ #include #include #if defined(__APPLE__) # undef strlcat # undef strlncpy # undef strlcpy #endif /* defined(__APPLE__) */ #undef TRUE #define TRUE 1 #undef FALSE #define FALSE 0 #define MAX_HEADERS 13 #define MAX_ELEMENT_SIZE 2048 #define MAX_CHUNKS 16 #define MIN(a,b) ((a) < (b) ? (a) : (b)) static http_parser *parser; struct message { const char *name; // for debugging purposes const char *raw; enum http_parser_type type; enum http_method method; int status_code; char response_status[MAX_ELEMENT_SIZE]; char request_path[MAX_ELEMENT_SIZE]; char request_url[MAX_ELEMENT_SIZE]; char fragment[MAX_ELEMENT_SIZE]; char query_string[MAX_ELEMENT_SIZE]; char body[MAX_ELEMENT_SIZE]; size_t body_size; const char *host; const char *userinfo; uint16_t port; int num_headers; enum { NONE=0, FIELD, VALUE } last_header_element; char headers [MAX_HEADERS][2][MAX_ELEMENT_SIZE]; int should_keep_alive; int num_chunks; int num_chunks_complete; int chunk_lengths[MAX_CHUNKS]; const char *upgrade; // upgraded body unsigned short http_major; unsigned short http_minor; int message_begin_cb_called; int headers_complete_cb_called; int message_complete_cb_called; int status_cb_called; int message_complete_on_eof; int body_is_final; }; static int currently_parsing_eof; static struct message messages[5]; static int num_messages; static http_parser_settings *current_pause_parser; /* * R E Q U E S T S * */ const struct message requests[] = #define CURL_GET 0 { {.name= "curl get" ,.type= HTTP_REQUEST ,.raw= "GET /test HTTP/1.1\r\n" "User-Agent: curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1\r\n" "Host: 0.0.0.0=5000\r\n" "Accept: */*\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/test" ,.request_url= "/test" ,.num_headers= 3 ,.headers= { { "User-Agent", "curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1" } , { "Host", "0.0.0.0=5000" } , { "Accept", "*/*" } } ,.body= "" } #define FIREFOX_GET 1 , {.name= "firefox get" ,.type= HTTP_REQUEST ,.raw= "GET /favicon.ico HTTP/1.1\r\n" "Host: 0.0.0.0=5000\r\n" "User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0\r\n" "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n" "Accept-Language: en-us,en;q=0.5\r\n" "Accept-Encoding: gzip,deflate\r\n" "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n" "Keep-Alive: 300\r\n" "Connection: keep-alive\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/favicon.ico" ,.request_url= "/favicon.ico" ,.num_headers= 8 ,.headers= { { "Host", "0.0.0.0=5000" } , { "User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0" } , { "Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" } , { "Accept-Language", "en-us,en;q=0.5" } , { "Accept-Encoding", "gzip,deflate" } , { "Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7" } , { "Keep-Alive", "300" } , { "Connection", "keep-alive" } } ,.body= "" } #define DUMBFUCK 2 , {.name= "dumbfuck" ,.type= HTTP_REQUEST ,.raw= "GET /dumbfuck HTTP/1.1\r\n" "aaaaaaaaaaaaa:++++++++++\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/dumbfuck" ,.request_url= "/dumbfuck" ,.num_headers= 1 ,.headers= { { "aaaaaaaaaaaaa", "++++++++++" } } ,.body= "" } #define FRAGMENT_IN_URI 3 , {.name= "fragment in url" ,.type= HTTP_REQUEST ,.raw= "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "page=1" ,.fragment= "posts-17408" ,.request_path= "/forums/1/topics/2375" /* XXX request url does include fragment? */ ,.request_url= "/forums/1/topics/2375?page=1#posts-17408" ,.num_headers= 0 ,.body= "" } #define GET_NO_HEADERS_NO_BODY 4 , {.name= "get no headers no body" ,.type= HTTP_REQUEST ,.raw= "GET /get_no_headers_no_body/world HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE /* would need Connection: close */ ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/get_no_headers_no_body/world" ,.request_url= "/get_no_headers_no_body/world" ,.num_headers= 0 ,.body= "" } #define GET_ONE_HEADER_NO_BODY 5 , {.name= "get one header no body" ,.type= HTTP_REQUEST ,.raw= "GET /get_one_header_no_body HTTP/1.1\r\n" "Accept: */*\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE /* would need Connection: close */ ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/get_one_header_no_body" ,.request_url= "/get_one_header_no_body" ,.num_headers= 1 ,.headers= { { "Accept" , "*/*" } } ,.body= "" } #define GET_FUNKY_CONTENT_LENGTH 6 , {.name= "get funky content length body hello" ,.type= HTTP_REQUEST ,.raw= "GET /get_funky_content_length_body_hello HTTP/1.0\r\n" "conTENT-Length: 5\r\n" "\r\n" "HELLO" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/get_funky_content_length_body_hello" ,.request_url= "/get_funky_content_length_body_hello" ,.num_headers= 1 ,.headers= { { "conTENT-Length" , "5" } } ,.body= "HELLO" } #define POST_IDENTITY_BODY_WORLD 7 , {.name= "post identity body world" ,.type= HTTP_REQUEST ,.raw= "POST /post_identity_body_world?q=search#hey HTTP/1.1\r\n" "Accept: */*\r\n" "Transfer-Encoding: identity\r\n" "Content-Length: 5\r\n" "\r\n" "World" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "q=search" ,.fragment= "hey" ,.request_path= "/post_identity_body_world" ,.request_url= "/post_identity_body_world?q=search#hey" ,.num_headers= 3 ,.headers= { { "Accept", "*/*" } , { "Transfer-Encoding", "identity" } , { "Content-Length", "5" } } ,.body= "World" } #define POST_CHUNKED_ALL_YOUR_BASE 8 , {.name= "post - chunked body: all your base are belong to us" ,.type= HTTP_REQUEST ,.raw= "POST /post_chunked_all_your_base HTTP/1.1\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "1e\r\nall your base are belong to us\r\n" "0\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "" ,.fragment= "" ,.request_path= "/post_chunked_all_your_base" ,.request_url= "/post_chunked_all_your_base" ,.num_headers= 1 ,.headers= { { "Transfer-Encoding" , "chunked" } } ,.body= "all your base are belong to us" ,.num_chunks_complete= 2 ,.chunk_lengths= { 0x1e } } #define TWO_CHUNKS_MULT_ZERO_END 9 , {.name= "two chunks ; triple zero ending" ,.type= HTTP_REQUEST ,.raw= "POST /two_chunks_mult_zero_end HTTP/1.1\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "5\r\nhello\r\n" "6\r\n world\r\n" "000\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "" ,.fragment= "" ,.request_path= "/two_chunks_mult_zero_end" ,.request_url= "/two_chunks_mult_zero_end" ,.num_headers= 1 ,.headers= { { "Transfer-Encoding", "chunked" } } ,.body= "hello world" ,.num_chunks_complete= 3 ,.chunk_lengths= { 5, 6 } } #define CHUNKED_W_TRAILING_HEADERS 10 , {.name= "chunked with trailing headers. blech." ,.type= HTTP_REQUEST ,.raw= "POST /chunked_w_trailing_headers HTTP/1.1\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "5\r\nhello\r\n" "6\r\n world\r\n" "0\r\n" "Vary: *\r\n" "Content-Type: text/plain\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "" ,.fragment= "" ,.request_path= "/chunked_w_trailing_headers" ,.request_url= "/chunked_w_trailing_headers" ,.num_headers= 3 ,.headers= { { "Transfer-Encoding", "chunked" } , { "Vary", "*" } , { "Content-Type", "text/plain" } } ,.body= "hello world" ,.num_chunks_complete= 3 ,.chunk_lengths= { 5, 6 } } #define CHUNKED_W_BULLSHIT_AFTER_LENGTH 11 , {.name= "with bullshit after the length" ,.type= HTTP_REQUEST ,.raw= "POST /chunked_w_bullshit_after_length HTTP/1.1\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "5; ihatew3;whatthefuck=aretheseparametersfor\r\nhello\r\n" "6; blahblah; blah\r\n world\r\n" "0\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "" ,.fragment= "" ,.request_path= "/chunked_w_bullshit_after_length" ,.request_url= "/chunked_w_bullshit_after_length" ,.num_headers= 1 ,.headers= { { "Transfer-Encoding", "chunked" } } ,.body= "hello world" ,.num_chunks_complete= 3 ,.chunk_lengths= { 5, 6 } } #define WITH_QUOTES 12 , {.name= "with quotes" ,.type= HTTP_REQUEST ,.raw= "GET /with_\"stupid\"_quotes?foo=\"bar\" HTTP/1.1\r\n\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "foo=\"bar\"" ,.fragment= "" ,.request_path= "/with_\"stupid\"_quotes" ,.request_url= "/with_\"stupid\"_quotes?foo=\"bar\"" ,.num_headers= 0 ,.headers= { } ,.body= "" } #define APACHEBENCH_GET 13 /* The server receiving this request SHOULD NOT wait for EOF * to know that content-length == 0. * How to represent this in a unit test? message_complete_on_eof * Compare with NO_CONTENT_LENGTH_RESPONSE. */ , {.name = "apachebench get" ,.type= HTTP_REQUEST ,.raw= "GET /test HTTP/1.0\r\n" "Host: 0.0.0.0:5000\r\n" "User-Agent: ApacheBench/2.3\r\n" "Accept: */*\r\n\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/test" ,.request_url= "/test" ,.num_headers= 3 ,.headers= { { "Host", "0.0.0.0:5000" } , { "User-Agent", "ApacheBench/2.3" } , { "Accept", "*/*" } } ,.body= "" } #define QUERY_URL_WITH_QUESTION_MARK_GET 14 /* Some clients include '?' characters in query strings. */ , {.name = "query url with question mark" ,.type= HTTP_REQUEST ,.raw= "GET /test.cgi?foo=bar?baz HTTP/1.1\r\n\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "foo=bar?baz" ,.fragment= "" ,.request_path= "/test.cgi" ,.request_url= "/test.cgi?foo=bar?baz" ,.num_headers= 0 ,.headers= {} ,.body= "" } #define PREFIX_NEWLINE_GET 15 /* Some clients, especially after a POST in a keep-alive connection, * will send an extra CRLF before the next request */ , {.name = "newline prefix get" ,.type= HTTP_REQUEST ,.raw= "\r\nGET /test HTTP/1.1\r\n\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/test" ,.request_url= "/test" ,.num_headers= 0 ,.headers= { } ,.body= "" } #define UPGRADE_REQUEST 16 , {.name = "upgrade request" ,.type= HTTP_REQUEST ,.raw= "GET /demo HTTP/1.1\r\n" "Host: example.com\r\n" "Connection: Upgrade\r\n" "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" "Sec-WebSocket-Protocol: sample\r\n" "Upgrade: WebSocket\r\n" "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" "Origin: http://example.com\r\n" "\r\n" "Hot diggity dogg" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/demo" ,.request_url= "/demo" ,.num_headers= 7 ,.upgrade="Hot diggity dogg" ,.headers= { { "Host", "example.com" } , { "Connection", "Upgrade" } , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } , { "Sec-WebSocket-Protocol", "sample" } , { "Upgrade", "WebSocket" } , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } , { "Origin", "http://example.com" } } ,.body= "" } #define CONNECT_REQUEST 17 , {.name = "connect request" ,.type= HTTP_REQUEST ,.raw= "CONNECT 0-home0.netscape.com:443 HTTP/1.0\r\n" "User-agent: Mozilla/1.1N\r\n" "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" "\r\n" "some data\r\n" "and yet even more data" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.method= HTTP_CONNECT ,.query_string= "" ,.fragment= "" ,.request_path= "" ,.request_url= "0-home0.netscape.com:443" ,.num_headers= 2 ,.upgrade="some data\r\nand yet even more data" ,.headers= { { "User-agent", "Mozilla/1.1N" } , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } } ,.body= "" } #define REPORT_REQ 18 , {.name= "report request" ,.type= HTTP_REQUEST ,.raw= "REPORT /test HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_REPORT ,.query_string= "" ,.fragment= "" ,.request_path= "/test" ,.request_url= "/test" ,.num_headers= 0 ,.headers= {} ,.body= "" } #define NO_HTTP_VERSION 19 , {.name= "request with no http version" ,.type= HTTP_REQUEST ,.raw= "GET /\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 0 ,.http_minor= 9 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/" ,.request_url= "/" ,.num_headers= 0 ,.headers= {} ,.body= "" } #define MSEARCH_REQ 20 , {.name= "m-search request" ,.type= HTTP_REQUEST ,.raw= "M-SEARCH * HTTP/1.1\r\n" "HOST: 239.255.255.250:1900\r\n" "MAN: \"ssdp:discover\"\r\n" "ST: \"ssdp:all\"\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_MSEARCH ,.query_string= "" ,.fragment= "" ,.request_path= "*" ,.request_url= "*" ,.num_headers= 3 ,.headers= { { "HOST", "239.255.255.250:1900" } , { "MAN", "\"ssdp:discover\"" } , { "ST", "\"ssdp:all\"" } } ,.body= "" } #define LINE_FOLDING_IN_HEADER 21 , {.name= "line folding in header value" ,.type= HTTP_REQUEST ,.raw= "GET / HTTP/1.1\r\n" "Line1: abc\r\n" "\tdef\r\n" " ghi\r\n" "\t\tjkl\r\n" " mno \r\n" "\t \tqrs\r\n" "Line2: \t line2\t\r\n" "Line3:\r\n" " line3\r\n" "Line4: \r\n" " \r\n" "Connection:\r\n" " close\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/" ,.request_url= "/" ,.num_headers= 5 ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } , { "Line2", "line2\t" } , { "Line3", "line3" } , { "Line4", "" } , { "Connection", "close" }, } ,.body= "" } #define QUERY_TERMINATED_HOST 22 , {.name= "host terminated by a query string" ,.type= HTTP_REQUEST ,.raw= "GET http://hypnotoad.org?hail=all HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "hail=all" ,.fragment= "" ,.request_path= "" ,.request_url= "http://hypnotoad.org?hail=all" ,.host= "hypnotoad.org" ,.num_headers= 0 ,.headers= { } ,.body= "" } #define QUERY_TERMINATED_HOSTPORT 23 , {.name= "host:port terminated by a query string" ,.type= HTTP_REQUEST ,.raw= "GET http://hypnotoad.org:1234?hail=all HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "hail=all" ,.fragment= "" ,.request_path= "" ,.request_url= "http://hypnotoad.org:1234?hail=all" ,.host= "hypnotoad.org" ,.port= 1234 ,.num_headers= 0 ,.headers= { } ,.body= "" } #define SPACE_TERMINATED_HOSTPORT 24 , {.name= "host:port terminated by a space" ,.type= HTTP_REQUEST ,.raw= "GET http://hypnotoad.org:1234 HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "" ,.request_url= "http://hypnotoad.org:1234" ,.host= "hypnotoad.org" ,.port= 1234 ,.num_headers= 0 ,.headers= { } ,.body= "" } #define PATCH_REQ 25 , {.name = "PATCH request" ,.type= HTTP_REQUEST ,.raw= "PATCH /file.txt HTTP/1.1\r\n" "Host: www.example.com\r\n" "Content-Type: application/example\r\n" "If-Match: \"e0023aa4e\"\r\n" "Content-Length: 10\r\n" "\r\n" "cccccccccc" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_PATCH ,.query_string= "" ,.fragment= "" ,.request_path= "/file.txt" ,.request_url= "/file.txt" ,.num_headers= 4 ,.headers= { { "Host", "www.example.com" } , { "Content-Type", "application/example" } , { "If-Match", "\"e0023aa4e\"" } , { "Content-Length", "10" } } ,.body= "cccccccccc" } #define CONNECT_CAPS_REQUEST 26 , {.name = "connect caps request" ,.type= HTTP_REQUEST ,.raw= "CONNECT HOME0.NETSCAPE.COM:443 HTTP/1.0\r\n" "User-agent: Mozilla/1.1N\r\n" "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.method= HTTP_CONNECT ,.query_string= "" ,.fragment= "" ,.request_path= "" ,.request_url= "HOME0.NETSCAPE.COM:443" ,.num_headers= 2 ,.upgrade="" ,.headers= { { "User-agent", "Mozilla/1.1N" } , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } } ,.body= "" } #if !HTTP_PARSER_STRICT #define UTF8_PATH_REQ 27 , {.name= "utf-8 path request" ,.type= HTTP_REQUEST ,.raw= "GET /δ¶/δt/pope?q=1#narf HTTP/1.1\r\n" "Host: github.com\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "q=1" ,.fragment= "narf" ,.request_path= "/δ¶/δt/pope" ,.request_url= "/δ¶/δt/pope?q=1#narf" ,.num_headers= 1 ,.headers= { {"Host", "github.com" } } ,.body= "" } #define HOSTNAME_UNDERSCORE 28 , {.name = "hostname underscore" ,.type= HTTP_REQUEST ,.raw= "CONNECT home_0.netscape.com:443 HTTP/1.0\r\n" "User-agent: Mozilla/1.1N\r\n" "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.method= HTTP_CONNECT ,.query_string= "" ,.fragment= "" ,.request_path= "" ,.request_url= "home_0.netscape.com:443" ,.num_headers= 2 ,.upgrade="" ,.headers= { { "User-agent", "Mozilla/1.1N" } , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } } ,.body= "" } #endif /* !HTTP_PARSER_STRICT */ /* see https://github.com/ry/http-parser/issues/47 */ #define EAT_TRAILING_CRLF_NO_CONNECTION_CLOSE 29 , {.name = "eat CRLF between requests, no \"Connection: close\" header" ,.raw= "POST / HTTP/1.1\r\n" "Host: www.example.com\r\n" "Content-Type: application/x-www-form-urlencoded\r\n" "Content-Length: 4\r\n" "\r\n" "q=42\r\n" /* note the trailing CRLF */ ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "" ,.fragment= "" ,.request_path= "/" ,.request_url= "/" ,.num_headers= 3 ,.upgrade= 0 ,.headers= { { "Host", "www.example.com" } , { "Content-Type", "application/x-www-form-urlencoded" } , { "Content-Length", "4" } } ,.body= "q=42" } /* see https://github.com/ry/http-parser/issues/47 */ #define EAT_TRAILING_CRLF_WITH_CONNECTION_CLOSE 30 , {.name = "eat CRLF between requests even if \"Connection: close\" is set" ,.raw= "POST / HTTP/1.1\r\n" "Host: www.example.com\r\n" "Content-Type: application/x-www-form-urlencoded\r\n" "Content-Length: 4\r\n" "Connection: close\r\n" "\r\n" "q=42\r\n" /* note the trailing CRLF */ ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE /* input buffer isn't empty when on_message_complete is called */ ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.query_string= "" ,.fragment= "" ,.request_path= "/" ,.request_url= "/" ,.num_headers= 4 ,.upgrade= 0 ,.headers= { { "Host", "www.example.com" } , { "Content-Type", "application/x-www-form-urlencoded" } , { "Content-Length", "4" } , { "Connection", "close" } } ,.body= "q=42" } #define PURGE_REQ 31 , {.name = "PURGE request" ,.type= HTTP_REQUEST ,.raw= "PURGE /file.txt HTTP/1.1\r\n" "Host: www.example.com\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_PURGE ,.query_string= "" ,.fragment= "" ,.request_path= "/file.txt" ,.request_url= "/file.txt" ,.num_headers= 1 ,.headers= { { "Host", "www.example.com" } } ,.body= "" } #define SEARCH_REQ 32 , {.name = "SEARCH request" ,.type= HTTP_REQUEST ,.raw= "SEARCH / HTTP/1.1\r\n" "Host: www.example.com\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_SEARCH ,.query_string= "" ,.fragment= "" ,.request_path= "/" ,.request_url= "/" ,.num_headers= 1 ,.headers= { { "Host", "www.example.com" } } ,.body= "" } #define PROXY_WITH_BASIC_AUTH 33 , {.name= "host:port and basic_auth" ,.type= HTTP_REQUEST ,.raw= "GET http://a%12:b!&*$@hypnotoad.org:1234/toto HTTP/1.1\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.fragment= "" ,.request_path= "/toto" ,.request_url= "http://a%12:b!&*$@hypnotoad.org:1234/toto" ,.host= "hypnotoad.org" ,.userinfo= "a%12:b!&*$" ,.port= 1234 ,.num_headers= 0 ,.headers= { } ,.body= "" } #define LINE_FOLDING_IN_HEADER_WITH_LF 34 , {.name= "line folding in header value" ,.type= HTTP_REQUEST ,.raw= "GET / HTTP/1.1\n" "Line1: abc\n" "\tdef\n" " ghi\n" "\t\tjkl\n" " mno \n" "\t \tqrs\n" "Line2: \t line2\t\n" "Line3:\n" " line3\n" "Line4: \n" " \n" "Connection:\n" " close\n" "\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/" ,.request_url= "/" ,.num_headers= 5 ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } , { "Line2", "line2\t" } , { "Line3", "line3" } , { "Line4", "" } , { "Connection", "close" }, } ,.body= "" } #define CONNECTION_MULTI 35 , {.name = "multiple connection header values with folding" ,.type= HTTP_REQUEST ,.raw= "GET /demo HTTP/1.1\r\n" "Host: example.com\r\n" "Connection: Something,\r\n" " Upgrade, ,Keep-Alive\r\n" "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" "Sec-WebSocket-Protocol: sample\r\n" "Upgrade: WebSocket\r\n" "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" "Origin: http://example.com\r\n" "\r\n" "Hot diggity dogg" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/demo" ,.request_url= "/demo" ,.num_headers= 7 ,.upgrade="Hot diggity dogg" ,.headers= { { "Host", "example.com" } , { "Connection", "Something, Upgrade, ,Keep-Alive" } , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } , { "Sec-WebSocket-Protocol", "sample" } , { "Upgrade", "WebSocket" } , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } , { "Origin", "http://example.com" } } ,.body= "" } #define CONNECTION_MULTI_LWS 36 , {.name = "multiple connection header values with folding and lws" ,.type= HTTP_REQUEST ,.raw= "GET /demo HTTP/1.1\r\n" "Connection: keep-alive, upgrade\r\n" "Upgrade: WebSocket\r\n" "\r\n" "Hot diggity dogg" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/demo" ,.request_url= "/demo" ,.num_headers= 2 ,.upgrade="Hot diggity dogg" ,.headers= { { "Connection", "keep-alive, upgrade" } , { "Upgrade", "WebSocket" } } ,.body= "" } #define CONNECTION_MULTI_LWS_CRLF 37 , {.name = "multiple connection header values with folding and lws" ,.type= HTTP_REQUEST ,.raw= "GET /demo HTTP/1.1\r\n" "Connection: keep-alive, \r\n upgrade\r\n" "Upgrade: WebSocket\r\n" "\r\n" "Hot diggity dogg" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_GET ,.query_string= "" ,.fragment= "" ,.request_path= "/demo" ,.request_url= "/demo" ,.num_headers= 2 ,.upgrade="Hot diggity dogg" ,.headers= { { "Connection", "keep-alive, upgrade" } , { "Upgrade", "WebSocket" } } ,.body= "" } #define UPGRADE_POST_REQUEST 38 , {.name = "upgrade post request" ,.type= HTTP_REQUEST ,.raw= "POST /demo HTTP/1.1\r\n" "Host: example.com\r\n" "Connection: Upgrade\r\n" "Upgrade: HTTP/2.0\r\n" "Content-Length: 15\r\n" "\r\n" "sweet post body" "Hot diggity dogg" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_POST ,.request_path= "/demo" ,.request_url= "/demo" ,.num_headers= 4 ,.upgrade="Hot diggity dogg" ,.headers= { { "Host", "example.com" } , { "Connection", "Upgrade" } , { "Upgrade", "HTTP/2.0" } , { "Content-Length", "15" } } ,.body= "sweet post body" } #define CONNECT_WITH_BODY_REQUEST 39 , {.name = "connect with body request" ,.type= HTTP_REQUEST ,.raw= "CONNECT foo.bar.com:443 HTTP/1.0\r\n" "User-agent: Mozilla/1.1N\r\n" "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" "Content-Length: 10\r\n" "\r\n" "blarfcicle" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.method= HTTP_CONNECT ,.request_url= "foo.bar.com:443" ,.num_headers= 3 ,.upgrade="blarfcicle" ,.headers= { { "User-agent", "Mozilla/1.1N" } , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } , { "Content-Length", "10" } } ,.body= "" } /* Examples from the Internet draft for LINK/UNLINK methods: * https://tools.ietf.org/id/draft-snell-link-method-01.html#rfc.section.5 */ #define LINK_REQUEST 40 , {.name = "link request" ,.type= HTTP_REQUEST ,.raw= "LINK /images/my_dog.jpg HTTP/1.1\r\n" "Host: example.com\r\n" "Link: ; rel=\"tag\"\r\n" "Link: ; rel=\"tag\"\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_LINK ,.request_path= "/images/my_dog.jpg" ,.request_url= "/images/my_dog.jpg" ,.query_string= "" ,.fragment= "" ,.num_headers= 3 ,.headers= { { "Host", "example.com" } , { "Link", "; rel=\"tag\"" } , { "Link", "; rel=\"tag\"" } } ,.body= "" } #define UNLINK_REQUEST 41 , {.name = "unlink request" ,.type= HTTP_REQUEST ,.raw= "UNLINK /images/my_dog.jpg HTTP/1.1\r\n" "Host: example.com\r\n" "Link: ; rel=\"tag\"\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.method= HTTP_UNLINK ,.request_path= "/images/my_dog.jpg" ,.request_url= "/images/my_dog.jpg" ,.query_string= "" ,.fragment= "" ,.num_headers= 2 ,.headers= { { "Host", "example.com" } , { "Link", "; rel=\"tag\"" } } ,.body= "" } , {.name= NULL } /* sentinel */ }; /* * R E S P O N S E S * */ const struct message responses[] = #define GOOGLE_301 0 { {.name= "google 301" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 301 Moved Permanently\r\n" "Location: http://www.google.com/\r\n" "Content-Type: text/html; charset=UTF-8\r\n" "Date: Sun, 26 Apr 2009 11:11:49 GMT\r\n" "Expires: Tue, 26 May 2009 11:11:49 GMT\r\n" "X-$PrototypeBI-Version: 1.6.0.3\r\n" /* $ char in header field */ "Cache-Control: public, max-age=2592000\r\n" "Server: gws\r\n" "Content-Length: 219 \r\n" "\r\n" "\n" "301 Moved\n" "

    301 Moved

    \n" "The document has moved\n" "here.\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 301 ,.response_status= "Moved Permanently" ,.num_headers= 8 ,.headers= { { "Location", "http://www.google.com/" } , { "Content-Type", "text/html; charset=UTF-8" } , { "Date", "Sun, 26 Apr 2009 11:11:49 GMT" } , { "Expires", "Tue, 26 May 2009 11:11:49 GMT" } , { "X-$PrototypeBI-Version", "1.6.0.3" } , { "Cache-Control", "public, max-age=2592000" } , { "Server", "gws" } , { "Content-Length", "219 " } } ,.body= "\n" "301 Moved\n" "

    301 Moved

    \n" "The document has moved\n" "here.\r\n" "\r\n" } #define NO_CONTENT_LENGTH_RESPONSE 1 /* The client should wait for the server's EOF. That is, when content-length * is not specified, and "Connection: close", the end of body is specified * by the EOF. * Compare with APACHEBENCH_GET */ , {.name= "no content-length response" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Date: Tue, 04 Aug 2009 07:59:32 GMT\r\n" "Server: Apache\r\n" "X-Powered-By: Servlet/2.5 JSP/2.1\r\n" "Content-Type: text/xml; charset=utf-8\r\n" "Connection: close\r\n" "\r\n" "\n" "\n" " \n" " \n" " SOAP-ENV:Client\n" " Client Error\n" " \n" " \n" "" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 5 ,.headers= { { "Date", "Tue, 04 Aug 2009 07:59:32 GMT" } , { "Server", "Apache" } , { "X-Powered-By", "Servlet/2.5 JSP/2.1" } , { "Content-Type", "text/xml; charset=utf-8" } , { "Connection", "close" } } ,.body= "\n" "\n" " \n" " \n" " SOAP-ENV:Client\n" " Client Error\n" " \n" " \n" "" } #define NO_HEADERS_NO_BODY_404 2 , {.name= "404 no headers no body" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 404 Not Found\r\n\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 404 ,.response_status= "Not Found" ,.num_headers= 0 ,.headers= {} ,.body_size= 0 ,.body= "" } #define NO_REASON_PHRASE 3 , {.name= "301 no response phrase" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 301\r\n\r\n" ,.should_keep_alive = FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 301 ,.response_status= "" ,.num_headers= 0 ,.headers= {} ,.body= "" } #define TRAILING_SPACE_ON_CHUNKED_BODY 4 , {.name="200 trailing space on chunked body" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "25 \r\n" "This is the data in the first chunk\r\n" "\r\n" "1C\r\n" "and this is the second one\r\n" "\r\n" "0 \r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 2 ,.headers= { {"Content-Type", "text/plain" } , {"Transfer-Encoding", "chunked" } } ,.body_size = 37+28 ,.body = "This is the data in the first chunk\r\n" "and this is the second one\r\n" ,.num_chunks_complete= 3 ,.chunk_lengths= { 0x25, 0x1c } } #define NO_CARRIAGE_RET 5 , {.name="no carriage ret" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\n" "Content-Type: text/html; charset=utf-8\n" "Connection: close\n" "\n" "these headers are from http://news.ycombinator.com/" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 2 ,.headers= { {"Content-Type", "text/html; charset=utf-8" } , {"Connection", "close" } } ,.body= "these headers are from http://news.ycombinator.com/" } #define PROXY_CONNECTION 6 , {.name="proxy connection" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Content-Type: text/html; charset=UTF-8\r\n" "Content-Length: 11\r\n" "Proxy-Connection: close\r\n" "Date: Thu, 31 Dec 2009 20:55:48 +0000\r\n" "\r\n" "hello world" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 4 ,.headers= { {"Content-Type", "text/html; charset=UTF-8" } , {"Content-Length", "11" } , {"Proxy-Connection", "close" } , {"Date", "Thu, 31 Dec 2009 20:55:48 +0000"} } ,.body= "hello world" } #define UNDERSTORE_HEADER_KEY 7 // shown by // curl -o /dev/null -v "http://ad.doubleclick.net/pfadx/DARTSHELLCONFIGXML;dcmt=text/xml;" , {.name="underscore header key" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Server: DCLK-AdSvr\r\n" "Content-Type: text/xml\r\n" "Content-Length: 0\r\n" "DCLK_imp: v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o\r\n\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 4 ,.headers= { {"Server", "DCLK-AdSvr" } , {"Content-Type", "text/xml" } , {"Content-Length", "0" } , {"DCLK_imp", "v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o" } } ,.body= "" } #define BONJOUR_MADAME_FR 8 /* The client should not merge two headers fields when the first one doesn't * have a value. */ , {.name= "bonjourmadame.fr" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.0 301 Moved Permanently\r\n" "Date: Thu, 03 Jun 2010 09:56:32 GMT\r\n" "Server: Apache/2.2.3 (Red Hat)\r\n" "Cache-Control: public\r\n" "Pragma: \r\n" "Location: http://www.bonjourmadame.fr/\r\n" "Vary: Accept-Encoding\r\n" "Content-Length: 0\r\n" "Content-Type: text/html; charset=UTF-8\r\n" "Connection: keep-alive\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.status_code= 301 ,.response_status= "Moved Permanently" ,.num_headers= 9 ,.headers= { { "Date", "Thu, 03 Jun 2010 09:56:32 GMT" } , { "Server", "Apache/2.2.3 (Red Hat)" } , { "Cache-Control", "public" } , { "Pragma", "" } , { "Location", "http://www.bonjourmadame.fr/" } , { "Vary", "Accept-Encoding" } , { "Content-Length", "0" } , { "Content-Type", "text/html; charset=UTF-8" } , { "Connection", "keep-alive" } } ,.body= "" } #define RES_FIELD_UNDERSCORE 9 /* Should handle spaces in header fields */ , {.name= "field underscore" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Date: Tue, 28 Sep 2010 01:14:13 GMT\r\n" "Server: Apache\r\n" "Cache-Control: no-cache, must-revalidate\r\n" "Expires: Mon, 26 Jul 1997 05:00:00 GMT\r\n" ".et-Cookie: PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com\r\n" "Vary: Accept-Encoding\r\n" "_eep-Alive: timeout=45\r\n" /* semantic value ignored */ "_onnection: Keep-Alive\r\n" /* semantic value ignored */ "Transfer-Encoding: chunked\r\n" "Content-Type: text/html\r\n" "Connection: close\r\n" "\r\n" "0\r\n\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 11 ,.headers= { { "Date", "Tue, 28 Sep 2010 01:14:13 GMT" } , { "Server", "Apache" } , { "Cache-Control", "no-cache, must-revalidate" } , { "Expires", "Mon, 26 Jul 1997 05:00:00 GMT" } , { ".et-Cookie", "PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com" } , { "Vary", "Accept-Encoding" } , { "_eep-Alive", "timeout=45" } , { "_onnection", "Keep-Alive" } , { "Transfer-Encoding", "chunked" } , { "Content-Type", "text/html" } , { "Connection", "close" } } ,.body= "" ,.num_chunks_complete= 1 ,.chunk_lengths= {} } #define NON_ASCII_IN_STATUS_LINE 10 /* Should handle non-ASCII in status line */ , {.name= "non-ASCII in status line" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 500 Oriëntatieprobleem\r\n" "Date: Fri, 5 Nov 2010 23:07:12 GMT+2\r\n" "Content-Length: 0\r\n" "Connection: close\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 500 ,.response_status= "Oriëntatieprobleem" ,.num_headers= 3 ,.headers= { { "Date", "Fri, 5 Nov 2010 23:07:12 GMT+2" } , { "Content-Length", "0" } , { "Connection", "close" } } ,.body= "" } #define HTTP_VERSION_0_9 11 /* Should handle HTTP/0.9 */ , {.name= "http version 0.9" ,.type= HTTP_RESPONSE ,.raw= "HTTP/0.9 200 OK\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 0 ,.http_minor= 9 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 0 ,.headers= {} ,.body= "" } #define NO_CONTENT_LENGTH_NO_TRANSFER_ENCODING_RESPONSE 12 /* The client should wait for the server's EOF. That is, when neither * content-length nor transfer-encoding is specified, the end of body * is specified by the EOF. */ , {.name= "neither content-length nor transfer-encoding response" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "\r\n" "hello world" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 1 ,.headers= { { "Content-Type", "text/plain" } } ,.body= "hello world" } #define NO_BODY_HTTP10_KA_200 13 , {.name= "HTTP/1.0 with keep-alive and EOF-terminated 200 status" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.0 200 OK\r\n" "Connection: keep-alive\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 0 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 1 ,.headers= { { "Connection", "keep-alive" } } ,.body_size= 0 ,.body= "" } #define NO_BODY_HTTP10_KA_204 14 , {.name= "HTTP/1.0 with keep-alive and a 204 status" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.0 204 No content\r\n" "Connection: keep-alive\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.status_code= 204 ,.response_status= "No content" ,.num_headers= 1 ,.headers= { { "Connection", "keep-alive" } } ,.body_size= 0 ,.body= "" } #define NO_BODY_HTTP11_KA_200 15 , {.name= "HTTP/1.1 with an EOF-terminated 200 status" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 0 ,.headers={} ,.body_size= 0 ,.body= "" } #define NO_BODY_HTTP11_KA_204 16 , {.name= "HTTP/1.1 with a 204 status" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 204 No content\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 204 ,.response_status= "No content" ,.num_headers= 0 ,.headers={} ,.body_size= 0 ,.body= "" } #define NO_BODY_HTTP11_NOKA_204 17 , {.name= "HTTP/1.1 with a 204 status and keep-alive disabled" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 204 No content\r\n" "Connection: close\r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 204 ,.response_status= "No content" ,.num_headers= 1 ,.headers= { { "Connection", "close" } } ,.body_size= 0 ,.body= "" } #define NO_BODY_HTTP11_KA_CHUNKED_200 18 , {.name= "HTTP/1.1 with chunked endocing and a 200 response" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "0\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 1 ,.headers= { { "Transfer-Encoding", "chunked" } } ,.body_size= 0 ,.body= "" ,.num_chunks_complete= 1 } #if !HTTP_PARSER_STRICT #define SPACE_IN_FIELD_RES 19 /* Should handle spaces in header fields */ , {.name= "field space" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Server: Microsoft-IIS/6.0\r\n" "X-Powered-By: ASP.NET\r\n" "en-US Content-Type: text/xml\r\n" /* this is the problem */ "Content-Type: text/xml\r\n" "Content-Length: 16\r\n" "Date: Fri, 23 Jul 2010 18:45:38 GMT\r\n" "Connection: keep-alive\r\n" "\r\n" "hello" /* fake body */ ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 7 ,.headers= { { "Server", "Microsoft-IIS/6.0" } , { "X-Powered-By", "ASP.NET" } , { "en-US Content-Type", "text/xml" } , { "Content-Type", "text/xml" } , { "Content-Length", "16" } , { "Date", "Fri, 23 Jul 2010 18:45:38 GMT" } , { "Connection", "keep-alive" } } ,.body= "hello" } #endif /* !HTTP_PARSER_STRICT */ #define AMAZON_COM 20 , {.name= "amazon.com" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 301 MovedPermanently\r\n" "Date: Wed, 15 May 2013 17:06:33 GMT\r\n" "Server: Server\r\n" "x-amz-id-1: 0GPHKXSJQ826RK7GZEB2\r\n" "p3p: policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"\r\n" "x-amz-id-2: STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD\r\n" "Location: http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846\r\n" "Vary: Accept-Encoding,User-Agent\r\n" "Content-Type: text/html; charset=ISO-8859-1\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "1\r\n" "\n\r\n" "0\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 301 ,.response_status= "MovedPermanently" ,.num_headers= 9 ,.headers= { { "Date", "Wed, 15 May 2013 17:06:33 GMT" } , { "Server", "Server" } , { "x-amz-id-1", "0GPHKXSJQ826RK7GZEB2" } , { "p3p", "policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"" } , { "x-amz-id-2", "STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD" } , { "Location", "http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846" } , { "Vary", "Accept-Encoding,User-Agent" } , { "Content-Type", "text/html; charset=ISO-8859-1" } , { "Transfer-Encoding", "chunked" } } ,.body= "\n" ,.num_chunks_complete= 2 ,.chunk_lengths= { 1 } } #define EMPTY_REASON_PHRASE_AFTER_SPACE 20 , {.name= "empty reason phrase after space" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 \r\n" "\r\n" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "" ,.num_headers= 0 ,.headers= {} ,.body= "" } #define CONTENT_LENGTH_X 21 , {.name= "Content-Length-X" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Content-Length-X: 0\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "2\r\n" "OK\r\n" "0\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 2 ,.headers= { { "Content-Length-X", "0" } , { "Transfer-Encoding", "chunked" } } ,.body= "OK" ,.num_chunks_complete= 2 ,.chunk_lengths= { 2 } } #define HTTP_101_RESPONSE_WITH_UPGRADE_HEADER 22 , {.name= "HTTP 101 response with Upgrade header" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 101 Switching Protocols\r\n" "Connection: upgrade\r\n" "Upgrade: h2c\r\n" "\r\n" "proto" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 101 ,.response_status= "Switching Protocols" ,.upgrade= "proto" ,.num_headers= 2 ,.headers= { { "Connection", "upgrade" } , { "Upgrade", "h2c" } } } #define HTTP_101_RESPONSE_WITH_UPGRADE_HEADER_AND_CONTENT_LENGTH 23 , {.name= "HTTP 101 response with Upgrade and Content-Length header" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 101 Switching Protocols\r\n" "Connection: upgrade\r\n" "Upgrade: h2c\r\n" "Content-Length: 4\r\n" "\r\n" "body" "proto" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 101 ,.response_status= "Switching Protocols" ,.body= "body" ,.upgrade= "proto" ,.num_headers= 3 ,.headers= { { "Connection", "upgrade" } , { "Upgrade", "h2c" } , { "Content-Length", "4" } } } #define HTTP_101_RESPONSE_WITH_UPGRADE_HEADER_AND_TRANSFER_ENCODING 24 , {.name= "HTTP 101 response with Upgrade and Transfer-Encoding header" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 101 Switching Protocols\r\n" "Connection: upgrade\r\n" "Upgrade: h2c\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "2\r\n" "bo\r\n" "2\r\n" "dy\r\n" "0\r\n" "\r\n" "proto" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 101 ,.response_status= "Switching Protocols" ,.body= "body" ,.upgrade= "proto" ,.num_headers= 3 ,.headers= { { "Connection", "upgrade" } , { "Upgrade", "h2c" } , { "Transfer-Encoding", "chunked" } } ,.num_chunks_complete= 3 ,.chunk_lengths= { 2, 2 } } #define HTTP_200_RESPONSE_WITH_UPGRADE_HEADER 25 , {.name= "HTTP 200 response with Upgrade header" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Connection: upgrade\r\n" "Upgrade: h2c\r\n" "\r\n" "body" ,.should_keep_alive= FALSE ,.message_complete_on_eof= TRUE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.body= "body" ,.upgrade= NULL ,.num_headers= 2 ,.headers= { { "Connection", "upgrade" } , { "Upgrade", "h2c" } } } #define HTTP_200_RESPONSE_WITH_UPGRADE_HEADER_AND_CONTENT_LENGTH 26 , {.name= "HTTP 200 response with Upgrade and Content-Length header" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Connection: upgrade\r\n" "Upgrade: h2c\r\n" "Content-Length: 4\r\n" "\r\n" "body" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 3 ,.body= "body" ,.upgrade= NULL ,.headers= { { "Connection", "upgrade" } , { "Upgrade", "h2c" } , { "Content-Length", "4" } } } #define HTTP_200_RESPONSE_WITH_UPGRADE_HEADER_AND_TRANSFER_ENCODING 27 , {.name= "HTTP 200 response with Upgrade and Transfer-Encoding header" ,.type= HTTP_RESPONSE ,.raw= "HTTP/1.1 200 OK\r\n" "Connection: upgrade\r\n" "Upgrade: h2c\r\n" "Transfer-Encoding: chunked\r\n" "\r\n" "2\r\n" "bo\r\n" "2\r\n" "dy\r\n" "0\r\n" "\r\n" ,.should_keep_alive= TRUE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 1 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 3 ,.body= "body" ,.upgrade= NULL ,.headers= { { "Connection", "upgrade" } , { "Upgrade", "h2c" } , { "Transfer-Encoding", "chunked" } } ,.num_chunks_complete= 3 ,.chunk_lengths= { 2, 2 } } , {.name= NULL } /* sentinel */ }; /* strnlen() is a POSIX.2008 addition. Can't rely on it being available so * define it ourselves. */ size_t strnlen(const char *s, size_t maxlen) { const char *p; p = memchr(s, '\0', maxlen); if (p == NULL) return maxlen; return p - s; } size_t strlncat(char *dst, size_t len, const char *src, size_t n) { size_t slen; size_t dlen; size_t rlen; size_t ncpy; slen = strnlen(src, n); dlen = strnlen(dst, len); if (dlen < len) { rlen = len - dlen; ncpy = slen < rlen ? slen : (rlen - 1); memcpy(dst + dlen, src, ncpy); dst[dlen + ncpy] = '\0'; } assert(len > slen + dlen); return slen + dlen; } size_t strlcat(char *dst, const char *src, size_t len) { return strlncat(dst, len, src, (size_t) -1); } size_t strlncpy(char *dst, size_t len, const char *src, size_t n) { size_t slen; size_t ncpy; slen = strnlen(src, n); if (len > 0) { ncpy = slen < len ? slen : (len - 1); memcpy(dst, src, ncpy); dst[ncpy] = '\0'; } assert(len > slen); return slen; } size_t strlcpy(char *dst, const char *src, size_t len) { return strlncpy(dst, len, src, (size_t) -1); } int request_url_cb (http_parser *p, const char *buf, size_t len) { assert(p == parser); strlncat(messages[num_messages].request_url, sizeof(messages[num_messages].request_url), buf, len); return 0; } int header_field_cb (http_parser *p, const char *buf, size_t len) { assert(p == parser); struct message *m = &messages[num_messages]; if (m->last_header_element != FIELD) m->num_headers++; strlncat(m->headers[m->num_headers-1][0], sizeof(m->headers[m->num_headers-1][0]), buf, len); m->last_header_element = FIELD; return 0; } int header_value_cb (http_parser *p, const char *buf, size_t len) { assert(p == parser); struct message *m = &messages[num_messages]; strlncat(m->headers[m->num_headers-1][1], sizeof(m->headers[m->num_headers-1][1]), buf, len); m->last_header_element = VALUE; return 0; } void check_body_is_final (const http_parser *p) { if (messages[num_messages].body_is_final) { fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " "on last on_body callback call " "but it doesn't! ***\n\n"); assert(0); abort(); } messages[num_messages].body_is_final = http_body_is_final(p); } int body_cb (http_parser *p, const char *buf, size_t len) { assert(p == parser); strlncat(messages[num_messages].body, sizeof(messages[num_messages].body), buf, len); messages[num_messages].body_size += len; check_body_is_final(p); // printf("body_cb: '%s'\n", requests[num_messages].body); return 0; } int count_body_cb (http_parser *p, const char *buf, size_t len) { assert(p == parser); assert(buf); messages[num_messages].body_size += len; check_body_is_final(p); return 0; } int message_begin_cb (http_parser *p) { assert(p == parser); messages[num_messages].message_begin_cb_called = TRUE; return 0; } int headers_complete_cb (http_parser *p) { assert(p == parser); messages[num_messages].method = parser->method; messages[num_messages].status_code = parser->status_code; messages[num_messages].http_major = parser->http_major; messages[num_messages].http_minor = parser->http_minor; messages[num_messages].headers_complete_cb_called = TRUE; messages[num_messages].should_keep_alive = http_should_keep_alive(parser); return 0; } int message_complete_cb (http_parser *p) { assert(p == parser); if (messages[num_messages].should_keep_alive != http_should_keep_alive(parser)) { fprintf(stderr, "\n\n *** Error http_should_keep_alive() should have same " "value in both on_message_complete and on_headers_complete " "but it doesn't! ***\n\n"); assert(0); abort(); } if (messages[num_messages].body_size && http_body_is_final(p) && !messages[num_messages].body_is_final) { fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " "on last on_body callback call " "but it doesn't! ***\n\n"); assert(0); abort(); } messages[num_messages].message_complete_cb_called = TRUE; messages[num_messages].message_complete_on_eof = currently_parsing_eof; num_messages++; return 0; } int response_status_cb (http_parser *p, const char *buf, size_t len) { assert(p == parser); messages[num_messages].status_cb_called = TRUE; strlncat(messages[num_messages].response_status, sizeof(messages[num_messages].response_status), buf, len); return 0; } int chunk_header_cb (http_parser *p) { assert(p == parser); int chunk_idx = messages[num_messages].num_chunks; messages[num_messages].num_chunks++; if (chunk_idx < MAX_CHUNKS) { messages[num_messages].chunk_lengths[chunk_idx] = p->content_length; } return 0; } int chunk_complete_cb (http_parser *p) { assert(p == parser); /* Here we want to verify that each chunk_header_cb is matched by a * chunk_complete_cb, so not only should the total number of calls to * both callbacks be the same, but they also should be interleaved * properly */ assert(messages[num_messages].num_chunks == messages[num_messages].num_chunks_complete + 1); messages[num_messages].num_chunks_complete++; return 0; } /* These dontcall_* callbacks exist so that we can verify that when we're * paused, no additional callbacks are invoked */ int dontcall_message_begin_cb (http_parser *p) { if (p) { } // gcc fprintf(stderr, "\n\n*** on_message_begin() called on paused parser ***\n\n"); abort(); } int dontcall_header_field_cb (http_parser *p, const char *buf, size_t len) { if (p || buf || len) { } // gcc fprintf(stderr, "\n\n*** on_header_field() called on paused parser ***\n\n"); abort(); } int dontcall_header_value_cb (http_parser *p, const char *buf, size_t len) { if (p || buf || len) { } // gcc fprintf(stderr, "\n\n*** on_header_value() called on paused parser ***\n\n"); abort(); } int dontcall_request_url_cb (http_parser *p, const char *buf, size_t len) { if (p || buf || len) { } // gcc fprintf(stderr, "\n\n*** on_request_url() called on paused parser ***\n\n"); abort(); } int dontcall_body_cb (http_parser *p, const char *buf, size_t len) { if (p || buf || len) { } // gcc fprintf(stderr, "\n\n*** on_body_cb() called on paused parser ***\n\n"); abort(); } int dontcall_headers_complete_cb (http_parser *p) { if (p) { } // gcc fprintf(stderr, "\n\n*** on_headers_complete() called on paused " "parser ***\n\n"); abort(); } int dontcall_message_complete_cb (http_parser *p) { if (p) { } // gcc fprintf(stderr, "\n\n*** on_message_complete() called on paused " "parser ***\n\n"); abort(); } int dontcall_response_status_cb (http_parser *p, const char *buf, size_t len) { if (p || buf || len) { } // gcc fprintf(stderr, "\n\n*** on_status() called on paused parser ***\n\n"); abort(); } int dontcall_chunk_header_cb (http_parser *p) { if (p) { } // gcc fprintf(stderr, "\n\n*** on_chunk_header() called on paused parser ***\n\n"); exit(1); } int dontcall_chunk_complete_cb (http_parser *p) { if (p) { } // gcc fprintf(stderr, "\n\n*** on_chunk_complete() " "called on paused parser ***\n\n"); exit(1); } static http_parser_settings settings_dontcall = {.on_message_begin = dontcall_message_begin_cb ,.on_header_field = dontcall_header_field_cb ,.on_header_value = dontcall_header_value_cb ,.on_url = dontcall_request_url_cb ,.on_status = dontcall_response_status_cb ,.on_body = dontcall_body_cb ,.on_headers_complete = dontcall_headers_complete_cb ,.on_message_complete = dontcall_message_complete_cb ,.on_chunk_header = dontcall_chunk_header_cb ,.on_chunk_complete = dontcall_chunk_complete_cb }; /* These pause_* callbacks always pause the parser and just invoke the regular * callback that tracks content. Before returning, we overwrite the parser * settings to point to the _dontcall variety so that we can verify that * the pause actually did, you know, pause. */ int pause_message_begin_cb (http_parser *p) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return message_begin_cb(p); } int pause_header_field_cb (http_parser *p, const char *buf, size_t len) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return header_field_cb(p, buf, len); } int pause_header_value_cb (http_parser *p, const char *buf, size_t len) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return header_value_cb(p, buf, len); } int pause_request_url_cb (http_parser *p, const char *buf, size_t len) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return request_url_cb(p, buf, len); } int pause_body_cb (http_parser *p, const char *buf, size_t len) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return body_cb(p, buf, len); } int pause_headers_complete_cb (http_parser *p) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return headers_complete_cb(p); } int pause_message_complete_cb (http_parser *p) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return message_complete_cb(p); } int pause_response_status_cb (http_parser *p, const char *buf, size_t len) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return response_status_cb(p, buf, len); } int pause_chunk_header_cb (http_parser *p) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return chunk_header_cb(p); } int pause_chunk_complete_cb (http_parser *p) { http_parser_pause(p, 1); *current_pause_parser = settings_dontcall; return chunk_complete_cb(p); } int connect_headers_complete_cb (http_parser *p) { headers_complete_cb(p); return 1; } int connect_message_complete_cb (http_parser *p) { messages[num_messages].should_keep_alive = http_should_keep_alive(parser); return message_complete_cb(p); } static http_parser_settings settings_pause = {.on_message_begin = pause_message_begin_cb ,.on_header_field = pause_header_field_cb ,.on_header_value = pause_header_value_cb ,.on_url = pause_request_url_cb ,.on_status = pause_response_status_cb ,.on_body = pause_body_cb ,.on_headers_complete = pause_headers_complete_cb ,.on_message_complete = pause_message_complete_cb ,.on_chunk_header = pause_chunk_header_cb ,.on_chunk_complete = pause_chunk_complete_cb }; static http_parser_settings settings = {.on_message_begin = message_begin_cb ,.on_header_field = header_field_cb ,.on_header_value = header_value_cb ,.on_url = request_url_cb ,.on_status = response_status_cb ,.on_body = body_cb ,.on_headers_complete = headers_complete_cb ,.on_message_complete = message_complete_cb ,.on_chunk_header = chunk_header_cb ,.on_chunk_complete = chunk_complete_cb }; static http_parser_settings settings_count_body = {.on_message_begin = message_begin_cb ,.on_header_field = header_field_cb ,.on_header_value = header_value_cb ,.on_url = request_url_cb ,.on_status = response_status_cb ,.on_body = count_body_cb ,.on_headers_complete = headers_complete_cb ,.on_message_complete = message_complete_cb ,.on_chunk_header = chunk_header_cb ,.on_chunk_complete = chunk_complete_cb }; static http_parser_settings settings_connect = {.on_message_begin = message_begin_cb ,.on_header_field = header_field_cb ,.on_header_value = header_value_cb ,.on_url = request_url_cb ,.on_status = response_status_cb ,.on_body = dontcall_body_cb ,.on_headers_complete = connect_headers_complete_cb ,.on_message_complete = connect_message_complete_cb ,.on_chunk_header = chunk_header_cb ,.on_chunk_complete = chunk_complete_cb }; static http_parser_settings settings_null = {.on_message_begin = 0 ,.on_header_field = 0 ,.on_header_value = 0 ,.on_url = 0 ,.on_status = 0 ,.on_body = 0 ,.on_headers_complete = 0 ,.on_message_complete = 0 ,.on_chunk_header = 0 ,.on_chunk_complete = 0 }; void parser_init (enum http_parser_type type) { num_messages = 0; assert(parser == NULL); parser = malloc(sizeof(http_parser)); http_parser_init(parser, type); memset(&messages, 0, sizeof messages); } void parser_free () { assert(parser); free(parser); parser = NULL; } size_t parse (const char *buf, size_t len) { size_t nparsed; currently_parsing_eof = (len == 0); nparsed = http_parser_execute(parser, &settings, buf, len); return nparsed; } size_t parse_count_body (const char *buf, size_t len) { size_t nparsed; currently_parsing_eof = (len == 0); nparsed = http_parser_execute(parser, &settings_count_body, buf, len); return nparsed; } size_t parse_pause (const char *buf, size_t len) { size_t nparsed; http_parser_settings s = settings_pause; currently_parsing_eof = (len == 0); current_pause_parser = &s; nparsed = http_parser_execute(parser, current_pause_parser, buf, len); return nparsed; } size_t parse_connect (const char *buf, size_t len) { size_t nparsed; currently_parsing_eof = (len == 0); nparsed = http_parser_execute(parser, &settings_connect, buf, len); return nparsed; } static inline int check_str_eq (const struct message *m, const char *prop, const char *expected, const char *found) { if ((expected == NULL) != (found == NULL)) { printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); printf("expected %s\n", (expected == NULL) ? "NULL" : expected); printf(" found %s\n", (found == NULL) ? "NULL" : found); return 0; } if (expected != NULL && 0 != strcmp(expected, found)) { printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); printf("expected '%s'\n", expected); printf(" found '%s'\n", found); return 0; } return 1; } static inline int check_num_eq (const struct message *m, const char *prop, int expected, int found) { if (expected != found) { printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); printf("expected %d\n", expected); printf(" found %d\n", found); return 0; } return 1; } #define MESSAGE_CHECK_STR_EQ(expected, found, prop) \ if (!check_str_eq(expected, #prop, expected->prop, found->prop)) return 0 #define MESSAGE_CHECK_NUM_EQ(expected, found, prop) \ if (!check_num_eq(expected, #prop, expected->prop, found->prop)) return 0 #define MESSAGE_CHECK_URL_EQ(u, expected, found, prop, fn) \ do { \ char ubuf[256]; \ \ if ((u)->field_set & (1 << (fn))) { \ memcpy(ubuf, (found)->request_url + (u)->field_data[(fn)].off, \ (u)->field_data[(fn)].len); \ ubuf[(u)->field_data[(fn)].len] = '\0'; \ } else { \ ubuf[0] = '\0'; \ } \ \ check_str_eq(expected, #prop, expected->prop, ubuf); \ } while(0) int message_eq (int index, int connect, const struct message *expected) { int i; struct message *m = &messages[index]; MESSAGE_CHECK_NUM_EQ(expected, m, http_major); MESSAGE_CHECK_NUM_EQ(expected, m, http_minor); if (expected->type == HTTP_REQUEST) { MESSAGE_CHECK_NUM_EQ(expected, m, method); } else { MESSAGE_CHECK_NUM_EQ(expected, m, status_code); MESSAGE_CHECK_STR_EQ(expected, m, response_status); assert(m->status_cb_called); } if (!connect) { MESSAGE_CHECK_NUM_EQ(expected, m, should_keep_alive); MESSAGE_CHECK_NUM_EQ(expected, m, message_complete_on_eof); } assert(m->message_begin_cb_called); assert(m->headers_complete_cb_called); assert(m->message_complete_cb_called); MESSAGE_CHECK_STR_EQ(expected, m, request_url); /* Check URL components; we can't do this w/ CONNECT since it doesn't * send us a well-formed URL. */ if (*m->request_url && m->method != HTTP_CONNECT) { struct http_parser_url u; if (http_parser_parse_url(m->request_url, strlen(m->request_url), 0, &u)) { fprintf(stderr, "\n\n*** failed to parse URL %s ***\n\n", m->request_url); abort(); } if (expected->host) { MESSAGE_CHECK_URL_EQ(&u, expected, m, host, UF_HOST); } if (expected->userinfo) { MESSAGE_CHECK_URL_EQ(&u, expected, m, userinfo, UF_USERINFO); } m->port = (u.field_set & (1 << UF_PORT)) ? u.port : 0; MESSAGE_CHECK_URL_EQ(&u, expected, m, query_string, UF_QUERY); MESSAGE_CHECK_URL_EQ(&u, expected, m, fragment, UF_FRAGMENT); MESSAGE_CHECK_URL_EQ(&u, expected, m, request_path, UF_PATH); MESSAGE_CHECK_NUM_EQ(expected, m, port); } if (connect) { check_num_eq(m, "body_size", 0, m->body_size); } else if (expected->body_size) { MESSAGE_CHECK_NUM_EQ(expected, m, body_size); } else { MESSAGE_CHECK_STR_EQ(expected, m, body); } if (connect) { check_num_eq(m, "num_chunks_complete", 0, m->num_chunks_complete); } else { assert(m->num_chunks == m->num_chunks_complete); MESSAGE_CHECK_NUM_EQ(expected, m, num_chunks_complete); for (i = 0; i < m->num_chunks && i < MAX_CHUNKS; i++) { MESSAGE_CHECK_NUM_EQ(expected, m, chunk_lengths[i]); } } MESSAGE_CHECK_NUM_EQ(expected, m, num_headers); int r; for (i = 0; i < m->num_headers; i++) { r = check_str_eq(expected, "header field", expected->headers[i][0], m->headers[i][0]); if (!r) return 0; r = check_str_eq(expected, "header value", expected->headers[i][1], m->headers[i][1]); if (!r) return 0; } if (!connect) { MESSAGE_CHECK_STR_EQ(expected, m, upgrade); } return 1; } /* Given a sequence of varargs messages, return the number of them that the * parser should successfully parse, taking into account that upgraded * messages prevent all subsequent messages from being parsed. */ size_t count_parsed_messages(const size_t nmsgs, ...) { size_t i; va_list ap; va_start(ap, nmsgs); for (i = 0; i < nmsgs; i++) { struct message *m = va_arg(ap, struct message *); if (m->upgrade) { va_end(ap); return i + 1; } } va_end(ap); return nmsgs; } /* Given a sequence of bytes and the number of these that we were able to * parse, verify that upgrade bodies are correct. */ void upgrade_message_fix(char *body, const size_t nread, const size_t nmsgs, ...) { va_list ap; size_t i; size_t off = 0; va_start(ap, nmsgs); for (i = 0; i < nmsgs; i++) { struct message *m = va_arg(ap, struct message *); off += strlen(m->raw); if (m->upgrade) { off -= strlen(m->upgrade); /* Check the portion of the response after its specified upgrade */ if (!check_str_eq(m, "upgrade", body + off, body + nread)) { abort(); } /* Fix up the response so that message_eq() will verify the beginning * of the upgrade */ *(body + nread + strlen(m->upgrade)) = '\0'; messages[num_messages -1 ].upgrade = body + nread; va_end(ap); return; } } va_end(ap); printf("\n\n*** Error: expected a message with upgrade ***\n"); abort(); } static void print_error (const char *raw, size_t error_location) { fprintf(stderr, "\n*** %s ***\n\n", http_errno_description(HTTP_PARSER_ERRNO(parser))); int this_line = 0, char_len = 0; size_t i, j, len = strlen(raw), error_location_line = 0; for (i = 0; i < len; i++) { if (i == error_location) this_line = 1; switch (raw[i]) { case '\r': char_len = 2; fprintf(stderr, "\\r"); break; case '\n': fprintf(stderr, "\\n\n"); if (this_line) goto print; error_location_line = 0; continue; default: char_len = 1; fputc(raw[i], stderr); break; } if (!this_line) error_location_line += char_len; } fprintf(stderr, "[eof]\n"); print: for (j = 0; j < error_location_line; j++) { fputc(' ', stderr); } fprintf(stderr, "^\n\nerror location: %u\n", (unsigned int)error_location); } void test_preserve_data (void) { char my_data[] = "application-specific data"; http_parser parser; parser.data = my_data; http_parser_init(&parser, HTTP_REQUEST); if (parser.data != my_data) { printf("\n*** parser.data not preserved accross http_parser_init ***\n\n"); abort(); } } struct url_test { const char *name; const char *url; int is_connect; struct http_parser_url u; int rv; }; const struct url_test url_tests[] = { {.name="proxy request" ,.url="http://hostname/" ,.is_connect=0 ,.u= {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) ,.port=0 ,.field_data= {{ 0, 4 } /* UF_SCHEMA */ ,{ 7, 8 } /* UF_HOST */ ,{ 0, 0 } /* UF_PORT */ ,{ 15, 1 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="proxy request with port" ,.url="http://hostname:444/" ,.is_connect=0 ,.u= {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) ,.port=444 ,.field_data= {{ 0, 4 } /* UF_SCHEMA */ ,{ 7, 8 } /* UF_HOST */ ,{ 16, 3 } /* UF_PORT */ ,{ 19, 1 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="CONNECT request" ,.url="hostname:443" ,.is_connect=1 ,.u= {.field_set=(1 << UF_HOST) | (1 << UF_PORT) ,.port=443 ,.field_data= {{ 0, 0 } /* UF_SCHEMA */ ,{ 0, 8 } /* UF_HOST */ ,{ 9, 3 } /* UF_PORT */ ,{ 0, 0 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="CONNECT request but not connect" ,.url="hostname:443" ,.is_connect=0 ,.rv=1 } , {.name="proxy ipv6 request" ,.url="http://[1:2::3:4]/" ,.is_connect=0 ,.u= {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) ,.port=0 ,.field_data= {{ 0, 4 } /* UF_SCHEMA */ ,{ 8, 8 } /* UF_HOST */ ,{ 0, 0 } /* UF_PORT */ ,{ 17, 1 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="proxy ipv6 request with port" ,.url="http://[1:2::3:4]:67/" ,.is_connect=0 ,.u= {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) ,.port=67 ,.field_data= {{ 0, 4 } /* UF_SCHEMA */ ,{ 8, 8 } /* UF_HOST */ ,{ 18, 2 } /* UF_PORT */ ,{ 20, 1 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="CONNECT ipv6 address" ,.url="[1:2::3:4]:443" ,.is_connect=1 ,.u= {.field_set=(1 << UF_HOST) | (1 << UF_PORT) ,.port=443 ,.field_data= {{ 0, 0 } /* UF_SCHEMA */ ,{ 1, 8 } /* UF_HOST */ ,{ 11, 3 } /* UF_PORT */ ,{ 0, 0 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="ipv4 in ipv6 address" ,.url="http://[2001:0000:0000:0000:0000:0000:1.9.1.1]/" ,.is_connect=0 ,.u= {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) ,.port=0 ,.field_data= {{ 0, 4 } /* UF_SCHEMA */ ,{ 8, 37 } /* UF_HOST */ ,{ 0, 0 } /* UF_PORT */ ,{ 46, 1 } /* UF_PATH */ ,{ 0, 0 } /* UF_QUERY */ ,{ 0, 0 } /* UF_FRAGMENT */ ,{ 0, 0 } /* UF_USERINFO */ } } ,.rv=0 } , {.name="extra ? in query string" ,.url="http://a.tbcdn.cn/p/fp/2010c/??fp-header-min.css,fp-base-min.css," "fp-channel-min.css,fp-product-min.css,fp-mall-min.css,fp-category-min.css," "fp-sub-min.css,fp-gdp4p-min.css,fp-css3-min.css,fp-misc-min.css?t=20101022.css" ,.is_connect=0 ,.u= {.field_set=(1<field_set, u->port); for (i = 0; i < UF_MAX; i++) { if ((u->field_set & (1 << i)) == 0) { printf("\tfield_data[%u]: unset\n", i); continue; } printf("\tfield_data[%u]: off: %u len: %u part: \"%.*s\n\"", i, u->field_data[i].off, u->field_data[i].len, u->field_data[i].len, url + u->field_data[i].off); } } void test_parse_url (void) { struct http_parser_url u; const struct url_test *test; unsigned int i; int rv; for (i = 0; i < (sizeof(url_tests) / sizeof(url_tests[0])); i++) { test = &url_tests[i]; memset(&u, 0, sizeof(u)); rv = http_parser_parse_url(test->url, strlen(test->url), test->is_connect, &u); if (test->rv == 0) { if (rv != 0) { printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " "unexpected rv %d ***\n\n", test->url, test->name, rv); abort(); } if (memcmp(&u, &test->u, sizeof(u)) != 0) { printf("\n*** http_parser_parse_url(\"%s\") \"%s\" failed ***\n", test->url, test->name); printf("target http_parser_url:\n"); dump_url(test->url, &test->u); printf("result http_parser_url:\n"); dump_url(test->url, &u); abort(); } } else { /* test->rv != 0 */ if (rv == 0) { printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " "unexpected rv %d ***\n\n", test->url, test->name, rv); abort(); } } } } void test_method_str (void) { assert(0 == strcmp("GET", http_method_str(HTTP_GET))); assert(0 == strcmp("", http_method_str(1337))); } void test_message (const struct message *message) { size_t raw_len = strlen(message->raw); size_t msg1len; for (msg1len = 0; msg1len < raw_len; msg1len++) { parser_init(message->type); size_t read; const char *msg1 = message->raw; const char *msg2 = msg1 + msg1len; size_t msg2len = raw_len - msg1len; if (msg1len) { read = parse(msg1, msg1len); if (message->upgrade && parser->upgrade && num_messages > 0) { messages[num_messages - 1].upgrade = msg1 + read; goto test; } if (read != msg1len) { print_error(msg1, read); abort(); } } read = parse(msg2, msg2len); if (message->upgrade && parser->upgrade) { messages[num_messages - 1].upgrade = msg2 + read; goto test; } if (read != msg2len) { print_error(msg2, read); abort(); } read = parse(NULL, 0); if (read != 0) { print_error(message->raw, read); abort(); } test: if (num_messages != 1) { printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); abort(); } if(!message_eq(0, 0, message)) abort(); parser_free(); } } void test_message_count_body (const struct message *message) { parser_init(message->type); size_t read; size_t l = strlen(message->raw); size_t i, toread; size_t chunk = 4024; for (i = 0; i < l; i+= chunk) { toread = MIN(l-i, chunk); read = parse_count_body(message->raw + i, toread); if (read != toread) { print_error(message->raw, read); abort(); } } read = parse_count_body(NULL, 0); if (read != 0) { print_error(message->raw, read); abort(); } if (num_messages != 1) { printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); abort(); } if(!message_eq(0, 0, message)) abort(); parser_free(); } void test_simple_type (const char *buf, enum http_errno err_expected, enum http_parser_type type) { parser_init(type); enum http_errno err; parse(buf, strlen(buf)); err = HTTP_PARSER_ERRNO(parser); parse(NULL, 0); parser_free(); /* In strict mode, allow us to pass with an unexpected HPE_STRICT as * long as the caller isn't expecting success. */ #if HTTP_PARSER_STRICT if (err_expected != err && err_expected != HPE_OK && err != HPE_STRICT) { #else if (err_expected != err) { #endif fprintf(stderr, "\n*** test_simple expected %s, but saw %s ***\n\n%s\n", http_errno_name(err_expected), http_errno_name(err), buf); abort(); } } void test_simple (const char *buf, enum http_errno err_expected) { test_simple_type(buf, err_expected, HTTP_REQUEST); } void test_invalid_header_content (int req, const char* str) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; const char *buf; buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.1 200 OK\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); buf = str; size_t buflen = strlen(buf); parsed = http_parser_execute(&parser, &settings_null, buf, buflen); if (parsed != buflen) { assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_HEADER_TOKEN); return; } fprintf(stderr, "\n*** Error expected but none in invalid header content test ***\n"); abort(); } void test_invalid_header_field_content_error (int req) { test_invalid_header_content(req, "Foo: F\01ailure"); test_invalid_header_content(req, "Foo: B\02ar"); } void test_invalid_header_field (int req, const char* str) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; const char *buf; buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.1 200 OK\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); buf = str; size_t buflen = strlen(buf); parsed = http_parser_execute(&parser, &settings_null, buf, buflen); if (parsed != buflen) { assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_HEADER_TOKEN); return; } fprintf(stderr, "\n*** Error expected but none in invalid header token test ***\n"); abort(); } void test_invalid_header_field_token_error (int req) { test_invalid_header_field(req, "Fo@: Failure"); test_invalid_header_field(req, "Foo\01\test: Bar"); } void test_double_content_length_error (int req) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; const char *buf; buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.1 200 OK\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); buf = "Content-Length: 0\r\nContent-Length: 1\r\n\r\n"; size_t buflen = strlen(buf); parsed = http_parser_execute(&parser, &settings_null, buf, buflen); if (parsed != buflen) { assert(HTTP_PARSER_ERRNO(&parser) == HPE_UNEXPECTED_CONTENT_LENGTH); return; } fprintf(stderr, "\n*** Error expected but none in double content-length test ***\n"); abort(); } void test_chunked_content_length_error (int req) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; const char *buf; buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.1 200 OK\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); buf = "Transfer-Encoding: chunked\r\nContent-Length: 1\r\n\r\n"; size_t buflen = strlen(buf); parsed = http_parser_execute(&parser, &settings_null, buf, buflen); if (parsed != buflen) { assert(HTTP_PARSER_ERRNO(&parser) == HPE_UNEXPECTED_CONTENT_LENGTH); return; } fprintf(stderr, "\n*** Error expected but none in chunked content-length test ***\n"); abort(); } void test_header_cr_no_lf_error (int req) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; const char *buf; buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.1 200 OK\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); buf = "Foo: 1\rBar: 1\r\n\r\n"; size_t buflen = strlen(buf); parsed = http_parser_execute(&parser, &settings_null, buf, buflen); if (parsed != buflen) { assert(HTTP_PARSER_ERRNO(&parser) == HPE_LF_EXPECTED); return; } fprintf(stderr, "\n*** Error expected but none in header whitespace test ***\n"); abort(); } void test_header_overflow_error (int req) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; const char *buf; buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.0 200 OK\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); buf = "header-key: header-value\r\n"; size_t buflen = strlen(buf); int i; for (i = 0; i < 10000; i++) { parsed = http_parser_execute(&parser, &settings_null, buf, buflen); if (parsed != buflen) { //fprintf(stderr, "error found on iter %d\n", i); assert(HTTP_PARSER_ERRNO(&parser) == HPE_HEADER_OVERFLOW); return; } } fprintf(stderr, "\n*** Error expected but none in header overflow test ***\n"); abort(); } void test_header_nread_value () { http_parser parser; http_parser_init(&parser, HTTP_REQUEST); size_t parsed; const char *buf; buf = "GET / HTTP/1.1\r\nheader: value\nhdr: value\r\n"; parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); assert(parsed == strlen(buf)); assert(parser.nread == strlen(buf)); } static void test_content_length_overflow (const char *buf, size_t buflen, int expect_ok) { http_parser parser; http_parser_init(&parser, HTTP_RESPONSE); http_parser_execute(&parser, &settings_null, buf, buflen); if (expect_ok) assert(HTTP_PARSER_ERRNO(&parser) == HPE_OK); else assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_CONTENT_LENGTH); } void test_header_content_length_overflow_error (void) { #define X(size) \ "HTTP/1.1 200 OK\r\n" \ "Content-Length: " #size "\r\n" \ "\r\n" const char a[] = X(1844674407370955160); /* 2^64 / 10 - 1 */ const char b[] = X(18446744073709551615); /* 2^64-1 */ const char c[] = X(18446744073709551616); /* 2^64 */ #undef X test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ } void test_chunk_content_length_overflow_error (void) { #define X(size) \ "HTTP/1.1 200 OK\r\n" \ "Transfer-Encoding: chunked\r\n" \ "\r\n" \ #size "\r\n" \ "..." const char a[] = X(FFFFFFFFFFFFFFE); /* 2^64 / 16 - 1 */ const char b[] = X(FFFFFFFFFFFFFFFF); /* 2^64-1 */ const char c[] = X(10000000000000000); /* 2^64 */ #undef X test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ } void test_no_overflow_long_body (int req, size_t length) { http_parser parser; http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); size_t parsed; size_t i; char buf1[3000]; size_t buf1len = sprintf(buf1, "%s\r\nConnection: Keep-Alive\r\nContent-Length: %lu\r\n\r\n", req ? "POST / HTTP/1.0" : "HTTP/1.0 200 OK", (unsigned long)length); parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); if (parsed != buf1len) goto err; for (i = 0; i < length; i++) { char foo = 'a'; parsed = http_parser_execute(&parser, &settings_null, &foo, 1); if (parsed != 1) goto err; } parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); if (parsed != buf1len) goto err; return; err: fprintf(stderr, "\n*** error in test_no_overflow_long_body %s of length %lu ***\n", req ? "REQUEST" : "RESPONSE", (unsigned long)length); abort(); } void test_multiple3 (const struct message *r1, const struct message *r2, const struct message *r3) { int message_count = count_parsed_messages(3, r1, r2, r3); char total[ strlen(r1->raw) + strlen(r2->raw) + strlen(r3->raw) + 1 ]; total[0] = '\0'; strcat(total, r1->raw); strcat(total, r2->raw); strcat(total, r3->raw); parser_init(r1->type); size_t read; read = parse(total, strlen(total)); if (parser->upgrade) { upgrade_message_fix(total, read, 3, r1, r2, r3); goto test; } if (read != strlen(total)) { print_error(total, read); abort(); } read = parse(NULL, 0); if (read != 0) { print_error(total, read); abort(); } test: if (message_count != num_messages) { fprintf(stderr, "\n\n*** Parser didn't see 3 messages only %d *** \n", num_messages); abort(); } if (!message_eq(0, 0, r1)) abort(); if (message_count > 1 && !message_eq(1, 0, r2)) abort(); if (message_count > 2 && !message_eq(2, 0, r3)) abort(); parser_free(); } /* SCAN through every possible breaking to make sure the * parser can handle getting the content in any chunks that * might come from the socket */ void test_scan (const struct message *r1, const struct message *r2, const struct message *r3) { char total[80*1024] = "\0"; char buf1[80*1024] = "\0"; char buf2[80*1024] = "\0"; char buf3[80*1024] = "\0"; strcat(total, r1->raw); strcat(total, r2->raw); strcat(total, r3->raw); size_t read; int total_len = strlen(total); int total_ops = 2 * (total_len - 1) * (total_len - 2) / 2; int ops = 0 ; size_t buf1_len, buf2_len, buf3_len; int message_count = count_parsed_messages(3, r1, r2, r3); int i,j,type_both; for (type_both = 0; type_both < 2; type_both ++ ) { for (j = 2; j < total_len; j ++ ) { for (i = 1; i < j; i ++ ) { if (ops % 1000 == 0) { printf("\b\b\b\b%3.0f%%", 100 * (float)ops /(float)total_ops); fflush(stdout); } ops += 1; parser_init(type_both ? HTTP_BOTH : r1->type); buf1_len = i; strlncpy(buf1, sizeof(buf1), total, buf1_len); buf1[buf1_len] = 0; buf2_len = j - i; strlncpy(buf2, sizeof(buf1), total+i, buf2_len); buf2[buf2_len] = 0; buf3_len = total_len - j; strlncpy(buf3, sizeof(buf1), total+j, buf3_len); buf3[buf3_len] = 0; read = parse(buf1, buf1_len); if (parser->upgrade) goto test; if (read != buf1_len) { print_error(buf1, read); goto error; } read += parse(buf2, buf2_len); if (parser->upgrade) goto test; if (read != buf1_len + buf2_len) { print_error(buf2, read); goto error; } read += parse(buf3, buf3_len); if (parser->upgrade) goto test; if (read != buf1_len + buf2_len + buf3_len) { print_error(buf3, read); goto error; } parse(NULL, 0); test: if (parser->upgrade) { upgrade_message_fix(total, read, 3, r1, r2, r3); } if (message_count != num_messages) { fprintf(stderr, "\n\nParser didn't see %d messages only %d\n", message_count, num_messages); goto error; } if (!message_eq(0, 0, r1)) { fprintf(stderr, "\n\nError matching messages[0] in test_scan.\n"); goto error; } if (message_count > 1 && !message_eq(1, 0, r2)) { fprintf(stderr, "\n\nError matching messages[1] in test_scan.\n"); goto error; } if (message_count > 2 && !message_eq(2, 0, r3)) { fprintf(stderr, "\n\nError matching messages[2] in test_scan.\n"); goto error; } parser_free(); } } } puts("\b\b\b\b100%"); return; error: fprintf(stderr, "i=%d j=%d\n", i, j); fprintf(stderr, "buf1 (%u) %s\n\n", (unsigned int)buf1_len, buf1); fprintf(stderr, "buf2 (%u) %s\n\n", (unsigned int)buf2_len , buf2); fprintf(stderr, "buf3 (%u) %s\n", (unsigned int)buf3_len, buf3); abort(); } // user required to free the result // string terminated by \0 char * create_large_chunked_message (int body_size_in_kb, const char* headers) { int i; size_t wrote = 0; size_t headers_len = strlen(headers); size_t bufsize = headers_len + (5+1024+2)*body_size_in_kb + 6; char * buf = malloc(bufsize); memcpy(buf, headers, headers_len); wrote += headers_len; for (i = 0; i < body_size_in_kb; i++) { // write 1kb chunk into the body. memcpy(buf + wrote, "400\r\n", 5); wrote += 5; memset(buf + wrote, 'C', 1024); wrote += 1024; strcpy(buf + wrote, "\r\n"); wrote += 2; } memcpy(buf + wrote, "0\r\n\r\n", 6); wrote += 6; assert(wrote == bufsize); return buf; } /* Verify that we can pause parsing at any of the bytes in the * message and still get the result that we're expecting. */ void test_message_pause (const struct message *msg) { char *buf = (char*) msg->raw; size_t buflen = strlen(msg->raw); size_t nread; parser_init(msg->type); do { nread = parse_pause(buf, buflen); // We can only set the upgrade buffer once we've gotten our message // completion callback. if (messages[0].message_complete_cb_called && msg->upgrade && parser->upgrade) { messages[0].upgrade = buf + nread; goto test; } if (nread < buflen) { // Not much do to if we failed a strict-mode check if (HTTP_PARSER_ERRNO(parser) == HPE_STRICT) { parser_free(); return; } assert (HTTP_PARSER_ERRNO(parser) == HPE_PAUSED); } buf += nread; buflen -= nread; http_parser_pause(parser, 0); } while (buflen > 0); nread = parse_pause(NULL, 0); assert (nread == 0); test: if (num_messages != 1) { printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name); abort(); } if(!message_eq(0, 0, msg)) abort(); parser_free(); } /* Verify that body and next message won't be parsed in responses to CONNECT */ void test_message_connect (const struct message *msg) { char *buf = (char*) msg->raw; size_t buflen = strlen(msg->raw); parser_init(msg->type); parse_connect(buf, buflen); if (num_messages != 1) { printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name); abort(); } if(!message_eq(0, 1, msg)) abort(); parser_free(); } int main (void) { parser = NULL; int i, j, k; int request_count; int response_count; unsigned long version; unsigned major; unsigned minor; unsigned patch; version = http_parser_version(); major = (version >> 16) & 255; minor = (version >> 8) & 255; patch = version & 255; printf("http_parser v%u.%u.%u (0x%06lx)\n", major, minor, patch, version); printf("sizeof(http_parser) = %u\n", (unsigned int)sizeof(http_parser)); for (request_count = 0; requests[request_count].name; request_count++); for (response_count = 0; responses[response_count].name; response_count++); //// API test_preserve_data(); test_parse_url(); test_method_str(); //// NREAD test_header_nread_value(); //// OVERFLOW CONDITIONS test_header_overflow_error(HTTP_REQUEST); test_no_overflow_long_body(HTTP_REQUEST, 1000); test_no_overflow_long_body(HTTP_REQUEST, 100000); test_header_overflow_error(HTTP_RESPONSE); test_no_overflow_long_body(HTTP_RESPONSE, 1000); test_no_overflow_long_body(HTTP_RESPONSE, 100000); test_header_content_length_overflow_error(); test_chunk_content_length_overflow_error(); //// HEADER FIELD CONDITIONS test_double_content_length_error(HTTP_REQUEST); test_chunked_content_length_error(HTTP_REQUEST); test_header_cr_no_lf_error(HTTP_REQUEST); test_invalid_header_field_token_error(HTTP_REQUEST); test_invalid_header_field_content_error(HTTP_REQUEST); test_double_content_length_error(HTTP_RESPONSE); test_chunked_content_length_error(HTTP_RESPONSE); test_header_cr_no_lf_error(HTTP_RESPONSE); test_invalid_header_field_token_error(HTTP_RESPONSE); test_invalid_header_field_content_error(HTTP_RESPONSE); //// RESPONSES test_simple_type("HTP/1.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); test_simple_type("HTTP/01.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); test_simple_type("HTTP/11.1 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); test_simple_type("HTTP/1.01 200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); test_simple_type("HTTP/1.1\t200 OK\r\n\r\n", HPE_INVALID_VERSION, HTTP_RESPONSE); for (i = 0; i < response_count; i++) { test_message(&responses[i]); } for (i = 0; i < response_count; i++) { test_message_pause(&responses[i]); } for (i = 0; i < response_count; i++) { test_message_connect(&responses[i]); } for (i = 0; i < response_count; i++) { if (!responses[i].should_keep_alive) continue; for (j = 0; j < response_count; j++) { if (!responses[j].should_keep_alive) continue; for (k = 0; k < response_count; k++) { test_multiple3(&responses[i], &responses[j], &responses[k]); } } } test_message_count_body(&responses[NO_HEADERS_NO_BODY_404]); test_message_count_body(&responses[TRAILING_SPACE_ON_CHUNKED_BODY]); // test very large chunked response { char * msg = create_large_chunked_message(31337, "HTTP/1.0 200 OK\r\n" "Transfer-Encoding: chunked\r\n" "Content-Type: text/plain\r\n" "\r\n"); struct message large_chunked = {.name= "large chunked" ,.type= HTTP_RESPONSE ,.raw= msg ,.should_keep_alive= FALSE ,.message_complete_on_eof= FALSE ,.http_major= 1 ,.http_minor= 0 ,.status_code= 200 ,.response_status= "OK" ,.num_headers= 2 ,.headers= { { "Transfer-Encoding", "chunked" } , { "Content-Type", "text/plain" } } ,.body_size= 31337*1024 ,.num_chunks_complete= 31338 }; for (i = 0; i < MAX_CHUNKS; i++) { large_chunked.chunk_lengths[i] = 1024; } test_message_count_body(&large_chunked); free(msg); } printf("response scan 1/2 "); test_scan( &responses[TRAILING_SPACE_ON_CHUNKED_BODY] , &responses[NO_BODY_HTTP10_KA_204] , &responses[NO_REASON_PHRASE] ); printf("response scan 2/2 "); test_scan( &responses[BONJOUR_MADAME_FR] , &responses[UNDERSTORE_HEADER_KEY] , &responses[NO_CARRIAGE_RET] ); puts("responses okay"); /// REQUESTS test_simple("GET / HTP/1.1\r\n\r\n", HPE_INVALID_VERSION); test_simple("GET / HTTP/01.1\r\n\r\n", HPE_INVALID_VERSION); test_simple("GET / HTTP/11.1\r\n\r\n", HPE_INVALID_VERSION); test_simple("GET / HTTP/1.01\r\n\r\n", HPE_INVALID_VERSION); // Extended characters - see nodejs/test/parallel/test-http-headers-obstext.js test_simple("GET / HTTP/1.1\r\n" "Test: Düsseldorf\r\n", HPE_OK); // Well-formed but incomplete test_simple("GET / HTTP/1.1\r\n" "Content-Type: text/plain\r\n" "Content-Length: 6\r\n" "\r\n" "fooba", HPE_OK); static const char *all_methods[] = { "DELETE", "GET", "HEAD", "POST", "PUT", //"CONNECT", //CONNECT can't be tested like other methods, it's a tunnel "OPTIONS", "TRACE", "COPY", "LOCK", "MKCOL", "MOVE", "PROPFIND", "PROPPATCH", "SEARCH", "UNLOCK", "BIND", "REBIND", "UNBIND", "ACL", "REPORT", "MKACTIVITY", "CHECKOUT", "MERGE", "M-SEARCH", "NOTIFY", "SUBSCRIBE", "UNSUBSCRIBE", "PATCH", "PURGE", "MKCALENDAR", "LINK", "UNLINK", 0 }; const char **this_method; for (this_method = all_methods; *this_method; this_method++) { char buf[200]; sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); test_simple(buf, HPE_OK); } static const char *bad_methods[] = { "ASDF", "C******", "COLA", "GEM", "GETA", "M****", "MKCOLA", "PROPPATCHA", "PUN", "PX", "SA", "hello world", 0 }; for (this_method = bad_methods; *this_method; this_method++) { char buf[200]; sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); test_simple(buf, HPE_INVALID_METHOD); } // illegal header field name line folding test_simple("GET / HTTP/1.1\r\n" "name\r\n" " : value\r\n" "\r\n", HPE_INVALID_HEADER_TOKEN); const char *dumbfuck2 = "GET / HTTP/1.1\r\n" "X-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n" "\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n" "\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n" "\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n" "\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n" "\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n" "\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n" "\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n" "\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n" "\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n" "\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n" "\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n" "\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n" "\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n" "\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n" "\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n" "\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n" "\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n" "\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n" "\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n" "\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n" "\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n" "\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n" "\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n" "\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n" "\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n" "\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n" "\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n" "\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n" "\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n" "\tRA==\r\n" "\t-----END CERTIFICATE-----\r\n" "\r\n"; test_simple(dumbfuck2, HPE_OK); const char *corrupted_connection = "GET / HTTP/1.1\r\n" "Host: www.example.com\r\n" "Connection\r\033\065\325eep-Alive\r\n" "Accept-Encoding: gzip\r\n" "\r\n"; test_simple(corrupted_connection, HPE_INVALID_HEADER_TOKEN); const char *corrupted_header_name = "GET / HTTP/1.1\r\n" "Host: www.example.com\r\n" "X-Some-Header\r\033\065\325eep-Alive\r\n" "Accept-Encoding: gzip\r\n" "\r\n"; test_simple(corrupted_header_name, HPE_INVALID_HEADER_TOKEN); #if 0 // NOTE(Wed Nov 18 11:57:27 CET 2009) this seems okay. we just read body // until EOF. // // no content-length // error if there is a body without content length const char *bad_get_no_headers_no_body = "GET /bad_get_no_headers_no_body/world HTTP/1.1\r\n" "Accept: */*\r\n" "\r\n" "HELLO"; test_simple(bad_get_no_headers_no_body, 0); #endif /* TODO sending junk and large headers gets rejected */ /* check to make sure our predefined requests are okay */ for (i = 0; requests[i].name; i++) { test_message(&requests[i]); } for (i = 0; i < request_count; i++) { test_message_pause(&requests[i]); } for (i = 0; i < request_count; i++) { if (!requests[i].should_keep_alive) continue; for (j = 0; j < request_count; j++) { if (!requests[j].should_keep_alive) continue; for (k = 0; k < request_count; k++) { test_multiple3(&requests[i], &requests[j], &requests[k]); } } } printf("request scan 1/4 "); test_scan( &requests[GET_NO_HEADERS_NO_BODY] , &requests[GET_ONE_HEADER_NO_BODY] , &requests[GET_NO_HEADERS_NO_BODY] ); printf("request scan 2/4 "); test_scan( &requests[POST_CHUNKED_ALL_YOUR_BASE] , &requests[POST_IDENTITY_BODY_WORLD] , &requests[GET_FUNKY_CONTENT_LENGTH] ); printf("request scan 3/4 "); test_scan( &requests[TWO_CHUNKS_MULT_ZERO_END] , &requests[CHUNKED_W_TRAILING_HEADERS] , &requests[CHUNKED_W_BULLSHIT_AFTER_LENGTH] ); printf("request scan 4/4 "); test_scan( &requests[QUERY_URL_WITH_QUESTION_MARK_GET] , &requests[PREFIX_NEWLINE_GET ] , &requests[CONNECT_REQUEST] ); puts("requests okay"); return 0; }