pax_global_header00006660000000000000000000000064146070321150014511gustar00rootroot0000000000000052 comment=23636985857affd9b35bfc895f4bafdf2dc0801c gql-3.6.0b2/000077500000000000000000000000001460703211500125265ustar00rootroot00000000000000gql-3.6.0b2/.github/000077500000000000000000000000001460703211500140665ustar00rootroot00000000000000gql-3.6.0b2/.github/ISSUE_TEMPLATE/000077500000000000000000000000001460703211500162515ustar00rootroot00000000000000gql-3.6.0b2/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000022111460703211500207370ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: '' labels: '' assignees: '' --- **Common problems** - If you receive a TransportQueryError, it means the error is coming from the backend (See [Error Handling](https://gql.readthedocs.io/en/latest/advanced/error_handling.html)) and has probably nothing to do with gql - If you use IPython (Jupyter, Spyder), then [you need to use the async version](https://gql.readthedocs.io/en/latest/async/async_usage.html#ipython) - Before sending a bug report, please consider [activating debug logs](https://gql.readthedocs.io/en/latest/advanced/logging.html) to see the messages exchanged between the client and the backend **Describe the bug** A clear and concise description of what the bug is. Please provide a full stack trace if you have one. If you can, please provide the backend URL, the GraphQL schema, the code you used. **To Reproduce** Steps to reproduce the behavior: **Expected behavior** A clear and concise description of what you expected to happen. **System info (please complete the following information):** - OS: - Python version: - gql version: - graphql-core version: gql-3.6.0b2/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000001541460703211500217760ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project title: '' labels: '' assignees: '' --- gql-3.6.0b2/.github/workflows/000077500000000000000000000000001460703211500161235ustar00rootroot00000000000000gql-3.6.0b2/.github/workflows/deploy.yml000066400000000000000000000010561460703211500201440ustar00rootroot00000000000000name: 🚀 Deploy to PyPI on: push: tags: - 'v*' jobs: build: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v3 - name: Set up Python 3.8 uses: actions/setup-python@v4 with: python-version: 3.8 - name: Build wheel and source tarball run: | pip install wheel python setup.py sdist bdist_wheel - name: Publish a Python distribution to PyPI uses: pypa/gh-action-pypi-publish@v1.1.0 with: user: __token__ password: ${{ secrets.pypi_password }} gql-3.6.0b2/.github/workflows/lint.yml000066400000000000000000000007331460703211500176170ustar00rootroot00000000000000name: Lint on: [push, pull_request] jobs: build: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v3 - name: Set up Python 3.8 uses: actions/setup-python@v4 with: python-version: 3.8 - name: Install dependencies run: | python -m pip install --upgrade pip wheel pip install tox - name: Run lint and static type checks run: tox env: TOXENV: flake8,black,import-order,mypy,manifest gql-3.6.0b2/.github/workflows/tests.yml000066400000000000000000000044061460703211500200140ustar00rootroot00000000000000name: Tests on: [push, pull_request] jobs: build: runs-on: ${{ matrix.os }} strategy: max-parallel: 4 matrix: python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy3.8"] os: [ubuntu-20.04, windows-latest] exclude: - os: windows-latest python-version: "3.7" - os: windows-latest python-version: "3.9" - os: windows-latest python-version: "3.10" - os: windows-latest python-version: "3.11" - os: windows-latest python-version: "3.12" - os: windows-latest python-version: "pypy3.8" steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip wheel pip install tox tox-gh-actions - name: Test with tox run: tox env: TOXENV: ${{ matrix.toxenv }} single_extra: runs-on: ubuntu-20.04 strategy: fail-fast: false matrix: dependency: ["aiohttp", "requests", "httpx", "websockets"] steps: - uses: actions/checkout@v3 - name: Set up Python 3.8 uses: actions/setup-python@v4 with: python-version: 3.8 - name: Install dependencies with only ${{ matrix.dependency }} extra dependency run: | python -m pip install --upgrade pip wheel pip install .[${{ matrix.dependency }},test_no_transport] - name: Test with --${{ matrix.dependency }}-only run: pytest tests --${{ matrix.dependency }}-only coverage: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v3 - name: Set up Python 3.8 uses: actions/setup-python@v4 with: python-version: 3.8 - name: Install test dependencies run: | python -m pip install --upgrade pip wheel pip install -e.[test] - name: Test with coverage run: pytest --cov=gql --cov-report=xml --cov-report=term-missing tests - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 gql-3.6.0b2/.gitignore000066400000000000000000000021371460703211500145210ustar00rootroot00000000000000# Created by https://www.gitignore.io ### Python ### # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ .env/ venv/ .venv/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover # Translations *.mo *.pot # Django stuff: *.log # Type checking /.mypy_cache .pyre /type_info.json # Sphinx documentation docs/_build/ # PyBuilder target/ /tests/django.sqlite /graphene/index.json /graphene/meta.json /meta.json /index.json /docs/playground/graphene-js/pypyjs-release-nojit/ /docs/static/playground/lib /docs/static/playground # PyCharm .idea # Databases *.sqlite3 .DS_Store ### VisualStudioCode ### .vscode/* # VIM *.swp gql-3.6.0b2/.readthedocs.yaml000066400000000000000000000011031460703211500157500ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Set the version of Python and other tools you might need build: os: ubuntu-20.04 tools: python: "3.9" # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py # Optionally build your docs in additional formats such as PDF formats: - pdf python: install: - requirements: docs/requirements.txt - method: pip path: . extra_requirements: - all gql-3.6.0b2/CODEOWNERS000066400000000000000000000000701460703211500141160ustar00rootroot00000000000000/ @syrusakbary @ekampf @cito @leszekhanusz @KingDarBoja gql-3.6.0b2/CONTRIBUTING.md000066400000000000000000000077451460703211500147740ustar00rootroot00000000000000# Contributing Thanks for helping to make gql awesome! We welcome all kinds of contributions: - Bug fixes - Documentation improvements - New features - Refactoring & tidying ## Getting started If you have a specific contribution in mind, be sure to check the [issues](https://github.com/graphql-python/gql/issues) and [pull requests](https://github.com/graphql-python/gql/pulls) in progress - someone could already be working on something similar and you can help out. ## Project setup ### Development with virtualenv (recommended) After cloning this repo, create a virtualenv: ```console virtualenv gql-dev ``` Activate the virtualenv and install dependencies by running: ```console python -m pip install -e.[dev] ``` If you are using Linux or MacOS, you can make use of Makefile command `make dev-setup`, which is a shortcut for the above python command. ### Development on Conda You must create a new env (e.g. `gql-dev`) with the following command: ```sh conda create -n gql-dev python=3.8 ``` Then activate the environment with `conda activate gql-dev`. Proceed to install all dependencies by running: ```console pip install -e.[dev] ``` And you ready to start development! ## Coding guidelines Several tools are used to ensure a coherent coding style. You need to make sure that your code satisfy those requirements or the automated tests will fail. - [black code formatter](https://github.com/psf/black) - [flake8 style enforcement](https://flake8.pycqa.org/en/latest/index.html) - [mypy static type checker](http://mypy-lang.org/) - [isort to sort imports alphabetically](https://isort.readthedocs.io/en/stable/) On Linux or MacOS, you can fix and check your code style by running the Makefile command `make check` (this is also checked by running the automated tests with tox but it is much faster with make) In addition to the above checks, it is asked that: - [type hints are used](https://docs.python.org/3/library/typing.html) - tests are added to ensure complete code coverage ## Running tests After developing, the full test suite can be evaluated by running: ```sh pytest tests --cov=gql --cov-report=term-missing -vv ``` Please note that some tests which require external online resources are not done in the automated tests. You can run those tests by running: ```sh pytest tests --cov=gql --cov-report=term-missing --run-online -vv ``` If you are using Linux or MacOS, you can make use of Makefile commands `make tests` and `make all_tests`, which are shortcuts for the above python commands. You can also test on several python environments by using tox. ### Running tox on virtualenv Install tox: ```console pip install tox ``` Run `tox` on your virtualenv (do not forget to activate it!) and that's it! ### Running tox on Conda In order to run `tox` command on conda, install [tox-conda](https://github.com/tox-dev/tox-conda): ```sh conda install -c conda-forge tox-conda ``` This install tox underneath so no need to install it before. Then add the line `requires = tox-conda` in the `tox.ini` file under `[tox]`. Run `tox` and you will see all the environments being created and all passing tests. :rocket: ## How to create a good Pull Request 1. Make a fork of the master branch on github 2. Clone your forked repo on your computer 3. Create a feature branch `git checkout -b feature_my_awesome_feature` 4. Modify the code 5. Verify that the [Coding guidelines](#coding-guidelines) are respected 6. Verify that the [automated tests](#running-tests) are passing 7. Make a commit and push it to your fork 8. From github, create the pull request. Automated tests from GitHub actions and codecov will then automatically run the tests and check the code coverage 9. If other modifications are needed, you are free to create more commits and push them on your branch. They'll get added to the PR automatically. Once the Pull Request is accepted and merged, you can safely delete the branch (and the forked repo if no more development is needed). gql-3.6.0b2/LICENSE000066400000000000000000000020711460703211500135330ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2016 GraphQL Python Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gql-3.6.0b2/MANIFEST.in000066400000000000000000000007201460703211500142630ustar00rootroot00000000000000include MANIFEST.in include CODEOWNERS include LICENSE include README.md include CONTRIBUTING.md include .readthedocs.yaml include dev_requirements.txt include Makefile include tox.ini include gql/py.typed recursive-include tests *.py *.graphql *.cnf *.yaml *.pem recursive-include docs *.txt *.rst conf.py Makefile make.bat *.jpg *.png *.gif recursive-include docs/code_examples *.py prune docs/_build prune gql-checker global-exclude *.py[co] __pycache__ gql-3.6.0b2/Makefile000066400000000000000000000015531460703211500141720ustar00rootroot00000000000000.PHONY: clean tests docs SRC_PYTHON := gql tests docs/code_examples dev-setup: python -m pip install -e ".[test]" tests: pytest tests --cov=gql --cov-report=term-missing -vv all_tests: pytest tests --cov=gql --cov-report=term-missing --run-online -vv tests_aiohttp: pytest tests --aiohttp-only tests_requests: pytest tests --requests-only tests_httpx: pytest tests --httpx-only tests_websockets: pytest tests --websockets-only check: isort --recursive $(SRC_PYTHON) black $(SRC_PYTHON) flake8 $(SRC_PYTHON) mypy $(SRC_PYTHON) check-manifest docs: rm -rf ./docs/_build cd docs; make html clean: find . -name "*.pyc" -delete find . -name "__pycache__" | xargs -I {} rm -rf {} rm -rf ./htmlcov rm -rf ./.mypy_cache rm -rf ./.pytest_cache rm -rf ./.tox rm -rf ./gql.egg-info rm -rf ./dist rm -rf ./build rm -rf ./docs/_build rm -f ./.coverage gql-3.6.0b2/README.md000066400000000000000000000103661460703211500140130ustar00rootroot00000000000000# GQL This is a GraphQL client for Python 3.7+. Plays nicely with `graphene`, `graphql-core`, `graphql-js` and any other GraphQL implementation compatible with the spec. GQL architecture is inspired by `React-Relay` and `Apollo-Client`. [![GitHub-Actions][gh-image]][gh-url] [![pyversion][pyversion-image]][pyversion-url] [![pypi][pypi-image]][pypi-url] [![Anaconda-Server Badge][conda-image]][conda-url] [![codecov][codecov-image]][codecov-url] [gh-image]: https://github.com/graphql-python/gql/workflows/Tests/badge.svg [gh-url]: https://github.com/graphql-python/gql/actions?query=workflow%3ATests [pyversion-image]: https://img.shields.io/pypi/pyversions/gql [pyversion-url]: https://pypi.org/project/gql/ [pypi-image]: https://img.shields.io/pypi/v/gql.svg?style=flat [pypi-url]: https://pypi.org/project/gql/ [conda-image]: https://img.shields.io/conda/vn/conda-forge/gql.svg [conda-url]: https://anaconda.org/conda-forge/gql [codecov-image]: https://codecov.io/gh/graphql-python/gql/branch/master/graph/badge.svg [codecov-url]: https://codecov.io/gh/graphql-python/gql ## Documentation The complete documentation for GQL can be found at [gql.readthedocs.io](https://gql.readthedocs.io). ## Features * Execute GraphQL queries using [different protocols](https://gql.readthedocs.io/en/latest/transports/index.html): * http * websockets: * apollo or graphql-ws protocol * Phoenix channels * AWS AppSync realtime protocol (experimental) * Possibility to [validate the queries locally](https://gql.readthedocs.io/en/latest/usage/validation.html) using a GraphQL schema provided locally or fetched from the backend using an instrospection query * Supports GraphQL queries, mutations and [subscriptions](https://gql.readthedocs.io/en/latest/usage/subscriptions.html) * Supports [sync or async usage](https://gql.readthedocs.io/en/latest/async/index.html), [allowing concurrent requests](https://gql.readthedocs.io/en/latest/advanced/async_advanced_usage.html#async-advanced-usage) * Supports [File uploads](https://gql.readthedocs.io/en/latest/usage/file_upload.html) * Supports [Custom scalars / Enums](https://gql.readthedocs.io/en/latest/usage/custom_scalars_and_enums.html) * [gql-cli script](https://gql.readthedocs.io/en/latest/gql-cli/intro.html) to execute GraphQL queries or download schemas from the command line * [DSL module](https://gql.readthedocs.io/en/latest/advanced/dsl_module.html) to compose GraphQL queries dynamically ## Installation You can install GQL with all the optional dependencies using pip: ```bash # Quotes may be required on certain shells such as zsh. pip install "gql[all]" ``` > **NOTE**: See also [the documentation](https://gql.readthedocs.io/en/latest/intro.html#less-dependencies) to install GQL with less extra dependencies depending on the transports you would like to use or for alternative installation methods. ## Usage ### Basic usage ```python from gql import gql, Client from gql.transport.aiohttp import AIOHTTPTransport # Select your transport with a defined url endpoint transport = AIOHTTPTransport(url="https://countries.trevorblades.com/") # Create a GraphQL client using the defined transport client = Client(transport=transport, fetch_schema_from_transport=True) # Provide a GraphQL query query = gql( """ query getContinents { continents { code name } } """ ) # Execute the query on the transport result = client.execute(query) print(result) ``` Executing the above code should output the following result: ``` $ python basic_example.py {'continents': [{'code': 'AF', 'name': 'Africa'}, {'code': 'AN', 'name': 'Antarctica'}, {'code': 'AS', 'name': 'Asia'}, {'code': 'EU', 'name': 'Europe'}, {'code': 'NA', 'name': 'North America'}, {'code': 'OC', 'name': 'Oceania'}, {'code': 'SA', 'name': 'South America'}]} ``` > **WARNING**: Please note that this basic example won't work if you have an asyncio event loop running. In some > python environments (as with Jupyter which uses IPython) an asyncio event loop is created for you. In that case you > should use instead the [async usage example](https://gql.readthedocs.io/en/latest/async/async_usage.html#async-usage). ## Contributing See [CONTRIBUTING.md](CONTRIBUTING.md) ## License [MIT License](https://github.com/graphql-python/gql/blob/master/LICENSE) gql-3.6.0b2/docs/000077500000000000000000000000001460703211500134565ustar00rootroot00000000000000gql-3.6.0b2/docs/Makefile000066400000000000000000000011721460703211500151170ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) gql-3.6.0b2/docs/advanced/000077500000000000000000000000001460703211500152235ustar00rootroot00000000000000gql-3.6.0b2/docs/advanced/async_advanced_usage.rst000066400000000000000000000035471460703211500221140ustar00rootroot00000000000000.. _async_advanced_usage: Async advanced usage ==================== It is possible to send multiple GraphQL queries (query, mutation or subscription) in parallel, on the same websocket connection, using asyncio tasks. In order to retry in case of connection failure, we can use the great `backoff`_ module. .. code-block:: python # First define all your queries using a session argument: async def execute_query1(session): result = await session.execute(query1) print(result) async def execute_query2(session): result = await session.execute(query2) print(result) async def execute_subscription1(session): async for result in session.subscribe(subscription1): print(result) async def execute_subscription2(session): async for result in session.subscribe(subscription2): print(result) # Then create a couroutine which will connect to your API and run all your queries as tasks. # We use a `backoff` decorator to reconnect using exponential backoff in case of connection failure. @backoff.on_exception(backoff.expo, Exception, max_time=300) async def graphql_connection(): transport = WebsocketsTransport(url="wss://YOUR_URL") client = Client(transport=transport, fetch_schema_from_transport=True) async with client as session: task1 = asyncio.create_task(execute_query1(session)) task2 = asyncio.create_task(execute_query2(session)) task3 = asyncio.create_task(execute_subscription1(session)) task4 = asyncio.create_task(execute_subscription2(session)) await asyncio.gather(task1, task2, task3, task4) asyncio.run(graphql_connection()) Subscriptions tasks can be stopped at any time by running .. code-block:: python task.cancel() .. _backoff: https://github.com/litl/backoff gql-3.6.0b2/docs/advanced/async_permanent_session.rst000066400000000000000000000102501460703211500227040ustar00rootroot00000000000000.. _async_permanent_session: Async permanent session ======================= Sometimes you want to have a single permanent reconnecting async session to a GraphQL backend, and that can be `difficult to manage`_ manually with the :code:`async with client as session` syntax. It is now possible to have a single reconnecting session using the :meth:`connect_async ` method of Client with a :code:`reconnecting=True` argument. .. code-block:: python # Create a session from the client which will reconnect automatically. # This session can be kept in a class for example to provide a way # to execute GraphQL queries from many different places session = await client.connect_async(reconnecting=True) # You can run execute or subscribe method on this session result = await session.execute(query) # When you want the connection to close (for cleanup), # you call close_async await client.close_async() When you use :code:`reconnecting=True`, gql will watch the exceptions generated during the execute and subscribe calls and, if it detects a TransportClosed exception (indicating that the link to the underlying transport is broken), it will try to reconnect to the backend again. Retries ------- Connection retries ^^^^^^^^^^^^^^^^^^ With :code:`reconnecting=True`, gql will use the `backoff`_ module to repeatedly try to connect with exponential backoff and jitter with a maximum delay of 60 seconds by default. You can change the default reconnecting profile by providing your own backoff decorator to the :code:`retry_connect` argument. .. code-block:: python # Here wait maximum 5 minutes between connection retries retry_connect = backoff.on_exception( backoff.expo, # wait generator (here: exponential backoff) Exception, # which exceptions should cause a retry (here: everything) max_value=300, # max wait time in seconds ) session = await client.connect_async( reconnecting=True, retry_connect=retry_connect, ) Execution retries ^^^^^^^^^^^^^^^^^ With :code:`reconnecting=True`, by default we will also retry up to 5 times when an exception happens during an execute call (to manage a possible loss in the connection to the transport). There is no retry in case of a :code:`TransportQueryError` exception as it indicates that the connection to the backend is working correctly. You can change the default execute retry profile by providing your own backoff decorator to the :code:`retry_execute` argument. .. code-block:: python # Here Only 3 tries for execute calls retry_execute = backoff.on_exception( backoff.expo, Exception, max_tries=3, ) session = await client.connect_async( reconnecting=True, retry_execute=retry_execute, ) If you don't want any retry on the execute calls, you can disable the retries with :code:`retry_execute=False` .. note:: If you want to retry even with :code:`TransportQueryError` exceptions, then you need to make your own backoff decorator on your own method: .. code-block:: python @backoff.on_exception(backoff.expo, Exception, max_tries=3) async def execute_with_retry(session, query): return await session.execute(query) Subscription retries ^^^^^^^^^^^^^^^^^^^^ There is no :code:`retry_subscribe` as it is not feasible with async generators. If you want retries for your subscriptions, then you can do it yourself with backoff decorators on your methods. .. code-block:: python @backoff.on_exception(backoff.expo, Exception, max_tries=3, giveup=lambda e: isinstance(e, TransportQueryError)) async def execute_subscription1(session): async for result in session.subscribe(subscription1): print(result) FastAPI example --------------- .. literalinclude:: ../code_examples/fastapi_async.py Console example --------------- .. literalinclude:: ../code_examples/console_async.py .. _difficult to manage: https://github.com/graphql-python/gql/issues/179 .. _backoff: https://github.com/litl/backoff gql-3.6.0b2/docs/advanced/dsl_module.rst000066400000000000000000000253701460703211500201130ustar00rootroot00000000000000Compose queries dynamically =========================== Instead of providing the GraphQL queries as a Python String, it is also possible to create GraphQL queries dynamically. Using the :mod:`DSL module `, we can create a query using a Domain Specific Language which is created from the schema. The following code: .. code-block:: python ds = DSLSchema(StarWarsSchema) query = dsl_gql( DSLQuery( ds.Query.hero.select( ds.Character.id, ds.Character.name, ds.Character.friends.select(ds.Character.name), ) ) ) will generate a query equivalent to: .. code-block:: python query = gql(""" query { hero { id name friends { name } } } """) How to use ---------- First generate the root using the :class:`DSLSchema `:: ds = DSLSchema(client.schema) Then use auto-generated attributes of the :code:`ds` instance to get a root type (Query, Mutation or Subscription). This will generate a :class:`DSLType ` instance:: ds.Query From this root type, you use auto-generated attributes to get a field. This will generate a :class:`DSLField ` instance:: ds.Query.hero hero is a GraphQL object type and needs children fields. By default, there is no children fields selected. To select the fields that you want in your query, you use the :meth:`select ` method. To generate the children fields, we use the same method as above to auto-generate the fields from the :code:`ds` instance (ie :code:`ds.Character.name` is the field `name` of the type `Character`):: ds.Query.hero.select(ds.Character.name) The select method return the same instance, so it is possible to chain the calls:: ds.Query.hero.select(ds.Character.name).select(ds.Character.id) Or do it sequencially:: hero_query = ds.Query.hero hero_query.select(ds.Character.name) hero_query.select(ds.Character.id) As you can select children fields of any object type, you can construct your complete query tree:: ds.Query.hero.select( ds.Character.id, ds.Character.name, ds.Character.friends.select(ds.Character.name), ) Once your root query fields are defined, you can put them in an operation using :class:`DSLQuery `, :class:`DSLMutation ` or :class:`DSLSubscription `:: DSLQuery( ds.Query.hero.select( ds.Character.id, ds.Character.name, ds.Character.friends.select(ds.Character.name), ) ) Once your operations are defined, use the :func:`dsl_gql ` function to convert your operations into a document which will be able to get executed in the client or a session:: query = dsl_gql( DSLQuery( ds.Query.hero.select( ds.Character.id, ds.Character.name, ds.Character.friends.select(ds.Character.name), ) ) ) result = client.execute(query) Arguments ^^^^^^^^^ It is possible to add arguments to any field simply by calling it with the required arguments:: ds.Query.human(id="1000").select(ds.Human.name) It can also be done using the :meth:`args ` method:: ds.Query.human.args(id="1000").select(ds.Human.name) .. note:: If your argument name is a Python keyword (for, in, from, ...), you will receive a SyntaxError (See `issue #308`_). To fix this, you can provide the arguments by unpacking a dictionary. For example, instead of using :code:`from=5`, you can use :code:`**{"from":5}` Aliases ^^^^^^^ You can set an alias of a field using the :meth:`alias ` method:: ds.Query.human.args(id=1000).alias("luke").select(ds.Character.name) It is also possible to set the alias directly using keyword arguments of an operation:: DSLQuery( luke=ds.Query.human.args(id=1000).select(ds.Character.name) ) Or using keyword arguments in the :meth:`select ` method:: ds.Query.hero.select( my_name=ds.Character.name ) Mutations ^^^^^^^^^ For the mutations, you need to start from root fields starting from :code:`ds.Mutation` then you need to create the GraphQL operation using the class :class:`DSLMutation `. Example:: query = dsl_gql( DSLMutation( ds.Mutation.createReview.args( episode=6, review={"stars": 5, "commentary": "This is a great movie!"} ).select(ds.Review.stars, ds.Review.commentary) ) ) Variable arguments ^^^^^^^^^^^^^^^^^^ To provide variables instead of argument values directly for an operation, you have to: * Instantiate a :class:`DSLVariableDefinitions `:: var = DSLVariableDefinitions() * From this instance you can generate :class:`DSLVariable ` instances and provide them as the value of the arguments:: ds.Mutation.createReview.args(review=var.review, episode=var.episode) * Once the operation has been defined, you have to save the variable definitions used in it:: operation.variable_definitions = var The following code: .. code-block:: python var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args(review=var.review, episode=var.episode).select( ds.Review.stars, ds.Review.commentary ) ) op.variable_definitions = var query = dsl_gql(op) will generate a query equivalent to:: mutation ($review: ReviewInput, $episode: Episode) { createReview(review: $review, episode: $episode) { stars commentary } } Variable arguments with a default value """"""""""""""""""""""""""""""""""""""" If you want to provide a **default value** for your variable, you can use the :code:`default` method on a variable. The following code: .. code-block:: python var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review=var.review.default({"stars": 5, "commentary": "Wow!"}), episode=var.episode, ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) will generate a query equivalent to:: mutation ($review: ReviewInput = {stars: 5, commentary: "Wow!"}, $episode: Episode) { createReview(review: $review, episode: $episode) { stars commentary } } Subscriptions ^^^^^^^^^^^^^ For the subscriptions, you need to start from root fields starting from :code:`ds.Subscription` then you need to create the GraphQL operation using the class :class:`DSLSubscription `. Example:: query = dsl_gql( DSLSubscription( ds.Subscription.reviewAdded(episode=6).select(ds.Review.stars, ds.Review.commentary) ) ) Multiple fields in an operation ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is possible to create an operation with multiple fields:: DSLQuery( ds.Query.hero.select(ds.Character.name), hero_of_episode_5=ds.Query.hero(episode=5).select(ds.Character.name), ) Operation name ^^^^^^^^^^^^^^ You can set the operation name of an operation using a keyword argument to :func:`dsl_gql `:: query = dsl_gql( GetHeroName=DSLQuery(ds.Query.hero.select(ds.Character.name)) ) will generate the request:: query GetHeroName { hero { name } } Multiple operations in a document ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is possible to create an Document with multiple operations:: query = dsl_gql( operation_name_1=DSLQuery( ... ), operation_name_2=DSLQuery( ... ), operation_name_3=DSLMutation( ... ), ) Fragments ^^^^^^^^^ To define a `Fragment`_, you have to: * Instantiate a :class:`DSLFragment ` with a name:: name_and_appearances = DSLFragment("NameAndAppearances") * Provide the GraphQL type of the fragment with the :meth:`on ` method:: name_and_appearances.on(ds.Character) * Add children fields using the :meth:`select ` method:: name_and_appearances.select(ds.Character.name, ds.Character.appearsIn) Once your fragment is defined, to use it you should: * select it as a field somewhere in your query:: query_with_fragment = DSLQuery(ds.Query.hero.select(name_and_appearances)) * add it as an argument of :func:`dsl_gql ` with your query:: query = dsl_gql(name_and_appearances, query_with_fragment) The above example will generate the following request:: fragment NameAndAppearances on Character { name appearsIn } { hero { ...NameAndAppearances } } Inline Fragments ^^^^^^^^^^^^^^^^ To define an `Inline Fragment`_, you have to: * Instantiate a :class:`DSLInlineFragment `:: human_fragment = DSLInlineFragment() * Provide the GraphQL type of the fragment with the :meth:`on ` method:: human_fragment.on(ds.Human) * Add children fields using the :meth:`select ` method:: human_fragment.select(ds.Human.homePlanet) Once your inline fragment is defined, to use it you should: * select it as a field somewhere in your query:: query_with_inline_fragment = ds.Query.hero.args(episode=6).select( ds.Character.name, human_fragment ) The above example will generate the following request:: hero(episode: JEDI) { name ... on Human { homePlanet } } Note: because the :meth:`on ` and :meth:`select ` methods return :code:`self`, this can be written in a concise manner:: query_with_inline_fragment = ds.Query.hero.args(episode=6).select( ds.Character.name, DSLInlineFragment().on(ds.Human).select(ds.Human.homePlanet) ) Meta-fields ^^^^^^^^^^^ To define meta-fields (:code:`__typename`, :code:`__schema` and :code:`__type`), you can use the :class:`DSLMetaField ` class:: query = ds.Query.hero.select( ds.Character.name, DSLMetaField("__typename") ) Executable examples ------------------- Async example ^^^^^^^^^^^^^ .. literalinclude:: ../code_examples/aiohttp_async_dsl.py Sync example ^^^^^^^^^^^^^ .. literalinclude:: ../code_examples/requests_sync_dsl.py .. _Fragment: https://graphql.org/learn/queries/#fragments .. _Inline Fragment: https://graphql.org/learn/queries/#inline-fragments .. _issue #308: https://github.com/graphql-python/gql/issues/308 gql-3.6.0b2/docs/advanced/error_handling.rst000066400000000000000000000061071460703211500207560ustar00rootroot00000000000000Error Handing ============= Local errors ------------ If gql detects locally that something does not correspond to the GraphQL specification, then gql may raise a **GraphQLError** from graphql-core. This may happen for example: - if your query is not valid - if your query does not correspond to your schema - if the result received from the backend does not correspond to the schema if :code:`parse_results` is set to True Transport errors ---------------- If an error happens with the transport, then gql may raise a :class:`TransportError ` Here are the possible Transport Errors: - :class:`TransportProtocolError `: Should never happen if the backend is a correctly configured GraphQL server. It means that the answer received from the server does not correspond to the transport protocol. - :class:`TransportServerError `: There was an error communicating with the server. If this error is received, then the connection with the server will be closed. This may happen if the server returned a 404 http header for example. The http error code is available in the exception :code:`code` attribute. - :class:`TransportQueryError `: There was a specific error returned from the server for your query. The message you receive in this error has been created by the backend, not gql! In that case, the connection to the server is still available and you are free to try to send other queries using the same connection. The message of the exception contains the first error returned by the backend. All the errors messages are available in the exception :code:`errors` attribute. If the error message begins with :code:`Error while fetching schema:`, it means that gql was not able to get the schema from the backend. If you don't need the schema, you can try to create the client with :code:`fetch_schema_from_transport=False` - :class:`TransportClosed `: This exception is generated when the client is trying to use the transport while the transport was previously closed. - :class:`TransportAlreadyConnected `: Exception generated when the client is trying to connect to the transport while the transport is already connected. HTTP ^^^^ For HTTP transports, we should get a json response which contain :code:`data` or :code:`errors` fields. If that is not the case, then the returned error depends whether the http return code is below 400 or not. - json response: - with data or errors keys: - no errors key -> no exception - errors key -> raise **TransportQueryError** - no data or errors keys: - http code < 400: raise **TransportProtocolError** - http code >= 400: raise **TransportServerError** - not a json response: - http code < 400: raise **TransportProtocolError** - http code >= 400: raise **TransportServerError** gql-3.6.0b2/docs/advanced/index.rst000066400000000000000000000002371460703211500170660ustar00rootroot00000000000000Advanced ======== .. toctree:: :maxdepth: 2 async_advanced_usage async_permanent_session logging error_handling local_schema dsl_module gql-3.6.0b2/docs/advanced/local_schema.rst000066400000000000000000000011441460703211500203670ustar00rootroot00000000000000Execution on a local schema =========================== It is also possible to execute queries against a local schema (so without a transport), even if it is not really useful except maybe for testing. .. code-block:: python from gql import gql, Client from .someSchema import SampleSchema client = Client(schema=SampleSchema) query = gql(''' { hello } ''') result = client.execute(query) See `tests/starwars/test_query.py`_ for an example .. _tests/starwars/test_query.py: https://github.com/graphql-python/gql/blob/master/tests/starwars/test_query.py gql-3.6.0b2/docs/advanced/logging.rst000066400000000000000000000026451460703211500174120ustar00rootroot00000000000000Logging ======= GQL uses the python `logging`_ module. In order to debug a problem, you can enable logging to see the messages exchanged between the client and the server. To do that, set the loglevel at **INFO** at the beginning of your code: .. code-block:: python import logging logging.basicConfig(level=logging.INFO) For even more logs, you can set the loglevel at **DEBUG**: .. code-block:: python import logging logging.basicConfig(level=logging.DEBUG) Disabling logs -------------- By default, the logs for the transports are quite verbose. On the **INFO** level, all the messages between the frontend and the backend are logged which can be difficult to read especially when it fetches the schema from the transport. It is possible to disable the logs only for a specific gql transport by setting a higher log level for this transport (**WARNING** for example) so that the other logs of your program are not affected. For this, you should import the logger from the transport file and set the level on this logger. For the RequestsHTTPTransport: .. code-block:: python from gql.transport.requests import log as requests_logger requests_logger.setLevel(logging.WARNING) For the WebsocketsTransport: .. code-block:: python from gql.transport.websockets import log as websockets_logger websockets_logger.setLevel(logging.WARNING) .. _logging: https://docs.python.org/3/howto/logging.html gql-3.6.0b2/docs/async/000077500000000000000000000000001460703211500145735ustar00rootroot00000000000000gql-3.6.0b2/docs/async/async_intro.rst000066400000000000000000000022141460703211500176540ustar00rootroot00000000000000On previous versions of GQL, the code was `sync` only , it means that when you ran `execute` on the Client, you could do nothing else in the current Thread and had to wait for an answer or a timeout from the backend to continue. The only http library was `requests`, allowing only sync usage. From the version 3 of GQL, we support `sync` and `async` :ref:`transports ` using `asyncio`_. With the :ref:`async transports `, there is now the possibility to execute GraphQL requests asynchronously, :ref:`allowing to execute multiple requests in parallel if needed `. If you don't care or need async functionality, it is still possible, with :ref:`async transports `, to run the `execute` or `subscribe` methods directly from the Client (as described in the :ref:`Basic Usage ` example) and GQL will execute the request in a synchronous manner by running an asyncio event loop itself. This won't work though if you already have an asyncio event loop running. In that case you should use :ref:`Async Usage ` .. _asyncio: https://docs.python.org/3/library/asyncio.html gql-3.6.0b2/docs/async/async_usage.rst000066400000000000000000000023431460703211500176300ustar00rootroot00000000000000.. _async_usage: Async Usage =========== If you use an :ref:`async transport `, you can use GQL asynchronously using `asyncio`_. * put your code in an asyncio coroutine (method starting with :code:`async def`) * use :code:`async with client as session:` to connect to the backend and provide a session instance * use the :code:`await` keyword to execute requests: :code:`await session.execute(...)` * then run your coroutine in an asyncio event loop by running :code:`asyncio.run` Example: .. literalinclude:: ../code_examples/aiohttp_async.py IPython ------- .. warning:: On some Python environments, like :emphasis:`Jupyter` or :emphasis:`Spyder`, which are using :emphasis:`IPython`, an asyncio event loop is already created for you by the environment. In this case, running the above code might generate the following error:: RuntimeError: asyncio.run() cannot be called from a running event loop If that happens, depending on the environment, you should replace :code:`asyncio.run(main())` by either: .. code-block:: python await main() OR: .. code-block:: python loop = asyncio.get_running_loop() loop.create_task(main()) .. _asyncio: https://docs.python.org/3/library/asyncio.html gql-3.6.0b2/docs/async/index.rst000066400000000000000000000001641460703211500164350ustar00rootroot00000000000000Async vs Sync ============= .. include:: async_intro.rst .. toctree:: :hidden: :maxdepth: 1 async_usage gql-3.6.0b2/docs/code_examples/000077500000000000000000000000001460703211500162665ustar00rootroot00000000000000gql-3.6.0b2/docs/code_examples/aiohttp_async.py000066400000000000000000000014201460703211500215020ustar00rootroot00000000000000import asyncio from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport async def main(): transport = AIOHTTPTransport(url="https://countries.trevorblades.com/graphql") # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection async with Client( transport=transport, fetch_schema_from_transport=True, ) as session: # Execute single query query = gql( """ query getContinents { continents { code name } } """ ) result = await session.execute(query) print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/aiohttp_async_dsl.py000066400000000000000000000033671460703211500223600ustar00rootroot00000000000000import asyncio from gql import Client from gql.dsl import DSLQuery, DSLSchema, dsl_gql from gql.transport.aiohttp import AIOHTTPTransport async def main(): transport = AIOHTTPTransport(url="https://countries.trevorblades.com/graphql") client = Client(transport=transport, fetch_schema_from_transport=True) # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection. # Because we requested to fetch the schema from the transport, # GQL will fetch the schema just after the establishment of the first session async with client as session: # Instantiate the root of the DSL Schema as ds ds = DSLSchema(client.schema) # Create the query using dynamically generated attributes from ds query = dsl_gql( DSLQuery( ds.Query.continents(filter={"code": {"eq": "EU"}}).select( ds.Continent.code, ds.Continent.name ) ) ) result = await session.execute(query) print(result) # This can also be written as: # I want to query the continents query_continents = ds.Query.continents # I want to get only the continents with code equal to "EU" query_continents(filter={"code": {"eq": "EU"}}) # I want this query to return the code and name fields query_continents.select(ds.Continent.code) query_continents.select(ds.Continent.name) # I generate a document from my query to be able to execute it query = dsl_gql(DSLQuery(query_continents)) # Execute the query result = await session.execute(query) print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/aiohttp_sync.py000066400000000000000000000010501460703211500213400ustar00rootroot00000000000000from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport # Select your transport with a defined url endpoint transport = AIOHTTPTransport(url="https://countries.trevorblades.com/") # Create a GraphQL client using the defined transport client = Client(transport=transport, fetch_schema_from_transport=True) # Provide a GraphQL query query = gql( """ query getContinents { continents { code name } } """ ) # Execute the query on the transport result = client.execute(query) print(result) gql-3.6.0b2/docs/code_examples/appsync/000077500000000000000000000000001460703211500177435ustar00rootroot00000000000000gql-3.6.0b2/docs/code_examples/appsync/mutation_api_key.py000066400000000000000000000024641460703211500236640ustar00rootroot00000000000000import asyncio import os import sys from urllib.parse import urlparse from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.appsync_auth import AppSyncApiKeyAuthentication # Uncomment the following lines to enable debug output # import logging # logging.basicConfig(level=logging.DEBUG) async def main(): # Should look like: # https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql url = os.environ.get("AWS_GRAPHQL_API_ENDPOINT") api_key = os.environ.get("AWS_GRAPHQL_API_KEY") if url is None or api_key is None: print("Missing environment variables") sys.exit() # Extract host from url host = str(urlparse(url).netloc) auth = AppSyncApiKeyAuthentication(host=host, api_key=api_key) transport = AIOHTTPTransport(url=url, auth=auth) async with Client( transport=transport, fetch_schema_from_transport=False, ) as session: query = gql( """ mutation createMessage($message: String!) { createMessage(input: {message: $message}) { id message createdAt } }""" ) variable_values = {"message": "Hello world!"} result = await session.execute(query, variable_values=variable_values) print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/appsync/mutation_iam.py000066400000000000000000000023261460703211500230060ustar00rootroot00000000000000import asyncio import os import sys from urllib.parse import urlparse from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.appsync_auth import AppSyncIAMAuthentication # Uncomment the following lines to enable debug output # import logging # logging.basicConfig(level=logging.DEBUG) async def main(): # Should look like: # https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql url = os.environ.get("AWS_GRAPHQL_API_ENDPOINT") if url is None: print("Missing environment variables") sys.exit() # Extract host from url host = str(urlparse(url).netloc) auth = AppSyncIAMAuthentication(host=host) transport = AIOHTTPTransport(url=url, auth=auth) async with Client( transport=transport, fetch_schema_from_transport=False, ) as session: query = gql( """ mutation createMessage($message: String!) { createMessage(input: {message: $message}) { id message createdAt } }""" ) variable_values = {"message": "Hello world!"} result = await session.execute(query, variable_values=variable_values) print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/appsync/subscription_api_key.py000066400000000000000000000023461460703211500245470ustar00rootroot00000000000000import asyncio import os import sys from urllib.parse import urlparse from gql import Client, gql from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport # Uncomment the following lines to enable debug output # import logging # logging.basicConfig(level=logging.DEBUG) async def main(): # Should look like: # https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql url = os.environ.get("AWS_GRAPHQL_API_ENDPOINT") api_key = os.environ.get("AWS_GRAPHQL_API_KEY") if url is None or api_key is None: print("Missing environment variables") sys.exit() # Extract host from url host = str(urlparse(url).netloc) print(f"Host: {host}") auth = AppSyncApiKeyAuthentication(host=host, api_key=api_key) transport = AppSyncWebsocketsTransport(url=url, auth=auth) async with Client(transport=transport) as session: subscription = gql( """ subscription onCreateMessage { onCreateMessage { message } } """ ) print("Waiting for messages...") async for result in session.subscribe(subscription): print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/appsync/subscription_iam.py000066400000000000000000000016551460703211500236760ustar00rootroot00000000000000import asyncio import os import sys from gql import Client, gql from gql.transport.appsync_websockets import AppSyncWebsocketsTransport # Uncomment the following lines to enable debug output # import logging # logging.basicConfig(level=logging.DEBUG) async def main(): # Should look like: # https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql url = os.environ.get("AWS_GRAPHQL_API_ENDPOINT") if url is None: print("Missing environment variables") sys.exit() # Using implicit auth (IAM) transport = AppSyncWebsocketsTransport(url=url) async with Client(transport=transport) as session: subscription = gql( """ subscription onCreateMessage { onCreateMessage { message } } """ ) print("Waiting for messages...") async for result in session.subscribe(subscription): print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/console_async.py000066400000000000000000000034651460703211500215070ustar00rootroot00000000000000import asyncio import logging from aioconsole import ainput from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport logging.basicConfig(level=logging.INFO) GET_CONTINENT_NAME = """ query getContinentName ($code: ID!) { continent (code: $code) { name } } """ class GraphQLContinentClient: def __init__(self): self._client = Client( transport=AIOHTTPTransport(url="https://countries.trevorblades.com/") ) self._session = None self.get_continent_name_query = gql(GET_CONTINENT_NAME) async def connect(self): self._session = await self._client.connect_async(reconnecting=True) async def close(self): await self._client.close_async() async def get_continent_name(self, code): params = {"code": code} answer = await self._session.execute( self.get_continent_name_query, variable_values=params ) return answer.get("continent").get("name") async def main(): continent_client = GraphQLContinentClient() continent_codes = ["AF", "AN", "AS", "EU", "NA", "OC", "SA"] await continent_client.connect() while True: answer = await ainput("\nPlease enter a continent code or 'exit':") answer = answer.strip() if answer == "exit": break elif answer in continent_codes: try: continent_name = await continent_client.get_continent_name(answer) print(f"The continent name is {continent_name}\n") except Exception as exc: print(f"Received exception {exc} while trying to get continent name") else: print(f"Please enter a valid continent code from {continent_codes}") await continent_client.close() asyncio.run(main()) gql-3.6.0b2/docs/code_examples/fastapi_async.py000066400000000000000000000037601460703211500214720ustar00rootroot00000000000000# First install fastapi and uvicorn: # # pip install fastapi uvicorn # # then run: # # uvicorn fastapi_async:app --reload import logging from fastapi import FastAPI, HTTPException from fastapi.responses import HTMLResponse from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport logging.basicConfig(level=logging.DEBUG) log = logging.getLogger(__name__) transport = AIOHTTPTransport(url="https://countries.trevorblades.com/graphql") client = Client(transport=transport) query = gql( """ query getContinentInfo($code: ID!) { continent(code:$code) { name code countries { name capital } } } """ ) app = FastAPI() @app.on_event("startup") async def startup_event(): print("Connecting to GraphQL backend") await client.connect_async(reconnecting=True) print("End of startup") @app.on_event("shutdown") async def shutdown_event(): print("Shutting down GraphQL permanent connection...") await client.close_async() print("Shutting down GraphQL permanent connection... done") continent_codes = [ "AF", "AN", "AS", "EU", "NA", "OC", "SA", ] @app.get("/", response_class=HTMLResponse) def get_root(): continent_links = ", ".join( [f'{code}' for code in continent_codes] ) return f""" Continents Continents: {continent_links} """ @app.get("/continent/{continent_code}") async def get_continent(continent_code): if continent_code not in continent_codes: raise HTTPException(status_code=404, detail="Continent not found") try: result = await client.session.execute( query, variable_values={"code": continent_code} ) except Exception as e: log.debug(f"get_continent Error: {e}") raise HTTPException(status_code=503, detail="GraphQL backend unavailable") return result gql-3.6.0b2/docs/code_examples/httpx_async.py000066400000000000000000000014241460703211500212050ustar00rootroot00000000000000import asyncio from gql import Client, gql from gql.transport.httpx import HTTPXAsyncTransport async def main(): transport = HTTPXAsyncTransport(url="https://countries.trevorblades.com/graphql") # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection async with Client( transport=transport, fetch_schema_from_transport=True, ) as session: # Execute single query query = gql( """ query getContinents { continents { code name } } """ ) result = await session.execute(query) print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/httpx_async_trio.py000066400000000000000000000014141460703211500222410ustar00rootroot00000000000000import trio from gql import Client, gql from gql.transport.httpx import HTTPXAsyncTransport async def main(): transport = HTTPXAsyncTransport(url="https://countries.trevorblades.com/graphql") # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection async with Client( transport=transport, fetch_schema_from_transport=True, ) as session: # Execute single query query = gql( """ query getContinents { continents { code name } } """ ) result = await session.execute(query) print(result) trio.run(main) gql-3.6.0b2/docs/code_examples/httpx_sync.py000066400000000000000000000005711460703211500210460ustar00rootroot00000000000000from gql import Client, gql from gql.transport.httpx import HTTPXTransport transport = HTTPXTransport(url="https://countries.trevorblades.com/") client = Client(transport=transport, fetch_schema_from_transport=True) query = gql( """ query getContinents { continents { code name } } """ ) result = client.execute(query) print(result) gql-3.6.0b2/docs/code_examples/phoenix_channel_async.py000066400000000000000000000013361460703211500232020ustar00rootroot00000000000000import asyncio from gql import Client, gql from gql.transport.phoenix_channel_websockets import PhoenixChannelWebsocketsTransport async def main(): transport = PhoenixChannelWebsocketsTransport( channel_name="YOUR_CHANNEL", url="wss://YOUR_URL/graphql" ) # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection async with Client(transport=transport) as session: # Execute single query query = gql( """ query yourQuery { ... } """ ) result = await session.execute(query) print(result) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/reconnecting_mutation_http.py000066400000000000000000000020741460703211500243000ustar00rootroot00000000000000import asyncio import logging import backoff from gql import Client, gql from gql.transport.aiohttp import AIOHTTPTransport logging.basicConfig(level=logging.INFO) async def main(): # Note: this example used the test backend from # https://github.com/slothmanxyz/typegraphql-ws-apollo transport = AIOHTTPTransport(url="ws://localhost:5000/graphql") client = Client(transport=transport) retry_connect = backoff.on_exception( backoff.expo, Exception, max_value=10, jitter=None, ) session = await client.connect_async(reconnecting=True, retry_connect=retry_connect) num = 0 while True: num += 1 # Execute single query query = gql("mutation ($message: String!) {sendMessage(message: $message)}") params = {"message": f"test {num}"} try: result = await session.execute(query, variable_values=params) print(result) except Exception as e: print(f"Received exception {e}") await asyncio.sleep(1) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/reconnecting_mutation_ws.py000066400000000000000000000021051460703211500237450ustar00rootroot00000000000000import asyncio import logging import backoff from gql import Client, gql from gql.transport.websockets import WebsocketsTransport logging.basicConfig(level=logging.INFO) async def main(): # Note: this example used the test backend from # https://github.com/slothmanxyz/typegraphql-ws-apollo transport = WebsocketsTransport(url="ws://localhost:5000/graphql") client = Client(transport=transport) retry_connect = backoff.on_exception( backoff.expo, Exception, max_value=10, jitter=None, ) session = await client.connect_async(reconnecting=True, retry_connect=retry_connect) num = 0 while True: num += 1 # Execute single query query = gql("mutation ($message: String!) {sendMessage(message: $message)}") params = {"message": f"test {num}"} try: result = await session.execute(query, variable_values=params) print(result) except Exception as e: print(f"Received exception {e}") await asyncio.sleep(1) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/reconnecting_subscription.py000066400000000000000000000014031460703211500241200ustar00rootroot00000000000000import asyncio import logging from gql import Client, gql from gql.transport.websockets import WebsocketsTransport logging.basicConfig(level=logging.INFO) async def main(): # Note: this example used the test backend from # https://github.com/slothmanxyz/typegraphql-ws-apollo transport = WebsocketsTransport(url="ws://localhost:5000/graphql") client = Client(transport=transport) session = await client.connect_async(reconnecting=True) query = gql("subscription {receiveMessage {message}}") while True: try: async for result in session.subscribe(query): print(result) except Exception as e: print(f"Received exception {e}") await asyncio.sleep(1) asyncio.run(main()) gql-3.6.0b2/docs/code_examples/requests_sync.py000066400000000000000000000006611460703211500215520ustar00rootroot00000000000000from gql import Client, gql from gql.transport.requests import RequestsHTTPTransport transport = RequestsHTTPTransport( url="https://countries.trevorblades.com/", verify=True, retries=3, ) client = Client(transport=transport, fetch_schema_from_transport=True) query = gql( """ query getContinents { continents { code name } } """ ) result = client.execute(query) print(result) gql-3.6.0b2/docs/code_examples/requests_sync_dsl.py000066400000000000000000000020561460703211500224140ustar00rootroot00000000000000from gql import Client from gql.dsl import DSLQuery, DSLSchema, dsl_gql from gql.transport.requests import RequestsHTTPTransport transport = RequestsHTTPTransport( url="https://countries.trevorblades.com/", verify=True, retries=3, ) client = Client(transport=transport, fetch_schema_from_transport=True) # Using `with` on the sync client will start a connection on the transport # and provide a `session` variable to execute queries on this connection. # Because we requested to fetch the schema from the transport, # GQL will fetch the schema just after the establishment of the first session with client as session: # We should have received the schema now that the session is established assert client.schema is not None # Instantiate the root of the DSL Schema as ds ds = DSLSchema(client.schema) # Create the query using dynamically generated attributes from ds query = dsl_gql( DSLQuery(ds.Query.continents.select(ds.Continent.code, ds.Continent.name)) ) result = session.execute(query) print(result) gql-3.6.0b2/docs/code_examples/websockets_async.py000066400000000000000000000021741460703211500222120ustar00rootroot00000000000000import asyncio import logging from gql import Client, gql from gql.transport.websockets import WebsocketsTransport logging.basicConfig(level=logging.INFO) async def main(): transport = WebsocketsTransport(url="wss://countries.trevorblades.com/graphql") # Using `async with` on the client will start a connection on the transport # and provide a `session` variable to execute queries on this connection async with Client( transport=transport, fetch_schema_from_transport=True, ) as session: # Execute single query query = gql( """ query getContinents { continents { code name } } """ ) result = await session.execute(query) print(result) # Request subscription subscription = gql( """ subscription { somethingChanged { id } } """ ) async for result in session.subscribe(subscription): print(result) asyncio.run(main()) gql-3.6.0b2/docs/conf.py000066400000000000000000000051011460703211500147520ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('./..')) # -- Project information ----------------------------------------------------- project = 'gql 3' copyright = '2020, graphql-python.org' author = 'graphql-python.org' # The full version, including alpha/beta/rc tags from gql import __version__ release = __version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinxarg.ext', 'sphinx.ext.autodoc', 'sphinx_rtd_theme' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Output file base name for HTML help builder. htmlhelp_basename = 'gql-3-doc' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # -- AutoDoc configuration ------------------------------------------------- # autoclass_content = "both" autodoc_default_options = { 'members': True, 'inherited-members': True, 'special-members': '__init__', 'undoc-members': True, 'show-inheritance': True } autosummary_generate = True gql-3.6.0b2/docs/gql-cli/000077500000000000000000000000001460703211500150065ustar00rootroot00000000000000gql-3.6.0b2/docs/gql-cli/intro.rst000066400000000000000000000040111460703211500166670ustar00rootroot00000000000000.. _gql_cli: gql-cli ======= GQL provides a python 3.7+ script, called `gql-cli` which allows you to execute GraphQL queries directly from the terminal. This script supports http(s) or websockets protocols. Usage ----- .. argparse:: :module: gql.cli :func: get_parser :prog: gql-cli Examples -------- Simple query using https ^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: shell $ echo 'query { continent(code:"AF") { name } }' | gql-cli https://countries.trevorblades.com {"continent": {"name": "Africa"}} Simple query using websockets ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: shell $ echo 'query { continent(code:"AF") { name } }' | gql-cli wss://countries.trevorblades.com/graphql {"continent": {"name": "Africa"}} Query with variable ^^^^^^^^^^^^^^^^^^^ .. code-block:: shell $ echo 'query getContinent($code:ID!) { continent(code:$code) { name } }' | gql-cli https://countries.trevorblades.com --variables code:AF {"continent": {"name": "Africa"}} Interactive usage ^^^^^^^^^^^^^^^^^ Insert your query in the terminal, then press Ctrl-D to execute it. .. code-block:: shell $ gql-cli wss://countries.trevorblades.com/graphql --variables code:AF Execute query saved in a file ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Put the query in a file: .. code-block:: shell $ echo 'query { continent(code:"AF") { name } }' > query.gql Then execute query from the file: .. code-block:: shell $ cat query.gql | gql-cli wss://countries.trevorblades.com/graphql {"continent": {"name": "Africa"}} Print the GraphQL schema in a file ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: shell $ gql-cli https://countries.trevorblades.com/graphql --print-schema > schema.graphql .. note:: By default, deprecated input fields are not requested from the backend. You can add :code:`--schema-download input_value_deprecation:true` to request them. .. note:: You can add :code:`--schema-download descriptions:false` to request a compact schema without comments. gql-3.6.0b2/docs/index.rst000066400000000000000000000004741460703211500153240ustar00rootroot00000000000000Welcome to GQL 3 documentation! =============================== Contents -------- .. toctree:: :maxdepth: 2 intro usage/index async/index transports/index advanced/index gql-cli/intro modules/gql Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` gql-3.6.0b2/docs/intro.rst000066400000000000000000000106251460703211500153470ustar00rootroot00000000000000Introduction ============ `GQL 3`_ is a `GraphQL`_ Client for Python 3.7+ which plays nicely with other graphql implementations compatible with the spec. Under the hood, it uses `GraphQL-core`_ which is a Python port of `GraphQL.js`_, the JavaScript reference implementation for GraphQL. Installation ------------ You can install GQL 3 and all the extra dependencies using pip_:: pip install "gql[all]" To have the latest pre-releases versions of gql, you can use:: pip install --pre "gql[all]" After installation, you can start using GQL by importing from the top-level :mod:`gql` package. Less dependencies ^^^^^^^^^^^^^^^^^ GQL supports multiple :ref:`transports ` to communicate with the backend. Each transport can necessitate specific dependencies. If you only need one transport you might want to install only the dependency needed for your transport, instead of using the "`all`" extra dependency as described above, which installs everything. If for example you only need the :ref:`AIOHTTPTransport `, which needs the :code:`aiohttp` dependency, then you can install GQL with:: pip install gql[aiohttp] The corresponding between extra dependencies required and the GQL classes is: +---------------------+----------------------------------------------------------------+ | Extra dependencies | Classes | +=====================+================================================================+ | aiohttp | :ref:`AIOHTTPTransport ` | +---------------------+----------------------------------------------------------------+ | websockets | :ref:`WebsocketsTransport ` | | | | | | :ref:`PhoenixChannelWebsocketsTransport ` | | | | | | :ref:`AppSyncWebsocketsTransport ` | +---------------------+----------------------------------------------------------------+ | requests | :ref:`RequestsHTTPTransport ` | +---------------------+----------------------------------------------------------------+ | httpx | :ref:`HTTPTXTransport ` | | | | | | :ref:`HTTPXAsyncTransport ` | +---------------------+----------------------------------------------------------------+ | botocore | :ref:`AppSyncIAMAuthentication ` | +---------------------+----------------------------------------------------------------+ .. note:: It is also possible to install multiple extra dependencies if needed using commas: :code:`gql[aiohttp,websockets]` Installation with conda ^^^^^^^^^^^^^^^^^^^^^^^ It is also possible to install gql using `conda`_. To install gql with all extra dependencies:: conda install gql-with-all To install gql with less dependencies, you might want to instead install a combinaison of the following packages: :code:`gql-with-aiohttp`, :code:`gql-with-websockets`, :code:`gql-with-requests`, :code:`gql-with-botocore` If you want to have the latest pre-releases version of gql and graphql-core, you can install them with conda using:: conda install -c conda-forge -c conda-forge/label/graphql_core_alpha -c conda-forge/label/gql_beta gql-with-all Reporting Issues and Contributing --------------------------------- Please visit the `GitHub repository for gql`_ if you're interested in the current development or want to report issues or send pull requests. We welcome all kinds of contributions if the coding guidelines are respected. Please check the `Contributing`_ file to learn how to make a good pull request. .. _GraphQL: https://graphql.org/ .. _GraphQL-core: https://github.com/graphql-python/graphql-core .. _GraphQL.js: https://github.com/graphql/graphql-js .. _GQL 3: https://github.com/graphql-python/gql .. _pip: https://pip.pypa.io/ .. _GitHub repository for gql: https://github.com/graphql-python/gql .. _Contributing: https://github.com/graphql-python/gql/blob/master/CONTRIBUTING.md .. _conda: https://docs.conda.io gql-3.6.0b2/docs/make.bat000066400000000000000000000013741460703211500150700ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd gql-3.6.0b2/docs/modules/000077500000000000000000000000001460703211500151265ustar00rootroot00000000000000gql-3.6.0b2/docs/modules/client.rst000066400000000000000000000001211460703211500171300ustar00rootroot00000000000000gql.client ========== .. currentmodule:: gql.client .. automodule:: gql.client gql-3.6.0b2/docs/modules/dsl.rst000066400000000000000000000001411460703211500164360ustar00rootroot00000000000000gql.dsl ======= .. currentmodule:: gql.dsl .. automodule:: gql.dsl :member-order: bysource gql-3.6.0b2/docs/modules/gql.rst000066400000000000000000000007511460703211500164460ustar00rootroot00000000000000Reference ========= .. currentmodule:: gql .. _top-level-functions: Top-Level Functions ------------------- .. automodule:: gql .. _sub-packages: Sub-Packages ------------ .. toctree:: :maxdepth: 1 client transport transport_aiohttp transport_appsync_auth transport_appsync_websockets transport_exceptions transport_phoenix_channel_websockets transport_requests transport_httpx transport_websockets transport_websockets_base dsl utilities gql-3.6.0b2/docs/modules/transport.rst000066400000000000000000000003551460703211500177170ustar00rootroot00000000000000gql.transport ============= .. currentmodule:: gql.transport .. autoclass:: gql.transport.transport.Transport .. autoclass:: gql.transport.async_transport.AsyncTransport .. autoclass:: gql.transport.local_schema.LocalSchemaTransport gql-3.6.0b2/docs/modules/transport_aiohttp.rst000066400000000000000000000002311460703211500214400ustar00rootroot00000000000000gql.transport.aiohttp ===================== .. currentmodule:: gql.transport.aiohttp .. automodule:: gql.transport.aiohttp :member-order: bysource gql-3.6.0b2/docs/modules/transport_appsync_auth.rst000066400000000000000000000002551460703211500224740ustar00rootroot00000000000000gql.transport.appsync_auth ========================== .. currentmodule:: gql.transport.appsync_auth .. automodule:: gql.transport.appsync_auth :member-order: bysource gql-3.6.0b2/docs/modules/transport_appsync_websockets.rst000066400000000000000000000003051460703211500237000ustar00rootroot00000000000000gql.transport.appsync_websockets ================================ .. currentmodule:: gql.transport.appsync_websockets .. automodule:: gql.transport.appsync_websockets :member-order: bysource gql-3.6.0b2/docs/modules/transport_exceptions.rst000066400000000000000000000002451460703211500221560ustar00rootroot00000000000000gql.transport.exceptions ======================== .. currentmodule:: gql.transport.exceptions .. automodule:: gql.transport.exceptions :member-order: bysource gql-3.6.0b2/docs/modules/transport_httpx.rst000066400000000000000000000002211460703211500211360ustar00rootroot00000000000000gql.transport.httpx =================== .. currentmodule:: gql.transport.httpx .. automodule:: gql.transport.httpx :member-order: bysource gql-3.6.0b2/docs/modules/transport_phoenix_channel_websockets.rst000066400000000000000000000003451460703211500253710ustar00rootroot00000000000000gql.transport.phoenix_channel_websockets ======================================== .. currentmodule:: gql.transport.phoenix_channel_websockets .. automodule:: gql.transport.phoenix_channel_websockets :member-order: bysource gql-3.6.0b2/docs/modules/transport_requests.rst000066400000000000000000000002351460703211500216470ustar00rootroot00000000000000gql.transport.requests ====================== .. currentmodule:: gql.transport.requests .. automodule:: gql.transport.requests :member-order: bysource gql-3.6.0b2/docs/modules/transport_websockets.rst000066400000000000000000000002451460703211500221460ustar00rootroot00000000000000gql.transport.websockets ======================== .. currentmodule:: gql.transport.websockets .. automodule:: gql.transport.websockets :member-order: bysource gql-3.6.0b2/docs/modules/transport_websockets_base.rst000066400000000000000000000002711460703211500231370ustar00rootroot00000000000000gql.transport.websockets_base ============================= .. currentmodule:: gql.transport.websockets_base .. automodule:: gql.transport.websockets_base :member-order: bysource gql-3.6.0b2/docs/modules/utilities.rst000066400000000000000000000001351460703211500176720ustar00rootroot00000000000000gql.utilities ============= .. currentmodule:: gql.utilities .. automodule:: gql.utilities gql-3.6.0b2/docs/requirements.txt000066400000000000000000000001251460703211500167400ustar00rootroot00000000000000sphinx>=5.3.0,<6 sphinx_rtd_theme>=0.4,<1 sphinx-argparse==0.2.5 multidict<5.0,>=4.5 gql-3.6.0b2/docs/transports/000077500000000000000000000000001460703211500156755ustar00rootroot00000000000000gql-3.6.0b2/docs/transports/aiohttp.rst000066400000000000000000000027021460703211500201000ustar00rootroot00000000000000.. _aiohttp_transport: AIOHTTPTransport ================ This transport uses the `aiohttp`_ library and allows you to send GraphQL queries using the HTTP protocol. Reference: :class:`gql.transport.aiohttp.AIOHTTPTransport` .. note:: GraphQL subscriptions are not supported on the HTTP transport. For subscriptions you should use the :ref:`websockets transport `. .. literalinclude:: ../code_examples/aiohttp_async.py Authentication -------------- There are multiple ways to authenticate depending on the server configuration. 1. Using HTTP Headers .. code-block:: python transport = AIOHTTPTransport( url='https://SERVER_URL:SERVER_PORT/graphql', headers={'Authorization': 'token'} ) 2. Using HTTP Cookies You can manually set the cookies which will be sent with each connection: .. code-block:: python transport = AIOHTTPTransport(url=url, cookies={"cookie1": "val1"}) Or you can use a cookie jar to save cookies set from the backend and reuse them later. In some cases, the server will set some connection cookies after a successful login mutation and you can save these cookies in a cookie jar to reuse them in a following connection (See `issue 197`_): .. code-block:: python jar = aiohttp.CookieJar() transport = AIOHTTPTransport(url=url, client_session_args={'cookie_jar': jar}) .. _aiohttp: https://docs.aiohttp.org .. _issue 197: https://github.com/graphql-python/gql/issues/197 gql-3.6.0b2/docs/transports/appsync.rst000066400000000000000000000135151460703211500201110ustar00rootroot00000000000000.. _appsync_transport: AppSyncWebsocketsTransport ========================== AWS AppSync allows you to execute GraphQL subscriptions on its realtime GraphQL endpoint. See `Building a real-time websocket client`_ for an explanation. GQL provides the :code:`AppSyncWebsocketsTransport` transport which implements this for you to allow you to execute subscriptions. .. note:: It is only possible to execute subscriptions with this transport. For queries or mutations, See :ref:`AppSync GraphQL Queries and mutations ` How to use it: * choose one :ref:`authentication method ` (API key, IAM, Cognito user pools or OIDC) * instantiate a :code:`AppSyncWebsocketsTransport` with your GraphQL endpoint as url and your auth method .. note:: It is also possible to instantiate the transport without an auth argument. In that case, gql will use by default the :class:`IAM auth ` which will try to authenticate with environment variables or from your aws credentials file. .. note:: All the examples in this documentation are based on the sample app created by following `this AWS blog post`_ Full example with API key authentication from environment variables: .. literalinclude:: ../code_examples/appsync/subscription_api_key.py Reference: :class:`gql.transport.appsync_websockets.AppSyncWebsocketsTransport` .. _Building a real-time websocket client: https://docs.aws.amazon.com/appsync/latest/devguide/real-time-websocket-client.html .. _this AWS blog post: https://aws.amazon.com/fr/blogs/mobile/appsync-realtime/ .. _appsync_authentication_methods: Authentication methods ---------------------- .. _appsync_api_key_auth: API key ^^^^^^^ Use the :code:`AppSyncApiKeyAuthentication` class to provide your API key: .. code-block:: python auth = AppSyncApiKeyAuthentication( host="XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com", api_key="YOUR_API_KEY", ) transport = AppSyncWebsocketsTransport( url="https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql", auth=auth, ) Reference: :class:`gql.transport.appsync_auth.AppSyncApiKeyAuthentication` .. _appsync_iam_auth: IAM ^^^ For the IAM authentication, you can simply create your transport without an auth argument. The region name will be autodetected from the url or from your AWS configuration (:code:`.aws/config`) or the environment variable: - AWS_DEFAULT_REGION The credentials will be detected from your AWS configuration file (:code:`.aws/credentials`) or from the environment variables: - AWS_ACCESS_KEY_ID - AWS_SECRET_ACCESS_KEY - AWS_SESSION_TOKEN (optional) .. code-block:: python transport = AppSyncWebsocketsTransport( url="https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql", ) OR You can also provide the credentials manually by creating the :code:`AppSyncIAMAuthentication` class yourself: .. code-block:: python from botocore.credentials import Credentials credentials = Credentials( access_key = os.environ.get("AWS_ACCESS_KEY_ID"), secret_key= os.environ.get("AWS_SECRET_ACCESS_KEY"), token=os.environ.get("AWS_SESSION_TOKEN", None), # Optional ) auth = AppSyncIAMAuthentication( host="XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com", credentials=credentials, region_name="your region" ) transport = AppSyncWebsocketsTransport( url="https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql", auth=auth, ) Reference: :class:`gql.transport.appsync_auth.AppSyncIAMAuthentication` .. _appsync_jwt_auth: Json Web Tokens (jwt) ^^^^^^^^^^^^^^^^^^^^^ AWS provides json web tokens (jwt) for the authentication methods: - Amazon Cognito user pools - OpenID Connect (OIDC) For these authentication methods, you can use the :code:`AppSyncJWTAuthentication` class: .. code-block:: python auth = AppSyncJWTAuthentication( host="XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com", jwt="YOUR_JWT_STRING", ) transport = AppSyncWebsocketsTransport( url="https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql", auth=auth, ) Reference: :class:`gql.transport.appsync_auth.AppSyncJWTAuthentication` .. _appsync_http: AppSync GraphQL Queries and mutations ------------------------------------- Queries and mutations are not allowed on the realtime websockets endpoint. But you can use the :ref:`AIOHTTPTransport ` to create a normal http session and reuse the authentication classes to create the headers for you. Full example with API key authentication from environment variables: .. literalinclude:: ../code_examples/appsync/mutation_api_key.py From the command line --------------------- Using :ref:`gql-cli `, it is possible to execute GraphQL queries and subscriptions from the command line on an AppSync endpoint. - For queries and mutations, use the :code:`--transport appsync_http` argument:: # Put the request in a file $ echo 'mutation createMessage($message: String!) { createMessage(input: {message: $message}) { id message createdAt } }' > mutation.graphql # Execute the request using gql-cli with --transport appsync_http $ cat mutation.graphql | gql-cli $AWS_GRAPHQL_API_ENDPOINT --transport appsync_http -V message:"Hello world!" - For subscriptions, use the :code:`--transport appsync_websockets` argument:: echo "subscription{onCreateMessage{message}}" | gql-cli $AWS_GRAPHQL_API_ENDPOINT --transport appsync_websockets - You can also get the full GraphQL schema from the backend from introspection:: $ gql-cli $AWS_GRAPHQL_API_ENDPOINT --transport appsync_http --print-schema > schema.graphql gql-3.6.0b2/docs/transports/async_transports.rst000066400000000000000000000004551460703211500220470ustar00rootroot00000000000000.. _async_transports: Async Transports ================ Async transports are transports which are using an underlying async library. They allow us to :ref:`run GraphQL queries asynchronously ` .. toctree:: :maxdepth: 1 aiohttp httpx_async websockets phoenix appsync gql-3.6.0b2/docs/transports/httpx.rst000066400000000000000000000005141460703211500175760ustar00rootroot00000000000000.. _httpx_transport: HTTPXTransport ============== The HTTPXTransport is a sync transport using the `httpx`_ library and allows you to send GraphQL queries using the HTTP protocol. Reference: :class:`gql.transport.httpx.HTTPXTransport` .. literalinclude:: ../code_examples/httpx_sync.py .. _httpx: https://www.python-httpx.org gql-3.6.0b2/docs/transports/httpx_async.rst000066400000000000000000000017451460703211500210020ustar00rootroot00000000000000.. _httpx_async_transport: HTTPXAsyncTransport =================== This transport uses the `httpx`_ library and allows you to send GraphQL queries using the HTTP protocol. Reference: :class:`gql.transport.httpx.HTTPXAsyncTransport` .. note:: GraphQL subscriptions are not supported on the HTTP transport. For subscriptions you should use the :ref:`websockets transport `. .. literalinclude:: ../code_examples/httpx_async.py Authentication -------------- There are multiple ways to authenticate depending on the server configuration. 1. Using HTTP Headers .. code-block:: python transport = HTTPXAsyncTransport( url='https://SERVER_URL:SERVER_PORT/graphql', headers={'Authorization': 'token'} ) 2. Using HTTP Cookies You can manually set the cookies which will be sent with each connection: .. code-block:: python transport = HTTPXAsyncTransport(url=url, cookies={"cookie1": "val1"}) .. _httpx: https://www.python-httpx.org gql-3.6.0b2/docs/transports/index.rst000066400000000000000000000004251460703211500175370ustar00rootroot00000000000000.. _transports: Transports ========== GQL Transports are used to define how the connection is made with the backend. We have different transports for different underlying protocols (http, websockets, ...) .. toctree:: :maxdepth: 2 async_transports sync_transports gql-3.6.0b2/docs/transports/phoenix.rst000066400000000000000000000011301460703211500200740ustar00rootroot00000000000000.. _phoenix_transport: PhoenixChannelWebsocketsTransport ================================= The PhoenixChannelWebsocketsTransport is an async transport which allows you to execute queries and subscriptions against an `Absinthe`_ backend using the `Phoenix`_ framework `channels`_. Reference: :class:`gql.transport.phoenix_channel_websockets.PhoenixChannelWebsocketsTransport` .. literalinclude:: ../code_examples/phoenix_channel_async.py .. _Absinthe: http://absinthe-graphql.org .. _Phoenix: https://www.phoenixframework.org .. _channels: https://hexdocs.pm/phoenix/Phoenix.Channel.html#content gql-3.6.0b2/docs/transports/requests.rst000066400000000000000000000005721460703211500203060ustar00rootroot00000000000000.. _requests_transport: RequestsHTTPTransport ===================== The RequestsHTTPTransport is a sync transport using the `requests`_ library and allows you to send GraphQL queries using the HTTP protocol. Reference: :class:`gql.transport.requests.RequestsHTTPTransport` .. literalinclude:: ../code_examples/requests_sync.py .. _requests: https://requests.readthedocs.io gql-3.6.0b2/docs/transports/sync_transports.rst000066400000000000000000000003331460703211500217010ustar00rootroot00000000000000.. _sync_transports: Sync Transports ================ Sync transports are transports which are using an underlying sync library. They cannot be used asynchronously. .. toctree:: :maxdepth: 1 requests httpx gql-3.6.0b2/docs/transports/websockets.rst000066400000000000000000000125511460703211500206040ustar00rootroot00000000000000.. _websockets_transport: WebsocketsTransport =================== The websockets transport supports both: - the `Apollo websockets transport protocol`_. - the `GraphQL-ws websockets transport protocol`_ It will propose both subprotocols to the backend and detect the supported protocol from the response http headers returned by the backend. .. note:: For some backends (graphql-ws before `version 5.6.1`_ without backwards compatibility), it may be necessary to specify only one subprotocol to the backend. It can be done by using :code:`subprotocols=[WebsocketsTransport.GRAPHQLWS_SUBPROTOCOL]` or :code:`subprotocols=[WebsocketsTransport.APOLLO_SUBPROTOCOL]` in the transport arguments. This transport allows to do multiple queries, mutations and subscriptions on the same websocket connection. Reference: :class:`gql.transport.websockets.WebsocketsTransport` .. literalinclude:: ../code_examples/websockets_async.py Websockets SSL -------------- If you need to connect to an ssl encrypted endpoint: * use :code:`wss` instead of :code:`ws` in the url of the transport .. code-block:: python transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', headers={'Authorization': 'token'} ) If you have a self-signed ssl certificate, you need to provide an ssl_context with the server public certificate: .. code-block:: python import pathlib import ssl ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) localhost_pem = pathlib.Path(__file__).with_name("YOUR_SERVER_PUBLIC_CERTIFICATE.pem") ssl_context.load_verify_locations(localhost_pem) transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', ssl=ssl_context ) If you have also need to have a client ssl certificate, add: .. code-block:: python ssl_context.load_cert_chain(certfile='YOUR_CLIENT_CERTIFICATE.pem', keyfile='YOUR_CLIENT_CERTIFICATE_KEY.key') Websockets authentication ------------------------- There are two ways to send authentication tokens with websockets depending on the server configuration. 1. Using HTTP Headers .. code-block:: python transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', headers={'Authorization': 'token'} ) 2. With a payload in the connection_init websocket message .. code-block:: python transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', init_payload={'Authorization': 'token'} ) .. _websockets_transport_keepalives: Keep-Alives ----------- Apollo protocol ^^^^^^^^^^^^^^^ With the Apollo protocol, the backend can optionally send unidirectional keep-alive ("ka") messages (only from the server to the client). It is possible to configure the transport to close if we don't receive a "ka" message within a specified time using the :code:`keep_alive_timeout` parameter. Here is an example with 60 seconds:: transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', keep_alive_timeout=60, ) One disadvantage of the Apollo protocol is that because the keep-alives are only sent from the server to the client, it can be difficult to detect the loss of a connection quickly from the server side. GraphQL-ws protocol ^^^^^^^^^^^^^^^^^^^ With the GraphQL-ws protocol, it is possible to send bidirectional ping/pong messages. Pings can be sent either from the client or the server and the other party should answer with a pong. As with the Apollo protocol, it is possible to configure the transport to close if we don't receive any message from the backend within the specified time using the :code:`keep_alive_timeout` parameter. But there is also the possibility for the client to send pings at a regular interval and verify that the backend sends a pong within a specified delay. This can be done using the :code:`ping_interval` and :code:`pong_timeout` parameters. Here is an example with a ping sent every 60 seconds, expecting a pong within 10 seconds:: transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', ping_interval=60, pong_timeout=10, ) Underlying websockets protocol ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In addition to the keep-alives described above for the apollo and graphql-ws protocols, there are also `ping frames`_ sent by the underlying websocket connection itself for both of them. These pings are enabled by default (every 20 seconds) and could be modified or disabled by passing extra arguments to the :code:`connect` call of the websockets client using the :code:`connect_args` argument of the transport. .. code-block:: python # Disabling websocket protocol level pings transport = WebsocketsTransport( url='wss://SERVER_URL:SERVER_PORT/graphql', connect_args={"ping_interval": None}, ) See the `websockets keepalive documentation`_ for details. .. _version 5.6.1: https://github.com/enisdenjo/graphql-ws/releases/tag/v5.6.1 .. _Apollo websockets transport protocol: https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md .. _GraphQL-ws websockets transport protocol: https://github.com/enisdenjo/graphql-ws/blob/master/PROTOCOL.md .. _ping frames: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2 .. _websockets keepalive documentation: https://websockets.readthedocs.io/en/stable/topics/timeouts.html#keepalive-in-websockets gql-3.6.0b2/docs/usage/000077500000000000000000000000001460703211500145625ustar00rootroot00000000000000gql-3.6.0b2/docs/usage/basic_usage.rst000066400000000000000000000014061460703211500175620ustar00rootroot00000000000000.. _basic_usage: Basic usage ----------- In order to execute a GraphQL request against a GraphQL API: * create your gql :ref:`transport ` in order to choose the destination url and the protocol used to communicate with it * create a gql :class:`Client ` with the selected transport * parse a query using :func:`gql ` * execute the query on the client to get the result .. literalinclude:: ../code_examples/aiohttp_sync.py .. warning:: Please note that this basic example won't work if you have an asyncio event loop running. In some python environments (as with Jupyter which uses IPython) an asyncio event loop is created for you. In that case you should use instead the :ref:`Async Usage example`. gql-3.6.0b2/docs/usage/custom_scalars_and_enums.rst000066400000000000000000000235151460703211500223750ustar00rootroot00000000000000Custom scalars and enums ======================== .. _custom_scalars: Custom scalars -------------- Scalar types represent primitive values at the leaves of a query. GraphQL provides a number of built-in scalars (Int, Float, String, Boolean and ID), but a GraphQL backend can add additional custom scalars to its schema to better express values in their data model. For example, a schema can define the Datetime scalar to represent an ISO-8601 encoded date. The schema will then only contain:: scalar Datetime When custom scalars are sent to the backend (as inputs) or from the backend (as outputs), their values need to be serialized to be composed of only built-in scalars, then at the destination the serialized values will be parsed again to be able to represent the scalar in its local internal representation. Because this serialization/unserialization is dependent on the language used at both sides, it is not described in the schema and needs to be defined independently at both sides (client, backend). A custom scalar value can have two different representations during its transport: - as a serialized value (usually as json): * in the results sent by the backend * in the variables sent by the client alongside the query - as "literal" inside the query itself sent by the client To define a custom scalar, you need 3 methods: - a :code:`serialize` method used: * by the backend to serialize a custom scalar output in the result * by the client to serialize a custom scalar input in the variables - a :code:`parse_value` method used: * by the backend to unserialize custom scalars inputs in the variables sent by the client * by the client to unserialize custom scalars outputs from the results - a :code:`parse_literal` method used: * by the backend to unserialize custom scalars inputs inside the query itself To define a custom scalar object, graphql-core provides the :code:`GraphQLScalarType` class which contains the implementation of the above methods. Example for Datetime: .. code-block:: python from datetime import datetime from typing import Any, Dict, Optional from graphql import GraphQLScalarType, ValueNode from graphql.utilities import value_from_ast_untyped def serialize_datetime(value: Any) -> str: return value.isoformat() def parse_datetime_value(value: Any) -> datetime: return datetime.fromisoformat(value) def parse_datetime_literal( value_node: ValueNode, variables: Optional[Dict[str, Any]] = None ) -> datetime: ast_value = value_from_ast_untyped(value_node, variables) return parse_datetime_value(ast_value) DatetimeScalar = GraphQLScalarType( name="Datetime", serialize=serialize_datetime, parse_value=parse_datetime_value, parse_literal=parse_datetime_literal, ) If you get your schema from a "schema.graphql" file or from introspection, then the generated schema in the gql Client will contain default :code:`GraphQLScalarType` instances where the serialize and parse_value methods simply return the serialized value without modification. In that case, if you want gql to parse custom scalars to a more useful Python representation, or to serialize custom scalars variables from a Python representation, then you can use the :func:`update_schema_scalars ` or :func:`update_schema_scalar ` methods to modify the definition of a scalar in your schema so that gql could do the parsing/serialization. .. code-block:: python from gql.utilities import update_schema_scalar with open('path/to/schema.graphql') as f: schema_str = f.read() client = Client(schema=schema_str, ...) update_schema_scalar(client.schema, "Datetime", DatetimeScalar) # or update_schema_scalars(client.schema, [DatetimeScalar]) .. _enums: Enums ----- GraphQL Enum types are a special kind of scalar that is restricted to a particular set of allowed values. For example, the schema may have a Color enum and contain:: enum Color { RED GREEN BLUE } Graphql-core provides the :code:`GraphQLEnumType` class to define an enum in the schema (See `graphql-core schema building docs`_). This class defines how the enum is serialized and parsed. If you get your schema from a "schema.graphql" file or from introspection, then the generated schema in the gql Client will contain default :code:`GraphQLEnumType` instances which should serialize/parse enums to/from its String representation (the :code:`RED` enum will be serialized to :code:`'RED'`). You may want to parse enums to convert them to Python Enum types. In that case, you can use the :func:`update_schema_enum ` to modify the default :code:`GraphQLEnumType` to use your defined Enum. Example: .. code-block:: python from enum import Enum from gql.utilities import update_schema_enum class Color(Enum): RED = 0 GREEN = 1 BLUE = 2 with open('path/to/schema.graphql') as f: schema_str = f.read() client = Client(schema=schema_str, ...) update_schema_enum(client.schema, 'Color', Color) Serializing Inputs ------------------ To provide custom scalars and/or enums in inputs with gql, you can: - serialize the inputs manually - let gql serialize the inputs using the custom scalars and enums defined in the schema Manually ^^^^^^^^ You can serialize inputs yourself: - in the query itself - in variables This has the advantage that you don't need a schema... In the query """""""""""" - custom scalar: .. code-block:: python query = gql( """{ shiftDays(time: "2021-11-12T11:58:13.461161", days: 5) }""" ) - enum: .. code-block:: python query = gql("{opposite(color: RED)}") In a variable """"""""""""" - custom scalar: .. code-block:: python query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") variable_values = { "time": "2021-11-12T11:58:13.461161", } result = client.execute(query, variable_values=variable_values) - enum: .. code-block:: python query = gql( """ query GetOppositeColor($color: Color) { opposite(color:$color) }""" ) variable_values = { "color": 'RED', } result = client.execute(query, variable_values=variable_values) Automatically ^^^^^^^^^^^^^ If you have custom scalar and/or enums defined in your schema (See: :ref:`custom_scalars` and :ref:`enums`), then you can request gql to serialize your variables automatically. - use :code:`Client(..., serialize_variables=True)` to request serializing variables for all queries - use :code:`execute(..., serialize_variables=True)` or :code:`subscribe(..., serialize_variables=True)` if you want gql to serialize the variables only for a single query. Examples: - custom scalars: .. code-block:: python from gql.utilities import update_schema_scalars from .myscalars import DatetimeScalar async with Client(transport=transport, fetch_schema_from_transport=True) as session: # We update the schema we got from introspection with our custom scalar type update_schema_scalars(session.client.schema, [DatetimeScalar]) # In the query, the custom scalar in the input is set to a variable query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") # the argument for time is a datetime instance variable_values = {"time": datetime.now()} # we execute the query with serialize_variables set to True result = await session.execute( query, variable_values=variable_values, serialize_variables=True ) - enums: .. code-block:: python from gql.utilities import update_schema_enum from .myenums import Color async with Client(transport=transport, fetch_schema_from_transport=True) as session: # We update the schema we got from introspection with our custom enum update_schema_enum(session.client.schema, 'Color', Color) # In the query, the enum in the input is set to a variable query = gql( """ query GetOppositeColor($color: Color) { opposite(color:$color) }""" ) # the argument for time is an instance of our Enum type variable_values = { "color": Color.RED, } # we execute the query with serialize_variables set to True result = client.execute( query, variable_values=variable_values, serialize_variables=True ) Parsing output -------------- By default, gql returns the serialized result from the backend without parsing (except json unserialization to Python default types). if you want to convert the result of custom scalars to custom objects, you can request gql to parse the results. - use :code:`Client(..., parse_results=True)` to request parsing for all queries - use :code:`execute(..., parse_result=True)` or :code:`subscribe(..., parse_result=True)` if you want gql to parse only the result of a single query. Same example as above, with result parsing enabled: .. code-block:: python from gql.utilities import update_schema_scalars async with Client(transport=transport, fetch_schema_from_transport=True) as session: update_schema_scalars(session.client.schema, [DatetimeScalar]) query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") variable_values = {"time": datetime.now()} result = await session.execute( query, variable_values=variable_values, serialize_variables=True, parse_result=True, ) # now result["time"] type is a datetime instead of string .. _graphql-core schema building docs: https://graphql-core-3.readthedocs.io/en/latest/usage/schema.html gql-3.6.0b2/docs/usage/extensions.rst000066400000000000000000000021451460703211500175150ustar00rootroot00000000000000.. _extensions: Extensions ---------- When you execute (or subscribe) GraphQL requests, the server will send responses which may have 3 fields: - data: the serialized response from the backend - errors: a list of potential errors - extensions: an optional field for additional data If there are errors in the response, then the :code:`execute` or :code:`subscribe` methods will raise a :code:`TransportQueryError`. If no errors are present, then only the data from the response is returned by default. .. code-block:: python result = client.execute(query) # result is here the content of the data field If you need to receive the extensions data too, then you can run the :code:`execute` or :code:`subscribe` methods with :code:`get_execution_result=True`. In that case, the full execution result is returned and you can have access to the extensions field .. code-block:: python result = client.execute(query, get_execution_result=True) # result is here an ExecutionResult instance # result.data is the content of the data field # result.extensions is the content of the extensions field gql-3.6.0b2/docs/usage/file_upload.rst000066400000000000000000000134121460703211500176000ustar00rootroot00000000000000File uploads ============ GQL supports file uploads with the :ref:`aiohttp transport `, the :ref:`requests transport `, the :ref:`httpx transport `, and the :ref:`httpx async transport `, using the `GraphQL multipart request spec`_. .. _GraphQL multipart request spec: https://github.com/jaydenseric/graphql-multipart-request-spec Single File ----------- In order to upload a single file, you need to: * set the file as a variable value in the mutation * provide the opened file to the `variable_values` argument of `execute` * set the `upload_files` argument to True .. code-block:: python transport = AIOHTTPTransport(url='YOUR_URL') # Or transport = RequestsHTTPTransport(url='YOUR_URL') # Or transport = HTTPXTransport(url='YOUR_URL') # Or transport = HTTPXAsyncTransport(url='YOUR_URL') client = Client(transport=transport) query = gql(''' mutation($file: Upload!) { singleUpload(file: $file) { id } } ''') with open("YOUR_FILE_PATH", "rb") as f: params = {"file": f} result = client.execute( query, variable_values=params, upload_files=True ) Setting the content-type ^^^^^^^^^^^^^^^^^^^^^^^^ If you need to set a specific Content-Type attribute to a file, you can set the :code:`content_type` attribute of the file like this: .. code-block:: python with open("YOUR_FILE_PATH", "rb") as f: # Setting the content-type to a pdf file for example f.content_type = "application/pdf" params = {"file": f} result = client.execute( query, variable_values=params, upload_files=True ) File list --------- It is also possible to upload multiple files using a list. .. code-block:: python transport = AIOHTTPTransport(url='YOUR_URL') # Or transport = RequestsHTTPTransport(url='YOUR_URL') # Or transport = HTTPXTransport(url='YOUR_URL') # Or transport = HTTPXAsyncTransport(url='YOUR_URL') client = Client(transport=transport) query = gql(''' mutation($files: [Upload!]!) { multipleUpload(files: $files) { id } } ''') f1 = open("YOUR_FILE_PATH_1", "rb") f2 = open("YOUR_FILE_PATH_2", "rb") params = {"files": [f1, f2]} result = client.execute( query, variable_values=params, upload_files=True ) f1.close() f2.close() Streaming --------- If you use the above methods to send files, then the entire contents of the files must be loaded in memory before the files are sent. If the files are not too big and you have enough RAM, it is not a problem. On another hand if you want to avoid using too much memory, then it is better to read the files and send them in small chunks so that the entire file contents don't have to be in memory at once. We provide methods to do that for two different uses cases: * Sending local files * Streaming downloaded files from an external URL to the GraphQL API .. note:: Streaming is only supported with the :ref:`aiohttp transport ` Streaming local files ^^^^^^^^^^^^^^^^^^^^^ aiohttp allows to upload files using an asynchronous generator. See `Streaming uploads on aiohttp docs`_. In order to stream local files, instead of providing opened files to the `variable_values` argument of `execute`, you need to provide an async generator which will provide parts of the files. You can use `aiofiles`_ to read the files in chunks and create this asynchronous generator. .. _Streaming uploads on aiohttp docs: https://docs.aiohttp.org/en/stable/client_quickstart.html#streaming-uploads .. _aiofiles: https://github.com/Tinche/aiofiles Example: .. code-block:: python transport = AIOHTTPTransport(url='YOUR_URL') client = Client(transport=transport) query = gql(''' mutation($file: Upload!) { singleUpload(file: $file) { id } } ''') async def file_sender(file_name): async with aiofiles.open(file_name, 'rb') as f: chunk = await f.read(64*1024) while chunk: yield chunk chunk = await f.read(64*1024) params = {"file": file_sender(file_name='YOUR_FILE_PATH')} result = client.execute( query, variable_values=params, upload_files=True ) Streaming downloaded files ^^^^^^^^^^^^^^^^^^^^^^^^^^ If the file you want to upload to the GraphQL API is not present locally and needs to be downloaded from elsewhere, then it is possible to chain the download and the upload in order to limit the amout of memory used. Because the `content` attribute of an aiohttp response is a `StreamReader` (it provides an async iterator protocol), you can chain the download and the upload together. In order to do that, you need to: * get the response from an aiohttp request and then get the StreamReader instance from `resp.content` * provide the StreamReader instance to the `variable_values` argument of `execute` Example: .. code-block:: python # First request to download your file with aiohttp async with aiohttp.ClientSession() as http_client: async with http_client.get('YOUR_DOWNLOAD_URL') as resp: # We now have a StreamReader instance in resp.content # and we provide it to the variable_values argument of execute transport = AIOHTTPTransport(url='YOUR_GRAPHQL_URL') client = Client(transport=transport) query = gql(''' mutation($file: Upload!) { singleUpload(file: $file) { id } } ''') params = {"file": resp.content} result = client.execute( query, variable_values=params, upload_files=True ) gql-3.6.0b2/docs/usage/headers.rst000066400000000000000000000005301460703211500167250ustar00rootroot00000000000000HTTP Headers ============ If you want to add additional http headers for your connection, you can specify these in your transport: .. code-block:: python transport = AIOHTTPTransport(url='YOUR_URL', headers={'Authorization': 'token'}) After the connection, the latest response headers can be found in :code:`transport.response_headers` gql-3.6.0b2/docs/usage/index.rst000066400000000000000000000002521460703211500164220ustar00rootroot00000000000000Usage ===== .. toctree:: :maxdepth: 2 basic_usage validation subscriptions variables headers file_upload custom_scalars_and_enums extensions gql-3.6.0b2/docs/usage/subscriptions.rst000066400000000000000000000013041460703211500202210ustar00rootroot00000000000000Subscriptions ============= Using the :ref:`websockets transport `, it is possible to execute GraphQL subscriptions: .. code-block:: python from gql import gql, Client from gql.transport.websockets import WebsocketsTransport transport = WebsocketsTransport(url='wss://your_server/graphql') client = Client( transport=transport, fetch_schema_from_transport=True, ) query = gql(''' subscription yourSubscription { ... } ''') for result in client.subscribe(query): print (result) .. note:: The websockets transport can also execute queries or mutations, it is not restricted to subscriptions gql-3.6.0b2/docs/usage/validation.rst000066400000000000000000000031031460703211500174430ustar00rootroot00000000000000.. _schema_validation: Schema validation ================= If a GraphQL schema is provided, gql will validate the queries locally before sending them to the backend. If no schema is provided, gql will send the query to the backend without local validation. You can either provide a schema yourself, or you can request gql to get the schema from the backend using `introspection`_. Using a provided schema ----------------------- The schema can be provided as a String (which is usually stored in a .graphql file): .. code-block:: python with open('path/to/schema.graphql') as f: schema_str = f.read() client = Client(schema=schema_str) .. note:: You can download a schema from a server by using :ref:`gql-cli ` :code:`$ gql-cli https://SERVER_URL/graphql --print-schema --schema-download input_value_deprecation:true > schema.graphql` OR can be created using python classes: .. code-block:: python from .someSchema import SampleSchema # SampleSchema is an instance of GraphQLSchema client = Client(schema=SampleSchema) See `tests/starwars/schema.py`_ for an example of such a schema. Using introspection ------------------- In order to get the schema directly from the GraphQL Server API using the transport, you need to set the `fetch_schema_from_transport` argument of Client to True, and the client will fetch the schema directly after the first connection to the backend. .. _introspection: https://graphql.org/learn/introspection .. _tests/starwars/schema.py: https://github.com/graphql-python/gql/blob/master/tests/starwars/schema.py gql-3.6.0b2/docs/usage/variables.rst000066400000000000000000000014261460703211500172670ustar00rootroot00000000000000Using variables =============== It is possible to provide variable values with your query by providing a Dict to the variable_values argument of the `execute` or the `subscribe` methods. The variable values will be sent alongside the query in the transport message (there is no local substitution). .. code-block:: python query = gql( """ query getContinentName ($code: ID!) { continent (code: $code) { name } } """ ) params = {"code": "EU"} # Get name of continent with code "EU" result = client.execute(query, variable_values=params) print(result) params = {"code": "AF"} # Get name of continent with code "AF" result = client.execute(query, variable_values=params) print(result) gql-3.6.0b2/gql-checker/000077500000000000000000000000001460703211500147135ustar00rootroot00000000000000gql-3.6.0b2/gql-checker/.gitignore000066400000000000000000000001031460703211500166750ustar00rootroot00000000000000*.pyc *.pyo __pycache__ *.egg-info *~ .coverage .tox/ build/ dist/ gql-3.6.0b2/gql-checker/.travis.yml000066400000000000000000000002251460703211500170230ustar00rootroot00000000000000language: python addons: apt: sources: - deadsnakes packages: - python3.5 install: - pip install tox script: - tox sudo: false gql-3.6.0b2/gql-checker/LICENSE000066400000000000000000000020711460703211500157200ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2016 GraphQL Python Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gql-3.6.0b2/gql-checker/MANIFEST.in000066400000000000000000000001351460703211500164500ustar00rootroot00000000000000include LICENSE include README.md recursive-include tests * recursive-exclude tests *.py[co] gql-3.6.0b2/gql-checker/README.rst000066400000000000000000000015511460703211500164040ustar00rootroot00000000000000gql-checker =========== |Build Status| A `flake8 `__ and `Pylama `__ plugin that checks the all the static gql calls given a GraphQL schema. It will not check anything else about the gql calls. Merely that the GraphQL syntax is correct and it validates against the provided schema. Warnings -------- This package adds 3 new flake8 warnings - ``GQL100``: The gql query is doesn't match GraphQL syntax - ``GQL101``: The gql query have valid syntax but doesn't validate against provided schema Configuration ------------- You will want to set the ``gql-introspection-schema`` option to a file with the json introspection of the schema. .. |Build Status| image:: https://travis-ci.org/graphql-python/gql-checker.png?branch=master :target: https://travis-ci.org/graphql-python/gql-checker gql-3.6.0b2/gql-checker/gql_checker/000077500000000000000000000000001460703211500171625ustar00rootroot00000000000000gql-3.6.0b2/gql-checker/gql_checker/__about__.py000066400000000000000000000007051460703211500214440ustar00rootroot00000000000000__all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "gql-checker" __summary__ = ( "Flake8 and pylama plugin that checks gql GraphQL calls." ) __uri__ = "https://github.com/graphql-python/gql-checker" __version__ = "0.1" __author__ = "Syrus Akbary" __email__ = "me@syrusakbary.com" __license__ = "MIT" __copyright__ = "Copyright 2016 %s" % __author__ gql-3.6.0b2/gql-checker/gql_checker/__init__.py000066400000000000000000000066331460703211500213030ustar00rootroot00000000000000import ast import json import pycodestyle from gql_checker.__about__ import ( __author__, __copyright__, __email__, __license__, __summary__, __title__, __uri__, __version__ ) from gql_checker.stdlib_list import STDLIB_NAMES from graphql import Source, validate, parse, build_client_schema __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] GQL_SYNTAX_ERROR = 'GQL100' GQL_VALIDATION_ERROR = 'GQL101' class ImportVisitor(ast.NodeVisitor): """ This class visits all the gql calls. """ def __init__(self, filename, options): self.filename = filename self.options = options or {} self.calls = [] def visit_Call(self, node): # noqa if node.func.id == 'gql': self.calls.append(node) def node_query(self, node): """ Return the query for the gql call node """ if isinstance(node, ast.Call): assert node.args arg = node.args[0] if not isinstance(arg, ast.Str): return else: raise TypeError(type(node)) return arg.s class ImportOrderChecker(object): visitor_class = ImportVisitor options = None def __init__(self, filename, tree): self.tree = tree self.filename = filename self.lines = None def load_file(self): if self.filename in ("stdin", "-", None): self.filename = "stdin" self.lines = pycodestyle.stdin_get_value().splitlines(True) else: self.lines = pycodestyle.readlines(self.filename) if not self.tree: self.tree = ast.parse("".join(self.lines)) def get_schema(self): gql_introspection_schema = self.options.get('gql_introspection_schema') if gql_introspection_schema: try: with open(gql_introspection_schema) as data_file: introspection_schema = json.load(data_file) return build_client_schema(introspection_schema) except IOError as e: raise Exception(f"Cannot find the provided introspection schema. {e}") schema = self.options.get('schema') assert schema, 'Need to provide schema' def validation_errors(self, ast): return validate(self.get_schema(), ast) def error(self, node, code, message): raise NotImplemented() def check_gql(self): if not self.tree or not self.lines: self.load_file() visitor = self.visitor_class(self.filename, self.options) visitor.visit(self.tree) for node in visitor.calls: # Lines with the noqa flag are ignored entirely if pycodestyle.noqa(self.lines[node.lineno - 1]): continue query = visitor.node_query(node) if not query: continue try: source = Source(query, 'gql query') ast = parse(source) except Exception as e: message = str(e) yield self.error(node, GQL_SYNTAX_ERROR, message) continue validation_errors = self.validation_errors(ast) if validation_errors: for error in validation_errors: message = str(error) yield self.error(node, GQL_VALIDATION_ERROR, message) gql-3.6.0b2/gql-checker/gql_checker/flake8_linter.py000066400000000000000000000027021460703211500222640ustar00rootroot00000000000000from __future__ import absolute_import import gql_checker from gql_checker import ImportOrderChecker class Linter(ImportOrderChecker): name = "gql" version = gql_checker.__version__ def __init__(self, tree, filename): super(Linter, self).__init__(filename, tree) @classmethod def add_options(cls, parser): # List of application import names. They go last. parser.add_option( "--gql-introspection-schema", metavar="FILE", help="Import names to consider as application specific" ) parser.add_option( "--gql-typedef-schema", default='', action="store", type="string", help=("Style to follow. Available: " "cryptography, google, smarkets, pep8") ) parser.config_options.append("gql-introspection-schema") parser.config_options.append("gql-typedef-schema") @classmethod def parse_options(cls, options): optdict = {} optdict = dict( gql_introspection_schema=options.gql_introspection_schema, gql_typedef_schema=options.gql_typedef_schema, ) cls.options = optdict def error(self, node, code, message): lineno, col_offset = node.lineno, node.col_offset return lineno, col_offset, f'{code} {message}', Linter def run(self): for error in self.check_gql(): yield error gql-3.6.0b2/gql-checker/gql_checker/pylama_linter.py000066400000000000000000000015331460703211500223760ustar00rootroot00000000000000from __future__ import absolute_import from pylama.lint import Linter as BaseLinter import gql_checker from gql_checker import ImportOrderChecker class Linter(ImportOrderChecker, BaseLinter): name = "gql" version = gql_checker.__version__ def __init__(self): super(Linter, self).__init__(None, None) def allow(self, path): return path.endswith(".py") def error(self, node, code, message): lineno, col_offset = node.lineno, node.col_offset return { "lnum": lineno, "col": col_offset, "text": message, "type": code } def run(self, path, **meta): self.filename = path self.tree = None self.options = dict( {'schema': ''}, **meta) for error in self.check_gql(): yield error gql-3.6.0b2/gql-checker/gql_checker/stdlib_list.py000066400000000000000000000116721460703211500220570ustar00rootroot00000000000000STDLIB_NAMES = set(( "AL", "BaseHTTPServer", "Bastion", "Binary", "Boolean", "CGIHTTPServer", "ColorPicker", "ConfigParser", "Cookie", "DEVICE", "DocXMLRPCServer", "EasyDialogs", "FL", "FrameWork", "GL", "HTMLParser", "MacOS", "Mapping", "MimeWriter", "MiniAEFrame", "Numeric", "Queue", "SUNAUDIODEV", "ScrolledText", "Sequence", "Set", "SimpleHTTPServer", "SimpleXMLRPCServer", "SocketServer", "StringIO", "Text", "Tix", "Tkinter", "UserDict", "UserList", "UserString", "__builtin__", "__future__", "__main__", "_dummy_thread", "_thread", "abc", "aepack", "aetools", "aetypes", "aifc", "al", "anydbm", "argparse", "array", "ast", "asynchat", "asyncio", "asyncore", "atexit", "audioop", "autoGIL", "base64", "bdb", "binascii", "binhex", "bisect", "bsddb", "builtins", "bz2", "cPickle", "cProfile", "cStringIO", "calendar", "cd", "cgi", "cgitb", "chunk", "cmath", "cmd", "code", "codecs", "codeop", "collections", "collections.abc", "colorsys", "commands", "compileall", "concurrent.futures", "configparser", "contextlib", "cookielib", "copy", "copy_reg", "copyreg", "crypt", "csv", "ctypes", "curses", "curses.ascii", "curses.panel", "curses.textpad", "curses.wrapper", "datetime", "dbhash", "dbm", "decimal", "difflib", "dircache", "dis", "distutils", "dl", "doctest", "dumbdbm", "dummy_thread", "dummy_threading", "email", "ensurepip", "enum", "errno", "faulthandler", "fcntl", "filecmp", "fileinput", "findertools", "fl", "flp", "fm", "fnmatch", "formatter", "fpectl", "fpformat", "fractions", "ftplib", "functools", "future_builtins", "gc", "gdbm", "gensuitemodule", "getopt", "getpass", "gettext", "gl", "glob", "grp", "gzip", "hashlib", "heapq", "hmac", "hotshot", "html", "html.entities", "html.parser", "htmlentitydefs", "htmllib", "http", "http.client", "http.cookiejar", "http.cookies", "http.server", "httplib", "ic", "imageop", "imaplib", "imgfile", "imghdr", "imp", "importlib", "imputil", "inspect", "io", "ipaddress", "itertools", "jpeg", "json", "keyword", "linecache", "locale", "logging", "logging.config", "logging.handlers", "lzma", "macostools", "macpath", "macurl2path", "mailbox", "mailcap", "marshal", "math", "md5", "mhlib", "mimetools", "mimetypes", "mimify", "mmap", "modulefinder", "msilib", "multifile", "multiprocessing", "mutex", "netrc", "new", "nis", "nntplib", "nturl2path", "numbers", "operator", "optparse", "os", "os.path", "ossaudiodev", "parser", "pathlib", "pdb", "pickle", "pickletools", "pipes", "pkgutil", "platform", "plistlib", "popen2", "poplib", "posix", "posixfile", "posixpath", "pprint", "profile", "pstats", "pty", "pwd", "py_compile", "pyclbr", "pydoc", "queue", "quopri", "random", "re", "readline", "repr", "reprlib", "resource", "rexec", "rfc822", "rlcompleter", "robotparser", "runpy", "sched", "select", "sets", "sgmllib", "sha", "shelve", "shlex", "shutil", "signal", "site", "smtpd", "smtplib", "sndhdr", "socket", "socketserver", "spwd", "sqlite3", "ssl", "stat", "statistics", "statvfs", "string", "stringprep", "struct", "subprocess", "sunau", "sunaudiodev", "symbol", "symtable", "sys", "sysconfig", "syslog", "tabnanny", "tarfile", "telnetlib", "tempfile", "termios", "test", "test.support", "test.test_support", "textwrap", "thread", "threading", "time", "timeit", "tkinter", "tkinter.scrolledtext", "tkinter.tix", "tkinter.ttk", "token", "tokenize", "trace", "traceback", "tracemalloc", "ttk", "tty", "turtle", "types", "typing", "unicodedata", "unittest", "unittest.mock", "urllib", "urllib.error", "urllib.parse", "urllib.request", "urllib.response", "urllib.robotparser", "urllib2", "urlparse", "user", "uu", "uuid", "venv", "warnings", "wave", "weakref", "webbrowser", "whichdb", "winsound", "wsgiref", "xdrlib", "xml", "xmlrpclib", "zipfile", "zipimport", "zlib", )) gql-3.6.0b2/gql-checker/setup.cfg000066400000000000000000000000261460703211500165320ustar00rootroot00000000000000[wheel] universal = 1 gql-3.6.0b2/gql-checker/setup.py000066400000000000000000000031111460703211500164210ustar00rootroot00000000000000import os from setuptools import setup, find_packages base_dir = os.path.dirname(__file__) about = {} with open(os.path.join(base_dir, "gql_checker", "__about__.py")) as f: exec(f.read(), about) with open(os.path.join(base_dir, "README.rst")) as f: long_description = f.read() setup( name=about["__title__"], version=about["__version__"], description=about["__summary__"], long_description=long_description, license=about["__license__"], url=about["__uri__"], author=about["__author__"], author_email=about["__email__"], packages=find_packages(exclude=["tests", "tests.*"]), zip_safe=False, install_requires=[ "pycodestyle" ], tests_require=[ "pytest", "flake8", "pycodestyle", "pylama" ], py_modules=['gql_checker'], entry_points={ 'flake8.extension': [ 'GQL = gql_checker.flake8_linter:Linter', ], 'pylama.linter': [ 'gql_checker = gql_checker.pylama_linter:Linter' ] }, classifiers=[ "Intended Audience :: Developers", "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", ( "License :: OSI Approved :: " "GNU Lesser General Public License v3 (LGPLv3)" ), "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Operating System :: OS Independent" ] ) gql-3.6.0b2/gql-checker/tests/000077500000000000000000000000001460703211500160555ustar00rootroot00000000000000gql-3.6.0b2/gql-checker/tests/__init__.py000066400000000000000000000000001460703211500201540ustar00rootroot00000000000000gql-3.6.0b2/gql-checker/tests/introspection_schema.json000066400000000000000000000477201460703211500232020ustar00rootroot00000000000000{"__schema": {"queryType": {"name": "Query"}, "mutationType": null, "subscriptionType": null, "types": [{"kind": "OBJECT", "name": "Query", "description": null, "fields": [{"name": "droid", "description": null, "args": [{"name": "id", "description": "id of the droid", "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "defaultValue": null}], "type": {"kind": "OBJECT", "name": "Droid", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "hero", "description": null, "args": [{"name": "episode", "description": "If omitted, returns the hero of the whole saga. If provided, returns the hero of that particular episode.", "type": {"kind": "ENUM", "name": "Episode", "ofType": null}, "defaultValue": null}], "type": {"kind": "INTERFACE", "name": "Character", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "human", "description": null, "args": [{"name": "id", "description": "id of the human", "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "defaultValue": null}], "type": {"kind": "OBJECT", "name": "Human", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "SCALAR", "name": "String", "description": "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", "fields": null, "inputFields": null, "interfaces": null, "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "Droid", "description": "A mechanical creature in the Star Wars universe.", "fields": [{"name": "appearsIn", "description": "Which movies they appear in.", "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "ENUM", "name": "Episode", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "friends", "description": "The friends of the droid, or an empty list if they have none.", "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "INTERFACE", "name": "Character", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "id", "description": "The id of the droid.", "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "name", "description": "The name of the droid.", "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "primaryFunction", "description": "The primary function of the droid.", "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [{"kind": "INTERFACE", "name": "Character", "ofType": null}], "enumValues": null, "possibleTypes": null}, {"kind": "INTERFACE", "name": "Character", "description": "A character in the Star Wars Trilogy", "fields": [{"name": "appearsIn", "description": "Which movies they appear in.", "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "ENUM", "name": "Episode", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "friends", "description": "The friends of the character, or an empty list if they have none.", "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "INTERFACE", "name": "Character", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "id", "description": "The id of the character.", "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "name", "description": "The name of the character.", "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": null, "enumValues": null, "possibleTypes": [{"kind": "OBJECT", "name": "Droid", "ofType": null}, {"kind": "OBJECT", "name": "Human", "ofType": null}]}, {"kind": "ENUM", "name": "Episode", "description": "One of the films in the Star Wars Trilogy", "fields": null, "inputFields": null, "interfaces": null, "enumValues": [{"name": "EMPIRE", "description": "Released in 1980.", "isDeprecated": false, "deprecationReason": null}, {"name": "JEDI", "description": "Released in 1983.", "isDeprecated": false, "deprecationReason": null}, {"name": "NEWHOPE", "description": "Released in 1977.", "isDeprecated": false, "deprecationReason": null}], "possibleTypes": null}, {"kind": "OBJECT", "name": "Human", "description": "A humanoid creature in the Star Wars universe.", "fields": [{"name": "appearsIn", "description": "Which movies they appear in.", "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "ENUM", "name": "Episode", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "friends", "description": "The friends of the human, or an empty list if they have none.", "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "INTERFACE", "name": "Character", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "homePlanet", "description": "The home planet of the human, or null if unknown.", "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "id", "description": "The id of the human.", "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "name", "description": "The name of the human.", "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [{"kind": "INTERFACE", "name": "Character", "ofType": null}], "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "__Schema", "description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation and subscription operations.", "fields": [{"name": "types", "description": "A list of all types supported by this server.", "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Type", "ofType": null}}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "queryType", "description": "The type that query operations will be rooted at.", "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Type", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "mutationType", "description": "If this server supports mutation, the type that mutation operations will be rooted at.", "args": [], "type": {"kind": "OBJECT", "name": "__Type", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "subscriptionType", "description": "If this server support subscription, the type that subscription operations will be rooted at.", "args": [], "type": {"kind": "OBJECT", "name": "__Type", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "directives", "description": "A list of all directives supported by this server.", "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Directive", "ofType": null}}}}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "__Type", "description": "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", "fields": [{"name": "kind", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "ENUM", "name": "__TypeKind", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "name", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "description", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "fields", "description": null, "args": [{"name": "includeDeprecated", "description": null, "type": {"kind": "SCALAR", "name": "Boolean", "ofType": null}, "defaultValue": "false"}], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Field", "ofType": null}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "interfaces", "description": null, "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Type", "ofType": null}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "possibleTypes", "description": null, "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Type", "ofType": null}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "enumValues", "description": null, "args": [{"name": "includeDeprecated", "description": null, "type": {"kind": "SCALAR", "name": "Boolean", "ofType": null}, "defaultValue": "false"}], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__EnumValue", "ofType": null}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "inputFields", "description": null, "args": [], "type": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__InputValue", "ofType": null}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "ofType", "description": null, "args": [], "type": {"kind": "OBJECT", "name": "__Type", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "ENUM", "name": "__TypeKind", "description": "An enum describing what kind of type a given `__Type` is", "fields": null, "inputFields": null, "interfaces": null, "enumValues": [{"name": "SCALAR", "description": "Indicates this type is a scalar.", "isDeprecated": false, "deprecationReason": null}, {"name": "OBJECT", "description": "Indicates this type is an object. `fields` and `interfaces` are valid fields.", "isDeprecated": false, "deprecationReason": null}, {"name": "INTERFACE", "description": "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.", "isDeprecated": false, "deprecationReason": null}, {"name": "UNION", "description": "Indicates this type is a union. `possibleTypes` is a valid field.", "isDeprecated": false, "deprecationReason": null}, {"name": "ENUM", "description": "Indicates this type is an enum. `enumValues` is a valid field.", "isDeprecated": false, "deprecationReason": null}, {"name": "INPUT_OBJECT", "description": "Indicates this type is an input object. `inputFields` is a valid field.", "isDeprecated": false, "deprecationReason": null}, {"name": "LIST", "description": "Indicates this type is a list. `ofType` is a valid field.", "isDeprecated": false, "deprecationReason": null}, {"name": "NON_NULL", "description": "Indicates this type is a non-null. `ofType` is a valid field.", "isDeprecated": false, "deprecationReason": null}], "possibleTypes": null}, {"kind": "SCALAR", "name": "Boolean", "description": "The `Boolean` scalar type represents `true` or `false`.", "fields": null, "inputFields": null, "interfaces": null, "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "__Field", "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", "fields": [{"name": "name", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "description", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "args", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__InputValue", "ofType": null}}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "type", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Type", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "isDeprecated", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "deprecationReason", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "__InputValue", "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", "fields": [{"name": "name", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "description", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "type", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__Type", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "defaultValue", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "__EnumValue", "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", "fields": [{"name": "name", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "description", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "isDeprecated", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "deprecationReason", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "OBJECT", "name": "__Directive", "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", "fields": [{"name": "name", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "String", "ofType": null}}, "isDeprecated": false, "deprecationReason": null}, {"name": "description", "description": null, "args": [], "type": {"kind": "SCALAR", "name": "String", "ofType": null}, "isDeprecated": false, "deprecationReason": null}, {"name": "locations", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "ENUM", "name": "__DirectiveLocation", "ofType": null}}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "args", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "LIST", "name": null, "ofType": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "OBJECT", "name": "__InputValue", "ofType": null}}}}, "isDeprecated": false, "deprecationReason": null}, {"name": "onOperation", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "isDeprecated": true, "deprecationReason": "Use `locations`."}, {"name": "onFragment", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "isDeprecated": true, "deprecationReason": "Use `locations`."}, {"name": "onField", "description": null, "args": [], "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "isDeprecated": true, "deprecationReason": "Use `locations`."}], "inputFields": null, "interfaces": [], "enumValues": null, "possibleTypes": null}, {"kind": "ENUM", "name": "__DirectiveLocation", "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", "fields": null, "inputFields": null, "interfaces": null, "enumValues": [{"name": "QUERY", "description": "Location adjacent to a query operation.", "isDeprecated": false, "deprecationReason": null}, {"name": "MUTATION", "description": "Location adjacent to a mutation operation.", "isDeprecated": false, "deprecationReason": null}, {"name": "SUBSCRIPTION", "description": "Location adjacent to a subscription operation.", "isDeprecated": false, "deprecationReason": null}, {"name": "FIELD", "description": "Location adjacent to a field.", "isDeprecated": false, "deprecationReason": null}, {"name": "FRAGMENT_DEFINITION", "description": "Location adjacent to a fragment definition.", "isDeprecated": false, "deprecationReason": null}, {"name": "FRAGMENT_SPREAD", "description": "Location adjacent to a fragment spread.", "isDeprecated": false, "deprecationReason": null}, {"name": "INLINE_FRAGMENT", "description": "Location adjacent to an inline fragment.", "isDeprecated": false, "deprecationReason": null}], "possibleTypes": null}], "directives": [{"name": "include", "description": null, "locations": ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"], "args": [{"name": "if", "description": "Included when true.", "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "defaultValue": null}]}, {"name": "skip", "description": null, "locations": ["FIELD", "FRAGMENT_SPREAD", "INLINE_FRAGMENT"], "args": [{"name": "if", "description": "Skipped when true.", "type": {"kind": "NON_NULL", "name": null, "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": null}}, "defaultValue": null}]}]}}gql-3.6.0b2/gql-checker/tests/test_cases/000077500000000000000000000000001460703211500202125ustar00rootroot00000000000000gql-3.6.0b2/gql-checker/tests/test_cases/bad_query.py000066400000000000000000000001361460703211500225370ustar00rootroot00000000000000from gql import gql gql(''' { id } ''') # GQL101: Cannot query field "id" on type "Query". gql-3.6.0b2/gql-checker/tests/test_cases/noqa.py000066400000000000000000000000651460703211500215230ustar00rootroot00000000000000from gql import gql gql(''' wrong query ''') # noqa gql-3.6.0b2/gql-checker/tests/test_cases/syntax_error.py000066400000000000000000000000671460703211500233260ustar00rootroot00000000000000from gql import gql gql(''' wrong query ''') # GQL100 gql-3.6.0b2/gql-checker/tests/test_cases/validation.py000066400000000000000000000026171460703211500227240ustar00rootroot00000000000000from gql import gql gql(''' query NestedQueryWithFragment { hero { ...NameAndAppearances friends { ...NameAndAppearances friends { ...NameAndAppearances } } } } fragment NameAndAppearances on Character { name appearsIn } ''') gql(''' query HeroSpaceshipQuery { hero { favoriteSpaceship } } ''') # GQL101: Cannot query field "favoriteSpaceship" on type "Character". gql(''' query HeroNoFieldsQuery { hero } ''') # GQL101: Field "hero" of type "Character" must have a sub selection. gql(''' query HeroFieldsOnScalarQuery { hero { name { firstCharacterOfName } } } ''') # GQL101: Field "name" of type "String" must not have a sub selection. gql(''' query DroidFieldOnCharacter { hero { name primaryFunction } } ''') # GQL101: Cannot query field "primaryFunction" on type "Character". However, this field exists on "Droid". Perhaps you meant to use an inline fragment? gql(''' query DroidFieldInFragment { hero { name ...DroidFields } } fragment DroidFields on Droid { primaryFunction } ''') gql(''' query DroidFieldInFragment { hero { name ... on Droid { primaryFunction } } } ''') gql-3.6.0b2/gql-checker/tests/test_flake8_linter.py000066400000000000000000000027011460703211500222150ustar00rootroot00000000000000import ast import re import os import pycodestyle import pytest from gql_checker.flake8_linter import Linter from tests.utils import extract_expected_errors def load_test_cases(): base_path = os.path.dirname(__file__) test_case_path = os.path.join(base_path, "test_cases") test_case_files = os.listdir(test_case_path) test_cases = [] for fname in test_case_files: if not fname.endswith(".py"): continue fullpath = os.path.join(test_case_path, fname) data = open(fullpath).read() tree = ast.parse(data, fullpath) codes, messages = extract_expected_errors(data) test_cases.append((tree, fullpath, codes, messages)) return test_cases @pytest.mark.parametrize( "tree, filename, expected_codes, expected_messages", load_test_cases() ) def test_expected_error(tree, filename, expected_codes, expected_messages): argv = [ "--gql-introspection-schema=./tests/introspection_schema.json" ] parser = pycodestyle.get_parser('', '') Linter.add_options(parser) options, args = parser.parse_args(argv) Linter.parse_options(options) checker = Linter(tree, filename) codes = [] messages = [] for lineno, col_offset, msg, instance in checker.run(): code, message = msg.split(" ", 1) codes.append(code) messages.append(message) assert codes == expected_codes assert set(messages) >= set(expected_messages) gql-3.6.0b2/gql-checker/tests/test_pylama_linter.py000066400000000000000000000023151460703211500223270ustar00rootroot00000000000000import ast import os import pytest from gql_checker import pylama_linter from tests.utils import extract_expected_errors def load_test_cases(): base_path = os.path.dirname(__file__) test_case_path = os.path.join(base_path, "test_cases") test_case_files = os.listdir(test_case_path) test_cases = [] for fname in test_case_files: if not fname.endswith(".py"): continue fullpath = os.path.join(test_case_path, fname) data = open(fullpath).read() codes, messages = extract_expected_errors(data) test_cases.append((fullpath, codes, messages)) return test_cases @pytest.mark.parametrize( "filename, expected_codes, expected_messages", load_test_cases() ) def test_expected_error(filename, expected_codes, expected_messages): checker = pylama_linter.Linter() assert checker.allow(filename) codes = [] messages = [] options = { "gql_introspection_schema": "./tests/introspection_schema.json" } for error in checker.run(filename, **options): codes.append(error['type']) messages.append(error['text']) assert codes == expected_codes assert set(messages) >= set(expected_messages) gql-3.6.0b2/gql-checker/tests/utils.py000066400000000000000000000007651460703211500175770ustar00rootroot00000000000000import re ERROR_RX = re.compile("# ((GQL[0-9]+ ?)+)(: (.*))?$") def extract_expected_errors(data): lines = data.splitlines() expected_codes = [] expected_messages = [] for line in lines: match = ERROR_RX.search(line) if match: codes = match.group(1).split() message = match.group(4) expected_codes.extend(codes) if message: expected_messages.append(message) return expected_codes, expected_messages gql-3.6.0b2/gql-checker/tox.ini000066400000000000000000000014001460703211500162210ustar00rootroot00000000000000[tox] envlist = py26,py27,pypy,py33,py34,py35,pep8,py3pep8 [testenv] deps = coverage==3.7 pytest flake8 pylama pycodestyle>=2.0 commands = coverage run --source=gql_checker/,tests/ -m pytest --capture=no --strict {posargs} coverage report -m # Temporarily disable coverage on pypy because of performance problems with # coverage.py on pypy. [testenv:pypy] commands = py.test --capture=no --strict {posargs} [testenv:pep8] deps = flake8 pep8-naming flake8-import-order commands = flake8 gql_checker/ [testenv:py3pep8] basepython = python3.3 deps = flake8 pep8-naming flake8-import-order commands = flake8 gql_checker/ [flake8] exclude = .tox,*.egg select = E,W,F,N,I application-import-names = gql_checker,tests gql-3.6.0b2/gql/000077500000000000000000000000001460703211500133115ustar00rootroot00000000000000gql-3.6.0b2/gql/__init__.py000066400000000000000000000010211460703211500154140ustar00rootroot00000000000000"""The primary :mod:`gql` package includes everything you need to execute GraphQL requests, with the exception of the transports which are optional: - the :func:`gql ` method to parse a GraphQL query - the :class:`Client ` class as the entrypoint to execute requests and create sessions """ from .__version__ import __version__ from .client import Client from .gql import gql from .graphql_request import GraphQLRequest __all__ = [ "__version__", "gql", "Client", "GraphQLRequest", ] gql-3.6.0b2/gql/__version__.py000066400000000000000000000000301460703211500161350ustar00rootroot00000000000000__version__ = "3.6.0b2" gql-3.6.0b2/gql/cli.py000066400000000000000000000404671460703211500144450ustar00rootroot00000000000000import asyncio import json import logging import signal as signal_module import sys import textwrap from argparse import ArgumentParser, Namespace, RawTextHelpFormatter from typing import Any, Dict, Optional from graphql import GraphQLError, print_schema from yarl import URL from gql import Client, __version__, gql from gql.transport import AsyncTransport from gql.transport.exceptions import TransportQueryError description = """ Send GraphQL queries from the command line using http(s) or websockets. If used interactively, write your query, then use Ctrl-D (EOF) to execute it. """ examples = """ EXAMPLES ======== # Simple query using https echo 'query { continent(code:"AF") { name } }' | \ gql-cli https://countries.trevorblades.com # Simple query using websockets echo 'query { continent(code:"AF") { name } }' | \ gql-cli wss://countries.trevorblades.com/graphql # Query with variable echo 'query getContinent($code:ID!) { continent(code:$code) { name } }' | \ gql-cli https://countries.trevorblades.com --variables code:AF # Interactive usage (insert your query in the terminal, then press Ctrl-D to execute it) gql-cli wss://countries.trevorblades.com/graphql --variables code:AF # Execute query saved in a file cat query.gql | gql-cli wss://countries.trevorblades.com/graphql # Print the schema of the backend gql-cli https://countries.trevorblades.com/graphql --print-schema """ def positive_int_or_none(value_str: str) -> Optional[int]: """Convert a string argument value into either an int or None. Raise a ValueError if the argument is negative or a string which is not "none" """ try: value_int = int(value_str) except ValueError: if value_str.lower() == "none": return None else: raise if value_int < 0: raise ValueError return value_int def get_parser(with_examples: bool = False) -> ArgumentParser: """Provides an ArgumentParser for the gql-cli script. This function is also used by sphinx to generate the script documentation. :param with_examples: set to False by default so that the examples are not present in the sphinx docs (they are put there with a different layout) """ parser = ArgumentParser( description=description, epilog=examples if with_examples else None, formatter_class=RawTextHelpFormatter, ) parser.add_argument( "server", help="the server url starting with http://, https://, ws:// or wss://" ) parser.add_argument( "-V", "--variables", nargs="*", help="query variables in the form key:json_value", ) parser.add_argument( "-H", "--headers", nargs="*", help="http headers in the form key:value" ) parser.add_argument("--version", action="version", version=f"v{__version__}") group = parser.add_mutually_exclusive_group() group.add_argument( "-d", "--debug", help="print lots of debugging statements (loglevel==DEBUG)", action="store_const", dest="loglevel", const=logging.DEBUG, ) group.add_argument( "-v", "--verbose", help="show low level messages (loglevel==INFO)", action="store_const", dest="loglevel", const=logging.INFO, ) parser.add_argument( "-o", "--operation-name", help="set the operation_name value", dest="operation_name", ) parser.add_argument( "--print-schema", help="get the schema from instrospection and print it", action="store_true", dest="print_schema", ) parser.add_argument( "--schema-download", nargs="*", help=textwrap.dedent( """select the introspection query arguments to download the schema. Only useful if --print-schema is used. By default, it will: - request field descriptions - not request deprecated input fields Possible options: - descriptions:false for a compact schema without comments - input_value_deprecation:true to download deprecated input fields - specified_by_url:true - schema_description:true - directive_is_repeatable:true""" ), dest="schema_download", ) parser.add_argument( "--execute-timeout", help="set the execute_timeout argument of the Client (default: 10)", type=positive_int_or_none, default=10, dest="execute_timeout", ) parser.add_argument( "--transport", default="auto", choices=[ "auto", "aiohttp", "phoenix", "websockets", "appsync_http", "appsync_websockets", ], help=( "select the transport. 'auto' by default: " "aiohttp or websockets depending on url scheme" ), dest="transport", ) appsync_description = """ By default, for an AppSync backend, the IAM authentication is chosen. If you want API key or JWT authentication, you can provide one of the following arguments:""" appsync_group = parser.add_argument_group( "AWS AppSync options", description=appsync_description ) appsync_auth_group = appsync_group.add_mutually_exclusive_group() appsync_auth_group.add_argument( "--api-key", help="Provide an API key for authentication", dest="api_key", ) appsync_auth_group.add_argument( "--jwt", help="Provide an JSON Web token for authentication", dest="jwt", ) return parser def get_transport_args(args: Namespace) -> Dict[str, Any]: """Extract extra arguments necessary for the transport from the parsed command line args Will create a headers dict by splitting the colon in the --headers arguments :param args: parsed command line arguments """ transport_args: Dict[str, Any] = {} # Parse the headers argument headers = {} if args.headers is not None: for header in args.headers: try: # Split only the first colon (throw a ValueError if no colon is present) header_key, header_value = header.split(":", 1) headers[header_key] = header_value except ValueError: raise ValueError(f"Invalid header: {header}") if args.headers is not None: transport_args["headers"] = headers return transport_args def get_execute_args(args: Namespace) -> Dict[str, Any]: """Extract extra arguments necessary for the execute or subscribe methods from the parsed command line args Extract the operation_name Extract the variable_values from the --variables argument by splitting the first colon, then loads the json value, We try to add double quotes around the value if it does not work first in order to simplify the passing of simple string values (we allow --variables KEY:VALUE instead of KEY:\"VALUE\") :param args: parsed command line arguments """ execute_args: Dict[str, Any] = {} # Parse the operation_name argument if args.operation_name is not None: execute_args["operation_name"] = args.operation_name # Parse the variables argument if args.variables is not None: variables = {} for var in args.variables: try: # Split only the first colon # (throw a ValueError if no colon is present) variable_key, variable_json_value = var.split(":", 1) # Extract the json value, # trying with double quotes if it does not work try: variable_value = json.loads(variable_json_value) except json.JSONDecodeError: try: variable_value = json.loads(f'"{variable_json_value}"') except json.JSONDecodeError: raise ValueError # Save the value in the variables dict variables[variable_key] = variable_value except ValueError: raise ValueError(f"Invalid variable: {var}") execute_args["variable_values"] = variables return execute_args def autodetect_transport(url: URL) -> str: """Detects which transport should be used depending on url.""" if url.scheme in ["ws", "wss"]: transport_name = "websockets" else: assert url.scheme in ["http", "https"] transport_name = "aiohttp" return transport_name def get_transport(args: Namespace) -> Optional[AsyncTransport]: """Instantiate a transport from the parsed command line arguments :param args: parsed command line arguments """ # Get the url scheme from server parameter url = URL(args.server) # Validate scheme if url.scheme not in ["http", "https", "ws", "wss"]: raise ValueError("URL protocol should be one of: http, https, ws, wss") # Get extra transport parameters from command line arguments # (headers) transport_args = get_transport_args(args) # Either use the requested transport or autodetect it if args.transport == "auto": transport_name = autodetect_transport(url) else: transport_name = args.transport # Import the correct transport class depending on the transport name if transport_name == "aiohttp": from gql.transport.aiohttp import AIOHTTPTransport return AIOHTTPTransport(url=args.server, **transport_args) elif transport_name == "phoenix": from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) return PhoenixChannelWebsocketsTransport(url=args.server, **transport_args) elif transport_name == "websockets": from gql.transport.websockets import WebsocketsTransport transport_args["ssl"] = url.scheme == "wss" return WebsocketsTransport(url=args.server, **transport_args) else: from gql.transport.appsync_auth import AppSyncAuthentication assert transport_name in ["appsync_http", "appsync_websockets"] assert url.host is not None auth: AppSyncAuthentication if args.api_key: from gql.transport.appsync_auth import AppSyncApiKeyAuthentication auth = AppSyncApiKeyAuthentication(host=url.host, api_key=args.api_key) elif args.jwt: from gql.transport.appsync_auth import AppSyncJWTAuthentication auth = AppSyncJWTAuthentication(host=url.host, jwt=args.jwt) else: from gql.transport.appsync_auth import AppSyncIAMAuthentication from botocore.exceptions import NoRegionError try: auth = AppSyncIAMAuthentication(host=url.host) except NoRegionError: # A warning message has been printed in the console return None transport_args["auth"] = auth if transport_name == "appsync_http": from gql.transport.aiohttp import AIOHTTPTransport return AIOHTTPTransport(url=args.server, **transport_args) else: from gql.transport.appsync_websockets import AppSyncWebsocketsTransport try: return AppSyncWebsocketsTransport(url=args.server, **transport_args) except Exception: # This is for the NoCredentialsError but we cannot import it here return None def get_introspection_args(args: Namespace) -> Dict: """Get the introspection args depending on the schema_download argument""" # Parse the headers argument introspection_args = {} possible_args = [ "descriptions", "specified_by_url", "directive_is_repeatable", "schema_description", "input_value_deprecation", ] if args.schema_download is not None: for arg in args.schema_download: try: # Split only the first colon (throw a ValueError if no colon is present) arg_key, arg_value = arg.split(":", 1) if arg_key not in possible_args: raise ValueError(f"Invalid schema_download: {args.schema_download}") arg_value = arg_value.lower() if arg_value not in ["true", "false"]: raise ValueError(f"Invalid schema_download: {args.schema_download}") introspection_args[arg_key] = arg_value == "true" except ValueError: raise ValueError(f"Invalid schema_download: {args.schema_download}") return introspection_args async def main(args: Namespace) -> int: """Main entrypoint of the gql-cli script :param args: The parsed command line arguments :return: The script exit code (0 = ok, 1 = error) """ # Set requested log level if args.loglevel is not None: logging.basicConfig(level=args.loglevel) try: # Instantiate transport from command line arguments transport = get_transport(args) if transport is None: return 1 # Get extra execute parameters from command line arguments # (variables, operation_name) execute_args = get_execute_args(args) except ValueError as e: print(f"Error: {e}", file=sys.stderr) return 1 # By default, the exit_code is 0 (everything is ok) exit_code = 0 # Connect to the backend and provide a session async with Client( transport=transport, fetch_schema_from_transport=args.print_schema, introspection_args=get_introspection_args(args), execute_timeout=args.execute_timeout, ) as session: if args.print_schema: schema_str = print_schema(session.client.schema) print(schema_str) return exit_code while True: # Read multiple lines from input and trim whitespaces # Will read until EOF character is received (Ctrl-D) query_str = sys.stdin.read().strip() # Exit if query is empty if len(query_str) == 0: break # Parse query, continue on error try: query = gql(query_str) except GraphQLError as e: print(e, file=sys.stderr) exit_code = 1 continue # Execute or Subscribe the query depending on transport try: try: async for result in session.subscribe(query, **execute_args): print(json.dumps(result)) except KeyboardInterrupt: # pragma: no cover pass except NotImplementedError: result = await session.execute(query, **execute_args) print(json.dumps(result)) except (GraphQLError, TransportQueryError) as e: print(e, file=sys.stderr) exit_code = 1 return exit_code def gql_cli() -> None: """Synchronously invoke ``main`` with the parsed command line arguments. Formerly ``scripts/gql-cli``, now registered as an ``entry_point`` """ # Get arguments from command line parser = get_parser(with_examples=True) args = parser.parse_args() try: # Create a new asyncio event loop loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) # Create a gql-cli task with the supplied arguments main_task = asyncio.ensure_future(main(args), loop=loop) # Add signal handlers to close gql-cli cleanly on Control-C for signal_name in ["SIGINT", "SIGTERM", "CTRL_C_EVENT", "CTRL_BREAK_EVENT"]: signal = getattr(signal_module, signal_name, None) if signal is None: continue try: loop.add_signal_handler(signal, main_task.cancel) except NotImplementedError: # pragma: no cover # not all signals supported on all platforms pass # Run the asyncio loop to execute the task exit_code = 0 try: exit_code = loop.run_until_complete(main_task) finally: loop.close() # Return with the correct exit code sys.exit(exit_code) except KeyboardInterrupt: # pragma: no cover pass gql-3.6.0b2/gql/client.py000066400000000000000000002020521460703211500151420ustar00rootroot00000000000000import asyncio import logging import sys import time import warnings from concurrent.futures import Future from queue import Queue from threading import Event, Thread from typing import ( Any, AsyncGenerator, Callable, Dict, Generator, List, Optional, Tuple, TypeVar, Union, cast, overload, ) import backoff from anyio import fail_after from graphql import ( DocumentNode, ExecutionResult, GraphQLSchema, IntrospectionQuery, build_ast_schema, get_introspection_query, parse, validate, ) from .graphql_request import GraphQLRequest from .transport.async_transport import AsyncTransport from .transport.exceptions import TransportClosed, TransportQueryError from .transport.local_schema import LocalSchemaTransport from .transport.transport import Transport from .utilities import build_client_schema from .utilities import parse_result as parse_result_fn from .utilities import serialize_variable_values from .utils import str_first_element """ Load the appropriate instance of the Literal type Note: we cannot use try: except ImportError because of the following mypy issue: https://github.com/python/mypy/issues/8520 """ if sys.version_info[:2] >= (3, 8): from typing import Literal else: from typing_extensions import Literal # pragma: no cover log = logging.getLogger(__name__) class Client: """The Client class is the main entrypoint to execute GraphQL requests on a GQL transport. It can take sync or async transports as argument and can either execute and subscribe to requests itself with the :func:`execute ` and :func:`subscribe ` methods OR can be used to get a sync or async session depending on the transport type. To connect to an :ref:`async transport ` and get an :class:`async session `, use :code:`async with client as session:` To connect to a :ref:`sync transport ` and get a :class:`sync session `, use :code:`with client as session:` """ def __init__( self, schema: Optional[Union[str, GraphQLSchema]] = None, introspection: Optional[IntrospectionQuery] = None, transport: Optional[Union[Transport, AsyncTransport]] = None, fetch_schema_from_transport: bool = False, introspection_args: Optional[Dict] = None, execute_timeout: Optional[Union[int, float]] = 10, serialize_variables: bool = False, parse_results: bool = False, batch_interval: float = 0, batch_max: int = 10, ): """Initialize the client with the given parameters. :param schema: an optional GraphQL Schema for local validation See :ref:`schema_validation` :param transport: The provided :ref:`transport `. :param fetch_schema_from_transport: Boolean to indicate that if we want to fetch the schema from the transport using an introspection query. :param introspection_args: arguments passed to the get_introspection_query method of graphql-core. :param execute_timeout: The maximum time in seconds for the execution of a request before a TimeoutError is raised. Only used for async transports. Passing None results in waiting forever for a response. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. Default: False. :param parse_results: Whether gql will try to parse the serialized output sent by the backend. Can be used to deserialize custom scalars or enums. :param batch_interval: Time to wait in seconds for batching requests together. Batching is disabled (by default) if 0. :param batch_max: Maximum number of requests in a single batch. """ if introspection: assert ( not schema ), "Cannot provide introspection and schema at the same time." schema = build_client_schema(introspection) if isinstance(schema, str): type_def_ast = parse(schema) schema = build_ast_schema(type_def_ast) if transport and fetch_schema_from_transport: assert ( not schema ), "Cannot fetch the schema from transport if is already provided." assert not type(transport).__name__ == "AppSyncWebsocketsTransport", ( "fetch_schema_from_transport=True is not allowed " "for AppSyncWebsocketsTransport " "because only subscriptions are allowed on the realtime endpoint." ) if schema and not transport: transport = LocalSchemaTransport(schema) # GraphQL schema self.schema: Optional[GraphQLSchema] = schema # Answer of the introspection query self.introspection: Optional[IntrospectionQuery] = introspection # GraphQL transport chosen self.transport: Optional[Union[Transport, AsyncTransport]] = transport # Flag to indicate that we need to fetch the schema from the transport # On async transports, we fetch the schema before executing the first query self.fetch_schema_from_transport: bool = fetch_schema_from_transport self.introspection_args = ( {} if introspection_args is None else introspection_args ) # Enforced timeout of the execute function (only for async transports) self.execute_timeout = execute_timeout self.serialize_variables = serialize_variables self.parse_results = parse_results self.batch_interval = batch_interval self.batch_max = batch_max @property def batching_enabled(self): return self.batch_interval != 0 def validate(self, document: DocumentNode): """:meta private:""" assert ( self.schema ), "Cannot validate the document locally, you need to pass a schema." validation_errors = validate(self.schema, document) if validation_errors: raise validation_errors[0] def _build_schema_from_introspection(self, execution_result: ExecutionResult): if execution_result.errors: raise TransportQueryError( ( "Error while fetching schema: " f"{str_first_element(execution_result.errors)}\n" "If you don't need the schema, you can try with: " '"fetch_schema_from_transport=False"' ), errors=execution_result.errors, data=execution_result.data, extensions=execution_result.extensions, ) self.introspection = cast(IntrospectionQuery, execution_result.data) self.schema = build_client_schema(self.introspection) @overload def execute_sync( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, # https://github.com/python/mypy/issues/7333#issuecomment-788255229 get_execution_result: Literal[False] = ..., **kwargs, ) -> Dict[str, Any]: ... # pragma: no cover @overload def execute_sync( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> ExecutionResult: ... # pragma: no cover @overload def execute_sync( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: ... # pragma: no cover def execute_sync( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: """:meta private:""" with self as session: return session.execute( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) @overload def execute_batch_sync( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: Literal[False], **kwargs, ) -> List[Dict[str, Any]]: ... # pragma: no cover @overload def execute_batch_sync( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: Literal[True], **kwargs, ) -> List[ExecutionResult]: ... # pragma: no cover @overload def execute_batch_sync( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool, **kwargs, ) -> Union[List[Dict[str, Any]], List[ExecutionResult]]: ... # pragma: no cover def execute_batch_sync( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[List[Dict[str, Any]], List[ExecutionResult]]: """:meta private:""" with self as session: return session.execute_batch( requests, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) @overload async def execute_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, # https://github.com/python/mypy/issues/7333#issuecomment-788255229 get_execution_result: Literal[False] = ..., **kwargs, ) -> Dict[str, Any]: ... # pragma: no cover @overload async def execute_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> ExecutionResult: ... # pragma: no cover @overload async def execute_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: ... # pragma: no cover async def execute_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: """:meta private:""" async with self as session: return await session.execute( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) @overload def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, # https://github.com/python/mypy/issues/7333#issuecomment-788255229 get_execution_result: Literal[False] = ..., **kwargs, ) -> Dict[str, Any]: ... # pragma: no cover @overload def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> ExecutionResult: ... # pragma: no cover @overload def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: ... # pragma: no cover def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: """Execute the provided document AST against the remote server using the transport provided during init. This function **WILL BLOCK** until the result is received from the server. Either the transport is sync and we execute the query synchronously directly OR the transport is async and we execute the query in the asyncio loop (blocking here until answer). This method will: - connect using the transport to get a session - execute the GraphQL request on the transport session - close the session and close the connection to the server If you have multiple requests to send, it is better to get your own session and execute the requests in your session. The extra arguments passed in the method will be passed to the transport execute method. """ if isinstance(self.transport, AsyncTransport): # Get the current asyncio event loop # Or create a new event loop if there isn't one (in a new Thread) try: with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="There is no current event loop" ) loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) assert not loop.is_running(), ( "Cannot run client.execute(query) if an asyncio loop is running." " Use 'await client.execute_async(query)' instead." ) data = loop.run_until_complete( self.execute_async( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) ) return data else: # Sync transports return self.execute_sync( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) @overload def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: Literal[False], **kwargs, ) -> List[Dict[str, Any]]: ... # pragma: no cover @overload def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: Literal[True], **kwargs, ) -> List[ExecutionResult]: ... # pragma: no cover @overload def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool, **kwargs, ) -> Union[List[Dict[str, Any]], List[ExecutionResult]]: ... # pragma: no cover def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[List[Dict[str, Any]], List[ExecutionResult]]: """Execute multiple GraphQL requests in a batch against the remote server using the transport provided during init. This function **WILL BLOCK** until the result is received from the server. Either the transport is sync and we execute the query synchronously directly OR the transport is async and we execute the query in the asyncio loop (blocking here until answer). This method will: - connect using the transport to get a session - execute the GraphQL requests on the transport session - close the session and close the connection to the server If you want to perform multiple executions, it is better to use the context manager to keep a session active. The extra arguments passed in the method will be passed to the transport execute method. """ if isinstance(self.transport, AsyncTransport): raise NotImplementedError("Batching is not implemented for async yet.") else: # Sync transports return self.execute_batch_sync( requests, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) @overload def subscribe_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[False] = ..., **kwargs, ) -> AsyncGenerator[Dict[str, Any], None]: ... # pragma: no cover @overload def subscribe_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> AsyncGenerator[ExecutionResult, None]: ... # pragma: no cover @overload def subscribe_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[ AsyncGenerator[Dict[str, Any], None], AsyncGenerator[ExecutionResult, None] ]: ... # pragma: no cover async def subscribe_async( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[ AsyncGenerator[Dict[str, Any], None], AsyncGenerator[ExecutionResult, None] ]: """:meta private:""" async with self as session: generator = session.subscribe( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) async for result in generator: yield result @overload def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[False] = ..., **kwargs, ) -> Generator[Dict[str, Any], None, None]: ... # pragma: no cover @overload def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> Generator[ExecutionResult, None, None]: ... # pragma: no cover @overload def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[ Generator[Dict[str, Any], None, None], Generator[ExecutionResult, None, None] ]: ... # pragma: no cover def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, *, get_execution_result: bool = False, **kwargs, ) -> Union[ Generator[Dict[str, Any], None, None], Generator[ExecutionResult, None, None] ]: """Execute a GraphQL subscription with a python generator. We need an async transport for this functionality. """ # Get the current asyncio event loop # Or create a new event loop if there isn't one (in a new Thread) try: with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="There is no current event loop" ) loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) async_generator: Union[ AsyncGenerator[Dict[str, Any], None], AsyncGenerator[ExecutionResult, None] ] = self.subscribe_async( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, get_execution_result=get_execution_result, **kwargs, ) assert not loop.is_running(), ( "Cannot run client.subscribe(query) if an asyncio loop is running." " Use 'await client.subscribe_async(query)' instead." ) try: while True: # Note: we need to create a task here in order to be able to close # the async generator properly on python 3.8 # See https://bugs.python.org/issue38559 generator_task = asyncio.ensure_future( async_generator.__anext__(), loop=loop ) result: Union[ Dict[str, Any], ExecutionResult ] = loop.run_until_complete( generator_task ) # type: ignore yield result except StopAsyncIteration: pass except (KeyboardInterrupt, Exception, GeneratorExit): # Graceful shutdown asyncio.ensure_future(async_generator.aclose(), loop=loop) generator_task.cancel() loop.run_until_complete(loop.shutdown_asyncgens()) # Then reraise the exception raise async def connect_async(self, reconnecting=False, **kwargs): r"""Connect asynchronously with the underlying async transport to produce a session. That session will be a permanent auto-reconnecting session if :code:`reconnecting=True`. If you call this method, you should call the :meth:`close_async ` method for cleanup. :param reconnecting: if True, create a permanent reconnecting session :param \**kwargs: additional arguments for the :meth:`ReconnectingAsyncClientSession init method `. """ assert isinstance( self.transport, AsyncTransport ), "Only a transport of type AsyncTransport can be used asynchronously" if reconnecting: self.session = ReconnectingAsyncClientSession(client=self, **kwargs) await self.session.start_connecting_task() else: await self.transport.connect() self.session = AsyncClientSession(client=self) # Get schema from transport if needed try: if self.fetch_schema_from_transport and not self.schema: await self.session.fetch_schema() except Exception: # we don't know what type of exception is thrown here because it # depends on the underlying transport; we just make sure that the # transport is closed and re-raise the exception await self.transport.close() raise return self.session async def close_async(self): """Close the async transport and stop the optional reconnecting task.""" if isinstance(self.session, ReconnectingAsyncClientSession): await self.session.stop_connecting_task() await self.transport.close() async def __aenter__(self): return await self.connect_async() async def __aexit__(self, exc_type, exc, tb): await self.close_async() def connect_sync(self): r"""Connect synchronously with the underlying sync transport to produce a session. If you call this method, you should call the :meth:`close_sync ` method for cleanup. """ assert not isinstance(self.transport, AsyncTransport), ( "Only a sync transport can be used." " Use 'async with Client(...) as session:' instead" ) if not hasattr(self, "session"): self.session = SyncClientSession(client=self) self.session.connect() # Get schema from transport if needed try: if self.fetch_schema_from_transport and not self.schema: self.session.fetch_schema() except Exception: # we don't know what type of exception is thrown here because it # depends on the underlying transport; we just make sure that the # transport is closed and re-raise the exception self.session.close() raise return self.session def close_sync(self): """Close the sync session and the sync transport. If batching is enabled, this will block until the remaining queries in the batching queue have been processed. """ self.session.close() def __enter__(self): return self.connect_sync() def __exit__(self, *args): self.close_sync() class SyncClientSession: """An instance of this class is created when using :code:`with` on the client. It contains the sync method execute to send queries on a sync transport using the same session. """ def __init__(self, client: Client): """:param client: the :class:`client ` used""" self.client = client def _execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, **kwargs, ) -> ExecutionResult: """Execute the provided document AST synchronously using the sync transport, returning an ExecutionResult object. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters. :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. The extra arguments are passed to the transport execute method.""" # Validate document if self.client.schema: self.client.validate(document) # Parse variable values for custom scalars if requested if variable_values is not None: if serialize_variables or ( serialize_variables is None and self.client.serialize_variables ): variable_values = serialize_variable_values( self.client.schema, document, variable_values, operation_name=operation_name, ) if self.client.batching_enabled: request = GraphQLRequest( document, variable_values=variable_values, operation_name=operation_name, ) future_result = self._execute_future(request) result = future_result.result() else: result = self.transport.execute( document, variable_values=variable_values, operation_name=operation_name, **kwargs, ) # Unserialize the result if requested if self.client.schema: if parse_result or (parse_result is None and self.client.parse_results): result.data = parse_result_fn( self.client.schema, document, result.data, operation_name=operation_name, ) return result @overload def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[False] = ..., **kwargs, ) -> Dict[str, Any]: ... # pragma: no cover @overload def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> ExecutionResult: ... # pragma: no cover @overload def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: ... # pragma: no cover def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: """Execute the provided document AST synchronously using the sync transport. Raises a TransportQueryError if an error has been returned in the ExecutionResult. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters. :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. :param get_execution_result: return the full ExecutionResult instance instead of only the "data" field. Necessary if you want to get the "extensions" field. The extra arguments are passed to the transport execute method.""" # Validate and execute on the transport result = self._execute( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) # Raise an error if an error is returned in the ExecutionResult object if result.errors: raise TransportQueryError( str_first_element(result.errors), errors=result.errors, data=result.data, extensions=result.extensions, ) assert ( result.data is not None ), "Transport returned an ExecutionResult without data or errors" if get_execution_result: return result return result.data def _execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, validate_document: Optional[bool] = True, **kwargs, ) -> List[ExecutionResult]: """Execute multiple GraphQL requests in a batch, using the sync transport, returning a list of ExecutionResult objects. :param requests: List of requests that will be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. :param validate_document: Whether we still need to validate the document. The extra arguments are passed to the transport execute method.""" # Validate document if self.client.schema: if validate_document: for req in requests: self.client.validate(req.document) # Parse variable values for custom scalars if requested if serialize_variables or ( serialize_variables is None and self.client.serialize_variables ): requests = [ req.serialize_variable_values(self.client.schema) if req.variable_values is not None else req for req in requests ] results = self.transport.execute_batch(requests, **kwargs) # Unserialize the result if requested if self.client.schema: if parse_result or (parse_result is None and self.client.parse_results): for result in results: result.data = parse_result_fn( self.client.schema, req.document, result.data, operation_name=req.operation_name, ) return results @overload def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: Literal[False], **kwargs, ) -> List[Dict[str, Any]]: ... # pragma: no cover @overload def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: Literal[True], **kwargs, ) -> List[ExecutionResult]: ... # pragma: no cover @overload def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool, **kwargs, ) -> Union[List[Dict[str, Any]], List[ExecutionResult]]: ... # pragma: no cover def execute_batch( self, requests: List[GraphQLRequest], *, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[List[Dict[str, Any]], List[ExecutionResult]]: """Execute multiple GraphQL requests in a batch, using the sync transport. This method sends the requests to the server all at once. Raises a TransportQueryError if an error has been returned in any ExecutionResult. :param requests: List of requests that will be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. :param get_execution_result: return the full ExecutionResult instance instead of only the "data" field. Necessary if you want to get the "extensions" field. The extra arguments are passed to the transport execute method.""" # Validate and execute on the transport results = self._execute_batch( requests, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) for result in results: # Raise an error if an error is returned in the ExecutionResult object if result.errors: raise TransportQueryError( str_first_element(result.errors), errors=result.errors, data=result.data, extensions=result.extensions, ) assert ( result.data is not None ), "Transport returned an ExecutionResult without data or errors" if get_execution_result: return results return cast(List[Dict[str, Any]], [result.data for result in results]) def _batch_loop(self) -> None: """main loop of the thread used to wait for requests to execute them in a batch""" stop_loop = False while not stop_loop: # First wait for a first request in from the batch queue requests_and_futures: List[Tuple[GraphQLRequest, Future]] = [] request_and_future: Tuple[GraphQLRequest, Future] = self.batch_queue.get() if request_and_future is None: break requests_and_futures.append(request_and_future) # Then wait the requested batch interval except if we already # have the maximum number of requests in the queue if self.batch_queue.qsize() < self.client.batch_max - 1: time.sleep(self.client.batch_interval) # Then get the requests which had been made during that wait interval for _ in range(self.client.batch_max - 1): if self.batch_queue.empty(): break request_and_future = self.batch_queue.get() if request_and_future is None: stop_loop = True break requests_and_futures.append(request_and_future) requests = [request for request, _ in requests_and_futures] futures = [future for _, future in requests_and_futures] # Manually execute the requests in a batch try: results: List[ExecutionResult] = self._execute_batch( requests, serialize_variables=False, # already done parse_result=False, validate_document=False, ) except Exception as exc: for future in futures: future.set_exception(exc) continue # Fill in the future results for result, future in zip(results, futures): future.set_result(result) # Indicate that the Thread has stopped self._batch_thread_stopped_event.set() def _execute_future( self, request: GraphQLRequest, ) -> Future: """If batching is enabled, this method will put a request in the batching queue instead of executing it directly so that the requests could be put in a batch. """ assert hasattr(self, "batch_queue"), "Batching is not enabled" assert not self._batch_thread_stop_requested, "Batching thread has been stopped" future: Future = Future() self.batch_queue.put((request, future)) return future def connect(self): """Connect the transport and initialize the batch threading loop if batching is enabled.""" if self.client.batching_enabled: self.batch_queue: Queue = Queue() self._batch_thread_stop_requested = False self._batch_thread_stopped_event = Event() self._batch_thread = Thread(target=self._batch_loop, daemon=True) self._batch_thread.start() self.transport.connect() def close(self): """Close the transport and cleanup the batching thread if batching is enabled. Will wait until all the remaining requests in the batch processing queue have been executed. """ if hasattr(self, "_batch_thread_stopped_event"): # Send a None in the queue to indicate that the batching Thread must stop # after having processed the remaining requests in the queue self._batch_thread_stop_requested = True self.batch_queue.put(None) # Wait for the Thread to stop self._batch_thread_stopped_event.wait() self.transport.close() def fetch_schema(self) -> None: """Fetch the GraphQL schema explicitly using introspection. Don't use this function and instead set the fetch_schema_from_transport attribute to True""" introspection_query = get_introspection_query(**self.client.introspection_args) execution_result = self.transport.execute(parse(introspection_query)) self.client._build_schema_from_introspection(execution_result) @property def transport(self): return self.client.transport class AsyncClientSession: """An instance of this class is created when using :code:`async with` on a :class:`client `. It contains the async methods (execute, subscribe) to send queries on an async transport using the same session. """ def __init__(self, client: Client): """:param client: the :class:`client ` used""" self.client = client async def _subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, **kwargs, ) -> AsyncGenerator[ExecutionResult, None]: """Coroutine to subscribe asynchronously to the provided document AST asynchronously using the async transport, returning an async generator producing ExecutionResult objects. * Validate the query with the schema if provided. * Serialize the variable_values if requested. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters. :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. The extra arguments are passed to the transport subscribe method.""" # Validate document if self.client.schema: self.client.validate(document) # Parse variable values for custom scalars if requested if variable_values is not None: if serialize_variables or ( serialize_variables is None and self.client.serialize_variables ): variable_values = serialize_variable_values( self.client.schema, document, variable_values, operation_name=operation_name, ) # Subscribe to the transport inner_generator: AsyncGenerator[ ExecutionResult, None ] = self.transport.subscribe( document, variable_values=variable_values, operation_name=operation_name, **kwargs, ) # Keep a reference to the inner generator to allow the user to call aclose() # before a break if python version is too old (pypy3 py 3.6.1) self._generator = inner_generator try: async for result in inner_generator: if self.client.schema: if parse_result or ( parse_result is None and self.client.parse_results ): result.data = parse_result_fn( self.client.schema, document, result.data, operation_name=operation_name, ) yield result finally: await inner_generator.aclose() @overload def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[False] = ..., **kwargs, ) -> AsyncGenerator[Dict[str, Any], None]: ... # pragma: no cover @overload def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> AsyncGenerator[ExecutionResult, None]: ... # pragma: no cover @overload def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[ AsyncGenerator[Dict[str, Any], None], AsyncGenerator[ExecutionResult, None] ]: ... # pragma: no cover async def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[ AsyncGenerator[Dict[str, Any], None], AsyncGenerator[ExecutionResult, None] ]: """Coroutine to subscribe asynchronously to the provided document AST asynchronously using the async transport. Raises a TransportQueryError if an error has been returned in the ExecutionResult. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters. :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. :param get_execution_result: yield the full ExecutionResult instance instead of only the "data" field. Necessary if you want to get the "extensions" field. The extra arguments are passed to the transport subscribe method.""" inner_generator: AsyncGenerator[ExecutionResult, None] = self._subscribe( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) try: # Validate and subscribe on the transport async for result in inner_generator: # Raise an error if an error is returned in the ExecutionResult object if result.errors: raise TransportQueryError( str_first_element(result.errors), errors=result.errors, data=result.data, extensions=result.extensions, ) elif result.data is not None: if get_execution_result: yield result else: yield result.data finally: await inner_generator.aclose() async def _execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, **kwargs, ) -> ExecutionResult: """Coroutine to execute the provided document AST asynchronously using the async transport, returning an ExecutionResult object. * Validate the query with the schema if provided. * Serialize the variable_values if requested. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters. :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. The extra arguments are passed to the transport execute method.""" # Validate document if self.client.schema: self.client.validate(document) # Parse variable values for custom scalars if requested if variable_values is not None: if serialize_variables or ( serialize_variables is None and self.client.serialize_variables ): variable_values = serialize_variable_values( self.client.schema, document, variable_values, operation_name=operation_name, ) # Execute the query with the transport with a timeout with fail_after(self.client.execute_timeout): result = await self.transport.execute( document, variable_values=variable_values, operation_name=operation_name, **kwargs, ) # Unserialize the result if requested if self.client.schema: if parse_result or (parse_result is None and self.client.parse_results): result.data = parse_result_fn( self.client.schema, document, result.data, operation_name=operation_name, ) return result @overload async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[False] = ..., **kwargs, ) -> Dict[str, Any]: ... # pragma: no cover @overload async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: Literal[True], **kwargs, ) -> ExecutionResult: ... # pragma: no cover @overload async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = ..., operation_name: Optional[str] = ..., serialize_variables: Optional[bool] = ..., parse_result: Optional[bool] = ..., *, get_execution_result: bool, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: ... # pragma: no cover async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, get_execution_result: bool = False, **kwargs, ) -> Union[Dict[str, Any], ExecutionResult]: """Coroutine to execute the provided document AST asynchronously using the async transport. Raises a TransportQueryError if an error has been returned in the ExecutionResult. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters. :param operation_name: Name of the operation that shall be executed. :param serialize_variables: whether the variable values should be serialized. Used for custom scalars and/or enums. By default use the serialize_variables argument of the client. :param parse_result: Whether gql will deserialize the result. By default use the parse_results argument of the client. :param get_execution_result: return the full ExecutionResult instance instead of only the "data" field. Necessary if you want to get the "extensions" field. The extra arguments are passed to the transport execute method.""" # Validate and execute on the transport result = await self._execute( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) # Raise an error if an error is returned in the ExecutionResult object if result.errors: raise TransportQueryError( str_first_element(result.errors), errors=result.errors, data=result.data, extensions=result.extensions, ) assert ( result.data is not None ), "Transport returned an ExecutionResult without data or errors" if get_execution_result: return result return result.data async def fetch_schema(self) -> None: """Fetch the GraphQL schema explicitly using introspection. Don't use this function and instead set the fetch_schema_from_transport attribute to True""" introspection_query = get_introspection_query(**self.client.introspection_args) execution_result = await self.transport.execute(parse(introspection_query)) self.client._build_schema_from_introspection(execution_result) @property def transport(self): return self.client.transport _CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) _Decorator = Callable[[_CallableT], _CallableT] class ReconnectingAsyncClientSession(AsyncClientSession): """An instance of this class is created when using the :meth:`connect_async ` method of the :class:`Client ` class with :code:`reconnecting=True`. It is used to provide a single session which will reconnect automatically if the connection fails. """ def __init__( self, client: Client, retry_connect: Union[bool, _Decorator] = True, retry_execute: Union[bool, _Decorator] = True, ): """ :param client: the :class:`client ` used. :param retry_connect: Either a Boolean to activate/deactivate the retries for the connection to the transport OR a backoff decorator to provide specific retries parameters for the connections. :param retry_execute: Either a Boolean to activate/deactivate the retries for the execute method OR a backoff decorator to provide specific retries parameters for this method. """ self.client = client self._connect_task = None self._reconnect_request_event = asyncio.Event() self._connected_event = asyncio.Event() if retry_connect is True: # By default, retry again and again, with maximum 60 seconds # between retries self.retry_connect = backoff.on_exception( backoff.expo, Exception, max_value=60, ) elif retry_connect is False: self.retry_connect = lambda e: e else: assert callable(retry_connect) self.retry_connect = retry_connect if retry_execute is True: # By default, retry 5 times, except if we receive a TransportQueryError self.retry_execute = backoff.on_exception( backoff.expo, Exception, max_tries=5, giveup=lambda e: isinstance(e, TransportQueryError), ) elif retry_execute is False: self.retry_execute = lambda e: e else: assert callable(retry_execute) self.retry_execute = retry_execute # Creating the _execute_with_retries and _connect_with_retries methods # using the provided backoff decorators self._execute_with_retries = self.retry_execute(self._execute_once) self._connect_with_retries = self.retry_connect(self.transport.connect) async def _connection_loop(self): """Coroutine used for the connection task. - try to connect to the transport with retries - send a connected event when the connection has been made - then wait for a reconnect request to try to connect again """ while True: # Connect to the transport with the retry decorator # By default it should keep retrying until it connect await self._connect_with_retries() # Once connected, set the connected event self._connected_event.set() self._connected_event.clear() # Then wait for the reconnect event self._reconnect_request_event.clear() await self._reconnect_request_event.wait() async def start_connecting_task(self): """Start the task responsible to restart the connection of the transport when requested by an event. """ if self._connect_task: log.warning("connect task already started!") else: self._connect_task = asyncio.ensure_future(self._connection_loop()) await self._connected_event.wait() async def stop_connecting_task(self): """Stop the connecting task.""" if self._connect_task is not None: self._connect_task.cancel() self._connect_task = None async def _execute_once( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, **kwargs, ) -> ExecutionResult: """Same Coroutine as parent method _execute but requesting a reconnection if we receive a TransportClosed exception. """ try: answer = await super()._execute( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) except TransportClosed: self._reconnect_request_event.set() raise return answer async def _execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, **kwargs, ) -> ExecutionResult: """Same Coroutine as parent, but with optional retries and requesting a reconnection if we receive a TransportClosed exception. """ return await self._execute_with_retries( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) async def _subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, serialize_variables: Optional[bool] = None, parse_result: Optional[bool] = None, **kwargs, ) -> AsyncGenerator[ExecutionResult, None]: """Same Async generator as parent method _subscribe but requesting a reconnection if we receive a TransportClosed exception. """ inner_generator: AsyncGenerator[ExecutionResult, None] = super()._subscribe( document, variable_values=variable_values, operation_name=operation_name, serialize_variables=serialize_variables, parse_result=parse_result, **kwargs, ) try: async for result in inner_generator: yield result except TransportClosed: self._reconnect_request_event.set() raise finally: await inner_generator.aclose() gql-3.6.0b2/gql/dsl.py000066400000000000000000001077761460703211500144670ustar00rootroot00000000000000""" .. image:: http://www.plantuml.com/plantuml/png/ZLAzJWCn3Dxz51vXw1im50ag8L4XwC1OkLTJ8gMvAd4GwEYxGuC8pTbKtUxy_TZEvsaIYfAt7e1MII9rWfsdbF1cSRzWpvtq4GT0JENduX8GXr_g7brQlf5tw-MBOx_-HlS0LV_Kzp8xr1kZav9PfCsMWvolEA_1VylHoZCExKwKv4Tg2s_VkSkca2kof2JDb0yxZYIk3qMZYUe1B1uUZOROXn96pQMugEMUdRnUUqUf6DBXQyIz2zu5RlgUQAFVNYaeRfBI79_JrUTaeg9JZFQj5MmUc69PDmNGE2iU61fDgfri3x36gxHw3gDHD6xqqQ7P4vjKqz2-602xtkO7uo17SCLhVSv25VjRjUAFcUE73Sspb8ADBl8gTT7j2cFAOPst_Wi0 # noqa :alt: UML diagram """ import logging import re from abc import ABC, abstractmethod from math import isfinite from typing import Any, Dict, Iterable, Mapping, Optional, Tuple, Union, cast from graphql import ( ArgumentNode, BooleanValueNode, DocumentNode, EnumValueNode, FieldNode, FloatValueNode, FragmentDefinitionNode, FragmentSpreadNode, GraphQLArgument, GraphQLEnumType, GraphQLError, GraphQLField, GraphQLID, GraphQLInputObjectType, GraphQLInputType, GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, GraphQLString, InlineFragmentNode, IntValueNode, ListTypeNode, ListValueNode, NamedTypeNode, NameNode, NonNullTypeNode, NullValueNode, ObjectFieldNode, ObjectValueNode, OperationDefinitionNode, OperationType, SelectionSetNode, StringValueNode, TypeNode, Undefined, ValueNode, VariableDefinitionNode, VariableNode, get_named_type, introspection_types, is_enum_type, is_input_object_type, is_leaf_type, is_list_type, is_non_null_type, is_wrapping_type, print_ast, ) from graphql.pyutils import inspect from .utils import to_camel_case log = logging.getLogger(__name__) _re_integer_string = re.compile("^-?(?:0|[1-9][0-9]*)$") def ast_from_serialized_value_untyped(serialized: Any) -> Optional[ValueNode]: """Given a serialized value, try our best to produce an AST. Anything ressembling an array (instance of Mapping) will be converted to an ObjectFieldNode. Anything ressembling a list (instance of Iterable - except str) will be converted to a ListNode. In some cases, a custom scalar can be serialized differently in the query than in the variables. In that case, this function will not work.""" if serialized is None or serialized is Undefined: return NullValueNode() if isinstance(serialized, Mapping): field_items = ( (key, ast_from_serialized_value_untyped(value)) for key, value in serialized.items() ) field_nodes = tuple( ObjectFieldNode(name=NameNode(value=field_name), value=field_value) for field_name, field_value in field_items if field_value ) return ObjectValueNode(fields=field_nodes) if isinstance(serialized, Iterable) and not isinstance(serialized, str): maybe_nodes = (ast_from_serialized_value_untyped(item) for item in serialized) nodes = tuple(node for node in maybe_nodes if node) return ListValueNode(values=nodes) if isinstance(serialized, bool): return BooleanValueNode(value=serialized) if isinstance(serialized, int): return IntValueNode(value=str(serialized)) if isinstance(serialized, float) and isfinite(serialized): value = str(serialized) if value.endswith(".0"): value = value[:-2] return FloatValueNode(value=value) if isinstance(serialized, str): return StringValueNode(value=serialized) raise TypeError(f"Cannot convert value to AST: {inspect(serialized)}.") def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: """ This is a partial copy paste of the ast_from_value function in graphql-core utilities/ast_from_value.py Overwrite the if blocks that use recursion and add a new case to return a VariableNode when value is a DSLVariable Produce a GraphQL Value AST given a Python object. Raises a GraphQLError instead of returning None if we receive an Undefined of if we receive a Null value for a Non-Null type. """ if isinstance(value, DSLVariable): return value.set_type(type_).ast_variable_name if is_non_null_type(type_): type_ = cast(GraphQLNonNull, type_) inner_type = type_.of_type ast_value = ast_from_value(value, inner_type) if isinstance(ast_value, NullValueNode): raise GraphQLError( "Received Null value for a Non-Null type " f"{inspect(inner_type)}." ) return ast_value # only explicit None, not Undefined or NaN if value is None: return NullValueNode() # undefined if value is Undefined: raise GraphQLError(f"Received Undefined value for type {inspect(type_)}.") # Convert Python list to GraphQL list. If the GraphQLType is a list, but the value # is not a list, convert the value using the list's item type. if is_list_type(type_): type_ = cast(GraphQLList, type_) item_type = type_.of_type if isinstance(value, Iterable) and not isinstance(value, str): maybe_value_nodes = (ast_from_value(item, item_type) for item in value) value_nodes = tuple(node for node in maybe_value_nodes if node) return ListValueNode(values=value_nodes) return ast_from_value(value, item_type) # Populate the fields of the input object by creating ASTs from each value in the # Python dict according to the fields in the input type. if is_input_object_type(type_): if value is None or not isinstance(value, Mapping): return None type_ = cast(GraphQLInputObjectType, type_) field_items = ( (field_name, ast_from_value(value[field_name], field.type)) for field_name, field in type_.fields.items() if field_name in value ) field_nodes = tuple( ObjectFieldNode(name=NameNode(value=field_name), value=field_value) for field_name, field_value in field_items if field_value ) return ObjectValueNode(fields=field_nodes) if is_leaf_type(type_): # Since value is an internally represented value, it must be serialized to an # externally represented value before converting into an AST. serialized = type_.serialize(value) # type: ignore # if the serialized value is a string, then we should use the # type to determine if it is an enum, an ID or a normal string if isinstance(serialized, str): # Enum types use Enum literals. if is_enum_type(type_): return EnumValueNode(value=serialized) # ID types can use Int literals. if type_ is GraphQLID and _re_integer_string.match(serialized): return IntValueNode(value=serialized) return StringValueNode(value=serialized) # Some custom scalars will serialize to dicts or lists # Providing here a default conversion to AST using our best judgment # until graphql-js issue #1817 is solved # https://github.com/graphql/graphql-js/issues/1817 return ast_from_serialized_value_untyped(serialized) # Not reachable. All possible input types have been considered. raise TypeError(f"Unexpected input type: {inspect(type_)}.") def dsl_gql( *operations: "DSLExecutable", **operations_with_name: "DSLExecutable" ) -> DocumentNode: r"""Given arguments instances of :class:`DSLExecutable` containing GraphQL operations or fragments, generate a Document which can be executed later in a gql client or a gql session. Similar to the :func:`gql.gql` function but instead of parsing a python string to describe the request, we are using operations which have been generated dynamically using instances of :class:`DSLField`, generated by instances of :class:`DSLType` which themselves originated from a :class:`DSLSchema` class. :param \*operations: the GraphQL operations and fragments :type \*operations: DSLQuery, DSLMutation, DSLSubscription, DSLFragment :param \**operations_with_name: the GraphQL operations with an operation name :type \**operations_with_name: DSLQuery, DSLMutation, DSLSubscription :return: a Document which can be later executed or subscribed by a :class:`Client `, by an :class:`async session ` or by a :class:`sync session ` :raises TypeError: if an argument is not an instance of :class:`DSLExecutable` :raises AttributeError: if a type has not been provided in a :class:`DSLFragment` """ # Concatenate operations without and with name all_operations: Tuple["DSLExecutable", ...] = ( *operations, *(operation for operation in operations_with_name.values()), ) # Set the operation name for name, operation in operations_with_name.items(): operation.name = name # Check the type for operation in all_operations: if not isinstance(operation, DSLExecutable): raise TypeError( "Operations should be instances of DSLExecutable " "(DSLQuery, DSLMutation, DSLSubscription or DSLFragment).\n" f"Received: {type(operation)}." ) return DocumentNode( definitions=[operation.executable_ast for operation in all_operations] ) class DSLSchema: """The DSLSchema is the root of the DSL code. Attributes of the DSLSchema class are generated automatically with the `__getattr__` dunder method in order to generate instances of :class:`DSLType` """ def __init__(self, schema: GraphQLSchema): """Initialize the DSLSchema with the given schema. :param schema: a GraphQL Schema provided locally or fetched using an introspection query. Usually `client.schema` :type schema: GraphQLSchema :raises TypeError: if the argument is not an instance of :class:`GraphQLSchema` """ if not isinstance(schema, GraphQLSchema): raise TypeError( f"DSLSchema needs a schema as parameter. Received: {type(schema)}" ) self._schema: GraphQLSchema = schema def __getattr__(self, name: str) -> "DSLType": type_def: Optional[GraphQLNamedType] = self._schema.get_type(name) if type_def is None: raise AttributeError(f"Type '{name}' not found in the schema!") if not isinstance(type_def, (GraphQLObjectType, GraphQLInterfaceType)): raise AttributeError( f'Type "{name} ({type_def!r})" is not valid as an attribute of' " DSLSchema. Only Object types or Interface types are accepted." ) return DSLType(type_def, self) class DSLSelector(ABC): """DSLSelector is an abstract class which defines the :meth:`select ` method to select children fields in the query. Inherited by :class:`DSLRootFieldSelector `, :class:`DSLFieldSelector ` :class:`DSLFragmentSelector ` """ selection_set: SelectionSetNode def __init__( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias", ): """:meta private:""" self.selection_set = SelectionSetNode(selections=()) if fields or fields_with_alias: self.select(*fields, **fields_with_alias) @abstractmethod def is_valid_field(self, field: "DSLSelectable") -> bool: raise NotImplementedError( "Any DSLSelector subclass must have a is_valid_field method" ) # pragma: no cover def select( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias", ): r"""Select the fields which should be added. :param \*fields: fields or fragments :type \*fields: DSLSelectable :param \**fields_with_alias: fields or fragments with alias as key :type \**fields_with_alias: DSLSelectable :raises TypeError: if an argument is not an instance of :class:`DSLSelectable` :raises GraphQLError: if an argument is not a valid field """ # Concatenate fields without and with alias added_fields: Tuple["DSLSelectable", ...] = DSLField.get_aliased_fields( fields, fields_with_alias ) # Check that each field is valid for field in added_fields: if not isinstance(field, DSLSelectable): raise TypeError( "Fields should be instances of DSLSelectable. " f"Received: {type(field)}" ) if not self.is_valid_field(field): raise GraphQLError(f"Invalid field for {self!r}: {field!r}") # Get a list of AST Nodes for each added field added_selections: Tuple[ Union[FieldNode, InlineFragmentNode, FragmentSpreadNode], ... ] = tuple(field.ast_field for field in added_fields) # Update the current selection list with new selections self.selection_set.selections = self.selection_set.selections + added_selections log.debug(f"Added fields: {added_fields} in {self!r}") class DSLExecutable(DSLSelector): """Interface for the root elements which can be executed in the :func:`dsl_gql ` function Inherited by :class:`DSLOperation ` and :class:`DSLFragment ` """ variable_definitions: "DSLVariableDefinitions" name: Optional[str] selection_set: SelectionSetNode @property @abstractmethod def executable_ast(self): """Generates the ast for :func:`dsl_gql `.""" raise NotImplementedError( "Any DSLExecutable subclass must have executable_ast property" ) # pragma: no cover def __init__( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias", ): r"""Given arguments of type :class:`DSLSelectable` containing GraphQL requests, generate an operation which can be converted to a Document using the :func:`dsl_gql `. The fields arguments should be either be fragments or fields of root GraphQL types (Query, Mutation or Subscription) and correspond to the operation_type of this operation. :param \*fields: root fields or fragments :type \*fields: DSLSelectable :param \**fields_with_alias: root fields or fragments with alias as key :type \**fields_with_alias: DSLSelectable :raises TypeError: if an argument is not an instance of :class:`DSLSelectable` :raises AssertionError: if an argument is not a field which correspond to the operation type """ self.name = None self.variable_definitions = DSLVariableDefinitions() DSLSelector.__init__(self, *fields, **fields_with_alias) class DSLRootFieldSelector(DSLSelector): """Class used to define the :meth:`is_valid_field ` method for root fields for the :meth:`select ` method. Inherited by :class:`DSLOperation ` """ def is_valid_field(self, field: "DSLSelectable") -> bool: """Check that a field is valid for a root field. For operations, the fields arguments should be fields of root GraphQL types (Query, Mutation or Subscription) and correspond to the operation_type of this operation. the :code:`__typename` field can only be added to Query or Mutation. the :code:`__schema` and :code:`__type` field can only be added to Query. """ assert isinstance(self, DSLOperation) operation_name = self.operation_type.name if isinstance(field, DSLMetaField): if field.name in ["__schema", "__type"]: return operation_name == "QUERY" if field.name == "__typename": return operation_name != "SUBSCRIPTION" elif isinstance(field, DSLField): assert field.dsl_type is not None schema = field.dsl_type._dsl_schema._schema root_type = None if operation_name == "QUERY": root_type = schema.query_type elif operation_name == "MUTATION": root_type = schema.mutation_type elif operation_name == "SUBSCRIPTION": root_type = schema.subscription_type if root_type is None: log.error( f"Root type of type {operation_name} not found in the schema!" ) return False return field.parent_type.name == root_type.name return False class DSLOperation(DSLExecutable, DSLRootFieldSelector): """Interface for GraphQL operations. Inherited by :class:`DSLQuery `, :class:`DSLMutation ` and :class:`DSLSubscription ` """ operation_type: OperationType @property def executable_ast(self) -> OperationDefinitionNode: """Generates the ast for :func:`dsl_gql `.""" return OperationDefinitionNode( operation=OperationType(self.operation_type), selection_set=self.selection_set, variable_definitions=self.variable_definitions.get_ast_definitions(), **({"name": NameNode(value=self.name)} if self.name else {}), directives=(), ) def __repr__(self) -> str: return f"<{self.__class__.__name__}>" class DSLQuery(DSLOperation): operation_type = OperationType.QUERY class DSLMutation(DSLOperation): operation_type = OperationType.MUTATION class DSLSubscription(DSLOperation): operation_type = OperationType.SUBSCRIPTION class DSLVariable: """The DSLVariable represents a single variable defined in a GraphQL operation Instances of this class are generated for you automatically as attributes of the :class:`DSLVariableDefinitions` The type of the variable is set by the :class:`DSLField` instance that receives it in the :meth:`args ` method. """ def __init__(self, name: str): """:meta private:""" self.name = name self.ast_variable_type: Optional[TypeNode] = None self.ast_variable_name = VariableNode(name=NameNode(value=self.name)) self.default_value = None self.type: Optional[GraphQLInputType] = None def to_ast_type(self, type_: GraphQLInputType) -> TypeNode: if is_wrapping_type(type_): if isinstance(type_, GraphQLList): return ListTypeNode(type=self.to_ast_type(type_.of_type)) elif isinstance(type_, GraphQLNonNull): return NonNullTypeNode(type=self.to_ast_type(type_.of_type)) assert isinstance( type_, (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) ) return NamedTypeNode(name=NameNode(value=type_.name)) def set_type(self, type_: GraphQLInputType) -> "DSLVariable": self.type = type_ self.ast_variable_type = self.to_ast_type(type_) return self def default(self, default_value: Any) -> "DSLVariable": self.default_value = default_value return self class DSLVariableDefinitions: """The DSLVariableDefinitions represents variable definitions in a GraphQL operation Instances of this class have to be created and set as the `variable_definitions` attribute of a DSLOperation instance Attributes of the DSLVariableDefinitions class are generated automatically with the `__getattr__` dunder method in order to generate instances of :class:`DSLVariable`, that can then be used as values in the :meth:`args ` method. """ def __init__(self): """:meta private:""" self.variables: Dict[str, DSLVariable] = {} def __getattr__(self, name: str) -> "DSLVariable": if name not in self.variables: self.variables[name] = DSLVariable(name) return self.variables[name] def get_ast_definitions(self) -> Tuple[VariableDefinitionNode, ...]: """ :meta private: Return a list of VariableDefinitionNodes for each variable with a type """ return tuple( VariableDefinitionNode( type=var.ast_variable_type, variable=var.ast_variable_name, default_value=None if var.default_value is None else ast_from_value(var.default_value, var.type), directives=(), ) for var in self.variables.values() if var.type is not None # only variables used ) class DSLType: """The DSLType represents a GraphQL type for the DSL code. It can be a root type (Query, Mutation or Subscription). Or it can be any other object type (Human in the StarWars schema). Or it can be an interface type (Character in the StarWars schema). Instances of this class are generated for you automatically as attributes of the :class:`DSLSchema` Attributes of the DSLType class are generated automatically with the `__getattr__` dunder method in order to generate instances of :class:`DSLField` """ def __init__( self, graphql_type: Union[GraphQLObjectType, GraphQLInterfaceType], dsl_schema: DSLSchema, ): """Initialize the DSLType with the GraphQL type. .. warning:: Don't instantiate this class yourself. Use attributes of the :class:`DSLSchema` instead. :param graphql_type: the GraphQL type definition from the schema :param dsl_schema: reference to the DSLSchema which created this type """ self._type: Union[GraphQLObjectType, GraphQLInterfaceType] = graphql_type self._dsl_schema = dsl_schema log.debug(f"Creating {self!r})") def __getattr__(self, name: str) -> "DSLField": camel_cased_name = to_camel_case(name) if name in self._type.fields: formatted_name = name field = self._type.fields[name] elif camel_cased_name in self._type.fields: formatted_name = camel_cased_name field = self._type.fields[camel_cased_name] else: raise AttributeError( f"Field {name} does not exist in type {self._type.name}." ) return DSLField(formatted_name, self._type, field, self) def __repr__(self) -> str: return f"<{self.__class__.__name__} {self._type!r}>" class DSLSelectable(ABC): """DSLSelectable is an abstract class which indicates that the subclasses can be used as arguments of the :meth:`select ` method. Inherited by :class:`DSLField `, :class:`DSLFragment ` :class:`DSLInlineFragment ` """ ast_field: Union[FieldNode, InlineFragmentNode, FragmentSpreadNode] @staticmethod def get_aliased_fields( fields: Iterable["DSLSelectable"], fields_with_alias: Dict[str, "DSLSelectableWithAlias"], ) -> Tuple["DSLSelectable", ...]: """ :meta private: Concatenate all the fields (with or without alias) in a Tuple. Set the requested alias for the fields with alias. """ return ( *fields, *(field.alias(alias) for alias, field in fields_with_alias.items()), ) def __str__(self) -> str: return print_ast(self.ast_field) class DSLFragmentSelector(DSLSelector): """Class used to define the :meth:`is_valid_field ` method for fragments for the :meth:`select ` method. Inherited by :class:`DSLFragment `, :class:`DSLInlineFragment ` """ def is_valid_field(self, field: DSLSelectable) -> bool: """Check that a field is valid.""" assert isinstance(self, (DSLFragment, DSLInlineFragment)) if isinstance(field, (DSLFragment, DSLInlineFragment)): return True assert isinstance(field, DSLField) if isinstance(field, DSLMetaField): return field.name == "__typename" fragment_type = self._type assert fragment_type is not None if field.name in fragment_type.fields.keys(): return fragment_type.fields[field.name].type == field.field.type return False class DSLFieldSelector(DSLSelector): """Class used to define the :meth:`is_valid_field ` method for fields for the :meth:`select ` method. Inherited by :class:`DSLField `, """ def is_valid_field(self, field: DSLSelectable) -> bool: """Check that a field is valid.""" assert isinstance(self, DSLField) if isinstance(field, (DSLFragment, DSLInlineFragment)): return True assert isinstance(field, DSLField) if isinstance(field, DSLMetaField): return field.name == "__typename" parent_type = get_named_type(self.field.type) if not isinstance(parent_type, (GraphQLInterfaceType, GraphQLObjectType)): return False if field.name in parent_type.fields.keys(): return parent_type.fields[field.name].type == field.field.type return False class DSLSelectableWithAlias(DSLSelectable): """DSLSelectableWithAlias is an abstract class which indicates that the subclasses can be selected with an alias. """ ast_field: FieldNode def alias(self, alias: str) -> "DSLSelectableWithAlias": """Set an alias .. note:: You can also pass the alias directly at the :meth:`select ` method. :code:`ds.Query.human.select(my_name=ds.Character.name)` is equivalent to: :code:`ds.Query.human.select(ds.Character.name.alias("my_name"))` :param alias: the alias :type alias: str :return: itself """ self.ast_field.alias = NameNode(value=alias) return self class DSLField(DSLSelectableWithAlias, DSLFieldSelector): """The DSLField represents a GraphQL field for the DSL code. Instances of this class are generated for you automatically as attributes of the :class:`DSLType` If this field contains children fields, then you need to select which ones you want in the request using the :meth:`select ` method. """ _type: Union[GraphQLObjectType, GraphQLInterfaceType] ast_field: FieldNode field: GraphQLField def __init__( self, name: str, parent_type: Union[GraphQLObjectType, GraphQLInterfaceType], field: GraphQLField, dsl_type: Optional[DSLType] = None, ): """Initialize the DSLField. .. warning:: Don't instantiate this class yourself. Use attributes of the :class:`DSLType` instead. :param name: the name of the field :param parent_type: the GraphQL type definition from the schema of the parent type of the field :param field: the GraphQL field definition from the schema :param dsl_type: reference of the DSLType instance which created this field """ self.parent_type = parent_type self.field = field self.ast_field = FieldNode( name=NameNode(value=name), arguments=(), directives=(), ) self.dsl_type = dsl_type log.debug(f"Creating {self!r}") DSLSelector.__init__(self) @property def name(self): """:meta private:""" return self.ast_field.name.value def __call__(self, **kwargs) -> "DSLField": return self.args(**kwargs) def args(self, **kwargs) -> "DSLField": r"""Set the arguments of a field The arguments are parsed to be stored in the AST of this field. .. note:: You can also call the field directly with your arguments. :code:`ds.Query.human(id=1000)` is equivalent to: :code:`ds.Query.human.args(id=1000)` :param \**kwargs: the arguments (keyword=value) :return: itself :raises KeyError: if any of the provided arguments does not exist for this field. """ assert self.ast_field.arguments is not None self.ast_field.arguments = self.ast_field.arguments + tuple( ArgumentNode( name=NameNode(value=name), value=ast_from_value(value, self._get_argument(name).type), ) for name, value in kwargs.items() ) log.debug(f"Added arguments {kwargs} in field {self!r})") return self def _get_argument(self, name: str) -> GraphQLArgument: """Method used to return the GraphQLArgument definition of an argument from its name. :raises KeyError: if the provided argument does not exist for this field. """ arg = self.field.args.get(name) if arg is None: raise KeyError(f"Argument {name} does not exist in {self.field}.") return arg def select( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias" ) -> "DSLField": """Calling :meth:`select ` method with corrected typing hints """ super().select(*fields, **fields_with_alias) self.ast_field.selection_set = self.selection_set return self def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.parent_type.name}" f"::{self.name}>" class DSLMetaField(DSLField): """DSLMetaField represents a GraphQL meta-field for the DSL code. meta-fields are reserved field in the GraphQL type system prefixed with "__" two underscores and used for introspection. """ meta_type = GraphQLObjectType( "meta_field", fields={ "__typename": GraphQLField(GraphQLString), "__schema": GraphQLField( cast(GraphQLObjectType, introspection_types["__Schema"]) ), "__type": GraphQLField( cast(GraphQLObjectType, introspection_types["__Type"]), args={"name": GraphQLArgument(type_=GraphQLNonNull(GraphQLString))}, ), }, ) def __init__(self, name: str): """Initialize the meta-field. :param name: the name between __typename, __schema or __type """ try: field = self.meta_type.fields[name] except KeyError: raise GraphQLError(f'Invalid meta-field "{name}"') super().__init__(name, self.meta_type, field) class DSLInlineFragment(DSLSelectable, DSLFragmentSelector): """DSLInlineFragment represents an inline fragment for the DSL code.""" _type: Union[GraphQLObjectType, GraphQLInterfaceType] ast_field: InlineFragmentNode def __init__( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias", ): r"""Initialize the DSLInlineFragment. :param \*fields: new children fields :type \*fields: DSLSelectable (DSLField, DSLFragment or DSLInlineFragment) :param \**fields_with_alias: new children fields with alias as key :type \**fields_with_alias: DSLField """ log.debug(f"Creating {self!r}") self.ast_field = InlineFragmentNode(directives=()) DSLSelector.__init__(self, *fields, **fields_with_alias) def select( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias" ) -> "DSLInlineFragment": """Calling :meth:`select ` method with corrected typing hints """ super().select(*fields, **fields_with_alias) self.ast_field.selection_set = self.selection_set return self def on(self, type_condition: DSLType) -> "DSLInlineFragment": """Provides the GraphQL type of this inline fragment.""" self._type = type_condition._type self.ast_field.type_condition = NamedTypeNode( name=NameNode(value=self._type.name) ) return self def __repr__(self) -> str: type_info = "" try: type_info += f" on {self._type.name}" except AttributeError: pass return f"<{self.__class__.__name__}{type_info}>" class DSLFragment(DSLSelectable, DSLFragmentSelector, DSLExecutable): """DSLFragment represents a named GraphQL fragment for the DSL code.""" _type: Optional[Union[GraphQLObjectType, GraphQLInterfaceType]] ast_field: FragmentSpreadNode name: str def __init__( self, name: str, ): r"""Initialize the DSLFragment. :param name: the name of the fragment :type name: str """ DSLExecutable.__init__(self) self.name = name self._type = None log.debug(f"Creating {self!r}") @property # type: ignore def ast_field(self) -> FragmentSpreadNode: # type: ignore """ast_field property will generate a FragmentSpreadNode with the provided name. Note: We need to ignore the type because of `issue #4125 of mypy `_. """ spread_node = FragmentSpreadNode(directives=()) spread_node.name = NameNode(value=self.name) return spread_node def select( self, *fields: "DSLSelectable", **fields_with_alias: "DSLSelectableWithAlias" ) -> "DSLFragment": """Calling :meth:`select ` method with corrected typing hints """ if self._type is None: raise AttributeError( "Missing type condition. Please use .on(type_condition) method" ) super().select(*fields, **fields_with_alias) return self def on(self, type_condition: DSLType) -> "DSLFragment": """Provides the GraphQL type of this fragment. :param type_condition: the provided type :type type_condition: DSLType """ self._type = type_condition._type return self @property def executable_ast(self) -> FragmentDefinitionNode: """Generates the ast for :func:`dsl_gql `. :raises AttributeError: if a type has not been provided """ assert self.name is not None if self._type is None: raise AttributeError( "Missing type condition. Please use .on(type_condition) method" ) fragment_variable_definitions = self.variable_definitions.get_ast_definitions() if len(fragment_variable_definitions) == 0: """Fragment variable definitions are obsolete and only supported on graphql-core if the Parser is initialized with: allow_legacy_fragment_variables=True. We will not provide variable_definitions instead of providing an empty tuple to be coherent with how it works by default on graphql-core. """ variable_definition_kwargs = {} else: variable_definition_kwargs = { "variable_definitions": fragment_variable_definitions } return FragmentDefinitionNode( type_condition=NamedTypeNode(name=NameNode(value=self._type.name)), selection_set=self.selection_set, **variable_definition_kwargs, name=NameNode(value=self.name), directives=(), ) def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.name!s}>" gql-3.6.0b2/gql/gql.py000066400000000000000000000016441460703211500144530ustar00rootroot00000000000000from __future__ import annotations from graphql import DocumentNode, Source, parse def gql(request_string: str | Source) -> DocumentNode: """Given a string containing a GraphQL request, parse it into a Document. :param request_string: the GraphQL request as a String :type request_string: str | Source :return: a Document which can be later executed or subscribed by a :class:`Client `, by an :class:`async session ` or by a :class:`sync session ` :raises GraphQLError: if a syntax error is encountered. """ if isinstance(request_string, Source): source = request_string elif isinstance(request_string, str): source = Source(request_string, "GraphQL request") else: raise TypeError("Request must be passed as a string or Source object.") return parse(source) gql-3.6.0b2/gql/graphql_request.py000066400000000000000000000021301460703211500170650ustar00rootroot00000000000000from dataclasses import dataclass from typing import Any, Dict, Optional from graphql import DocumentNode, GraphQLSchema from .utilities import serialize_variable_values @dataclass(frozen=True) class GraphQLRequest: """GraphQL Request to be executed.""" document: DocumentNode """GraphQL query as AST Node object.""" variable_values: Optional[Dict[str, Any]] = None """Dictionary of input parameters (Default: None).""" operation_name: Optional[str] = None """ Name of the operation that shall be executed. Only required in multi-operation documents (Default: None). """ def serialize_variable_values(self, schema: GraphQLSchema) -> "GraphQLRequest": assert self.variable_values return GraphQLRequest( document=self.document, variable_values=serialize_variable_values( schema=schema, document=self.document, variable_values=self.variable_values, operation_name=self.operation_name, ), operation_name=self.operation_name, ) gql-3.6.0b2/gql/py.typed000066400000000000000000000000761460703211500150130ustar00rootroot00000000000000# Marker file for PEP 561. The gql package uses inline types. gql-3.6.0b2/gql/transport/000077500000000000000000000000001460703211500153455ustar00rootroot00000000000000gql-3.6.0b2/gql/transport/__init__.py000066400000000000000000000001701460703211500174540ustar00rootroot00000000000000from .async_transport import AsyncTransport from .transport import Transport __all__ = ["AsyncTransport", "Transport"] gql-3.6.0b2/gql/transport/aiohttp.py000066400000000000000000000326141460703211500173750ustar00rootroot00000000000000import asyncio import functools import io import json import logging from ssl import SSLContext from typing import Any, AsyncGenerator, Callable, Dict, Optional, Tuple, Type, Union import aiohttp from aiohttp.client_exceptions import ClientResponseError from aiohttp.client_reqrep import Fingerprint from aiohttp.helpers import BasicAuth from aiohttp.typedefs import LooseCookies, LooseHeaders from graphql import DocumentNode, ExecutionResult, print_ast from multidict import CIMultiDictProxy from ..utils import extract_files from .appsync_auth import AppSyncAuthentication from .async_transport import AsyncTransport from .exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportServerError, ) log = logging.getLogger(__name__) class AIOHTTPTransport(AsyncTransport): """:ref:`Async Transport ` to execute GraphQL queries on remote servers with an HTTP connection. This transport use the aiohttp library with asyncio. """ file_classes: Tuple[Type[Any], ...] = ( io.IOBase, aiohttp.StreamReader, AsyncGenerator, ) def __init__( self, url: str, headers: Optional[LooseHeaders] = None, cookies: Optional[LooseCookies] = None, auth: Optional[Union[BasicAuth, "AppSyncAuthentication"]] = None, ssl: Union[SSLContext, bool, Fingerprint] = False, timeout: Optional[int] = None, ssl_close_timeout: Optional[Union[int, float]] = 10, json_serialize: Callable = json.dumps, json_deserialize: Callable = json.loads, client_session_args: Optional[Dict[str, Any]] = None, ) -> None: """Initialize the transport with the given aiohttp parameters. :param url: The GraphQL server URL. Example: 'https://server.com:PORT/path'. :param headers: Dict of HTTP Headers. :param cookies: Dict of HTTP cookies. :param auth: BasicAuth object to enable Basic HTTP auth if needed Or Appsync Authentication class :param ssl: ssl_context of the connection. Use ssl=False to disable encryption :param ssl_close_timeout: Timeout in seconds to wait for the ssl connection to close properly :param json_serialize: Json serializer callable. By default json.dumps() function :param json_deserialize: Json deserializer callable. By default json.loads() function :param client_session_args: Dict of extra args passed to `aiohttp.ClientSession`_ .. _aiohttp.ClientSession: https://docs.aiohttp.org/en/stable/client_reference.html#aiohttp.ClientSession """ self.url: str = url self.headers: Optional[LooseHeaders] = headers self.cookies: Optional[LooseCookies] = cookies self.auth: Optional[Union[BasicAuth, "AppSyncAuthentication"]] = auth self.ssl: Union[SSLContext, bool, Fingerprint] = ssl self.timeout: Optional[int] = timeout self.ssl_close_timeout: Optional[Union[int, float]] = ssl_close_timeout self.client_session_args = client_session_args self.session: Optional[aiohttp.ClientSession] = None self.response_headers: Optional[CIMultiDictProxy[str]] self.json_serialize: Callable = json_serialize self.json_deserialize: Callable = json_deserialize async def connect(self) -> None: """Coroutine which will create an aiohttp ClientSession() as self.session. Don't call this coroutine directly on the transport, instead use :code:`async with` on the client and this coroutine will be executed to create the session. Should be cleaned with a call to the close coroutine. """ if self.session is None: client_session_args: Dict[str, Any] = { "cookies": self.cookies, "headers": self.headers, "auth": None if isinstance(self.auth, AppSyncAuthentication) else self.auth, "json_serialize": self.json_serialize, } if self.timeout is not None: client_session_args["timeout"] = aiohttp.ClientTimeout( total=self.timeout ) # Adding custom parameters passed from init if self.client_session_args: client_session_args.update(self.client_session_args) # type: ignore log.debug("Connecting transport") self.session = aiohttp.ClientSession(**client_session_args) else: raise TransportAlreadyConnected("Transport is already connected") @staticmethod def create_aiohttp_closed_event(session) -> asyncio.Event: """Work around aiohttp issue that doesn't properly close transports on exit. See https://github.com/aio-libs/aiohttp/issues/1925#issuecomment-639080209 Returns: An event that will be set once all transports have been properly closed. """ ssl_transports = 0 all_is_lost = asyncio.Event() def connection_lost(exc, orig_lost): nonlocal ssl_transports try: orig_lost(exc) finally: ssl_transports -= 1 if ssl_transports == 0: all_is_lost.set() def eof_received(orig_eof_received): try: orig_eof_received() except AttributeError: # pragma: no cover # It may happen that eof_received() is called after # _app_protocol and _transport are set to None. pass for conn in session.connector._conns.values(): for handler, _ in conn: proto = getattr(handler.transport, "_ssl_protocol", None) if proto is None: continue ssl_transports += 1 orig_lost = proto.connection_lost orig_eof_received = proto.eof_received proto.connection_lost = functools.partial( connection_lost, orig_lost=orig_lost ) proto.eof_received = functools.partial( eof_received, orig_eof_received=orig_eof_received ) if ssl_transports == 0: all_is_lost.set() return all_is_lost async def close(self) -> None: """Coroutine which will close the aiohttp session. Don't call this coroutine directly on the transport, instead use :code:`async with` on the client and this coroutine will be executed when you exit the async context manager. """ if self.session is not None: log.debug("Closing transport") if ( self.client_session_args and self.client_session_args.get("connector_owner") is False ): log.debug("connector_owner is False -> not closing connector") else: closed_event = self.create_aiohttp_closed_event(self.session) await self.session.close() try: await asyncio.wait_for(closed_event.wait(), self.ssl_close_timeout) except asyncio.TimeoutError: pass self.session = None async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, extra_args: Optional[Dict[str, Any]] = None, upload_files: bool = False, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server using the current session. This uses the aiohttp library to perform a HTTP POST request asynchronously to the remote server. Don't call this coroutine directly on the transport, instead use :code:`execute` on a client or a session. :param document: the parsed GraphQL request :param variable_values: An optional Dict of variable values :param operation_name: An optional Operation name for the request :param extra_args: additional arguments to send to the aiohttp post method :param upload_files: Set to True if you want to put files in the variable values :returns: an ExecutionResult object. """ query_str = print_ast(document) payload: Dict[str, Any] = { "query": query_str, } if operation_name: payload["operationName"] = operation_name if upload_files: # If the upload_files flag is set, then we need variable_values assert variable_values is not None # If we upload files, we will extract the files present in the # variable_values dict and replace them by null values nulled_variable_values, files = extract_files( variables=variable_values, file_classes=self.file_classes, ) # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values # Prepare aiohttp to send multipart-encoded data data = aiohttp.FormData() # Generate the file map # path is nested in a list because the spec allows multiple pointers # to the same file. But we don't support that. # Will generate something like {"0": ["variables.file"]} file_map = {str(i): [path] for i, path in enumerate(files)} # Enumerate the file streams # Will generate something like {'0': <_io.BufferedReader ...>} file_streams = {str(i): files[path] for i, path in enumerate(files)} # Add the payload to the operations field operations_str = self.json_serialize(payload) log.debug("operations %s", operations_str) data.add_field( "operations", operations_str, content_type="application/json" ) # Add the file map field file_map_str = self.json_serialize(file_map) log.debug("file_map %s", file_map_str) data.add_field("map", file_map_str, content_type="application/json") # Add the extracted files as remaining fields for k, f in file_streams.items(): name = getattr(f, "name", k) content_type = getattr(f, "content_type", None) data.add_field(k, f, filename=name, content_type=content_type) post_args: Dict[str, Any] = {"data": data} else: if variable_values: payload["variables"] = variable_values if log.isEnabledFor(logging.INFO): log.info(">>> %s", self.json_serialize(payload)) post_args = {"json": payload} # Pass post_args to aiohttp post method if extra_args: post_args.update(extra_args) # Add headers for AppSync if requested if isinstance(self.auth, AppSyncAuthentication): post_args["headers"] = self.auth.get_headers( self.json_serialize(payload), {"content-type": "application/json"}, ) if self.session is None: raise TransportClosed("Transport is not connected") async with self.session.post(self.url, ssl=self.ssl, **post_args) as resp: # Saving latest response headers in the transport self.response_headers = resp.headers async def raise_response_error(resp: aiohttp.ClientResponse, reason: str): # We raise a TransportServerError if the status code is 400 or higher # We raise a TransportProtocolError in the other cases try: # Raise a ClientResponseError if response status is 400 or higher resp.raise_for_status() except ClientResponseError as e: raise TransportServerError(str(e), e.status) from e result_text = await resp.text() raise TransportProtocolError( f"Server did not return a GraphQL result: " f"{reason}: " f"{result_text}" ) try: result = await resp.json(loads=self.json_deserialize, content_type=None) if log.isEnabledFor(logging.INFO): result_text = await resp.text() log.info("<<< %s", result_text) except Exception: await raise_response_error(resp, "Not a JSON answer") if result is None: await raise_response_error(resp, "Not a JSON answer") if "errors" not in result and "data" not in result: await raise_response_error(resp, 'No "data" or "errors" keys in answer') return ExecutionResult( errors=result.get("errors"), data=result.get("data"), extensions=result.get("extensions"), ) def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> AsyncGenerator[ExecutionResult, None]: """Subscribe is not supported on HTTP. :meta private: """ raise NotImplementedError(" The HTTP transport does not support subscriptions") gql-3.6.0b2/gql/transport/appsync_auth.py000066400000000000000000000165331460703211500204250ustar00rootroot00000000000000import json import logging import re from abc import ABC, abstractmethod from base64 import b64encode from typing import Any, Callable, Dict, Optional try: import botocore except ImportError: # pragma: no cover # botocore is only needed for the IAM AppSync authentication method pass log = logging.getLogger("gql.transport.appsync") class AppSyncAuthentication(ABC): """AWS authentication abstract base class All AWS authentication class should have a :meth:`get_headers ` method which defines the headers used in the authentication process.""" def get_auth_url(self, url: str) -> str: """ :return: a url with base64 encoded headers used to establish a websocket connection to the appsync-realtime-api. """ headers = self.get_headers() encoded_headers = b64encode( json.dumps(headers, separators=(",", ":")).encode() ).decode() url_base = url.replace("https://", "wss://").replace( "appsync-api", "appsync-realtime-api" ) return f"{url_base}?header={encoded_headers}&payload=e30=" @abstractmethod def get_headers( self, data: Optional[str] = None, headers: Optional[Dict[str, Any]] = None ) -> Dict[str, Any]: raise NotImplementedError() # pragma: no cover class AppSyncApiKeyAuthentication(AppSyncAuthentication): """AWS authentication class using an API key""" def __init__(self, host: str, api_key: str) -> None: """ :param host: the host, something like: XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com :param api_key: the API key """ self._host = host.replace("appsync-realtime-api", "appsync-api") self.api_key = api_key def get_headers( self, data: Optional[str] = None, headers: Optional[Dict[str, Any]] = None ) -> Dict[str, Any]: return {"host": self._host, "x-api-key": self.api_key} class AppSyncJWTAuthentication(AppSyncAuthentication): """AWS authentication class using a JWT access token. It can be used either for: - Amazon Cognito user pools - OpenID Connect (OIDC) """ def __init__(self, host: str, jwt: str) -> None: """ :param host: the host, something like: XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com :param jwt: the JWT Access Token """ self._host = host.replace("appsync-realtime-api", "appsync-api") self.jwt = jwt def get_headers( self, data: Optional[str] = None, headers: Optional[Dict[str, Any]] = None ) -> Dict[str, Any]: return {"host": self._host, "Authorization": self.jwt} class AppSyncIAMAuthentication(AppSyncAuthentication): """AWS authentication class using IAM. .. note:: There is no need for you to use this class directly, you could instead intantiate the :class:`gql.transport.appsync.AppSyncWebsocketsTransport` without an auth argument. During initialization, this class will use botocore to attempt to find your IAM credentials, either from environment variables or from your AWS credentials file. """ def __init__( self, host: str, region_name: Optional[str] = None, signer: Optional["botocore.auth.BaseSigner"] = None, request_creator: Optional[ Callable[[Dict[str, Any]], "botocore.awsrequest.AWSRequest"] ] = None, credentials: Optional["botocore.credentials.Credentials"] = None, session: Optional["botocore.session.Session"] = None, ) -> None: """Initialize itself, saving the found credentials used to sign the headers later. if no credentials are found, then a NoCredentialsError is raised. """ from botocore.auth import SigV4Auth from botocore.awsrequest import create_request_object from botocore.session import get_session self._host = host.replace("appsync-realtime-api", "appsync-api") self._session = session if session else get_session() self._credentials = ( credentials if credentials else self._session.get_credentials() ) self._service_name = "appsync" self._region_name = region_name or self._detect_region_name() self._signer = ( signer if signer else SigV4Auth(self._credentials, self._service_name, self._region_name) ) self._request_creator = ( request_creator if request_creator else create_request_object ) def _detect_region_name(self): """Try to detect the correct region_name. First try to extract the region_name from the host. If that does not work, then try to get the region_name from the aws configuration (~/.aws/config file) or the AWS_DEFAULT_REGION environment variable. If no region_name was found, then raise a NoRegionError exception.""" from botocore.exceptions import NoRegionError # Regular expression from botocore.utils.validate_region m = re.search( r"appsync-api\.((?![0-9]+$)(?!-)[a-zA-Z0-9-]{,63}(? Dict[str, Any]: from botocore.exceptions import NoCredentialsError # Default headers for a websocket connection headers = headers or { "accept": "application/json, text/javascript", "content-encoding": "amz-1.0", "content-type": "application/json; charset=UTF-8", } request: "botocore.awsrequest.AWSRequest" = self._request_creator( { "method": "POST", "url": f"https://{self._host}/graphql{'' if data else '/connect'}", "headers": headers, "context": {}, "body": data or "{}", } ) try: self._signer.add_auth(request) except NoCredentialsError: log.warning( "Credentials not found for the IAM auth. " "Do you have default AWS credentials configured?", ) raise headers = dict(request.headers) headers["host"] = self._host if log.isEnabledFor(logging.DEBUG): headers_log = [] headers_log.append("\n\nSigned headers:") for key, value in headers.items(): headers_log.append(f" {key}: {value}") headers_log.append("\n") log.debug("\n".join(headers_log)) return headers gql-3.6.0b2/gql/transport/appsync_websockets.py000066400000000000000000000156651460703211500216420ustar00rootroot00000000000000import json import logging from ssl import SSLContext from typing import Any, Dict, Optional, Tuple, Union, cast from urllib.parse import urlparse from graphql import DocumentNode, ExecutionResult, print_ast from .appsync_auth import AppSyncAuthentication, AppSyncIAMAuthentication from .exceptions import TransportProtocolError, TransportServerError from .websockets import WebsocketsTransport, WebsocketsTransportBase log = logging.getLogger("gql.transport.appsync") try: import botocore except ImportError: # pragma: no cover # botocore is only needed for the IAM AppSync authentication method pass class AppSyncWebsocketsTransport(WebsocketsTransportBase): """:ref:`Async Transport ` used to execute GraphQL subscription on AWS appsync realtime endpoint. This transport uses asyncio and the websockets library in order to send requests on a websocket connection. """ auth: Optional[AppSyncAuthentication] def __init__( self, url: str, auth: Optional[AppSyncAuthentication] = None, session: Optional["botocore.session.Session"] = None, ssl: Union[SSLContext, bool] = False, connect_timeout: int = 10, close_timeout: int = 10, ack_timeout: int = 10, keep_alive_timeout: Optional[Union[int, float]] = None, connect_args: Dict[str, Any] = {}, ) -> None: """Initialize the transport with the given parameters. :param url: The GraphQL endpoint URL. Example: https://XXXXXXXXXXXXXXXXXXXXXXXXXX.appsync-api.REGION.amazonaws.com/graphql :param auth: Optional AWS authentication class which will provide the necessary headers to be correctly authenticated. If this argument is not provided, then we will try to authenticate using IAM. :param ssl: ssl_context of the connection. :param connect_timeout: Timeout in seconds for the establishment of the websocket connection. If None is provided this will wait forever. :param close_timeout: Timeout in seconds for the close. If None is provided this will wait forever. :param ack_timeout: Timeout in seconds to wait for the connection_ack message from the server. If None is provided this will wait forever. :param keep_alive_timeout: Optional Timeout in seconds to receive a sign of liveness from the server. :param connect_args: Other parameters forwarded to websockets.connect """ if not auth: # Extract host from url host = str(urlparse(url).netloc) # May raise NoRegionError or NoCredentialsError or ImportError auth = AppSyncIAMAuthentication(host=host, session=session) self.auth = auth url = self.auth.get_auth_url(url) super().__init__( url, ssl=ssl, connect_timeout=connect_timeout, close_timeout=close_timeout, ack_timeout=ack_timeout, keep_alive_timeout=keep_alive_timeout, connect_args=connect_args, ) # Using the same 'graphql-ws' protocol as the apollo protocol self.supported_subprotocols = [ WebsocketsTransport.APOLLO_SUBPROTOCOL, ] self.subprotocol = WebsocketsTransport.APOLLO_SUBPROTOCOL def _parse_answer( self, answer: str ) -> Tuple[str, Optional[int], Optional[ExecutionResult]]: """Parse the answer received from the server. Difference between apollo protocol and aws protocol: - aws protocol can return an error without an id - aws protocol will send start_ack messages Returns a list consisting of: - the answer_type: - 'connection_ack', - 'connection_error', - 'start_ack', - 'ka', - 'data', - 'error', - 'complete' - the answer id (Integer) if received or None - an execution Result if the answer_type is 'data' or None """ answer_type: str = "" try: json_answer = json.loads(answer) answer_type = str(json_answer.get("type")) if answer_type == "start_ack": return ("start_ack", None, None) elif answer_type == "error" and "id" not in json_answer: error_payload = json_answer.get("payload") raise TransportServerError(f"Server error: '{error_payload!r}'") else: return WebsocketsTransport._parse_answer_apollo( cast(WebsocketsTransport, self), json_answer ) except ValueError: raise TransportProtocolError( f"Server did not return a GraphQL result: {answer}" ) async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> int: query_id = self.next_query_id self.next_query_id += 1 data: Dict = {"query": print_ast(document)} if variable_values: data["variables"] = variable_values if operation_name: data["operationName"] = operation_name serialized_data = json.dumps(data, separators=(",", ":")) payload = {"data": serialized_data} message: Dict = { "id": str(query_id), "type": "start", "payload": payload, } assert self.auth is not None message["payload"]["extensions"] = { "authorization": self.auth.get_headers(serialized_data) } await self._send( json.dumps( message, separators=(",", ":"), ) ) return query_id subscribe = WebsocketsTransportBase.subscribe """Send a subscription query and receive the results using a python async generator. Only subscriptions are supported, queries and mutations are forbidden. The results are sent as an ExecutionResult object. """ async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> ExecutionResult: """This method is not available. Only subscriptions are supported on the AWS realtime endpoint. :raise: AssertionError""" raise AssertionError( "execute method is not allowed for AppSyncWebsocketsTransport " "because only subscriptions are allowed on the realtime endpoint." ) _initialize = WebsocketsTransport._initialize _stop_listener = WebsocketsTransport._send_stop_message # type: ignore _send_init_message_and_wait_ack = ( WebsocketsTransport._send_init_message_and_wait_ack ) _wait_ack = WebsocketsTransport._wait_ack gql-3.6.0b2/gql/transport/async_transport.py000066400000000000000000000032731460703211500211550ustar00rootroot00000000000000import abc from typing import Any, AsyncGenerator, Dict, Optional from graphql import DocumentNode, ExecutionResult class AsyncTransport(abc.ABC): @abc.abstractmethod async def connect(self): """Coroutine used to create a connection to the specified address""" raise NotImplementedError( "Any AsyncTransport subclass must implement connect method" ) # pragma: no cover @abc.abstractmethod async def close(self): """Coroutine used to Close an established connection""" raise NotImplementedError( "Any AsyncTransport subclass must implement close method" ) # pragma: no cover @abc.abstractmethod async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> ExecutionResult: """Execute the provided document AST for either a remote or local GraphQL Schema.""" raise NotImplementedError( "Any AsyncTransport subclass must implement execute method" ) # pragma: no cover @abc.abstractmethod def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> AsyncGenerator[ExecutionResult, None]: """Send a query and receive the results using an async generator The query can be a graphql query, mutation or subscription The results are sent as an ExecutionResult object """ raise NotImplementedError( "Any AsyncTransport subclass must implement subscribe method" ) # pragma: no cover gql-3.6.0b2/gql/transport/exceptions.py000066400000000000000000000033201460703211500200760ustar00rootroot00000000000000from typing import Any, List, Optional class TransportError(Exception): """Base class for all the Transport exceptions""" pass class TransportProtocolError(TransportError): """Transport protocol error. The answer received from the server does not correspond to the transport protocol. """ class TransportServerError(TransportError): """The server returned a global error. This exception will close the transport connection. """ code: Optional[int] def __init__(self, message: str, code: Optional[int] = None): super(TransportServerError, self).__init__(message) self.code = code class TransportQueryError(TransportError): """The server returned an error for a specific query. This exception should not close the transport connection. """ query_id: Optional[int] errors: Optional[List[Any]] data: Optional[Any] extensions: Optional[Any] def __init__( self, msg: str, query_id: Optional[int] = None, errors: Optional[List[Any]] = None, data: Optional[Any] = None, extensions: Optional[Any] = None, ): super().__init__(msg) self.query_id = query_id self.errors = errors self.data = data self.extensions = extensions class TransportClosed(TransportError): """Transport is already closed. This exception is generated when the client is trying to use the transport while the transport was previously closed. """ class TransportAlreadyConnected(TransportError): """Transport is already connected. Exception generated when the client is trying to connect to the transport while the transport is already connected. """ gql-3.6.0b2/gql/transport/httpx.py000066400000000000000000000246611460703211500170770ustar00rootroot00000000000000import io import json import logging from typing import ( Any, AsyncGenerator, Callable, Dict, List, Optional, Tuple, Type, Union, cast, ) import httpx from graphql import DocumentNode, ExecutionResult, print_ast from ..utils import extract_files from . import AsyncTransport, Transport from .exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportServerError, ) log = logging.getLogger(__name__) class _HTTPXTransport: file_classes: Tuple[Type[Any], ...] = (io.IOBase,) response_headers: Optional[httpx.Headers] = None def __init__( self, url: Union[str, httpx.URL], json_serialize: Callable = json.dumps, json_deserialize: Callable = json.loads, **kwargs, ): """Initialize the transport with the given httpx parameters. :param url: The GraphQL server URL. Example: 'https://server.com:PORT/path'. :param json_serialize: Json serializer callable. By default json.dumps() function. :param json_deserialize: Json deserializer callable. By default json.loads() function. :param kwargs: Extra args passed to the `httpx` client. """ self.url = url self.json_serialize = json_serialize self.json_deserialize = json_deserialize self.kwargs = kwargs def _prepare_request( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, extra_args: Optional[Dict[str, Any]] = None, upload_files: bool = False, ) -> Dict[str, Any]: query_str = print_ast(document) payload: Dict[str, Any] = { "query": query_str, } if operation_name: payload["operationName"] = operation_name if upload_files: # If the upload_files flag is set, then we need variable_values assert variable_values is not None post_args = self._prepare_file_uploads(variable_values, payload) else: if variable_values: payload["variables"] = variable_values post_args = {"json": payload} # Log the payload if log.isEnabledFor(logging.DEBUG): log.debug(">>> %s", self.json_serialize(payload)) # Pass post_args to httpx post method if extra_args: post_args.update(extra_args) return post_args def _prepare_file_uploads(self, variable_values, payload) -> Dict[str, Any]: # If we upload files, we will extract the files present in the # variable_values dict and replace them by null values nulled_variable_values, files = extract_files( variables=variable_values, file_classes=self.file_classes, ) # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values # Prepare to send multipart-encoded data data: Dict[str, Any] = {} file_map: Dict[str, List[str]] = {} file_streams: Dict[str, Tuple[str, ...]] = {} for i, (path, f) in enumerate(files.items()): key = str(i) # Generate the file map # path is nested in a list because the spec allows multiple pointers # to the same file. But we don't support that. # Will generate something like {"0": ["variables.file"]} file_map[key] = [path] # Generate the file streams # Will generate something like # {"0": ("variables.file", <_io.BufferedReader ...>)} name = cast(str, getattr(f, "name", key)) content_type = getattr(f, "content_type", None) if content_type is None: file_streams[key] = (name, f) else: file_streams[key] = (name, f, content_type) # Add the payload to the operations field operations_str = self.json_serialize(payload) log.debug("operations %s", operations_str) data["operations"] = operations_str # Add the file map field file_map_str = self.json_serialize(file_map) log.debug("file_map %s", file_map_str) data["map"] = file_map_str return {"data": data, "files": file_streams} def _prepare_result(self, response: httpx.Response) -> ExecutionResult: # Save latest response headers in transport self.response_headers = response.headers if log.isEnabledFor(logging.DEBUG): log.debug("<<< %s", response.text) try: result: Dict[str, Any] = self.json_deserialize(response.content) except Exception: self._raise_response_error(response, "Not a JSON answer") if "errors" not in result and "data" not in result: self._raise_response_error(response, 'No "data" or "errors" keys in answer') return ExecutionResult( errors=result.get("errors"), data=result.get("data"), extensions=result.get("extensions"), ) def _raise_response_error(self, response: httpx.Response, reason: str): # We raise a TransportServerError if the status code is 400 or higher # We raise a TransportProtocolError in the other cases try: # Raise a HTTPError if response status is 400 or higher response.raise_for_status() except httpx.HTTPStatusError as e: raise TransportServerError(str(e), e.response.status_code) from e raise TransportProtocolError( f"Server did not return a GraphQL result: " f"{reason}: " f"{response.text}" ) class HTTPXTransport(Transport, _HTTPXTransport): """:ref:`Sync Transport ` used to execute GraphQL queries on remote servers. The transport uses the httpx library to send HTTP POST requests. """ client: Optional[httpx.Client] = None def connect(self): if self.client: raise TransportAlreadyConnected("Transport is already connected") log.debug("Connecting transport") self.client = httpx.Client(**self.kwargs) def execute( # type: ignore self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, extra_args: Optional[Dict[str, Any]] = None, upload_files: bool = False, ) -> ExecutionResult: """Execute GraphQL query. Execute the provided document AST against the configured remote server. This uses the httpx library to perform a HTTP POST request to the remote server. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters (Default: None). :param operation_name: Name of the operation that shall be executed. Only required in multi-operation documents (Default: None). :param extra_args: additional arguments to send to the httpx post method :param upload_files: Set to True if you want to put files in the variable values :return: The result of execution. `data` is the result of executing the query, `errors` is null if no errors occurred, and is a non-empty array if an error occurred. """ if not self.client: raise TransportClosed("Transport is not connected") post_args = self._prepare_request( document, variable_values, operation_name, extra_args, upload_files, ) response = self.client.post(self.url, **post_args) return self._prepare_result(response) def close(self): """Closing the transport by closing the inner session""" if self.client: self.client.close() self.client = None class HTTPXAsyncTransport(AsyncTransport, _HTTPXTransport): """:ref:`Async Transport ` used to execute GraphQL queries on remote servers. The transport uses the httpx library with anyio. """ client: Optional[httpx.AsyncClient] = None async def connect(self): if self.client: raise TransportAlreadyConnected("Transport is already connected") log.debug("Connecting transport") self.client = httpx.AsyncClient(**self.kwargs) async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, extra_args: Optional[Dict[str, Any]] = None, upload_files: bool = False, ) -> ExecutionResult: """Execute GraphQL query. Execute the provided document AST against the configured remote server. This uses the httpx library to perform a HTTP POST request asynchronously to the remote server. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters (Default: None). :param operation_name: Name of the operation that shall be executed. Only required in multi-operation documents (Default: None). :param extra_args: additional arguments to send to the httpx post method :param upload_files: Set to True if you want to put files in the variable values :return: The result of execution. `data` is the result of executing the query, `errors` is null if no errors occurred, and is a non-empty array if an error occurred. """ if not self.client: raise TransportClosed("Transport is not connected") post_args = self._prepare_request( document, variable_values, operation_name, extra_args, upload_files, ) response = await self.client.post(self.url, **post_args) return self._prepare_result(response) async def close(self): """Closing the transport by closing the inner session""" if self.client: await self.client.aclose() self.client = None def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> AsyncGenerator[ExecutionResult, None]: """Subscribe is not supported on HTTP. :meta private: """ raise NotImplementedError("The HTTP transport does not support subscriptions") gql-3.6.0b2/gql/transport/local_schema.py000066400000000000000000000044111460703211500203310ustar00rootroot00000000000000import asyncio from inspect import isawaitable from typing import AsyncGenerator, Awaitable, cast from graphql import DocumentNode, ExecutionResult, GraphQLSchema, execute, subscribe from gql.transport import AsyncTransport class LocalSchemaTransport(AsyncTransport): """A transport for executing GraphQL queries against a local schema.""" def __init__( self, schema: GraphQLSchema, ): """Initialize the transport with the given local schema. :param schema: Local schema as GraphQLSchema object """ self.schema = schema async def connect(self): """No connection needed on local transport""" pass async def close(self): """No close needed on local transport""" pass async def execute( self, document: DocumentNode, *args, **kwargs, ) -> ExecutionResult: """Execute the provided document AST for on a local GraphQL Schema.""" result_or_awaitable = execute(self.schema, document, *args, **kwargs) execution_result: ExecutionResult if isawaitable(result_or_awaitable): result_or_awaitable = cast(Awaitable[ExecutionResult], result_or_awaitable) execution_result = await result_or_awaitable else: result_or_awaitable = cast(ExecutionResult, result_or_awaitable) execution_result = result_or_awaitable return execution_result @staticmethod async def _await_if_necessary(obj): """This method is necessary to work with graphql-core versions < and >= 3.3.0a3""" return await obj if asyncio.iscoroutine(obj) else obj async def subscribe( self, document: DocumentNode, *args, **kwargs, ) -> AsyncGenerator[ExecutionResult, None]: """Send a subscription and receive the results using an async generator The results are sent as an ExecutionResult object """ subscribe_result = await self._await_if_necessary( subscribe(self.schema, document, *args, **kwargs) ) if isinstance(subscribe_result, ExecutionResult): yield subscribe_result else: async for result in subscribe_result: yield result gql-3.6.0b2/gql/transport/phoenix_channel_websockets.py000066400000000000000000000355131460703211500233210ustar00rootroot00000000000000import asyncio import json import logging from typing import Any, Dict, Optional, Tuple from graphql import DocumentNode, ExecutionResult, print_ast from websockets.exceptions import ConnectionClosed from .exceptions import ( TransportProtocolError, TransportQueryError, TransportServerError, ) from .websockets_base import WebsocketsTransportBase log = logging.getLogger(__name__) class Subscription: """Records listener_id and unsubscribe query_id for a subscription.""" def __init__(self, query_id: int) -> None: self.listener_id: int = query_id self.unsubscribe_id: Optional[int] = None class PhoenixChannelWebsocketsTransport(WebsocketsTransportBase): """The PhoenixChannelWebsocketsTransport is an async transport which allows you to execute queries and subscriptions against an `Absinthe`_ backend using the `Phoenix`_ framework `channels`_. .. _Absinthe: http://absinthe-graphql.org .. _Phoenix: https://www.phoenixframework.org .. _channels: https://hexdocs.pm/phoenix/Phoenix.Channel.html#content """ def __init__( self, channel_name: str = "__absinthe__:control", heartbeat_interval: float = 30, *args, **kwargs, ) -> None: """Initialize the transport with the given parameters. :param channel_name: Channel on the server this transport will join. The default for Absinthe servers is "__absinthe__:control" :param heartbeat_interval: Interval in second between each heartbeat messages sent by the client """ self.channel_name: str = channel_name self.heartbeat_interval: float = heartbeat_interval self.heartbeat_task: Optional[asyncio.Future] = None self.subscriptions: Dict[str, Subscription] = {} super(PhoenixChannelWebsocketsTransport, self).__init__(*args, **kwargs) async def _initialize(self) -> None: """Join the specified channel and wait for the connection ACK. If the answer is not a connection_ack message, we will return an Exception. """ query_id = self.next_query_id self.next_query_id += 1 init_message = json.dumps( { "topic": self.channel_name, "event": "phx_join", "payload": {}, "ref": query_id, } ) await self._send(init_message) # Wait for the connection_ack message or raise a TimeoutError init_answer = await asyncio.wait_for(self._receive(), self.ack_timeout) answer_type, answer_id, execution_result = self._parse_answer(init_answer) if answer_type != "reply": raise TransportProtocolError( "Websocket server did not return a connection ack" ) async def heartbeat_coro(): while True: await asyncio.sleep(self.heartbeat_interval) try: query_id = self.next_query_id self.next_query_id += 1 await self._send( json.dumps( { "topic": "phoenix", "event": "heartbeat", "payload": {}, "ref": query_id, } ) ) except ConnectionClosed: # pragma: no cover return self.heartbeat_task = asyncio.ensure_future(heartbeat_coro()) async def _send_stop_message(self, query_id: int) -> None: """Send an 'unsubscribe' message to the Phoenix Channel referencing the listener's query_id, saving the query_id of the message. The server should afterwards return a 'phx_reply' message with the same query_id and subscription_id of the 'unsubscribe' request. """ subscription_id = self._find_existing_subscription(query_id) unsubscribe_query_id = self.next_query_id self.next_query_id += 1 # Save the ref so it can be matched in the reply self.subscriptions[subscription_id].unsubscribe_id = unsubscribe_query_id unsubscribe_message = json.dumps( { "topic": self.channel_name, "event": "unsubscribe", "payload": {"subscriptionId": subscription_id}, "ref": unsubscribe_query_id, } ) await self._send(unsubscribe_message) async def _stop_listener(self, query_id: int) -> None: await self._send_stop_message(query_id) async def _send_connection_terminate_message(self) -> None: """Send a phx_leave message to disconnect from the provided channel.""" query_id = self.next_query_id self.next_query_id += 1 connection_terminate_message = json.dumps( { "topic": self.channel_name, "event": "phx_leave", "payload": {}, "ref": query_id, } ) await self._send(connection_terminate_message) async def _connection_terminate(self): await self._send_connection_terminate_message() async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> int: """Send a query to the provided websocket connection. We use an incremented id to reference the query. Returns the used id for this query. """ query_id = self.next_query_id self.next_query_id += 1 query_str = json.dumps( { "topic": self.channel_name, "event": "doc", "payload": { "query": print_ast(document), "variables": variable_values or {}, }, "ref": query_id, } ) await self._send(query_str) return query_id def _parse_answer( self, answer: str ) -> Tuple[str, Optional[int], Optional[ExecutionResult]]: """Parse the answer received from the server Returns a list consisting of: - the answer_type (between: 'data', 'reply', 'complete', 'close') - the answer id (Integer) if received or None - an execution Result if the answer_type is 'data' or None """ event: str = "" answer_id: Optional[int] = None answer_type: str = "" execution_result: Optional[ExecutionResult] = None subscription_id: Optional[str] = None def _get_value(d: Any, key: str, label: str) -> Any: if not isinstance(d, dict): raise ValueError(f"{label} is not a dict") return d.get(key) def _required_value(d: Any, key: str, label: str) -> Any: value = _get_value(d, key, label) if value is None: raise ValueError(f"null {key} in {label}") return value def _required_subscription_id( d: Any, label: str, must_exist: bool = False, must_not_exist=False ) -> str: subscription_id = str(_required_value(d, "subscriptionId", label)) if must_exist and (subscription_id not in self.subscriptions): raise ValueError("unregistered subscriptionId") if must_not_exist and (subscription_id in self.subscriptions): raise ValueError("previously registered subscriptionId") return subscription_id def _validate_data_response(d: Any, label: str) -> dict: """Make sure query, mutation or subscription answer conforms. The GraphQL spec says only three keys are permitted. """ if not isinstance(d, dict): raise ValueError(f"{label} is not a dict") keys = set(d.keys()) invalid = keys - {"data", "errors", "extensions"} if len(invalid) > 0: raise ValueError( f"{label} contains invalid items: " + ", ".join(invalid) ) return d try: json_answer = json.loads(answer) event = str(_required_value(json_answer, "event", "answer")) if event == "subscription:data": payload = _required_value(json_answer, "payload", "answer") subscription_id = _required_subscription_id( payload, "payload", must_exist=True ) result = _validate_data_response(payload.get("result"), "result") answer_type = "data" subscription = self.subscriptions[subscription_id] answer_id = subscription.listener_id execution_result = ExecutionResult( data=result.get("data"), errors=result.get("errors"), extensions=result.get("extensions"), ) elif event == "phx_reply": # Will generate a ValueError if 'ref' is not there # or if it is not an integer answer_id = int(_required_value(json_answer, "ref", "answer")) payload = _required_value(json_answer, "payload", "answer") status = _get_value(payload, "status", "payload") if status == "ok": answer_type = "reply" if answer_id in self.listeners: response = _required_value(payload, "response", "payload") if isinstance(response, dict) and "subscriptionId" in response: # Subscription answer subscription_id = _required_subscription_id( response, "response", must_not_exist=True ) self.subscriptions[subscription_id] = Subscription( answer_id ) else: # Query or mutation answer # GraphQL spec says only three keys are permitted response = _validate_data_response(response, "response") answer_type = "data" execution_result = ExecutionResult( data=response.get("data"), errors=response.get("errors"), extensions=response.get("extensions"), ) else: ( registered_subscription_id, listener_id, ) = self._find_subscription(answer_id) if registered_subscription_id is not None: # Unsubscription answer response = _required_value(payload, "response", "payload") subscription_id = _required_subscription_id( response, "response" ) if subscription_id != registered_subscription_id: raise ValueError("subscription id does not match") answer_type = "complete" answer_id = listener_id elif status == "error": response = payload.get("response") if isinstance(response, dict): if "errors" in response: raise TransportQueryError( str(response.get("errors")), query_id=answer_id ) elif "reason" in response: raise TransportQueryError( str(response.get("reason")), query_id=answer_id ) raise TransportQueryError("reply error", query_id=answer_id) elif status == "timeout": raise TransportQueryError("reply timeout", query_id=answer_id) else: # missing or unrecognized status, just continue pass elif event == "phx_error": # Sent if the channel has crashed # answer_id will be the "join_ref" for the channel # answer_id = int(json_answer.get("ref")) raise TransportServerError("Server error") elif event == "phx_close": answer_type = "close" else: raise ValueError("unrecognized event") except ValueError as e: log.error(f"Error parsing answer '{answer}': {e!r}") raise TransportProtocolError( f"Server did not return a GraphQL result: {e!s}" ) from e return answer_type, answer_id, execution_result async def _handle_answer( self, answer_type: str, answer_id: Optional[int], execution_result: Optional[ExecutionResult], ) -> None: if answer_type == "close": await self.close() else: await super()._handle_answer(answer_type, answer_id, execution_result) def _remove_listener(self, query_id: int) -> None: """If the listener was a subscription, remove that information.""" try: subscription_id = self._find_existing_subscription(query_id) del self.subscriptions[subscription_id] except Exception: pass super()._remove_listener(query_id) def _find_subscription(self, query_id: int) -> Tuple[Optional[str], int]: """Perform a reverse lookup to find the subscription id matching a listener's query_id. """ for subscription_id, subscription in self.subscriptions.items(): if query_id == subscription.listener_id: return subscription_id, query_id if query_id == subscription.unsubscribe_id: return subscription_id, subscription.listener_id return None, query_id def _find_existing_subscription(self, query_id: int) -> str: """Perform a reverse lookup to find the subscription id matching a listener's query_id. """ subscription_id, _listener_id = self._find_subscription(query_id) if subscription_id is None: raise TransportProtocolError( f"No subscription registered for listener {query_id}" ) return subscription_id async def _close_coro(self, e: Exception, clean_close: bool = True) -> None: if self.heartbeat_task is not None: self.heartbeat_task.cancel() await super()._close_coro(e, clean_close) gql-3.6.0b2/gql/transport/requests.py000066400000000000000000000404131460703211500175740ustar00rootroot00000000000000import io import json import logging from typing import Any, Callable, Collection, Dict, List, Optional, Tuple, Type, Union import requests from graphql import DocumentNode, ExecutionResult, print_ast from requests.adapters import HTTPAdapter, Retry from requests.auth import AuthBase from requests.cookies import RequestsCookieJar from requests_toolbelt.multipart.encoder import MultipartEncoder from gql.transport import Transport from ..graphql_request import GraphQLRequest from ..utils import extract_files from .exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportServerError, ) log = logging.getLogger(__name__) class RequestsHTTPTransport(Transport): """:ref:`Sync Transport ` used to execute GraphQL queries on remote servers. The transport uses the requests library to send HTTP POST requests. """ file_classes: Tuple[Type[Any], ...] = (io.IOBase,) _default_retry_codes = (429, 500, 502, 503, 504) def __init__( self, url: str, headers: Optional[Dict[str, Any]] = None, cookies: Optional[Union[Dict[str, Any], RequestsCookieJar]] = None, auth: Optional[AuthBase] = None, use_json: bool = True, timeout: Optional[int] = None, verify: Union[bool, str] = True, retries: int = 0, method: str = "POST", retry_backoff_factor: float = 0.1, retry_status_forcelist: Collection[int] = _default_retry_codes, json_serialize: Callable = json.dumps, json_deserialize: Callable = json.loads, **kwargs: Any, ): """Initialize the transport with the given request parameters. :param url: The GraphQL server URL. :param headers: Dictionary of HTTP Headers to send with the :class:`Request` (Default: None). :param cookies: Dict or CookieJar object to send with the :class:`Request` (Default: None). :param auth: Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth (Default: None). :param use_json: Send request body as JSON instead of form-urlencoded (Default: True). :param timeout: Specifies a default timeout for requests (Default: None). :param verify: Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. (Default: True). :param retries: Pre-setup of the requests' Session for performing retries :param method: HTTP method used for requests. (Default: POST). :param retry_backoff_factor: A backoff factor to apply between attempts after the second try. urllib3 will sleep for: {backoff factor} * (2 ** ({number of previous retries})) :param retry_status_forcelist: A set of integer HTTP status codes that we should force a retry on. A retry is initiated if the request method is in allowed_methods and the response status code is in status_forcelist. (Default: [429, 500, 502, 503, 504]) :param json_serialize: Json serializer callable. By default json.dumps() function :param json_deserialize: Json deserializer callable. By default json.loads() function :param kwargs: Optional arguments that ``request`` takes. These can be seen at the `requests`_ source code or the official `docs`_ .. _requests: https://github.com/psf/requests/blob/master/requests/api.py .. _docs: https://requests.readthedocs.io/en/master/ """ self.url = url self.headers = headers self.cookies = cookies self.auth = auth self.use_json = use_json self.default_timeout = timeout self.verify = verify self.retries = retries self.method = method self.retry_backoff_factor = retry_backoff_factor self.retry_status_forcelist = retry_status_forcelist self.json_serialize: Callable = json_serialize self.json_deserialize: Callable = json_deserialize self.kwargs = kwargs self.session = None self.response_headers = None def connect(self): if self.session is None: # Creating a session that can later be re-use to configure custom mechanisms self.session = requests.Session() # If we specified some retries, we provide a predefined retry-logic if self.retries > 0: adapter = HTTPAdapter( max_retries=Retry( total=self.retries, backoff_factor=self.retry_backoff_factor, status_forcelist=self.retry_status_forcelist, allowed_methods=None, ) ) for prefix in "http://", "https://": self.session.mount(prefix, adapter) else: raise TransportAlreadyConnected("Transport is already connected") def execute( # type: ignore self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, timeout: Optional[int] = None, extra_args: Optional[Dict[str, Any]] = None, upload_files: bool = False, ) -> ExecutionResult: """Execute GraphQL query. Execute the provided document AST against the configured remote server. This uses the requests library to perform a HTTP POST request to the remote server. :param document: GraphQL query as AST Node object. :param variable_values: Dictionary of input parameters (Default: None). :param operation_name: Name of the operation that shall be executed. Only required in multi-operation documents (Default: None). :param timeout: Specifies a default timeout for requests (Default: None). :param extra_args: additional arguments to send to the requests post method :param upload_files: Set to True if you want to put files in the variable values :return: The result of execution. `data` is the result of executing the query, `errors` is null if no errors occurred, and is a non-empty array if an error occurred. """ if not self.session: raise TransportClosed("Transport is not connected") query_str = print_ast(document) payload: Dict[str, Any] = {"query": query_str} if operation_name: payload["operationName"] = operation_name post_args = { "headers": self.headers, "auth": self.auth, "cookies": self.cookies, "timeout": timeout or self.default_timeout, "verify": self.verify, } if upload_files: # If the upload_files flag is set, then we need variable_values assert variable_values is not None # If we upload files, we will extract the files present in the # variable_values dict and replace them by null values nulled_variable_values, files = extract_files( variables=variable_values, file_classes=self.file_classes, ) # Save the nulled variable values in the payload payload["variables"] = nulled_variable_values # Add the payload to the operations field operations_str = self.json_serialize(payload) log.debug("operations %s", operations_str) # Generate the file map # path is nested in a list because the spec allows multiple pointers # to the same file. But we don't support that. # Will generate something like {"0": ["variables.file"]} file_map = {str(i): [path] for i, path in enumerate(files)} # Enumerate the file streams # Will generate something like {'0': <_io.BufferedReader ...>} file_streams = {str(i): files[path] for i, path in enumerate(files)} # Add the file map field file_map_str = self.json_serialize(file_map) log.debug("file_map %s", file_map_str) fields = {"operations": operations_str, "map": file_map_str} # Add the extracted files as remaining fields for k, f in file_streams.items(): name = getattr(f, "name", k) content_type = getattr(f, "content_type", None) if content_type is None: fields[k] = (name, f) else: fields[k] = (name, f, content_type) # Prepare requests http to send multipart-encoded data data = MultipartEncoder(fields=fields) post_args["data"] = data if post_args["headers"] is None: post_args["headers"] = {} else: post_args["headers"] = {**post_args["headers"]} post_args["headers"]["Content-Type"] = data.content_type else: if variable_values: payload["variables"] = variable_values data_key = "json" if self.use_json else "data" post_args[data_key] = payload # Log the payload if log.isEnabledFor(logging.INFO): log.info(">>> %s", self.json_serialize(payload)) # Pass kwargs to requests post method post_args.update(self.kwargs) # Pass post_args to requests post method if extra_args: post_args.update(extra_args) # Using the created session to perform requests response = self.session.request( self.method, self.url, **post_args # type: ignore ) self.response_headers = response.headers def raise_response_error(resp: requests.Response, reason: str): # We raise a TransportServerError if the status code is 400 or higher # We raise a TransportProtocolError in the other cases try: # Raise a HTTPError if response status is 400 or higher resp.raise_for_status() except requests.HTTPError as e: raise TransportServerError(str(e), e.response.status_code) from e result_text = resp.text raise TransportProtocolError( f"Server did not return a GraphQL result: " f"{reason}: " f"{result_text}" ) try: if self.json_deserialize == json.loads: result = response.json() else: result = self.json_deserialize(response.text) if log.isEnabledFor(logging.INFO): log.info("<<< %s", response.text) except Exception: raise_response_error(response, "Not a JSON answer") if "errors" not in result and "data" not in result: raise_response_error(response, 'No "data" or "errors" keys in answer') return ExecutionResult( errors=result.get("errors"), data=result.get("data"), extensions=result.get("extensions"), ) def execute_batch( # type: ignore self, reqs: List[GraphQLRequest], timeout: Optional[int] = None, extra_args: Optional[Dict[str, Any]] = None, ) -> List[ExecutionResult]: """Execute multiple GraphQL requests in a batch. Execute the provided requests against the configured remote server. This uses the requests library to perform a HTTP POST request to the remote server. :param reqs: GraphQL requests as a list of GraphQLRequest objects. :param timeout: Specifies a default timeout for requests (Default: None). :param extra_args: additional arguments to send to the requests post method :return: A list of results of execution. For every result `data` is the result of executing the query, `errors` is null if no errors occurred, and is a non-empty array if an error occurred. """ if not self.session: raise TransportClosed("Transport is not connected") # Using the created session to perform requests response = self.session.request( self.method, self.url, **self._build_batch_post_args(reqs, timeout, extra_args), ) self.response_headers = response.headers answers = self._extract_response(response) self._validate_answer_is_a_list(answers) self._validate_num_of_answers_same_as_requests(reqs, answers) self._validate_every_answer_is_a_dict(answers) self._validate_data_and_errors_keys_in_answers(answers) return [self._answer_to_execution_result(answer) for answer in answers] def _answer_to_execution_result(self, result: Dict[str, Any]) -> ExecutionResult: return ExecutionResult( errors=result.get("errors"), data=result.get("data"), extensions=result.get("extensions"), ) def _validate_answer_is_a_list(self, results: Any) -> None: if not isinstance(results, list): self._raise_invalid_result( str(results), "Answer is not a list", ) def _validate_data_and_errors_keys_in_answers( self, results: List[Dict[str, Any]] ) -> None: for result in results: if "errors" not in result and "data" not in result: self._raise_invalid_result( str(results), 'No "data" or "errors" keys in answer', ) def _validate_every_answer_is_a_dict(self, results: List[Dict[str, Any]]) -> None: for result in results: if not isinstance(result, dict): self._raise_invalid_result(str(results), "Not every answer is dict") def _validate_num_of_answers_same_as_requests( self, reqs: List[GraphQLRequest], results: List[Dict[str, Any]], ) -> None: if len(reqs) != len(results): self._raise_invalid_result( str(results), "Invalid answer length", ) def _raise_invalid_result(self, result_text: str, reason: str) -> None: raise TransportProtocolError( f"Server did not return a valid GraphQL result: " f"{reason}: " f"{result_text}" ) def _extract_response(self, response: requests.Response) -> Any: try: response.raise_for_status() result = response.json() if log.isEnabledFor(logging.INFO): log.info("<<< %s", response.text) except requests.HTTPError as e: raise TransportServerError(str(e), e.response.status_code) from e except Exception: self._raise_invalid_result(str(response.text), "Not a JSON answer") return result def _build_batch_post_args( self, reqs: List[GraphQLRequest], timeout: Optional[int] = None, extra_args: Optional[Dict[str, Any]] = None, ) -> Dict[str, Any]: post_args: Dict[str, Any] = { "headers": self.headers, "auth": self.auth, "cookies": self.cookies, "timeout": timeout or self.default_timeout, "verify": self.verify, } data_key = "json" if self.use_json else "data" post_args[data_key] = [self._build_data(req) for req in reqs] # Log the payload if log.isEnabledFor(logging.INFO): log.info(">>> %s", self.json_serialize(post_args[data_key])) # Pass kwargs to requests post method post_args.update(self.kwargs) # Pass post_args to requests post method if extra_args: post_args.update(extra_args) return post_args def _build_data(self, req: GraphQLRequest) -> Dict[str, Any]: query_str = print_ast(req.document) payload: Dict[str, Any] = {"query": query_str} if req.operation_name: payload["operationName"] = req.operation_name if req.variable_values: payload["variables"] = req.variable_values return payload def close(self): """Closing the transport by closing the inner session""" if self.session: self.session.close() self.session = None gql-3.6.0b2/gql/transport/transport.py000066400000000000000000000031231460703211500177520ustar00rootroot00000000000000import abc from typing import List from graphql import DocumentNode, ExecutionResult from ..graphql_request import GraphQLRequest class Transport(abc.ABC): @abc.abstractmethod def execute(self, document: DocumentNode, *args, **kwargs) -> ExecutionResult: """Execute GraphQL query. Execute the provided document AST for either a remote or local GraphQL Schema. :param document: GraphQL query as AST Node or Document object. :return: ExecutionResult """ raise NotImplementedError( "Any Transport subclass must implement execute method" ) # pragma: no cover def execute_batch( self, reqs: List[GraphQLRequest], *args, **kwargs, ) -> List[ExecutionResult]: """Execute multiple GraphQL requests in a batch. Execute the provided requests for either a remote or local GraphQL Schema. :param reqs: GraphQL requests as a list of GraphQLRequest objects. :return: a list of ExecutionResult objects """ raise NotImplementedError( "This Transport has not implemented the execute_batch method" ) # pragma: no cover def connect(self): """Establish a session with the transport.""" pass # pragma: no cover def close(self): """Close the transport This method doesn't have to be implemented unless the transport would benefit from it. This is currently used by the RequestsHTTPTransport transport to close the session's connection pool. """ pass # pragma: no cover gql-3.6.0b2/gql/transport/websockets.py000066400000000000000000000453661460703211500201060ustar00rootroot00000000000000import asyncio import json import logging from contextlib import suppress from ssl import SSLContext from typing import Any, Dict, List, Optional, Tuple, Union, cast from graphql import DocumentNode, ExecutionResult, print_ast from websockets.datastructures import HeadersLike from websockets.typing import Subprotocol from .exceptions import ( TransportProtocolError, TransportQueryError, TransportServerError, ) from .websockets_base import WebsocketsTransportBase log = logging.getLogger(__name__) class WebsocketsTransport(WebsocketsTransportBase): """:ref:`Async Transport ` used to execute GraphQL queries on remote servers with websocket connection. This transport uses asyncio and the websockets library in order to send requests on a websocket connection. """ # This transport supports two subprotocols and will autodetect the # subprotocol supported on the server APOLLO_SUBPROTOCOL = cast(Subprotocol, "graphql-ws") GRAPHQLWS_SUBPROTOCOL = cast(Subprotocol, "graphql-transport-ws") def __init__( self, url: str, headers: Optional[HeadersLike] = None, ssl: Union[SSLContext, bool] = False, init_payload: Dict[str, Any] = {}, connect_timeout: Optional[Union[int, float]] = 10, close_timeout: Optional[Union[int, float]] = 10, ack_timeout: Optional[Union[int, float]] = 10, keep_alive_timeout: Optional[Union[int, float]] = None, ping_interval: Optional[Union[int, float]] = None, pong_timeout: Optional[Union[int, float]] = None, answer_pings: bool = True, connect_args: Dict[str, Any] = {}, subprotocols: Optional[List[Subprotocol]] = None, ) -> None: """Initialize the transport with the given parameters. :param url: The GraphQL server URL. Example: 'wss://server.com:PORT/graphql'. :param headers: Dict of HTTP Headers. :param ssl: ssl_context of the connection. Use ssl=False to disable encryption :param init_payload: Dict of the payload sent in the connection_init message. :param connect_timeout: Timeout in seconds for the establishment of the websocket connection. If None is provided this will wait forever. :param close_timeout: Timeout in seconds for the close. If None is provided this will wait forever. :param ack_timeout: Timeout in seconds to wait for the connection_ack message from the server. If None is provided this will wait forever. :param keep_alive_timeout: Optional Timeout in seconds to receive a sign of liveness from the server. :param ping_interval: Delay in seconds between pings sent by the client to the backend for the graphql-ws protocol. None (by default) means that we don't send pings. Note: there are also pings sent by the underlying websockets protocol. See the :ref:`keepalive documentation ` for more information about this. :param pong_timeout: Delay in seconds to receive a pong from the backend after we sent a ping (only for the graphql-ws protocol). By default equal to half of the ping_interval. :param answer_pings: Whether the client answers the pings from the backend (for the graphql-ws protocol). By default: True :param connect_args: Other parameters forwarded to `websockets.connect `_ :param subprotocols: list of subprotocols sent to the backend in the 'subprotocols' http header. By default: both apollo and graphql-ws subprotocols. """ super().__init__( url, headers, ssl, init_payload, connect_timeout, close_timeout, ack_timeout, keep_alive_timeout, connect_args, ) self.ping_interval: Optional[Union[int, float]] = ping_interval self.pong_timeout: Optional[Union[int, float]] self.answer_pings: bool = answer_pings if ping_interval is not None: if pong_timeout is None: self.pong_timeout = ping_interval / 2 else: self.pong_timeout = pong_timeout self.send_ping_task: Optional[asyncio.Future] = None self.ping_received: asyncio.Event = asyncio.Event() """ping_received is an asyncio Event which will fire each time a ping is received with the graphql-ws protocol""" self.pong_received: asyncio.Event = asyncio.Event() """pong_received is an asyncio Event which will fire each time a pong is received with the graphql-ws protocol""" if subprotocols is None: self.supported_subprotocols = [ self.APOLLO_SUBPROTOCOL, self.GRAPHQLWS_SUBPROTOCOL, ] else: self.supported_subprotocols = subprotocols async def _wait_ack(self) -> None: """Wait for the connection_ack message. Keep alive messages are ignored""" while True: init_answer = await self._receive() answer_type, answer_id, execution_result = self._parse_answer(init_answer) if answer_type == "connection_ack": return if answer_type != "ka": raise TransportProtocolError( "Websocket server did not return a connection ack" ) async def _send_init_message_and_wait_ack(self) -> None: """Send init message to the provided websocket and wait for the connection ACK. If the answer is not a connection_ack message, we will return an Exception. """ init_message = json.dumps( {"type": "connection_init", "payload": self.init_payload} ) await self._send(init_message) # Wait for the connection_ack message or raise a TimeoutError await asyncio.wait_for(self._wait_ack(), self.ack_timeout) async def _initialize(self): await self._send_init_message_and_wait_ack() async def send_ping(self, payload: Optional[Any] = None) -> None: """Send a ping message for the graphql-ws protocol""" ping_message = {"type": "ping"} if payload is not None: ping_message["payload"] = payload await self._send(json.dumps(ping_message)) async def send_pong(self, payload: Optional[Any] = None) -> None: """Send a pong message for the graphql-ws protocol""" pong_message = {"type": "pong"} if payload is not None: pong_message["payload"] = payload await self._send(json.dumps(pong_message)) async def _send_stop_message(self, query_id: int) -> None: """Send stop message to the provided websocket connection and query_id. The server should afterwards return a 'complete' message. """ stop_message = json.dumps({"id": str(query_id), "type": "stop"}) await self._send(stop_message) async def _send_complete_message(self, query_id: int) -> None: """Send a complete message for the provided query_id. This is only for the graphql-ws protocol. """ complete_message = json.dumps({"id": str(query_id), "type": "complete"}) await self._send(complete_message) async def _stop_listener(self, query_id: int): """Stop the listener corresponding to the query_id depending on the detected backend protocol. For apollo: send a "stop" message (a "complete" message will be sent from the backend) For graphql-ws: send a "complete" message and simulate the reception of a "complete" message from the backend """ log.debug(f"stop listener {query_id}") if self.subprotocol == self.GRAPHQLWS_SUBPROTOCOL: await self._send_complete_message(query_id) await self.listeners[query_id].put(("complete", None)) else: await self._send_stop_message(query_id) async def _send_connection_terminate_message(self) -> None: """Send a connection_terminate message to the provided websocket connection. This message indicates that the connection will disconnect. """ connection_terminate_message = json.dumps({"type": "connection_terminate"}) await self._send(connection_terminate_message) async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> int: """Send a query to the provided websocket connection. We use an incremented id to reference the query. Returns the used id for this query. """ query_id = self.next_query_id self.next_query_id += 1 payload: Dict[str, Any] = {"query": print_ast(document)} if variable_values: payload["variables"] = variable_values if operation_name: payload["operationName"] = operation_name query_type = "start" if self.subprotocol == self.GRAPHQLWS_SUBPROTOCOL: query_type = "subscribe" query_str = json.dumps( {"id": str(query_id), "type": query_type, "payload": payload} ) await self._send(query_str) return query_id async def _connection_terminate(self): if self.subprotocol == self.APOLLO_SUBPROTOCOL: await self._send_connection_terminate_message() def _parse_answer_graphqlws( self, json_answer: Dict[str, Any] ) -> Tuple[str, Optional[int], Optional[ExecutionResult]]: """Parse the answer received from the server if the server supports the graphql-ws protocol. Returns a list consisting of: - the answer_type (between: 'connection_ack', 'ping', 'pong', 'data', 'error', 'complete') - the answer id (Integer) if received or None - an execution Result if the answer_type is 'data' or None Differences with the apollo websockets protocol (superclass): - the "data" message is now called "next" - the "stop" message is now called "complete" - there is no connection_terminate or connection_error messages - instead of a unidirectional keep-alive (ka) message from server to client, there is now the possibility to send bidirectional ping/pong messages - connection_ack has an optional payload - the 'error' answer type returns a list of errors instead of a single error """ answer_type: str = "" answer_id: Optional[int] = None execution_result: Optional[ExecutionResult] = None try: answer_type = str(json_answer.get("type")) if answer_type in ["next", "error", "complete"]: answer_id = int(str(json_answer.get("id"))) if answer_type == "next" or answer_type == "error": payload = json_answer.get("payload") if answer_type == "next": if not isinstance(payload, dict): raise ValueError("payload is not a dict") if "errors" not in payload and "data" not in payload: raise ValueError( "payload does not contain 'data' or 'errors' fields" ) execution_result = ExecutionResult( errors=payload.get("errors"), data=payload.get("data"), extensions=payload.get("extensions"), ) # Saving answer_type as 'data' to be understood with superclass answer_type = "data" elif answer_type == "error": if not isinstance(payload, list): raise ValueError("payload is not a list") raise TransportQueryError( str(payload[0]), query_id=answer_id, errors=payload ) elif answer_type in ["ping", "pong", "connection_ack"]: self.payloads[answer_type] = json_answer.get("payload", None) else: raise ValueError if self.check_keep_alive_task is not None: self._next_keep_alive_message.set() except ValueError as e: raise TransportProtocolError( f"Server did not return a GraphQL result: {json_answer}" ) from e return answer_type, answer_id, execution_result def _parse_answer_apollo( self, json_answer: Dict[str, Any] ) -> Tuple[str, Optional[int], Optional[ExecutionResult]]: """Parse the answer received from the server if the server supports the apollo websockets protocol. Returns a list consisting of: - the answer_type (between: 'connection_ack', 'ka', 'connection_error', 'data', 'error', 'complete') - the answer id (Integer) if received or None - an execution Result if the answer_type is 'data' or None """ answer_type: str = "" answer_id: Optional[int] = None execution_result: Optional[ExecutionResult] = None try: answer_type = str(json_answer.get("type")) if answer_type in ["data", "error", "complete"]: answer_id = int(str(json_answer.get("id"))) if answer_type == "data" or answer_type == "error": payload = json_answer.get("payload") if not isinstance(payload, dict): raise ValueError("payload is not a dict") if answer_type == "data": if "errors" not in payload and "data" not in payload: raise ValueError( "payload does not contain 'data' or 'errors' fields" ) execution_result = ExecutionResult( errors=payload.get("errors"), data=payload.get("data"), extensions=payload.get("extensions"), ) elif answer_type == "error": raise TransportQueryError( str(payload), query_id=answer_id, errors=[payload] ) elif answer_type == "ka": # Keep-alive message if self.check_keep_alive_task is not None: self._next_keep_alive_message.set() elif answer_type == "connection_ack": pass elif answer_type == "connection_error": error_payload = json_answer.get("payload") raise TransportServerError(f"Server error: '{repr(error_payload)}'") else: raise ValueError except ValueError as e: raise TransportProtocolError( f"Server did not return a GraphQL result: {json_answer}" ) from e return answer_type, answer_id, execution_result def _parse_answer( self, answer: str ) -> Tuple[str, Optional[int], Optional[ExecutionResult]]: """Parse the answer received from the server depending on the detected subprotocol. """ try: json_answer = json.loads(answer) except ValueError: raise TransportProtocolError( f"Server did not return a GraphQL result: {answer}" ) if self.subprotocol == self.GRAPHQLWS_SUBPROTOCOL: return self._parse_answer_graphqlws(json_answer) return self._parse_answer_apollo(json_answer) async def _send_ping_coro(self) -> None: """Coroutine to periodically send a ping from the client to the backend. Only used for the graphql-ws protocol. Send a ping every ping_interval seconds. Close the connection if a pong is not received within pong_timeout seconds. """ assert self.ping_interval is not None try: while True: await asyncio.sleep(self.ping_interval) await self.send_ping() await asyncio.wait_for(self.pong_received.wait(), self.pong_timeout) # Reset for the next iteration self.pong_received.clear() except asyncio.TimeoutError: # No pong received in the appriopriate time, close with error # If the timeout happens during a close already in progress, do nothing if self.close_task is None: await self._fail( TransportServerError( f"No pong received after {self.pong_timeout!r} seconds" ), clean_close=False, ) async def _handle_answer( self, answer_type: str, answer_id: Optional[int], execution_result: Optional[ExecutionResult], ) -> None: # Put the answer in the queue await super()._handle_answer(answer_type, answer_id, execution_result) # Answer pong to ping for graphql-ws protocol if answer_type == "ping": self.ping_received.set() if self.answer_pings: await self.send_pong() elif answer_type == "pong": self.pong_received.set() async def _after_connect(self): # Find the backend subprotocol returned in the response headers response_headers = self.websocket.response_headers try: self.subprotocol = response_headers["Sec-WebSocket-Protocol"] except KeyError: # If the server does not send the subprotocol header, using # the apollo subprotocol by default self.subprotocol = self.APOLLO_SUBPROTOCOL log.debug(f"backend subprotocol returned: {self.subprotocol!r}") async def _after_initialize(self): # If requested, create a task to send periodic pings to the backend if ( self.subprotocol == self.GRAPHQLWS_SUBPROTOCOL and self.ping_interval is not None ): self.send_ping_task = asyncio.ensure_future(self._send_ping_coro()) async def _close_hook(self): # Properly shut down the send ping task if enabled if self.send_ping_task is not None: self.send_ping_task.cancel() with suppress(asyncio.CancelledError): await self.send_ping_task self.send_ping_task = None gql-3.6.0b2/gql/transport/websockets_base.py000066400000000000000000000576221460703211500210760ustar00rootroot00000000000000import asyncio import logging import warnings from abc import abstractmethod from contextlib import suppress from ssl import SSLContext from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple, Union, cast import websockets from graphql import DocumentNode, ExecutionResult from websockets.client import WebSocketClientProtocol from websockets.datastructures import Headers, HeadersLike from websockets.exceptions import ConnectionClosed from websockets.typing import Data, Subprotocol from .async_transport import AsyncTransport from .exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportQueryError, TransportServerError, ) log = logging.getLogger("gql.transport.websockets") ParsedAnswer = Tuple[str, Optional[ExecutionResult]] class ListenerQueue: """Special queue used for each query waiting for server answers If the server is stopped while the listener is still waiting, Then we send an exception to the queue and this exception will be raised to the consumer once all the previous messages have been consumed from the queue """ def __init__(self, query_id: int, send_stop: bool) -> None: self.query_id: int = query_id self.send_stop: bool = send_stop self._queue: asyncio.Queue = asyncio.Queue() self._closed: bool = False async def get(self) -> ParsedAnswer: try: item = self._queue.get_nowait() except asyncio.QueueEmpty: item = await self._queue.get() self._queue.task_done() # If we receive an exception when reading the queue, we raise it if isinstance(item, Exception): self._closed = True raise item # Don't need to save new answers or # send the stop message if we already received the complete message answer_type, execution_result = item if answer_type == "complete": self.send_stop = False self._closed = True return item async def put(self, item: ParsedAnswer) -> None: if not self._closed: await self._queue.put(item) async def set_exception(self, exception: Exception) -> None: # Put the exception in the queue await self._queue.put(exception) # Don't need to send stop messages in case of error self.send_stop = False self._closed = True class WebsocketsTransportBase(AsyncTransport): """abstract :ref:`Async Transport ` used to implement different websockets protocols. This transport uses asyncio and the websockets library in order to send requests on a websocket connection. """ def __init__( self, url: str, headers: Optional[HeadersLike] = None, ssl: Union[SSLContext, bool] = False, init_payload: Dict[str, Any] = {}, connect_timeout: Optional[Union[int, float]] = 10, close_timeout: Optional[Union[int, float]] = 10, ack_timeout: Optional[Union[int, float]] = 10, keep_alive_timeout: Optional[Union[int, float]] = None, connect_args: Dict[str, Any] = {}, ) -> None: """Initialize the transport with the given parameters. :param url: The GraphQL server URL. Example: 'wss://server.com:PORT/graphql'. :param headers: Dict of HTTP Headers. :param ssl: ssl_context of the connection. Use ssl=False to disable encryption :param init_payload: Dict of the payload sent in the connection_init message. :param connect_timeout: Timeout in seconds for the establishment of the websocket connection. If None is provided this will wait forever. :param close_timeout: Timeout in seconds for the close. If None is provided this will wait forever. :param ack_timeout: Timeout in seconds to wait for the connection_ack message from the server. If None is provided this will wait forever. :param keep_alive_timeout: Optional Timeout in seconds to receive a sign of liveness from the server. :param connect_args: Other parameters forwarded to websockets.connect """ self.url: str = url self.headers: Optional[HeadersLike] = headers self.ssl: Union[SSLContext, bool] = ssl self.init_payload: Dict[str, Any] = init_payload self.connect_timeout: Optional[Union[int, float]] = connect_timeout self.close_timeout: Optional[Union[int, float]] = close_timeout self.ack_timeout: Optional[Union[int, float]] = ack_timeout self.keep_alive_timeout: Optional[Union[int, float]] = keep_alive_timeout self.connect_args = connect_args self.websocket: Optional[WebSocketClientProtocol] = None self.next_query_id: int = 1 self.listeners: Dict[int, ListenerQueue] = {} self.receive_data_task: Optional[asyncio.Future] = None self.check_keep_alive_task: Optional[asyncio.Future] = None self.close_task: Optional[asyncio.Future] = None # We need to set an event loop here if there is none # Or else we will not be able to create an asyncio.Event() try: with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="There is no current event loop" ) self._loop = asyncio.get_event_loop() except RuntimeError: self._loop = asyncio.new_event_loop() asyncio.set_event_loop(self._loop) self._wait_closed: asyncio.Event = asyncio.Event() self._wait_closed.set() self._no_more_listeners: asyncio.Event = asyncio.Event() self._no_more_listeners.set() if self.keep_alive_timeout is not None: self._next_keep_alive_message: asyncio.Event = asyncio.Event() self._next_keep_alive_message.set() self.payloads: Dict[str, Any] = {} """payloads is a dict which will contain the payloads received for example with the graphql-ws protocol: 'ping', 'pong', 'connection_ack'""" self._connecting: bool = False self.close_exception: Optional[Exception] = None # The list of supported subprotocols should be defined in the subclass self.supported_subprotocols: List[Subprotocol] = [] self.response_headers: Optional[Headers] = None async def _initialize(self): """Hook to send the initialization messages after the connection and potentially wait for the backend ack. """ pass # pragma: no cover async def _stop_listener(self, query_id: int): """Hook to stop to listen to a specific query. Will send a stop message in some subclasses. """ pass # pragma: no cover async def _after_connect(self): """Hook to add custom code for subclasses after the connection has been established. """ pass # pragma: no cover async def _after_initialize(self): """Hook to add custom code for subclasses after the initialization has been done. """ pass # pragma: no cover async def _close_hook(self): """Hook to add custom code for subclasses for the connection close""" pass # pragma: no cover async def _connection_terminate(self): """Hook to add custom code for subclasses after the initialization has been done. """ pass # pragma: no cover async def _send(self, message: str) -> None: """Send the provided message to the websocket connection and log the message""" if not self.websocket: raise TransportClosed( "Transport is not connected" ) from self.close_exception try: await self.websocket.send(message) log.info(">>> %s", message) except ConnectionClosed as e: await self._fail(e, clean_close=False) raise e async def _receive(self) -> str: """Wait the next message from the websocket connection and log the answer""" # It is possible that the websocket has been already closed in another task if self.websocket is None: raise TransportClosed("Transport is already closed") # Wait for the next websocket frame. Can raise ConnectionClosed data: Data = await self.websocket.recv() # websocket.recv() can return either str or bytes # In our case, we should receive only str here if not isinstance(data, str): raise TransportProtocolError("Binary data received in the websocket") answer: str = data log.info("<<< %s", answer) return answer @abstractmethod async def _send_query( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> int: raise NotImplementedError # pragma: no cover @abstractmethod def _parse_answer( self, answer: str ) -> Tuple[str, Optional[int], Optional[ExecutionResult]]: raise NotImplementedError # pragma: no cover async def _check_ws_liveness(self) -> None: """Coroutine which will periodically check the liveness of the connection through keep-alive messages """ try: while True: await asyncio.wait_for( self._next_keep_alive_message.wait(), self.keep_alive_timeout ) # Reset for the next iteration self._next_keep_alive_message.clear() except asyncio.TimeoutError: # No keep-alive message in the appriopriate interval, close with error # while trying to notify the server of a proper close (in case # the keep-alive interval of the client or server was not aligned # the connection still remains) # If the timeout happens during a close already in progress, do nothing if self.close_task is None: await self._fail( TransportServerError( "No keep-alive message has been received within " "the expected interval ('keep_alive_timeout' parameter)" ), clean_close=False, ) except asyncio.CancelledError: # The client is probably closing, handle it properly pass async def _receive_data_loop(self) -> None: """Main asyncio task which will listen to the incoming messages and will call the parse_answer and handle_answer methods of the subclass.""" try: while True: # Wait the next answer from the websocket server try: answer = await self._receive() except (ConnectionClosed, TransportProtocolError) as e: await self._fail(e, clean_close=False) break except TransportClosed: break # Parse the answer try: answer_type, answer_id, execution_result = self._parse_answer( answer ) except TransportQueryError as e: # Received an exception for a specific query # ==> Add an exception to this query queue # The exception is raised for this specific query, # but the transport is not closed. assert isinstance( e.query_id, int ), "TransportQueryError should have a query_id defined here" try: await self.listeners[e.query_id].set_exception(e) except KeyError: # Do nothing if no one is listening to this query_id pass continue except (TransportServerError, TransportProtocolError) as e: # Received a global exception for this transport # ==> close the transport # The exception will be raised for all current queries. await self._fail(e, clean_close=False) break await self._handle_answer(answer_type, answer_id, execution_result) finally: log.debug("Exiting _receive_data_loop()") async def _handle_answer( self, answer_type: str, answer_id: Optional[int], execution_result: Optional[ExecutionResult], ) -> None: try: # Put the answer in the queue if answer_id is not None: await self.listeners[answer_id].put((answer_type, execution_result)) except KeyError: # Do nothing if no one is listening to this query_id. pass async def subscribe( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, send_stop: Optional[bool] = True, ) -> AsyncGenerator[ExecutionResult, None]: """Send a query and receive the results using a python async generator. The query can be a graphql query, mutation or subscription. The results are sent as an ExecutionResult object. """ # Send the query and receive the id query_id: int = await self._send_query( document, variable_values, operation_name ) # Create a queue to receive the answers for this query_id listener = ListenerQueue(query_id, send_stop=(send_stop is True)) self.listeners[query_id] = listener # We will need to wait at close for this query to clean properly self._no_more_listeners.clear() try: # Loop over the received answers while True: # Wait for the answer from the queue of this query_id # This can raise a TransportError or ConnectionClosed exception. answer_type, execution_result = await listener.get() # If the received answer contains data, # Then we will yield the results back as an ExecutionResult object if execution_result is not None: yield execution_result # If we receive a 'complete' answer from the server, # Then we will end this async generator output without errors elif answer_type == "complete": log.debug( f"Complete received for query {query_id} --> exit without error" ) break except (asyncio.CancelledError, GeneratorExit) as e: log.debug(f"Exception in subscribe: {e!r}") if listener.send_stop: await self._stop_listener(query_id) listener.send_stop = False finally: log.debug(f"In subscribe finally for query_id {query_id}") self._remove_listener(query_id) async def execute( self, document: DocumentNode, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, ) -> ExecutionResult: """Execute the provided document AST against the configured remote server using the current session. Send a query but close the async generator as soon as we have the first answer. The result is sent as an ExecutionResult object. """ first_result = None generator = self.subscribe( document, variable_values, operation_name, send_stop=False ) async for result in generator: first_result = result # Note: we need to run generator.aclose() here or the finally block in # the subscribe will not be reached in pypy3 (python version 3.6.1) await generator.aclose() break if first_result is None: raise TransportQueryError( "Query completed without any answer received from the server" ) return first_result async def connect(self) -> None: """Coroutine which will: - connect to the websocket address - send the init message - wait for the connection acknowledge from the server - create an asyncio task which will be used to receive and parse the websocket answers Should be cleaned with a call to the close coroutine """ log.debug("connect: starting") if self.websocket is None and not self._connecting: # Set connecting to True to avoid a race condition if user is trying # to connect twice using the same client at the same time self._connecting = True # If the ssl parameter is not provided, # generate the ssl value depending on the url ssl: Optional[Union[SSLContext, bool]] if self.ssl: ssl = self.ssl else: ssl = True if self.url.startswith("wss") else None # Set default arguments used in the websockets.connect call connect_args: Dict[str, Any] = { "ssl": ssl, "extra_headers": self.headers, "subprotocols": self.supported_subprotocols, } # Adding custom parameters passed from init connect_args.update(self.connect_args) # Connection to the specified url # Generate a TimeoutError if taking more than connect_timeout seconds # Set the _connecting flag to False after in all cases try: self.websocket = await asyncio.wait_for( websockets.client.connect(self.url, **connect_args), self.connect_timeout, ) finally: self._connecting = False self.websocket = cast(WebSocketClientProtocol, self.websocket) self.response_headers = self.websocket.response_headers # Run the after_connect hook of the subclass await self._after_connect() self.next_query_id = 1 self.close_exception = None self._wait_closed.clear() # Send the init message and wait for the ack from the server # Note: This should generate a TimeoutError # if no ACKs are received within the ack_timeout try: await self._initialize() except ConnectionClosed as e: raise e except (TransportProtocolError, asyncio.TimeoutError) as e: await self._fail(e, clean_close=False) raise e # Run the after_init hook of the subclass await self._after_initialize() # If specified, create a task to check liveness of the connection # through keep-alive messages if self.keep_alive_timeout is not None: self.check_keep_alive_task = asyncio.ensure_future( self._check_ws_liveness() ) # Create a task to listen to the incoming websocket messages self.receive_data_task = asyncio.ensure_future(self._receive_data_loop()) else: raise TransportAlreadyConnected("Transport is already connected") log.debug("connect: done") def _remove_listener(self, query_id) -> None: """After exiting from a subscription, remove the listener and signal an event if this was the last listener for the client. """ if query_id in self.listeners: del self.listeners[query_id] remaining = len(self.listeners) log.debug(f"listener {query_id} deleted, {remaining} remaining") if remaining == 0: self._no_more_listeners.set() async def _clean_close(self, e: Exception) -> None: """Coroutine which will: - send stop messages for each active subscription to the server - send the connection terminate message """ # Send 'stop' message for all current queries for query_id, listener in self.listeners.items(): if listener.send_stop: await self._stop_listener(query_id) listener.send_stop = False # Wait that there is no more listeners (we received 'complete' for all queries) try: await asyncio.wait_for(self._no_more_listeners.wait(), self.close_timeout) except asyncio.TimeoutError: # pragma: no cover log.debug("Timer close_timeout fired") # Calling the subclass hook await self._connection_terminate() async def _close_coro(self, e: Exception, clean_close: bool = True) -> None: """Coroutine which will: - do a clean_close if possible: - send stop messages for each active query to the server - send the connection terminate message - close the websocket connection - send the exception to all the remaining listeners """ log.debug("_close_coro: starting") try: # We should always have an active websocket connection here assert self.websocket is not None # Properly shut down liveness checker if enabled if self.check_keep_alive_task is not None: # More info: https://stackoverflow.com/a/43810272/1113207 self.check_keep_alive_task.cancel() with suppress(asyncio.CancelledError): await self.check_keep_alive_task # Calling the subclass close hook await self._close_hook() # Saving exception to raise it later if trying to use the transport # after it has already closed. self.close_exception = e if clean_close: log.debug("_close_coro: starting clean_close") try: await self._clean_close(e) except Exception as exc: # pragma: no cover log.warning("Ignoring exception in _clean_close: " + repr(exc)) log.debug("_close_coro: sending exception to listeners") # Send an exception to all remaining listeners for query_id, listener in self.listeners.items(): await listener.set_exception(e) log.debug("_close_coro: close websocket connection") await self.websocket.close() log.debug("_close_coro: websocket connection closed") except Exception as exc: # pragma: no cover log.warning("Exception catched in _close_coro: " + repr(exc)) finally: log.debug("_close_coro: start cleanup") self.websocket = None self.close_task = None self.check_keep_alive_task = None self._wait_closed.set() log.debug("_close_coro: exiting") async def _fail(self, e: Exception, clean_close: bool = True) -> None: log.debug("_fail: starting with exception: " + repr(e)) if self.close_task is None: if self.websocket is None: log.debug("_fail started with self.websocket == None -> already closed") else: self.close_task = asyncio.shield( asyncio.ensure_future(self._close_coro(e, clean_close=clean_close)) ) else: log.debug( "close_task is not None in _fail. Previous exception is: " + repr(self.close_exception) + " New exception is: " + repr(e) ) async def close(self) -> None: log.debug("close: starting") await self._fail(TransportClosed("Websocket GraphQL transport closed by user")) await self.wait_closed() log.debug("close: done") async def wait_closed(self) -> None: log.debug("wait_close: starting") await self._wait_closed.wait() log.debug("wait_close: done") gql-3.6.0b2/gql/utilities/000077500000000000000000000000001460703211500153245ustar00rootroot00000000000000gql-3.6.0b2/gql/utilities/__init__.py000066400000000000000000000012231460703211500174330ustar00rootroot00000000000000from .build_client_schema import build_client_schema from .get_introspection_query_ast import get_introspection_query_ast from .node_tree import node_tree from .parse_result import parse_result from .serialize_variable_values import serialize_value, serialize_variable_values from .update_schema_enum import update_schema_enum from .update_schema_scalars import update_schema_scalar, update_schema_scalars __all__ = [ "build_client_schema", "node_tree", "parse_result", "get_introspection_query_ast", "serialize_variable_values", "serialize_value", "update_schema_enum", "update_schema_scalars", "update_schema_scalar", ] gql-3.6.0b2/gql/utilities/build_client_schema.py000066400000000000000000000061201460703211500216520ustar00rootroot00000000000000from graphql import DirectiveLocation, GraphQLSchema, IntrospectionQuery from graphql import build_client_schema as build_client_schema_orig from graphql.pyutils import inspect from graphql.utilities.get_introspection_query import IntrospectionDirective __all__ = ["build_client_schema"] INCLUDE_DIRECTIVE_JSON: IntrospectionDirective = { "name": "include", "description": ( "Directs the executor to include this field or fragment " "only when the `if` argument is true." ), "locations": [ DirectiveLocation.FIELD, DirectiveLocation.FRAGMENT_SPREAD, DirectiveLocation.INLINE_FRAGMENT, ], "args": [ { "name": "if", "description": "Included when true.", "type": { "kind": "NON_NULL", "name": "None", "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": "None"}, }, "defaultValue": "None", } ], } SKIP_DIRECTIVE_JSON: IntrospectionDirective = { "name": "skip", "description": ( "Directs the executor to skip this field or fragment " "when the `if` argument is true." ), "locations": [ DirectiveLocation.FIELD, DirectiveLocation.FRAGMENT_SPREAD, DirectiveLocation.INLINE_FRAGMENT, ], "args": [ { "name": "if", "description": "Skipped when true.", "type": { "kind": "NON_NULL", "name": "None", "ofType": {"kind": "SCALAR", "name": "Boolean", "ofType": "None"}, }, "defaultValue": "None", } ], } def build_client_schema(introspection: IntrospectionQuery) -> GraphQLSchema: """This is an alternative to the graphql-core function :code:`build_client_schema` but with default include and skip directives added to the schema to fix `issue #278 `_ .. warning:: This function will be removed once the issue `graphql-js#3419 `_ has been fixed and ported to graphql-core so don't use it outside gql. """ if not isinstance(introspection, dict) or not isinstance( introspection.get("__schema"), dict ): raise TypeError( "Invalid or incomplete introspection result. Ensure that you" " are passing the 'data' attribute of an introspection response" f" and no 'errors' were returned alongside: {inspect(introspection)}." ) schema_introspection = introspection["__schema"] directives = schema_introspection.get("directives", None) if directives is None: schema_introspection["directives"] = directives = [] if not any(directive["name"] == "skip" for directive in directives): directives.append(SKIP_DIRECTIVE_JSON) if not any(directive["name"] == "include" for directive in directives): directives.append(INCLUDE_DIRECTIVE_JSON) return build_client_schema_orig(introspection, assume_valid=False) gql-3.6.0b2/gql/utilities/get_introspection_query_ast.py000066400000000000000000000074311460703211500235360ustar00rootroot00000000000000from itertools import repeat from graphql import DocumentNode, GraphQLSchema from gql.dsl import DSLFragment, DSLMetaField, DSLQuery, DSLSchema, dsl_gql def get_introspection_query_ast( descriptions: bool = True, specified_by_url: bool = False, directive_is_repeatable: bool = False, schema_description: bool = False, type_recursion_level: int = 7, ) -> DocumentNode: """Get a query for introspection as a document using the DSL module. Equivalent to the get_introspection_query function from graphql-core but using the DSL module and allowing to select the recursion level. Optionally, you can exclude descriptions, include specification URLs, include repeatability of directives, and specify whether to include the schema description as well. """ ds = DSLSchema(GraphQLSchema()) fragment_FullType = DSLFragment("FullType").on(ds.__Type) fragment_InputValue = DSLFragment("InputValue").on(ds.__InputValue) fragment_TypeRef = DSLFragment("TypeRef").on(ds.__Type) schema = DSLMetaField("__schema") if descriptions and schema_description: schema.select(ds.__Schema.description) schema.select( ds.__Schema.queryType.select(ds.__Type.name), ds.__Schema.mutationType.select(ds.__Type.name), ds.__Schema.subscriptionType.select(ds.__Type.name), ) schema.select(ds.__Schema.types.select(fragment_FullType)) directives = ds.__Schema.directives.select(ds.__Directive.name) if descriptions: directives.select(ds.__Directive.description) if directive_is_repeatable: directives.select(ds.__Directive.isRepeatable) directives.select( ds.__Directive.locations, ds.__Directive.args.select(fragment_InputValue), ) schema.select(directives) fragment_FullType.select( ds.__Type.kind, ds.__Type.name, ) if descriptions: fragment_FullType.select(ds.__Type.description) if specified_by_url: fragment_FullType.select(ds.__Type.specifiedByURL) fields = ds.__Type.fields(includeDeprecated=True).select(ds.__Field.name) if descriptions: fields.select(ds.__Field.description) fields.select( ds.__Field.args.select(fragment_InputValue), ds.__Field.type.select(fragment_TypeRef), ds.__Field.isDeprecated, ds.__Field.deprecationReason, ) enum_values = ds.__Type.enumValues(includeDeprecated=True).select( ds.__EnumValue.name ) if descriptions: enum_values.select(ds.__EnumValue.description) enum_values.select( ds.__EnumValue.isDeprecated, ds.__EnumValue.deprecationReason, ) fragment_FullType.select( fields, ds.__Type.inputFields.select(fragment_InputValue), ds.__Type.interfaces.select(fragment_TypeRef), enum_values, ds.__Type.possibleTypes.select(fragment_TypeRef), ) fragment_InputValue.select(ds.__InputValue.name) if descriptions: fragment_InputValue.select(ds.__InputValue.description) fragment_InputValue.select( ds.__InputValue.type.select(fragment_TypeRef), ds.__InputValue.defaultValue, ) fragment_TypeRef.select( ds.__Type.kind, ds.__Type.name, ) if type_recursion_level >= 1: current_field = ds.__Type.ofType.select(ds.__Type.kind, ds.__Type.name) fragment_TypeRef.select(current_field) for _ in repeat(None, type_recursion_level - 1): new_oftype = ds.__Type.ofType.select(ds.__Type.kind, ds.__Type.name) current_field.select(new_oftype) current_field = new_oftype query = DSLQuery(schema) query.name = "IntrospectionQuery" dsl_query = dsl_gql(query, fragment_FullType, fragment_InputValue, fragment_TypeRef) return dsl_query gql-3.6.0b2/gql/utilities/node_tree.py000066400000000000000000000052141460703211500176440ustar00rootroot00000000000000from typing import Any, Iterable, List, Optional, Sized from graphql import Node def _node_tree_recursive( obj: Any, *, indent: int = 0, ignored_keys: List, ): assert ignored_keys is not None results = [] if hasattr(obj, "__slots__"): results.append(" " * indent + f"{type(obj).__name__}") try: keys = obj.keys except AttributeError: # If the object has no keys attribute, print its repr and return. results.append(" " * (indent + 1) + repr(obj)) else: for key in keys: if key in ignored_keys: continue attr_value = getattr(obj, key, None) results.append(" " * (indent + 1) + f"{key}:") if isinstance(attr_value, Iterable) and not isinstance( attr_value, (str, bytes) ): if isinstance(attr_value, Sized) and len(attr_value) == 0: results.append( " " * (indent + 2) + f"empty {type(attr_value).__name__}" ) else: for item in attr_value: results.append( _node_tree_recursive( item, indent=indent + 2, ignored_keys=ignored_keys, ) ) else: results.append( _node_tree_recursive( attr_value, indent=indent + 2, ignored_keys=ignored_keys, ) ) else: results.append(" " * indent + repr(obj)) return "\n".join(results) def node_tree( obj: Node, *, ignore_loc: bool = True, ignore_block: bool = True, ignored_keys: Optional[List] = None, ): """Method which returns a tree of Node elements as a String. Useful to debug deep DocumentNode instances created by gql or dsl_gql. WARNING: the output of this method is not guaranteed and may change without notice. """ assert isinstance(obj, Node) if ignored_keys is None: ignored_keys = [] if ignore_loc: # We are ignoring loc attributes by default ignored_keys.append("loc") if ignore_block: # We are ignoring block attributes by default (in StringValueNode) ignored_keys.append("block") return _node_tree_recursive(obj, ignored_keys=ignored_keys) gql-3.6.0b2/gql/utilities/parse_result.py000066400000000000000000000341771460703211500204220ustar00rootroot00000000000000import logging from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast from graphql import ( IDLE, REMOVE, DocumentNode, FieldNode, FragmentDefinitionNode, FragmentSpreadNode, GraphQLError, GraphQLInterfaceType, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLType, InlineFragmentNode, NameNode, Node, OperationDefinitionNode, SelectionSetNode, TypeInfo, TypeInfoVisitor, Visitor, is_leaf_type, print_ast, visit, ) from graphql.language.visitor import VisitorActionEnum from graphql.pyutils import inspect log = logging.getLogger(__name__) # Equivalent to QUERY_DOCUMENT_KEYS but only for fields interesting to # visit to parse the results RESULT_DOCUMENT_KEYS: Dict[str, Tuple[str, ...]] = { "document": ("definitions",), "operation_definition": ("selection_set",), "selection_set": ("selections",), "field": ("selection_set",), "inline_fragment": ("selection_set",), "fragment_definition": ("selection_set",), } def _ignore_non_null(type_: GraphQLType): """Removes the GraphQLNonNull wrappings around types.""" if isinstance(type_, GraphQLNonNull): return type_.of_type else: return type_ def _get_fragment(document, fragment_name): """Returns a fragment from the document.""" for definition in document.definitions: if isinstance(definition, FragmentDefinitionNode): if definition.name.value == fragment_name: return definition raise GraphQLError(f'Fragment "{fragment_name}" not found in document!') class ParseResultVisitor(Visitor): def __init__( self, schema: GraphQLSchema, document: DocumentNode, node: Node, result: Dict[str, Any], type_info: TypeInfo, visit_fragment: bool = False, inside_list_level: int = 0, operation_name: Optional[str] = None, ): """Recursive Implementation of a Visitor class to parse results correspondind to a schema and a document. Using a TypeInfo class to get the node types during traversal. If we reach a list in the results, then we parse each item of the list recursively, traversing the same nodes of the query again. During traversal, we keep the current position in the result in the result_stack field. Alongside the field type, we calculate the "result type" which is computed from the field type and the current recursive level we are for this field (:code:`inside_list_level` argument). """ self.schema: GraphQLSchema = schema self.document: DocumentNode = document self.node: Node = node self.result: Dict[str, Any] = result self.type_info: TypeInfo = type_info self.visit_fragment: bool = visit_fragment self.inside_list_level = inside_list_level self.operation_name = operation_name self.result_stack: List[Any] = [] super().__init__() @property def current_result(self): try: return self.result_stack[-1] except IndexError: return self.result @staticmethod def leave_document(node: DocumentNode, *_args: Any) -> Dict[str, Any]: results = cast(List[Dict[str, Any]], node.definitions) return {k: v for result in results for k, v in result.items()} def enter_operation_definition( self, node: OperationDefinitionNode, *_args: Any ) -> Union[None, VisitorActionEnum]: if self.operation_name is not None: if not hasattr(node.name, "value"): return REMOVE # pragma: no cover node.name = cast(NameNode, node.name) if node.name.value != self.operation_name: log.debug(f"SKIPPING operation {node.name.value}") return REMOVE return IDLE @staticmethod def leave_operation_definition( node: OperationDefinitionNode, *_args: Any ) -> Dict[str, Any]: selections = cast(List[Dict[str, Any]], node.selection_set) return {k: v for s in selections for k, v in s.items()} @staticmethod def leave_selection_set(node: SelectionSetNode, *_args: Any) -> Dict[str, Any]: partial_results = cast(Dict[str, Any], node.selections) return partial_results @staticmethod def in_first_field(path): return path.count("selections") <= 1 def get_current_result_type(self, path): field_type = self.type_info.get_type() list_level = self.inside_list_level result_type = _ignore_non_null(field_type) if self.in_first_field(path): while list_level > 0: assert isinstance(result_type, GraphQLList) result_type = _ignore_non_null(result_type.of_type) list_level -= 1 return result_type def enter_field( self, node: FieldNode, key: str, parent: Node, path: List[Node], ancestors: List[Node], ) -> Union[None, VisitorActionEnum, Dict[str, Any]]: name = node.alias.value if node.alias else node.name.value if log.isEnabledFor(logging.DEBUG): log.debug(f"Enter field {name}") log.debug(f" path={path!r}") log.debug(f" current_result={self.current_result!r}") if self.current_result is None: # Result was null for this field -> remove return REMOVE elif isinstance(self.current_result, Mapping): try: result_value = self.current_result[name] except KeyError: # Key not found in result. # Should never happen in theory with a correct GraphQL backend # Silently ignoring this field log.debug(f" Key {name} not found in result --> REMOVE") return REMOVE log.debug(f" result_value={result_value}") # We get the field_type from type_info field_type = self.type_info.get_type() # We calculate a virtual "result type" depending on our recursion level. result_type = self.get_current_result_type(path) # If the result for this field is a list, then we need # to recursively visit the same node multiple times for each # item in the list. if ( not isinstance(result_value, Mapping) and isinstance(result_value, Iterable) and not isinstance(result_value, str) and not is_leaf_type(result_type) ): # Finding out the inner type of the list inner_type = _ignore_non_null(result_type.of_type) if log.isEnabledFor(logging.DEBUG): log.debug(" List detected:") log.debug(f" field_type={inspect(field_type)}") log.debug(f" result_type={inspect(result_type)}") log.debug(f" inner_type={inspect(inner_type)}\n") visits: List[Dict[str, Any]] = [] # Get parent type initial_type = self.type_info.get_parent_type() assert isinstance( initial_type, (GraphQLObjectType, GraphQLInterfaceType) ) # Get parent SelectionSet node selection_set_node = ancestors[-1] assert isinstance(selection_set_node, SelectionSetNode) # Keep only the current node in a new selection set node new_node = SelectionSetNode(selections=[node]) for item in result_value: new_result = {name: item} if log.isEnabledFor(logging.DEBUG): log.debug(f" recursive new_result={new_result}") log.debug(f" recursive ast={print_ast(node)}") log.debug(f" recursive path={path!r}") log.debug(f" recursive initial_type={initial_type!r}\n") if self.in_first_field(path): inside_list_level = self.inside_list_level + 1 else: inside_list_level = 1 inner_visit = parse_result_recursive( self.schema, self.document, new_node, new_result, initial_type=initial_type, inside_list_level=inside_list_level, ) log.debug(f" recursive result={inner_visit}\n") inner_visit = cast(List[Dict[str, Any]], inner_visit) visits.append(inner_visit[0][name]) result_value = {name: visits} log.debug(f" recursive visits final result = {result_value}\n") return result_value # If the result for this field is not a list, then add it # to the result stack so that it becomes the current_value # for the next inner fields self.result_stack.append(result_value) return IDLE raise GraphQLError( f"Invalid result for container of field {name}: {self.current_result!r}" ) def leave_field( self, node: FieldNode, key: str, parent: Node, path: List[Node], ancestors: List[Node], ) -> Dict[str, Any]: name = cast(str, node.alias.value if node.alias else node.name.value) log.debug(f"Leave field {name}") if self.current_result is None: return_value = None elif node.selection_set is None: field_type = self.type_info.get_type() result_type = self.get_current_result_type(path) if log.isEnabledFor(logging.DEBUG): log.debug(f" field type of {name} is {inspect(field_type)}") log.debug(f" result type of {name} is {inspect(result_type)}") assert is_leaf_type(result_type) # Finally parsing a single scalar using the parse_value method return_value = result_type.parse_value(self.current_result) else: partial_results = cast(List[Dict[str, Any]], node.selection_set) return_value = {k: v for pr in partial_results for k, v in pr.items()} # Go up a level in the result stack self.result_stack.pop() log.debug(f"Leave field {name}: returning {return_value}") return {name: return_value} # Fragments def enter_fragment_definition( self, node: FragmentDefinitionNode, *_args: Any ) -> Union[None, VisitorActionEnum]: if log.isEnabledFor(logging.DEBUG): log.debug(f"Enter fragment definition {node.name.value}.") log.debug(f"visit_fragment={self.visit_fragment!s}") if self.visit_fragment: return IDLE else: return REMOVE @staticmethod def leave_fragment_definition( node: FragmentDefinitionNode, *_args: Any ) -> Dict[str, Any]: selections = cast(List[Dict[str, Any]], node.selection_set) return {k: v for s in selections for k, v in s.items()} def leave_fragment_spread( self, node: FragmentSpreadNode, *_args: Any ) -> Dict[str, Any]: fragment_name = node.name.value log.debug(f"Start recursive fragment visit {fragment_name}") fragment_node = _get_fragment(self.document, fragment_name) fragment_result = parse_result_recursive( self.schema, self.document, fragment_node, self.current_result, visit_fragment=True, ) log.debug( f"Result of recursive fragment visit {fragment_name}: {fragment_result}" ) return cast(Dict[str, Any], fragment_result) @staticmethod def leave_inline_fragment(node: InlineFragmentNode, *_args: Any) -> Dict[str, Any]: selections = cast(List[Dict[str, Any]], node.selection_set) return {k: v for s in selections for k, v in s.items()} def parse_result_recursive( schema: GraphQLSchema, document: DocumentNode, node: Node, result: Optional[Dict[str, Any]], initial_type: Optional[GraphQLType] = None, inside_list_level: int = 0, visit_fragment: bool = False, operation_name: Optional[str] = None, ) -> Any: if result is None: return None type_info = TypeInfo(schema, initial_type=initial_type) visited = visit( node, TypeInfoVisitor( type_info, ParseResultVisitor( schema, document, node, result, type_info=type_info, inside_list_level=inside_list_level, visit_fragment=visit_fragment, operation_name=operation_name, ), ), visitor_keys=RESULT_DOCUMENT_KEYS, ) return visited def parse_result( schema: GraphQLSchema, document: DocumentNode, result: Optional[Dict[str, Any]], operation_name: Optional[str] = None, ) -> Optional[Dict[str, Any]]: """Unserialize a result received from a GraphQL backend. :param schema: the GraphQL schema :param document: the document representing the query sent to the backend :param result: the serialized result received from the backend :param operation_name: the optional operation name :returns: a parsed result with scalars and enums parsed depending on their definition in the schema. Given a schema, a query and a serialized result, provide a new result with parsed values. If the result contains only built-in GraphQL scalars (String, Int, Float, ...) then the parsed result should be unchanged. If the result contains custom scalars or enums, then those values will be parsed with the parse_value method of the custom scalar or enum definition in the schema.""" return parse_result_recursive( schema, document, document, result, operation_name=operation_name ) gql-3.6.0b2/gql/utilities/serialize_variable_values.py000066400000000000000000000102451460703211500231130ustar00rootroot00000000000000from typing import Any, Dict, Optional from graphql import ( DocumentNode, GraphQLEnumType, GraphQLError, GraphQLInputObjectType, GraphQLList, GraphQLNonNull, GraphQLScalarType, GraphQLSchema, GraphQLType, GraphQLWrappingType, OperationDefinitionNode, type_from_ast, ) from graphql.pyutils import inspect def _get_document_operation( document: DocumentNode, operation_name: Optional[str] = None ) -> OperationDefinitionNode: """Returns the operation which should be executed in the document. Raises a GraphQLError if a single operation cannot be retrieved. """ operation: Optional[OperationDefinitionNode] = None for definition in document.definitions: if isinstance(definition, OperationDefinitionNode): if operation_name is None: if operation: raise GraphQLError( "Must provide operation name" " if query contains multiple operations." ) operation = definition elif definition.name and definition.name.value == operation_name: operation = definition if not operation: if operation_name is not None: raise GraphQLError(f"Unknown operation named '{operation_name}'.") # The following line should never happen normally as the document is # already verified before calling this function. raise GraphQLError("Must provide an operation.") # pragma: no cover return operation def serialize_value(type_: GraphQLType, value: Any) -> Any: """Given a GraphQL type and a Python value, return the serialized value. This method will serialize the value recursively, entering into lists and dicts. Can be used to serialize Enums and/or Custom Scalars in variable values. :param type_: the GraphQL type :param value: the provided value """ if value is None: if isinstance(type_, GraphQLNonNull): # raise GraphQLError(f"Type {type_.of_type.name} Cannot be None.") raise GraphQLError(f"Type {inspect(type_)} Cannot be None.") else: return None if isinstance(type_, GraphQLWrappingType): inner_type = type_.of_type if isinstance(type_, GraphQLNonNull): return serialize_value(inner_type, value) elif isinstance(type_, GraphQLList): return [serialize_value(inner_type, v) for v in value] elif isinstance(type_, (GraphQLScalarType, GraphQLEnumType)): return type_.serialize(value) elif isinstance(type_, GraphQLInputObjectType): return { field_name: serialize_value(field.type, value[field_name]) for field_name, field in type_.fields.items() if field_name in value } raise GraphQLError(f"Impossible to serialize value with type: {inspect(type_)}.") def serialize_variable_values( schema: GraphQLSchema, document: DocumentNode, variable_values: Dict[str, Any], operation_name: Optional[str] = None, ) -> Dict[str, Any]: """Given a GraphQL document and a schema, serialize the Dictionary of variable values. Useful to serialize Enums and/or Custom Scalars in variable values. :param schema: the GraphQL schema :param document: the document representing the query sent to the backend :param variable_values: the dictionnary of variable values which needs to be serialized. :param operation_name: the optional operation_name for the query. """ parsed_variable_values: Dict[str, Any] = {} # Find the operation in the document operation = _get_document_operation(document, operation_name=operation_name) # Serialize every variable value defined for the operation for var_def_node in operation.variable_definitions: var_name = var_def_node.variable.name.value var_type = type_from_ast(schema, var_def_node.type) if var_name in variable_values: assert var_type is not None var_value = variable_values[var_name] parsed_variable_values[var_name] = serialize_value(var_type, var_value) return parsed_variable_values gql-3.6.0b2/gql/utilities/update_schema_enum.py000066400000000000000000000045301460703211500215260ustar00rootroot00000000000000from enum import Enum from typing import Any, Dict, Mapping, Type, Union, cast from graphql import GraphQLEnumType, GraphQLSchema def update_schema_enum( schema: GraphQLSchema, name: str, values: Union[Dict[str, Any], Type[Enum]], use_enum_values: bool = False, ): """Update in the schema the GraphQLEnumType corresponding to the given name. Example:: from enum import Enum class Color(Enum): RED = 0 GREEN = 1 BLUE = 2 update_schema_enum(schema, 'Color', Color) :param schema: a GraphQL Schema already containing the GraphQLEnumType type. :param name: the name of the enum in the GraphQL schema :param values: Either a Python Enum or a dict of values. The keys of the provided values should correspond to the keys of the existing enum in the schema. :param use_enum_values: By default, we configure the GraphQLEnumType to serialize to enum instances (ie: .parse_value() returns Color.RED). If use_enum_values is set to True, then .parse_value() returns 0. use_enum_values=True is the defaut behaviour when passing an Enum to a GraphQLEnumType. """ # Convert Enum values to Dict if isinstance(values, type): if issubclass(values, Enum): values = cast(Type[Enum], values) if use_enum_values: values = {enum.name: enum.value for enum in values} else: values = {enum.name: enum for enum in values} if not isinstance(values, Mapping): raise TypeError(f"Invalid type for enum values: {type(values)}") # Find enum type in schema schema_enum = schema.get_type(name) if schema_enum is None: raise KeyError(f"Enum {name} not found in schema!") if not isinstance(schema_enum, GraphQLEnumType): raise TypeError( f'The type "{name}" is not a GraphQLEnumType, it is a {type(schema_enum)}' ) # Replace all enum values for enum_name, enum_value in schema_enum.values.items(): try: enum_value.value = values[enum_name] except KeyError: raise KeyError(f'Enum key "{enum_name}" not found in provided values!') # Delete the _value_lookup cached property if "_value_lookup" in schema_enum.__dict__: del schema_enum.__dict__["_value_lookup"] gql-3.6.0b2/gql/utilities/update_schema_scalars.py000066400000000000000000000043501460703211500222120ustar00rootroot00000000000000from typing import Iterable, List from graphql import GraphQLScalarType, GraphQLSchema def update_schema_scalar(schema: GraphQLSchema, name: str, scalar: GraphQLScalarType): """Update the scalar in a schema with the scalar provided. :param schema: the GraphQL schema :param name: the name of the custom scalar type in the schema :param scalar: a provided scalar type This can be used to update the default Custom Scalar implementation when the schema has been provided from a text file or from introspection. """ if not isinstance(scalar, GraphQLScalarType): raise TypeError("Scalars should be instances of GraphQLScalarType.") schema_scalar = schema.get_type(name) if schema_scalar is None: raise KeyError(f"Scalar '{name}' not found in schema.") if not isinstance(schema_scalar, GraphQLScalarType): raise TypeError( f'The type "{name}" is not a GraphQLScalarType,' f" it is a {type(schema_scalar)}" ) # Update the conversion methods # Using setattr because mypy has a false positive # https://github.com/python/mypy/issues/2427 setattr(schema_scalar, "serialize", scalar.serialize) setattr(schema_scalar, "parse_value", scalar.parse_value) setattr(schema_scalar, "parse_literal", scalar.parse_literal) def update_schema_scalars(schema: GraphQLSchema, scalars: List[GraphQLScalarType]): """Update the scalars in a schema with the scalars provided. :param schema: the GraphQL schema :param scalars: a list of provided scalar types This can be used to update the default Custom Scalar implementation when the schema has been provided from a text file or from introspection. If the name of the provided scalar is different than the name of the custom scalar, then you should use the :func:`update_schema_scalar ` method instead. """ if not isinstance(scalars, Iterable): raise TypeError("Scalars argument should be a list of scalars.") for scalar in scalars: if not isinstance(scalar, GraphQLScalarType): raise TypeError("Scalars should be instances of GraphQLScalarType.") update_schema_scalar(schema, scalar.name, scalar) gql-3.6.0b2/gql/utils.py000066400000000000000000000034721460703211500150310ustar00rootroot00000000000000"""Utilities to manipulate several python objects.""" from typing import Any, Dict, List, Tuple, Type # From this response in Stackoverflow # http://stackoverflow.com/a/19053800/1072990 def to_camel_case(snake_str): components = snake_str.split("_") # We capitalize the first letter of each component except the first one # with the 'title' method and join them together. return components[0] + "".join(x.title() if x else "_" for x in components[1:]) def extract_files( variables: Dict, file_classes: Tuple[Type[Any], ...] ) -> Tuple[Dict, Dict]: files = {} def recurse_extract(path, obj): """ recursively traverse obj, doing a deepcopy, but replacing any file-like objects with nulls and shunting the originals off to the side. """ nonlocal files if isinstance(obj, list): nulled_obj = [] for key, value in enumerate(obj): value = recurse_extract(f"{path}.{key}", value) nulled_obj.append(value) return nulled_obj elif isinstance(obj, dict): nulled_obj = {} for key, value in obj.items(): value = recurse_extract(f"{path}.{key}", value) nulled_obj[key] = value return nulled_obj elif isinstance(obj, file_classes): # extract obj from its parent and put it into files instead. files[path] = obj return None else: # base case: pass through unchanged return obj nulled_variables = recurse_extract("variables", variables) return nulled_variables, files def str_first_element(errors: List) -> str: try: first_error = errors[0] except (KeyError, TypeError): first_error = errors return str(first_error) gql-3.6.0b2/setup.cfg000066400000000000000000000005121460703211500143450ustar00rootroot00000000000000[wheel] universal = 1 [flake8] max-line-length = 88 [isort] known_standard_library = ssl known_first_party = gql multi_line_output = 3 include_trailing_comma = True line_length = 88 not_skip = __init__.py [mypy] ignore_missing_imports = true [tool:pytest] norecursedirs = venv .venv .tox .git .cache .mypy_cache .pytest_cache gql-3.6.0b2/setup.py000066400000000000000000000064611460703211500142470ustar00rootroot00000000000000import os from setuptools import setup, find_packages install_requires = [ "graphql-core>=3.3.0a3,<3.4", "yarl>=1.6,<2.0", "backoff>=1.11.1,<3.0", "anyio>=3.0,<5", ] console_scripts = [ "gql-cli=gql.cli:gql_cli", ] tests_requires = [ "parse==1.15.0", "pytest==7.4.2", "pytest-asyncio==0.21.1", "pytest-console-scripts==1.3.1", "pytest-cov==3.0.0", "mock==4.0.2", "vcrpy==4.4.0", "aiofiles", ] dev_requires = [ "black==22.3.0", "check-manifest>=0.42,<1", "flake8==3.8.1", "isort==4.3.21", "mypy==0.910", "sphinx>=5.3.0,<6", "sphinx_rtd_theme>=0.4,<1", "sphinx-argparse==0.2.5", "types-aiofiles", "types-mock", "types-requests", ] + tests_requires install_aiohttp_requires = [ "aiohttp>=3.8.0,<4;python_version<='3.11'", "aiohttp>=3.9.0b0,<4;python_version>'3.11'", ] install_requests_requires = [ "requests>=2.26,<3", "requests_toolbelt>=1.0.0,<2", ] install_httpx_requires = [ "httpx>=0.23.1,<1", ] install_websockets_requires = [ "websockets>=10,<12", ] install_botocore_requires = [ "botocore>=1.21,<2", ] install_all_requires = ( install_aiohttp_requires + install_requests_requires + install_httpx_requires + install_websockets_requires + install_botocore_requires ) # Get version from __version__.py file current_folder = os.path.abspath(os.path.dirname(__file__)) about = {} with open(os.path.join(current_folder, "gql", "__version__.py"), "r") as f: exec(f.read(), about) setup( name="gql", version=about["__version__"], description="GraphQL client for Python", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/graphql-python/gql", author="Syrus Akbary", author_email="me@syrusakbary.com", license="MIT", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: PyPy", ], keywords="api graphql protocol rest relay gql client", packages=find_packages(include=["gql*"]), # PEP-561: https://www.python.org/dev/peps/pep-0561/ package_data={"gql": ["py.typed"]}, install_requires=install_requires, tests_require=install_all_requires + tests_requires, extras_require={ "all": install_all_requires, "test": install_all_requires + tests_requires, "test_no_transport": tests_requires, "dev": install_all_requires + dev_requires, "aiohttp": install_aiohttp_requires, "requests": install_requests_requires, "httpx": install_httpx_requires, "websockets": install_websockets_requires, "botocore": install_botocore_requires, }, include_package_data=True, zip_safe=False, platforms="any", entry_points={"console_scripts": console_scripts}, ) gql-3.6.0b2/tests/000077500000000000000000000000001460703211500136705ustar00rootroot00000000000000gql-3.6.0b2/tests/__init__.py000066400000000000000000000000001460703211500157670ustar00rootroot00000000000000gql-3.6.0b2/tests/conftest.py000066400000000000000000000361141460703211500160740ustar00rootroot00000000000000import asyncio import json import logging import os import pathlib import re import ssl import sys import tempfile import types from concurrent.futures import ThreadPoolExecutor from typing import Union import pytest import pytest_asyncio from gql import Client all_transport_dependencies = ["aiohttp", "requests", "httpx", "websockets", "botocore"] def pytest_addoption(parser): parser.addoption( "--run-online", action="store_true", default=False, help="run tests necessitating online resources", ) for transport in all_transport_dependencies: parser.addoption( f"--{transport}-only", action="store_true", default=False, help=f"run tests necessitating only the {transport} dependency", ) def pytest_configure(config): config.addinivalue_line( "markers", "online: mark test as necessitating external online resources" ) for transport in all_transport_dependencies: config.addinivalue_line( "markers", f"{transport}: mark test as necessitating the {transport} dependency", ) def pytest_collection_modifyitems(config, items): # --run-online given in cli: do not skip online tests if not config.getoption("--run-online"): skip_online = pytest.mark.skip(reason="need --run-online option to run") for item in items: if "online" in item.keywords: item.add_marker(skip_online) # --aiohttp-only # --requests-only # --httpx-only # --websockets-only for transport in all_transport_dependencies: other_transport_dependencies = [ t for t in all_transport_dependencies if t != transport ] if config.getoption(f"--{transport}-only"): skip_transport = pytest.mark.skip( reason=f"need another dependency than {transport}" ) for item in items: # Check if we have a dependency transport # other than the requested transport if any(t in item.keywords for t in other_transport_dependencies): item.add_marker(skip_transport) async def aiohttp_server_base(with_ssl=False): """Factory to create a TestServer instance, given an app. aiohttp_server(app, **kwargs) """ from aiohttp.test_utils import TestServer as AIOHTTPTestServer servers = [] async def go(app, *, port=None, **kwargs): # type: ignore server = AIOHTTPTestServer(app, port=port) start_server_args = {**kwargs} if with_ssl: testcert, ssl_context = get_localhost_ssl_context() start_server_args["ssl"] = ssl_context await server.start_server(**start_server_args) servers.append(server) return server yield go while servers: await servers.pop().close() @pytest_asyncio.fixture async def aiohttp_server(): async for server in aiohttp_server_base(): yield server @pytest_asyncio.fixture async def ssl_aiohttp_server(): async for server in aiohttp_server_base(with_ssl=True): yield server # Adding debug logs for name in [ "websockets.legacy.server", "gql.transport.aiohttp", "gql.transport.appsync", "gql.transport.phoenix_channel_websockets", "gql.transport.requests", "gql.transport.httpx", "gql.transport.websockets", "gql.dsl", "gql.utilities.parse_result", ]: logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) if len(logger.handlers) < 1: logger.addHandler(logging.StreamHandler()) # Unit for timeouts. May be increased on slow machines by setting the # GQL_TESTS_TIMEOUT_FACTOR environment variable. # Copied from websockets source MS = 0.001 * int(os.environ.get("GQL_TESTS_TIMEOUT_FACTOR", 1)) def get_localhost_ssl_context(): # This is a copy of certificate from websockets tests folder # # Generate TLS certificate with: # $ openssl req -x509 -config test_localhost.cnf \ # -days 15340 -newkey rsa:2048 \ # -out test_localhost.crt -keyout test_localhost.key # $ cat test_localhost.key test_localhost.crt > test_localhost.pem # $ rm test_localhost.key test_localhost.crt testcert = bytes(pathlib.Path(__file__).with_name("test_localhost.pem")) ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ssl_context.load_cert_chain(testcert) return (testcert, ssl_context) class WebSocketServer: """Websocket server on localhost on a free port. This server allows us to test our client by simulating different correct and incorrect server responses. """ def __init__(self, with_ssl: bool = False): self.with_ssl = with_ssl async def start(self, handler, extra_serve_args=None): import websockets.server print("Starting server") if extra_serve_args is None: extra_serve_args = {} if self.with_ssl: self.testcert, ssl_context = get_localhost_ssl_context() extra_serve_args["ssl"] = ssl_context # Adding dummy response headers extra_serve_args["extra_headers"] = {"dummy": "test1234"} # Start a server with a random open port self.start_server = websockets.server.serve( handler, "127.0.0.1", 0, **extra_serve_args ) # Wait that the server is started self.server = await self.start_server # Get hostname and port hostname, port = self.server.sockets[0].getsockname()[:2] assert hostname == "127.0.0.1" self.hostname = hostname self.port = port print(f"Server started on port {port}") async def stop(self): print("Stopping server") self.server.close() try: await asyncio.wait_for(self.server.wait_closed(), timeout=5) except asyncio.TimeoutError: # pragma: no cover pass print("Server stopped\n\n\n") class WebSocketServerHelper: @staticmethod async def send_complete(ws, query_id): await ws.send(f'{{"type":"complete","id":"{query_id}","payload":null}}') @staticmethod async def send_keepalive(ws): await ws.send('{"type":"ka"}') @staticmethod async def send_ping(ws, payload=None): if payload is None: await ws.send('{"type":"ping"}') else: await ws.send(json.dumps({"type": "ping", "payload": payload})) @staticmethod async def send_pong(ws, payload=None): if payload is None: await ws.send('{"type":"pong"}') else: await ws.send(json.dumps({"type": "pong", "payload": payload})) @staticmethod async def send_connection_ack(ws, payload=None): # Line return for easy debugging print("") # Wait for init result = await ws.recv() json_result = json.loads(result) assert json_result["type"] == "connection_init" # Send ack if payload is None: await ws.send('{"type":"connection_ack"}') else: await ws.send(json.dumps({"type": "connection_ack", "payload": payload})) @staticmethod async def wait_connection_terminate(ws): result = await ws.recv() json_result = json.loads(result) assert json_result["type"] == "connection_terminate" class PhoenixChannelServerHelper: @staticmethod async def send_close(ws): await ws.send('{"event":"phx_close"}') @staticmethod async def send_connection_ack(ws): # Line return for easy debugging print("") # Wait for init result = await ws.recv() json_result = json.loads(result) assert json_result["event"] == "phx_join" # Send ack await ws.send('{"event":"phx_reply", "payload": {"status": "ok"}, "ref": 1}') class TemporaryFile: """Class used to generate temporary files for the tests""" def __init__(self, content: Union[str, bytearray]): mode = "w" if isinstance(content, str) else "wb" # We need to set the newline to '' so that the line returns # are not replaced by '\r\n' on windows newline = "" if isinstance(content, str) else None self.file = tempfile.NamedTemporaryFile( mode=mode, newline=newline, delete=False ) with self.file as f: f.write(content) @property def filename(self): return self.file.name def __enter__(self): return self def __exit__(self, type, value, traceback): os.unlink(self.filename) def get_server_handler(request): """Get the server handler. Either get it from test or use the default server handler if the test provides only an array of answers. """ from websockets.exceptions import ConnectionClosed if isinstance(request.param, types.FunctionType): server_handler = request.param else: answers = request.param async def default_server_handler(ws, path): try: await WebSocketServerHelper.send_connection_ack(ws) query_id = 1 for answer in answers: result = await ws.recv() print(f"Server received: {result}", file=sys.stderr) if isinstance(answer, str) and "{query_id}" in answer: answer_format_params = {"query_id": query_id} formatted_answer = answer.format(**answer_format_params) else: formatted_answer = answer await ws.send(formatted_answer) await WebSocketServerHelper.send_complete(ws, query_id) query_id += 1 await WebSocketServerHelper.wait_connection_terminate(ws) await ws.wait_closed() except ConnectionClosed: pass server_handler = default_server_handler return server_handler @pytest_asyncio.fixture async def ws_ssl_server(request): """Websockets server fixture using SSL. It can take as argument either a handler function for the websocket server for complete control OR an array of answers to be sent by the default server handler. """ server_handler = get_server_handler(request) try: test_server = WebSocketServer(with_ssl=True) # Starting the server with the fixture param as the handler function await test_server.start(server_handler) yield test_server except Exception as e: print("Exception received in ws server fixture:", e) finally: await test_server.stop() @pytest_asyncio.fixture async def server(request): """Fixture used to start a dummy server to test the client behaviour. It can take as argument either a handler function for the websocket server for complete control OR an array of answers to be sent by the default server handler. """ server_handler = get_server_handler(request) try: test_server = WebSocketServer() # Starting the server with the fixture param as the handler function await test_server.start(server_handler) yield test_server except Exception as e: print("Exception received in server fixture:", e) finally: await test_server.stop() @pytest_asyncio.fixture async def graphqlws_server(request): """Fixture used to start a dummy server with the graphql-ws protocol. Similar to the server fixture above but will return "graphql-transport-ws" as the server subprotocol. It can take as argument either a handler function for the websocket server for complete control OR an array of answers to be sent by the default server handler. """ subprotocol = "graphql-transport-ws" from websockets.server import WebSocketServerProtocol class CustomSubprotocol(WebSocketServerProtocol): def select_subprotocol(self, client_subprotocols, server_subprotocols): print(f"Client subprotocols: {client_subprotocols!r}") print(f"Server subprotocols: {server_subprotocols!r}") return subprotocol def process_subprotocol(self, headers, available_subprotocols): # Overwriting available subprotocols available_subprotocols = [subprotocol] print(f"headers: {headers!r}") # print (f"Available subprotocols: {available_subprotocols!r}") return super().process_subprotocol(headers, available_subprotocols) server_handler = get_server_handler(request) try: test_server = WebSocketServer() # Starting the server with the fixture param as the handler function await test_server.start( server_handler, extra_serve_args={"create_protocol": CustomSubprotocol} ) yield test_server except Exception as e: print("Exception received in server fixture:", e) finally: await test_server.stop() @pytest_asyncio.fixture async def client_and_server(server): """Helper fixture to start a server and a client connected to its port.""" from gql.transport.websockets import WebsocketsTransport # Generate transport to connect to the server fixture path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = WebsocketsTransport(url=url) async with Client(transport=sample_transport) as session: # Yield both client session and server yield session, server @pytest_asyncio.fixture async def client_and_graphqlws_server(graphqlws_server): """Helper fixture to start a server with the graphql-ws prototocol and a client connected to its port.""" from gql.transport.websockets import WebsocketsTransport # Generate transport to connect to the server fixture path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" sample_transport = WebsocketsTransport( url=url, subprotocols=[WebsocketsTransport.GRAPHQLWS_SUBPROTOCOL], ) async with Client(transport=sample_transport) as session: # Yield both client session and server yield session, graphqlws_server @pytest_asyncio.fixture async def run_sync_test(): async def run_sync_test_inner(event_loop, server, test_function): """This function will run the test in a different Thread. This allows us to run sync code while aiohttp server can still run. """ executor = ThreadPoolExecutor(max_workers=2) test_task = event_loop.run_in_executor(executor, test_function) await test_task if hasattr(server, "close"): await server.close() return run_sync_test_inner pytest_plugins = [ "tests.fixtures.aws.fake_credentials", "tests.fixtures.aws.fake_request", "tests.fixtures.aws.fake_session", "tests.fixtures.aws.fake_signer", ] def strip_braces_spaces(s): """Allow to ignore differences in graphql-core syntax between versions""" # Strip spaces after starting braces strip_front = s.replace("{ ", "{") # Strip spaces before closing braces only if one space is present strip_back = re.sub(r"([^\s]) }", r"\1}", strip_front) return strip_back gql-3.6.0b2/tests/custom_scalars/000077500000000000000000000000001460703211500167125ustar00rootroot00000000000000gql-3.6.0b2/tests/custom_scalars/__init__.py000066400000000000000000000000001460703211500210110ustar00rootroot00000000000000gql-3.6.0b2/tests/custom_scalars/test_datetime.py000066400000000000000000000147011460703211500221220ustar00rootroot00000000000000from datetime import datetime, timedelta from typing import Any, Dict, Optional import pytest from graphql.error import GraphQLError from graphql.language import ValueNode from graphql.pyutils import inspect from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, GraphQLList, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, ) from graphql.utilities import value_from_ast_untyped from gql import Client, gql def serialize_datetime(value: Any) -> str: if not isinstance(value, datetime): raise GraphQLError("Cannot serialize datetime value: " + inspect(value)) return value.isoformat() def parse_datetime_value(value: Any) -> datetime: if isinstance(value, str): try: # Note: a more solid custom scalar should use dateutil.parser.isoparse # Not using it here in the test to avoid adding another dependency return datetime.fromisoformat(value) except Exception: raise GraphQLError("Cannot parse datetime value : " + inspect(value)) else: raise GraphQLError("Cannot parse datetime value: " + inspect(value)) def parse_datetime_literal( value_node: ValueNode, variables: Optional[Dict[str, Any]] = None ) -> datetime: ast_value = value_from_ast_untyped(value_node, variables) if not isinstance(ast_value, str): raise GraphQLError("Cannot parse literal datetime value: " + inspect(ast_value)) return parse_datetime_value(ast_value) DatetimeScalar = GraphQLScalarType( name="Datetime", serialize=serialize_datetime, parse_value=parse_datetime_value, parse_literal=parse_datetime_literal, ) def resolve_shift_days(root, _info, time, days): return time + timedelta(days=days) def resolve_latest(root, _info, times): return max(times) def resolve_seconds(root, _info, interval): print(f"interval={interval!r}") return (interval["end"] - interval["start"]).total_seconds() IntervalInputType = GraphQLInputObjectType( "IntervalInput", fields={ "start": GraphQLInputField( DatetimeScalar, default_value=datetime(2021, 11, 12, 11, 58, 13, 461161), ), "end": GraphQLInputField(DatetimeScalar), }, ) queryType = GraphQLObjectType( name="RootQueryType", fields={ "shiftDays": GraphQLField( DatetimeScalar, args={ "time": GraphQLArgument(DatetimeScalar), "days": GraphQLArgument(GraphQLInt), }, resolve=resolve_shift_days, ), "latest": GraphQLField( DatetimeScalar, args={"times": GraphQLArgument(GraphQLList(DatetimeScalar))}, resolve=resolve_latest, ), "seconds": GraphQLField( GraphQLInt, args={"interval": GraphQLArgument(IntervalInputType)}, resolve=resolve_seconds, ), }, ) schema = GraphQLSchema(query=queryType) @pytest.mark.skipif( not hasattr(datetime, "fromisoformat"), reason="fromisoformat is new in Python 3.7+" ) def test_shift_days(): client = Client(schema=schema, parse_results=True, serialize_variables=True) now = datetime.fromisoformat("2021-11-12T11:58:13.461161") query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") variable_values = { "time": now, } result = client.execute(query, variable_values=variable_values) print(result) assert result["shiftDays"] == datetime.fromisoformat("2021-11-17T11:58:13.461161") @pytest.mark.skipif( not hasattr(datetime, "fromisoformat"), reason="fromisoformat is new in Python 3.7+" ) def test_shift_days_serialized_manually_in_query(): client = Client(schema=schema) query = gql( """{ shiftDays(time: "2021-11-12T11:58:13.461161", days: 5) }""" ) result = client.execute(query, parse_result=True) print(result) assert result["shiftDays"] == datetime.fromisoformat("2021-11-17T11:58:13.461161") @pytest.mark.skipif( not hasattr(datetime, "fromisoformat"), reason="fromisoformat is new in Python 3.7+" ) def test_shift_days_serialized_manually_in_variables(): client = Client(schema=schema, parse_results=True) query = gql("query shift5days($time: Datetime) {shiftDays(time: $time, days: 5)}") variable_values = { "time": "2021-11-12T11:58:13.461161", } result = client.execute(query, variable_values=variable_values) print(result) assert result["shiftDays"] == datetime.fromisoformat("2021-11-17T11:58:13.461161") @pytest.mark.skipif( not hasattr(datetime, "fromisoformat"), reason="fromisoformat is new in Python 3.7+" ) def test_latest(): client = Client(schema=schema, parse_results=True) now = datetime.fromisoformat("2021-11-12T11:58:13.461161") in_five_days = datetime.fromisoformat("2021-11-17T11:58:13.461161") query = gql("query latest($times: [Datetime!]!) {latest(times: $times)}") variable_values = { "times": [now, in_five_days], } result = client.execute( query, variable_values=variable_values, serialize_variables=True ) print(result) assert result["latest"] == in_five_days @pytest.mark.skipif( not hasattr(datetime, "fromisoformat"), reason="fromisoformat is new in Python 3.7+" ) def test_seconds(): client = Client(schema=schema) now = datetime.fromisoformat("2021-11-12T11:58:13.461161") in_five_days = datetime.fromisoformat("2021-11-17T11:58:13.461161") query = gql( "query seconds($interval: IntervalInput) {seconds(interval: $interval)}" ) variable_values = {"interval": {"start": now, "end": in_five_days}} result = client.execute( query, variable_values=variable_values, serialize_variables=True ) print(result) assert result["seconds"] == 432000 @pytest.mark.skipif( not hasattr(datetime, "fromisoformat"), reason="fromisoformat is new in Python 3.7+" ) def test_seconds_omit_optional_start_argument(): client = Client(schema=schema) in_five_days = datetime.fromisoformat("2021-11-17T11:58:13.461161") query = gql( "query seconds($interval: IntervalInput) {seconds(interval: $interval)}" ) variable_values = {"interval": {"end": in_five_days}} result = client.execute( query, variable_values=variable_values, serialize_variables=True ) print(result) assert result["seconds"] == 432000 gql-3.6.0b2/tests/custom_scalars/test_enum_colors.py000066400000000000000000000154601460703211500226560ustar00rootroot00000000000000from enum import Enum import pytest from graphql import ( GraphQLArgument, GraphQLEnumType, GraphQLField, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLSchema, ) from gql import Client, gql from gql.utilities import update_schema_enum class Color(Enum): RED = 0 GREEN = 1 BLUE = 2 YELLOW = 3 CYAN = 4 MAGENTA = 5 RED = Color.RED GREEN = Color.GREEN BLUE = Color.BLUE YELLOW = Color.YELLOW CYAN = Color.CYAN MAGENTA = Color.MAGENTA ALL_COLORS = [c for c in Color] ColorType = GraphQLEnumType("Color", {c.name: c for c in Color}) def resolve_opposite(_root, _info, color): opposite_colors = { RED: CYAN, GREEN: MAGENTA, BLUE: YELLOW, YELLOW: BLUE, CYAN: RED, MAGENTA: GREEN, } return opposite_colors[color] def resolve_all(_root, _info): return ALL_COLORS list_of_list_of_list = [[[RED, GREEN], [GREEN, BLUE]], [[YELLOW, CYAN], [MAGENTA, RED]]] def resolve_list_of_list_of_list(_root, _info): return list_of_list_of_list def resolve_list_of_list(_root, _info): return list_of_list_of_list[0] def resolve_list(_root, _info): return list_of_list_of_list[0][0] queryType = GraphQLObjectType( name="RootQueryType", fields={ "all": GraphQLField( GraphQLList(ColorType), resolve=resolve_all, ), "opposite": GraphQLField( ColorType, args={"color": GraphQLArgument(ColorType)}, resolve=resolve_opposite, ), "list_of_list_of_list": GraphQLField( GraphQLNonNull( GraphQLList( GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLList(ColorType)))) ) ), resolve=resolve_list_of_list_of_list, ), "list_of_list": GraphQLField( GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLList(ColorType)))), resolve=resolve_list_of_list, ), "list": GraphQLField( GraphQLNonNull(GraphQLList(ColorType)), resolve=resolve_list, ), }, ) schema = GraphQLSchema(query=queryType) def test_parse_value_enum(): result = ColorType.parse_value("RED") print(result) assert isinstance(result, Color) assert result is RED def test_serialize_enum(): result = ColorType.serialize(RED) print(result) assert result == "RED" def test_get_all_colors(): query = gql("{all}") client = Client(schema=schema, parse_results=True) result = client.execute(query) print(result) all_colors = result["all"] assert all_colors == ALL_COLORS def test_opposite_color_literal(): client = Client(schema=schema, parse_results=True) query = gql("{opposite(color: RED)}") result = client.execute(query) print(result) opposite_color = result["opposite"] assert isinstance(opposite_color, Color) assert opposite_color == CYAN def test_opposite_color_variable_serialized_manually(): client = Client(schema=schema, parse_results=True) query = gql( """ query GetOppositeColor($color: Color) { opposite(color:$color) }""" ) variable_values = { "color": "RED", } result = client.execute(query, variable_values=variable_values) print(result) opposite_color = result["opposite"] assert isinstance(opposite_color, Color) assert opposite_color == CYAN def test_opposite_color_variable_serialized_by_gql(): client = Client(schema=schema, parse_results=True) query = gql( """ query GetOppositeColor($color: Color) { opposite(color:$color) }""" ) variable_values = { "color": RED, } result = client.execute( query, variable_values=variable_values, serialize_variables=True ) print(result) opposite_color = result["opposite"] assert isinstance(opposite_color, Color) assert opposite_color == CYAN def test_list(): query = gql("{list}") client = Client(schema=schema, parse_results=True) result = client.execute(query) print(result) big_list = result["list"] assert big_list == list_of_list_of_list[0][0] def test_list_of_list(): query = gql("{list_of_list}") client = Client(schema=schema, parse_results=True) result = client.execute(query) print(result) big_list = result["list_of_list"] assert big_list == list_of_list_of_list[0] def test_list_of_list_of_list(): query = gql("{list_of_list_of_list}") client = Client(schema=schema, parse_results=True) result = client.execute(query) print(result) big_list = result["list_of_list_of_list"] assert big_list == list_of_list_of_list def test_update_schema_enum(): assert schema.get_type("Color").parse_value("RED") == Color.RED # Using values update_schema_enum(schema, "Color", Color, use_enum_values=True) assert schema.get_type("Color").parse_value("RED") == 0 assert schema.get_type("Color").serialize(1) == "GREEN" update_schema_enum(schema, "Color", Color) assert schema.get_type("Color").parse_value("RED") == Color.RED assert schema.get_type("Color").serialize(Color.RED) == "RED" def test_update_schema_enum_errors(): with pytest.raises(KeyError) as exc_info: update_schema_enum(schema, "Corlo", Color) assert "Enum Corlo not found in schema!" in str(exc_info) with pytest.raises(TypeError) as exc_info: update_schema_enum(schema, "Color", 6) assert "Invalid type for enum values: " in str(exc_info) with pytest.raises(TypeError) as exc_info: update_schema_enum(schema, "RootQueryType", Color) assert 'The type "RootQueryType" is not a GraphQLEnumType, it is a' in str(exc_info) with pytest.raises(KeyError) as exc_info: update_schema_enum(schema, "Color", {"RED": Color.RED}) assert 'Enum key "GREEN" not found in provided values!' in str(exc_info) def test_parse_results_with_operation_type(): client = Client(schema=schema, parse_results=True) query = gql( """ query GetAll { all } query GetOppositeColor($color: Color) { opposite(color:$color) } query GetOppositeColor2($color: Color) { other_opposite:opposite(color:$color) } query GetOppositeColor3 { opposite(color: YELLOW) } query GetListOfListOfList { list_of_list_of_list } """ ) variable_values = { "color": "RED", } result = client.execute( query, variable_values=variable_values, operation_name="GetOppositeColor" ) print(result) opposite_color = result["opposite"] assert isinstance(opposite_color, Color) assert opposite_color == CYAN gql-3.6.0b2/tests/custom_scalars/test_json.py000066400000000000000000000121631460703211500212770ustar00rootroot00000000000000from typing import Any, Dict, Optional import pytest from graphql import ( GraphQLArgument, GraphQLError, GraphQLField, GraphQLFloat, GraphQLInt, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, ) from graphql.language import ValueNode from graphql.utilities import value_from_ast_untyped from gql import Client, gql from gql.dsl import DSLSchema from ..conftest import strip_braces_spaces # Marking all tests in this file with the aiohttp marker pytestmark = pytest.mark.aiohttp def serialize_json(value: Any) -> Dict[str, Any]: return value def parse_json_value(value: Any) -> Any: return value def parse_json_literal( value_node: ValueNode, variables: Optional[Dict[str, Any]] = None ) -> Any: return value_from_ast_untyped(value_node, variables) JsonScalar = GraphQLScalarType( name="JSON", serialize=serialize_json, parse_value=parse_json_value, parse_literal=parse_json_literal, ) root_value = { "players": [ { "name": "John", "level": 3, "is_connected": True, "score": 123.45, "friends": ["Alex", "Alicia"], }, { "name": "Alex", "level": 4, "is_connected": False, "score": 1337.69, "friends": None, }, ] } def resolve_players(root, _info): return root["players"] queryType = GraphQLObjectType( name="Query", fields={"players": GraphQLField(JsonScalar, resolve=resolve_players)}, ) def resolve_add_player(root, _info, player): print(f"player = {player!r}") root["players"].append(player) return {"players": root["players"]} mutationType = GraphQLObjectType( name="Mutation", fields={ "addPlayer": GraphQLField( JsonScalar, args={"player": GraphQLArgument(GraphQLNonNull(JsonScalar))}, resolve=resolve_add_player, ) }, ) schema = GraphQLSchema(query=queryType, mutation=mutationType) def test_json_value_output(): client = Client(schema=schema, parse_results=True) query = gql("query {players}") result = client.execute(query, root_value=root_value) print(result) assert result["players"] == serialize_json(root_value["players"]) def test_json_value_input_in_ast(): client = Client(schema=schema) query = gql( """ mutation adding_player { addPlayer(player: { name: "Tom", level: 1, is_connected: True, score: 0, friends: [ "John" ] }) }""" ) result = client.execute(query, root_value=root_value) print(result) players = result["addPlayer"]["players"] assert players == serialize_json(root_value["players"]) assert players[-1]["name"] == "Tom" def test_json_value_input_in_ast_with_variables(): print(f"{schema.type_map!r}") client = Client(schema=schema) # Note: we need to manually add the built-in types which # are not present in the schema schema.type_map["Int"] = GraphQLInt schema.type_map["Float"] = GraphQLFloat query = gql( """ mutation adding_player( $name: String!, $level: Int!, $is_connected: Boolean, $score: Float!, $friends: [String!]!) { addPlayer(player: { name: $name, level: $level, is_connected: $is_connected, score: $score, friends: $friends, }) }""" ) variable_values = { "name": "Barbara", "level": 1, "is_connected": False, "score": 69, "friends": ["Alex", "John"], } result = client.execute( query, variable_values=variable_values, root_value=root_value ) print(result) players = result["addPlayer"]["players"] assert players == serialize_json(root_value["players"]) assert players[-1]["name"] == "Barbara" def test_json_value_input_in_dsl_argument(): ds = DSLSchema(schema) new_player = { "name": "Tim", "level": 0, "is_connected": False, "score": 5, "friends": ["Lea"], } query = ds.Mutation.addPlayer(player=new_player) print(str(query)) assert ( strip_braces_spaces(str(query)) == """addPlayer( player: {name: "Tim", level: 0, is_connected: false, score: 5, friends: ["Lea"]} )""" ) def test_none_json_value_input_in_dsl_argument(): ds = DSLSchema(schema) with pytest.raises(GraphQLError) as exc_info: ds.Mutation.addPlayer(player=None) assert "Received Null value for a Non-Null type JSON." in str(exc_info.value) def test_json_value_input_with_none_list_in_dsl_argument(): ds = DSLSchema(schema) new_player = { "name": "Bob", "level": 9001, "is_connected": True, "score": 666.66, "friends": None, } query = ds.Mutation.addPlayer(player=new_player) print(str(query)) assert ( strip_braces_spaces(str(query)) == """addPlayer( player: {name: "Bob", level: 9001, is_connected: true, score: 666.66, friends: null} )""" ) gql-3.6.0b2/tests/custom_scalars/test_money.py000066400000000000000000000536601460703211500214640ustar00rootroot00000000000000import asyncio from math import isfinite from typing import Any, Dict, NamedTuple, Optional import pytest from graphql import ExecutionResult, graphql_sync from graphql.error import GraphQLError from graphql.language import ValueNode from graphql.pyutils import inspect from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLFloat, GraphQLInt, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, ) from graphql.utilities import value_from_ast_untyped from gql import Client, GraphQLRequest, gql from gql.transport.exceptions import TransportQueryError from gql.utilities import serialize_value, update_schema_scalar, update_schema_scalars from ..conftest import MS # Marking all tests in this file with the aiohttp marker pytestmark = pytest.mark.aiohttp class Money(NamedTuple): amount: float currency: str def is_finite(value: Any) -> bool: """Return true if a value is a finite number.""" return (isinstance(value, int) and not isinstance(value, bool)) or ( isinstance(value, float) and isfinite(value) ) def serialize_money(output_value: Any) -> Dict[str, Any]: if not isinstance(output_value, Money): raise GraphQLError("Cannot serialize money value: " + inspect(output_value)) return output_value._asdict() def parse_money_value(input_value: Any) -> Money: """Using Money custom scalar from graphql-core tests except here the input value is supposed to be a dict instead of a Money object.""" """ if isinstance(input_value, Money): return input_value """ if isinstance(input_value, dict): amount = input_value.get("amount", None) currency = input_value.get("currency", None) if not is_finite(amount) or not isinstance(currency, str): raise GraphQLError("Cannot parse money value dict: " + inspect(input_value)) return Money(float(amount), currency) else: raise GraphQLError("Cannot parse money value: " + inspect(input_value)) def parse_money_literal( value_node: ValueNode, variables: Optional[Dict[str, Any]] = None ) -> Money: money = value_from_ast_untyped(value_node, variables) if variables is not None and ( # variables are not set when checked with ValuesIOfCorrectTypeRule not money or not is_finite(money.get("amount")) or not isinstance(money.get("currency"), str) ): raise GraphQLError("Cannot parse literal money value: " + inspect(money)) return Money(**money) MoneyScalar = GraphQLScalarType( name="Money", serialize=serialize_money, parse_value=parse_money_value, parse_literal=parse_money_literal, ) root_value = { "balance": Money(42, "DM"), "friends_balance": [Money(12, "EUR"), Money(24, "EUR"), Money(150, "DM")], "countries_balance": { "Belgium": Money(15000, "EUR"), "Luxembourg": Money(99999, "EUR"), }, } def resolve_balance(root, _info): return root["balance"] def resolve_friends_balance(root, _info): return root["friends_balance"] def resolve_countries_balance(root, _info): return root["countries_balance"] def resolve_belgium_balance(countries_balance, _info): return countries_balance["Belgium"] def resolve_luxembourg_balance(countries_balance, _info): return countries_balance["Luxembourg"] def resolve_to_euros(_root, _info, money): amount = money.amount currency = money.currency if not amount or currency == "EUR": return amount if currency == "DM": return amount * 0.5 raise ValueError("Cannot convert to euros: " + inspect(money)) countriesBalance = GraphQLObjectType( name="CountriesBalance", fields={ "Belgium": GraphQLField( GraphQLNonNull(MoneyScalar), resolve=resolve_belgium_balance ), "Luxembourg": GraphQLField( GraphQLNonNull(MoneyScalar), resolve=resolve_luxembourg_balance ), }, ) queryType = GraphQLObjectType( name="RootQueryType", fields={ "balance": GraphQLField(MoneyScalar, resolve=resolve_balance), "toEuros": GraphQLField( GraphQLFloat, args={"money": GraphQLArgument(MoneyScalar)}, resolve=resolve_to_euros, ), "friends_balance": GraphQLField( GraphQLList(MoneyScalar), resolve=resolve_friends_balance ), "countries_balance": GraphQLField( GraphQLNonNull(countriesBalance), resolve=resolve_countries_balance, ), }, ) def resolve_spent_money(spent_money, _info, **kwargs): return spent_money async def subscribe_spend_all(_root, _info, money): while money.amount > 0: money = Money(money.amount - 1, money.currency) yield money await asyncio.sleep(1 * MS) subscriptionType = GraphQLObjectType( "Subscription", fields=lambda: { "spend": GraphQLField( MoneyScalar, args={"money": GraphQLArgument(MoneyScalar)}, subscribe=subscribe_spend_all, resolve=resolve_spent_money, ) }, ) schema = GraphQLSchema( query=queryType, subscription=subscriptionType, ) def test_custom_scalar_in_output(): client = Client(schema=schema, parse_results=True) query = gql("{balance}") result = client.execute(query, root_value=root_value) print(result) assert result["balance"] == root_value["balance"] def test_custom_scalar_in_output_embedded_fragments(): client = Client(schema=schema, parse_results=True) query = gql( """ fragment LuxMoneyInternal on CountriesBalance { ... on CountriesBalance { Luxembourg } } query { countries_balance { Belgium ...LuxMoney } } fragment LuxMoney on CountriesBalance { ...LuxMoneyInternal } """ ) result = client.execute(query, root_value=root_value) print(result) belgium_money = result["countries_balance"]["Belgium"] assert belgium_money == Money(15000, "EUR") luxembourg_money = result["countries_balance"]["Luxembourg"] assert luxembourg_money == Money(99999, "EUR") def test_custom_scalar_list_in_output(): client = Client(schema=schema, parse_results=True) query = gql("{friends_balance}") result = client.execute(query, root_value=root_value) print(result) assert result["friends_balance"] == root_value["friends_balance"] def test_custom_scalar_in_input_query(): client = Client(schema=schema) query = gql('{toEuros(money: {amount: 10, currency: "DM"})}') result = client.execute(query, root_value=root_value) assert result["toEuros"] == 5 query = gql('{toEuros(money: {amount: 10, currency: "EUR"})}') result = client.execute(query, root_value=root_value) assert result["toEuros"] == 10 def test_custom_scalar_in_input_variable_values(): client = Client(schema=schema) query = gql("query myquery($money: Money) {toEuros(money: $money)}") money_value = {"amount": 10, "currency": "DM"} variable_values = {"money": money_value} result = client.execute( query, variable_values=variable_values, root_value=root_value ) assert result["toEuros"] == 5 def test_custom_scalar_in_input_variable_values_serialized(): client = Client(schema=schema) query = gql("query myquery($money: Money) {toEuros(money: $money)}") money_value = Money(10, "DM") variable_values = {"money": money_value} result = client.execute( query, variable_values=variable_values, root_value=root_value, serialize_variables=True, ) assert result["toEuros"] == 5 def test_custom_scalar_in_input_variable_values_serialized_with_operation_name(): client = Client(schema=schema) query = gql("query myquery($money: Money) {toEuros(money: $money)}") money_value = Money(10, "DM") variable_values = {"money": money_value} result = client.execute( query, variable_values=variable_values, root_value=root_value, serialize_variables=True, operation_name="myquery", ) assert result["toEuros"] == 5 def test_serialize_variable_values_exception_multiple_ops_without_operation_name(): client = Client(schema=schema) query = gql( """ query myconversion($money: Money) { toEuros(money: $money) } query mybalance { balance }""" ) money_value = Money(10, "DM") variable_values = {"money": money_value} with pytest.raises(GraphQLError) as exc_info: client.execute( query, variable_values=variable_values, root_value=root_value, serialize_variables=True, ) exception = exc_info.value assert ( str(exception) == "Must provide operation name if query contains multiple operations." ) def test_serialize_variable_values_exception_operation_name_not_found(): client = Client(schema=schema) query = gql( """ query myconversion($money: Money) { toEuros(money: $money) } """ ) money_value = Money(10, "DM") variable_values = {"money": money_value} with pytest.raises(GraphQLError) as exc_info: client.execute( query, variable_values=variable_values, root_value=root_value, serialize_variables=True, operation_name="invalid_operation_name", ) exception = exc_info.value assert str(exception) == "Unknown operation named 'invalid_operation_name'." def test_custom_scalar_subscribe_in_input_variable_values_serialized(): client = Client(schema=schema) query = gql("subscription spendAll($money: Money) {spend(money: $money)}") money_value = Money(10, "DM") variable_values = {"money": money_value} expected_result = {"spend": Money(10, "DM")} for result in client.subscribe( query, variable_values=variable_values, root_value=root_value, serialize_variables=True, parse_result=True, ): print(f"result = {result!r}") assert isinstance(result["spend"], Money) expected_result["spend"] = Money(expected_result["spend"].amount - 1, "DM") assert expected_result == result async def make_money_backend(aiohttp_server): from aiohttp import web async def handler(request): req_data = await request.json() def handle_single(data: Dict[str, Any]) -> ExecutionResult: source = data["query"] try: variables = data["variables"] except KeyError: variables = None result = graphql_sync( schema, source, variable_values=variables, root_value=root_value ) return result if isinstance(req_data, list): results = [handle_single(d) for d in req_data] return web.json_response( [ { "data": result.data, "errors": [str(e) for e in result.errors] if result.errors else None, } for result in results ] ) else: result = handle_single(req_data) return web.json_response( { "data": result.data, "errors": [str(e) for e in result.errors] if result.errors else None, } ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) return server async def make_money_transport(aiohttp_server): from gql.transport.aiohttp import AIOHTTPTransport server = await make_money_backend(aiohttp_server) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) return transport async def make_sync_money_transport(aiohttp_server): from gql.transport.requests import RequestsHTTPTransport server = await make_money_backend(aiohttp_server) url = server.make_url("/") transport = RequestsHTTPTransport(url=url, timeout=10) return (server, transport) @pytest.mark.asyncio async def test_custom_scalar_in_output_with_transport(event_loop, aiohttp_server): transport = await make_money_transport(aiohttp_server) async with Client( transport=transport, ) as session: query = gql("{balance}") result = await session.execute(query) print(result) assert result["balance"] == serialize_money(root_value["balance"]) @pytest.mark.asyncio async def test_custom_scalar_in_input_query_with_transport(event_loop, aiohttp_server): transport = await make_money_transport(aiohttp_server) async with Client( transport=transport, ) as session: query = gql('{toEuros(money: {amount: 10, currency: "DM"})}') result = await session.execute(query) assert result["toEuros"] == 5 query = gql('{toEuros(money: {amount: 10, currency: "EUR"})}') result = await session.execute(query) assert result["toEuros"] == 10 @pytest.mark.asyncio async def test_custom_scalar_in_input_variable_values_with_transport( event_loop, aiohttp_server ): transport = await make_money_transport(aiohttp_server) async with Client( transport=transport, ) as session: query = gql("query myquery($money: Money) {toEuros(money: $money)}") money_value = {"amount": 10, "currency": "DM"} # money_value = Money(10, "DM") variable_values = {"money": money_value} result = await session.execute(query, variable_values=variable_values) print(f"result = {result!r}") assert result["toEuros"] == 5 @pytest.mark.asyncio async def test_custom_scalar_in_input_variable_values_split_with_transport( event_loop, aiohttp_server ): transport = await make_money_transport(aiohttp_server) async with Client( transport=transport, ) as session: query = gql( """ query myquery($amount: Float, $currency: String) { toEuros(money: {amount: $amount, currency: $currency}) }""" ) variable_values = {"amount": 10, "currency": "DM"} result = await session.execute(query, variable_values=variable_values) print(f"result = {result!r}") assert result["toEuros"] == 5 @pytest.mark.asyncio async def test_custom_scalar_serialize_variables(event_loop, aiohttp_server): transport = await make_money_transport(aiohttp_server) async with Client( schema=schema, transport=transport, ) as session: query = gql("query myquery($money: Money) {toEuros(money: $money)}") variable_values = {"money": Money(10, "DM")} result = await session.execute( query, variable_values=variable_values, serialize_variables=True ) print(f"result = {result!r}") assert result["toEuros"] == 5 @pytest.mark.asyncio async def test_custom_scalar_serialize_variables_no_schema(event_loop, aiohttp_server): transport = await make_money_transport(aiohttp_server) async with Client( transport=transport, ) as session: query = gql("query myquery($money: Money) {toEuros(money: $money)}") variable_values = {"money": Money(10, "DM")} with pytest.raises(TransportQueryError): await session.execute( query, variable_values=variable_values, serialize_variables=True ) @pytest.mark.asyncio async def test_custom_scalar_serialize_variables_schema_from_introspection( event_loop, aiohttp_server ): transport = await make_money_transport(aiohttp_server) async with Client(transport=transport, fetch_schema_from_transport=True) as session: schema = session.client.schema # Updating the Money Scalar in the schema # We cannot replace it because some other objects keep a reference # to the existing Scalar # cannot do: schema.type_map["Money"] = MoneyScalar money_scalar = schema.type_map["Money"] money_scalar.serialize = MoneyScalar.serialize money_scalar.parse_value = MoneyScalar.parse_value money_scalar.parse_literal = MoneyScalar.parse_literal query = gql("query myquery($money: Money) {toEuros(money: $money)}") variable_values = {"money": Money(10, "DM")} result = await session.execute( query, variable_values=variable_values, serialize_variables=True ) print(f"result = {result!r}") assert result["toEuros"] == 5 @pytest.mark.asyncio async def test_update_schema_scalars(event_loop, aiohttp_server): transport = await make_money_transport(aiohttp_server) async with Client(transport=transport, fetch_schema_from_transport=True) as session: # Update the schema MoneyScalar default implementation from # introspection with our provided conversion methods # update_schema_scalars(session.client.schema, [MoneyScalar]) update_schema_scalar(session.client.schema, "Money", MoneyScalar) query = gql("query myquery($money: Money) {toEuros(money: $money)}") variable_values = {"money": Money(10, "DM")} result = await session.execute( query, variable_values=variable_values, serialize_variables=True ) print(f"result = {result!r}") assert result["toEuros"] == 5 def test_update_schema_scalars_invalid_scalar(): with pytest.raises(TypeError) as exc_info: update_schema_scalars(schema, [int]) exception = exc_info.value assert str(exception) == "Scalars should be instances of GraphQLScalarType." with pytest.raises(TypeError) as exc_info: update_schema_scalar(schema, "test", int) exception = exc_info.value assert str(exception) == "Scalars should be instances of GraphQLScalarType." def test_update_schema_scalars_invalid_scalar_argument(): with pytest.raises(TypeError) as exc_info: update_schema_scalars(schema, MoneyScalar) exception = exc_info.value assert str(exception) == "Scalars argument should be a list of scalars." def test_update_schema_scalars_scalar_not_found_in_schema(): NotFoundScalar = GraphQLScalarType( name="abcd", ) with pytest.raises(KeyError) as exc_info: update_schema_scalars(schema, [MoneyScalar, NotFoundScalar]) exception = exc_info.value assert "Scalar 'abcd' not found in schema." in str(exception) def test_update_schema_scalars_scalar_type_is_not_a_scalar_in_schema(): with pytest.raises(TypeError) as exc_info: update_schema_scalar(schema, "CountriesBalance", MoneyScalar) exception = exc_info.value assert 'The type "CountriesBalance" is not a GraphQLScalarType, it is a' in str( exception ) @pytest.mark.asyncio @pytest.mark.requests async def test_custom_scalar_serialize_variables_sync_transport( event_loop, aiohttp_server, run_sync_test ): server, transport = await make_sync_money_transport(aiohttp_server) def test_code(): with Client(schema=schema, transport=transport, parse_results=True) as session: query = gql("query myquery($money: Money) {toEuros(money: $money)}") variable_values = {"money": Money(10, "DM")} result = session.execute( query, variable_values=variable_values, serialize_variables=True ) print(f"result = {result!r}") assert result["toEuros"] == 5 await run_sync_test(event_loop, server, test_code) @pytest.mark.asyncio @pytest.mark.requests async def test_custom_scalar_serialize_variables_sync_transport_2( event_loop, aiohttp_server, run_sync_test ): server, transport = await make_sync_money_transport(aiohttp_server) def test_code(): with Client(schema=schema, transport=transport, parse_results=True) as session: query = gql("query myquery($money: Money) {toEuros(money: $money)}") variable_values = {"money": Money(10, "DM")} results = session.execute_batch( [ GraphQLRequest(document=query, variable_values=variable_values), GraphQLRequest(document=query, variable_values=variable_values), ], serialize_variables=True, ) print(f"result = {results!r}") assert results[0]["toEuros"] == 5 assert results[1]["toEuros"] == 5 await run_sync_test(event_loop, server, test_code) def test_serialize_value_with_invalid_type(): with pytest.raises(GraphQLError) as exc_info: serialize_value("Not a valid type", 50) exception = exc_info.value assert ( str(exception) == "Impossible to serialize value with type: 'Not a valid type'." ) def test_serialize_value_with_non_null_type_null(): non_null_int = GraphQLNonNull(GraphQLInt) with pytest.raises(GraphQLError) as exc_info: serialize_value(non_null_int, None) exception = exc_info.value assert str(exception) == "Type Int! Cannot be None." def test_serialize_value_with_nullable_type(): nullable_int = GraphQLInt assert serialize_value(nullable_int, None) is None @pytest.mark.asyncio async def test_gql_cli_print_schema(event_loop, aiohttp_server, capsys): from gql.cli import get_parser, main server = await make_money_backend(aiohttp_server) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url, "--print-schema"]) exit_code = await main(args) assert exit_code == 0 # Check that the result has been printed on stdout captured = capsys.readouterr() captured_out = str(captured.out).strip() print(captured_out) assert ( """ type Subscription { spend(money: Money): Money } """.strip() in captured_out ) gql-3.6.0b2/tests/custom_scalars/test_parse_results.py000066400000000000000000000047621460703211500232270ustar00rootroot00000000000000from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLInt, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLString, ) from gql import Client, gql static_result = { "edges": [ { "node": { "from": {"address": "0x45b9ad45995577fe"}, "to": {"address": "0x6394e988297f5ed2"}, } }, {"node": {"from": None, "to": {"address": "0x6394e988297f5ed2"}}}, ] } def resolve_test(root, _info, count): return static_result Account = GraphQLObjectType( name="Account", fields={"address": GraphQLField(GraphQLNonNull(GraphQLString))}, ) queryType = GraphQLObjectType( name="RootQueryType", fields={ "test": GraphQLField( GraphQLObjectType( name="test", fields={ "edges": GraphQLField( GraphQLList( GraphQLObjectType( "example", fields={ "node": GraphQLField( GraphQLObjectType( name="node", fields={ "from": GraphQLField(Account), "to": GraphQLField(Account), }, ) ) }, ) ) ) }, ), args={"count": GraphQLArgument(GraphQLInt)}, resolve=resolve_test, ), }, ) schema = GraphQLSchema(query=queryType) def test_parse_results_null_mapping(): """This is a regression test for the issue: https://github.com/graphql-python/gql/issues/325 Most of the parse_results tests are in tests/starwars/test_parse_results.py """ client = Client(schema=schema, parse_results=True) query = gql( """query testQ($count: Int) {test(count: $count){ edges { node { from { address } to { address } } } } }""" ) assert client.execute(query, variable_values={"count": 2}) == { "test": static_result } gql-3.6.0b2/tests/fixtures/000077500000000000000000000000001460703211500155415ustar00rootroot00000000000000gql-3.6.0b2/tests/fixtures/__init__.py000066400000000000000000000000001460703211500176400ustar00rootroot00000000000000gql-3.6.0b2/tests/fixtures/aws/000077500000000000000000000000001460703211500163335ustar00rootroot00000000000000gql-3.6.0b2/tests/fixtures/aws/__init__.py000066400000000000000000000000001460703211500204320ustar00rootroot00000000000000gql-3.6.0b2/tests/fixtures/aws/fake_credentials.py000066400000000000000000000015651460703211500221770ustar00rootroot00000000000000import pytest class FakeCredentials(object): def __init__( self, access_key=None, secret_key=None, method=None, token=None, region=None ): self.region = region if region else "us-east-1a" self.access_key = access_key if access_key else "fake-access-key" self.secret_key = secret_key if secret_key else "fake-secret-key" self.method = method if method else "shared-credentials-file" self.token = token if token else "fake-token" @pytest.fixture def fake_credentials_factory(): def _fake_credentials_factory( access_key=None, secret_key=None, method=None, token=None, region=None ): return FakeCredentials( access_key=access_key, secret_key=secret_key, method=method, token=token, region=region, ) yield _fake_credentials_factory gql-3.6.0b2/tests/fixtures/aws/fake_request.py000066400000000000000000000011341460703211500213620ustar00rootroot00000000000000import pytest class FakeRequest(object): headers = None def __init__(self, request_props=None): if not isinstance(request_props, dict): return self.method = request_props.get("method") self.url = request_props.get("url") self.headers = request_props.get("headers") self.context = request_props.get("context") self.body = request_props.get("body") @pytest.fixture def fake_request_factory(): def _fake_request_factory(request_props=None): return FakeRequest(request_props=request_props) yield _fake_request_factory gql-3.6.0b2/tests/fixtures/aws/fake_session.py000066400000000000000000000012461460703211500213610ustar00rootroot00000000000000import pytest class FakeSession(object): def __init__(self, credentials, region_name): self._credentials = credentials self._region_name = region_name def get_default_client_config(self): return def get_credentials(self): return self._credentials def _resolve_region_name(self, region_name, client_config): return region_name if region_name else self._region_name @pytest.fixture def fake_session_factory(fake_credentials_factory): def _fake_session_factory(credentials=fake_credentials_factory()): return FakeSession(credentials=credentials, region_name="fake-region") yield _fake_session_factory gql-3.6.0b2/tests/fixtures/aws/fake_signer.py000066400000000000000000000012461460703211500211650ustar00rootroot00000000000000import pytest @pytest.fixture def fake_signer_factory(fake_request_factory): def _fake_signer_factory(request=None): if not request: request = fake_request_factory() return FakeSigner(request=request) yield _fake_signer_factory class FakeSigner(object): def __init__(self, request=None) -> None: self.request = request def add_auth(self, request) -> None: """ A fake for getting a request object that :return: """ request.headers = {"FakeAuthorization": "a", "FakeTime": "today"} def get_headers(self): self.add_auth(self.request) return self.request.headers gql-3.6.0b2/tests/fixtures/graphql/000077500000000000000000000000001460703211500171775ustar00rootroot00000000000000gql-3.6.0b2/tests/fixtures/graphql/sample.graphql000066400000000000000000000002641460703211500220420ustar00rootroot00000000000000type User { id: ID! username: String firstName: String lastName: String fullName: String } type Query { user(id: ID!): User } schema { query: Query } gql-3.6.0b2/tests/fixtures/vcr_cassettes/000077500000000000000000000000001460703211500204115ustar00rootroot00000000000000gql-3.6.0b2/tests/fixtures/vcr_cassettes/client.yaml000066400000000000000000000123341460703211500225560ustar00rootroot00000000000000interactions: - request: body: null headers: Accept: - text/html Accept-Encoding: - gzip, deflate Connection: - keep-alive Host: - swapi.graphene-python.org User-Agent: - python-requests/2.24.0 method: GET uri: http://127.0.0.1:8000/graphql response: body: string: "\n\n\n\n \n \n \n \n \n \n\n\n \n\ \n\n" headers: Content-Length: - '3808' Content-Type: - text/html; charset=utf-8 Date: - Fri, 06 Nov 2020 11:30:20 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:20 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK version: 1 gql-3.6.0b2/tests/fixtures/vcr_cassettes/queries.yaml000066400000000000000000002317441460703211500227650ustar00rootroot00000000000000interactions: - request: body: '{"query": "query IntrospectionQuery {\n __schema {\n queryType {\n name\n }\n mutationType {\n name\n }\n subscriptionType {\n name\n }\n types {\n ...FullType\n }\n directives {\n name\n description\n locations\n args {\n ...InputValue\n }\n }\n }\n}\n\nfragment FullType on __Type {\n kind\n name\n description\n fields(includeDeprecated: true) {\n name\n description\n args {\n ...InputValue\n }\n type {\n ...TypeRef\n }\n isDeprecated\n deprecationReason\n }\n inputFields {\n ...InputValue\n }\n interfaces {\n ...TypeRef\n }\n enumValues(includeDeprecated: true) {\n name\n description\n isDeprecated\n deprecationReason\n }\n possibleTypes {\n ...TypeRef\n }\n}\n\nfragment InputValue on __InputValue {\n name\n description\n type {\n ...TypeRef\n }\n defaultValue\n}\n\nfragment TypeRef on __Type {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n }\n }\n }\n }\n }\n }\n }\n}"}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '1417' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '{"data":{"__schema":{"queryType":{"name":"Query"},"mutationType":{"name":"Mutation"},"subscriptionType":null,"types":[{"kind":"OBJECT","name":"Query","description":null,"fields":[{"name":"allFilms","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allSpecies","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allCharacters","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allVehicles","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"VehicleConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allPlanets","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PlanetConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allStarships","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"StarshipConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allHeroes","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"HeroConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"film","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Film","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"specie","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Specie","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"character","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Person","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"vehicle","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Vehicle","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"planet","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Planet","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"starship","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Starship","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"hero","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Hero","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"node","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"INTERFACE","name":"Node","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"viewer","description":null,"args":[],"type":{"kind":"OBJECT","name":"Query","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"_debug","description":null,"args":[],"type":{"kind":"OBJECT","name":"DjangoDebug","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"FilmConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"FilmEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PageInfo","description":"The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.","fields":[{"name":"hasNextPage","description":"When paginating forwards, are there more items?","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hasPreviousPage","description":"When paginating backwards, are there more items?","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"startCursor","description":"When paginating backwards, the cursor to continue.","args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"endCursor","description":"When paginating forwards, the cursor to continue.","args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Boolean","description":"The `Boolean` scalar type represents `true` or `false`.","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"String","description":"The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"FilmEdge","description":"A Relay edge containing a `Film` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Film","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Film","description":"A single film.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"title","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"episodeId","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Int","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"openingCrawl","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"director","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"releaseDate","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Date","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"characters","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"planets","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PlanetConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"starships","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"StarshipConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"vehicles","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"VehicleConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"species","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"producers","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"INTERFACE","name":"Node","description":"An object with an ID","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":[{"kind":"OBJECT","name":"Film","ofType":null},{"kind":"OBJECT","name":"Person","ofType":null},{"kind":"OBJECT","name":"Planet","ofType":null},{"kind":"OBJECT","name":"Specie","ofType":null},{"kind":"OBJECT","name":"Hero","ofType":null},{"kind":"OBJECT","name":"Starship","ofType":null},{"kind":"OBJECT","name":"Vehicle","ofType":null}]},{"kind":"SCALAR","name":"ID","description":"The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"4\"`) or integer (such as `4`) input value will be accepted as an ID.","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Int","description":"The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31 - 1) and 2^31 - 1 since represented in JSON as double-precision floating point numbers specifiedby [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Date","description":"The `Date` scalar type represents a Date\nvalue as specified by\n[iso8601](https://en.wikipedia.org/wiki/ISO_8601).","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PersonConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"PersonEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PersonEdge","description":"A Relay edge containing a `Person` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Person","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Person","description":"An individual person or character within the Star Wars universe.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"height","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"mass","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hairColor","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"skinColor","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"eyeColor","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"birthYear","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"gender","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"homeworld","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"Planet","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"species","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"starships","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"StarshipConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"vehicles","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"VehicleConnection","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Planet","description":"A large mass, planet or planetoid in the Star Wars Universe,\nat the time of 0 ABY.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"rotationPeriod","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"orbitalPeriod","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"diameter","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"gravity","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"surfaceWater","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"population","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"speciesSet","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"heroes","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"HeroConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"residents","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"climates","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"terrains","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"SpecieConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"SpecieEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"SpecieEdge","description":"A Relay edge containing a `Specie` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Specie","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Specie","description":"A type of person or character within the Star Wars Universe.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"classification","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"designation","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"averageHeight","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"averageLifespan","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"homeworld","description":"","args":[],"type":{"kind":"OBJECT","name":"Planet","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"language","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"people","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"eyeColors","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hairColors","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"skinColors","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Float","description":"The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). ","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"HeroConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"HeroEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"HeroEdge","description":"A Relay edge containing a `Hero` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Hero","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Hero","description":"A hero created by fans","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"homeworld","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"Planet","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"StarshipConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"StarshipEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"StarshipEdge","description":"A Relay edge containing a `Starship` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Starship","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Starship","description":"A single transport craft that has hyperdrive capability.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"model","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturer","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"costInCredits","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"length","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"maxAtmospheringSpeed","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"crew","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"passengers","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"cargoCapacity","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"consumables","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hyperdriveRating","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"MGLT","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"starshipClass","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"pilots","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturers","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"VehicleConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"VehicleEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"VehicleEdge","description":"A Relay edge containing a `Vehicle` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Vehicle","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Vehicle","description":"A single transport craft that does not have hyperdrive capability","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"model","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturer","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"costInCredits","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"length","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"maxAtmospheringSpeed","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"crew","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"passengers","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"cargoCapacity","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"consumables","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"vehicleClass","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"pilots","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturers","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PlanetConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"PlanetEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PlanetEdge","description":"A Relay edge containing a `Planet` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Planet","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"DjangoDebug","description":null,"fields":[{"name":"sql","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"DjangoDebugSQL","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"DjangoDebugSQL","description":null,"fields":[{"name":"vendor","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"alias","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"sql","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"duration","description":null,"args":[],"type":{"kind":"SCALAR","name":"Float","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"rawSql","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"params","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"startTime","description":null,"args":[],"type":{"kind":"SCALAR","name":"Float","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"stopTime","description":null,"args":[],"type":{"kind":"SCALAR","name":"Float","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isSlow","description":null,"args":[],"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isSelect","description":null,"args":[],"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"transId","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"transStatus","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isoLevel","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"encoding","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Mutation","description":null,"fields":[{"name":"createHero","description":null,"args":[{"name":"input","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"INPUT_OBJECT","name":"CreateHeroInput","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"CreateHeroPayload","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"CreateHeroPayload","description":null,"fields":[{"name":"hero","description":null,"args":[],"type":{"kind":"OBJECT","name":"Hero","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"ok","description":null,"args":[],"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"clientMutationId","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"INPUT_OBJECT","name":"CreateHeroInput","description":null,"fields":null,"inputFields":[{"name":"name","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"defaultValue":null},{"name":"homeworldId","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"defaultValue":null},{"name":"clientMutationId","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__Schema","description":"A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation and subscription operations.","fields":[{"name":"types","description":"A list of all types supported by this server.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"queryType","description":"The type that query operations will be rooted at.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"mutationType","description":"If this server supports mutation, the type that mutation operations will be rooted at.","args":[],"type":{"kind":"OBJECT","name":"__Type","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"subscriptionType","description":"If this server support subscription, the type that subscription operations will be rooted at.","args":[],"type":{"kind":"OBJECT","name":"__Type","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"directives","description":"A list of all directives supported by this server.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Directive","ofType":null}}}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__Type","description":"The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.","fields":[{"name":"kind","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"ENUM","name":"__TypeKind","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"fields","description":null,"args":[{"name":"includeDeprecated","description":null,"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"defaultValue":"false"}],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Field","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"interfaces","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"possibleTypes","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"enumValues","description":null,"args":[{"name":"includeDeprecated","description":null,"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"defaultValue":"false"}],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__EnumValue","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"inputFields","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__InputValue","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"ofType","description":null,"args":[],"type":{"kind":"OBJECT","name":"__Type","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"ENUM","name":"__TypeKind","description":"An enum describing what kind of type a given `__Type` is","fields":null,"inputFields":null,"interfaces":null,"enumValues":[{"name":"SCALAR","description":"Indicates this type is a scalar.","isDeprecated":false,"deprecationReason":null},{"name":"OBJECT","description":"Indicates this type is an object. `fields` and `interfaces` are valid fields.","isDeprecated":false,"deprecationReason":null},{"name":"INTERFACE","description":"Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.","isDeprecated":false,"deprecationReason":null},{"name":"UNION","description":"Indicates this type is a union. `possibleTypes` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"ENUM","description":"Indicates this type is an enum. `enumValues` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"INPUT_OBJECT","description":"Indicates this type is an input object. `inputFields` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"LIST","description":"Indicates this type is a list. `ofType` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"NON_NULL","description":"Indicates this type is a non-null. `ofType` is a valid field.","isDeprecated":false,"deprecationReason":null}],"possibleTypes":null},{"kind":"OBJECT","name":"__Field","description":"Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"args","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__InputValue","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"type","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"isDeprecated","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"deprecationReason","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__InputValue","description":"Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"type","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"defaultValue","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__EnumValue","description":"One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isDeprecated","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"deprecationReason","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__Directive","description":"A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL''s execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"locations","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"ENUM","name":"__DirectiveLocation","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"args","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__InputValue","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"onOperation","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":true,"deprecationReason":"Use `locations`."},{"name":"onFragment","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":true,"deprecationReason":"Use `locations`."},{"name":"onField","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":true,"deprecationReason":"Use `locations`."}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"ENUM","name":"__DirectiveLocation","description":"A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.","fields":null,"inputFields":null,"interfaces":null,"enumValues":[{"name":"QUERY","description":"Location adjacent to a query operation.","isDeprecated":false,"deprecationReason":null},{"name":"MUTATION","description":"Location adjacent to a mutation operation.","isDeprecated":false,"deprecationReason":null},{"name":"SUBSCRIPTION","description":"Location adjacent to a subscription operation.","isDeprecated":false,"deprecationReason":null},{"name":"FIELD","description":"Location adjacent to a field.","isDeprecated":false,"deprecationReason":null},{"name":"FRAGMENT_DEFINITION","description":"Location adjacent to a fragment definition.","isDeprecated":false,"deprecationReason":null},{"name":"FRAGMENT_SPREAD","description":"Location adjacent to a fragment spread.","isDeprecated":false,"deprecationReason":null},{"name":"INLINE_FRAGMENT","description":"Location adjacent to an inline fragment.","isDeprecated":false,"deprecationReason":null},{"name":"SCHEMA","description":"Location adjacent to a schema definition.","isDeprecated":false,"deprecationReason":null},{"name":"SCALAR","description":"Location adjacent to a scalar definition.","isDeprecated":false,"deprecationReason":null},{"name":"OBJECT","description":"Location adjacent to an object definition.","isDeprecated":false,"deprecationReason":null},{"name":"FIELD_DEFINITION","description":"Location adjacent to a field definition.","isDeprecated":false,"deprecationReason":null},{"name":"ARGUMENT_DEFINITION","description":"Location adjacent to an argument definition.","isDeprecated":false,"deprecationReason":null},{"name":"INTERFACE","description":"Location adjacent to an interface definition.","isDeprecated":false,"deprecationReason":null},{"name":"UNION","description":"Location adjacent to a union definition.","isDeprecated":false,"deprecationReason":null},{"name":"ENUM","description":"Location adjacent to an enum definition.","isDeprecated":false,"deprecationReason":null},{"name":"ENUM_VALUE","description":"Location adjacent to an enum value definition.","isDeprecated":false,"deprecationReason":null},{"name":"INPUT_OBJECT","description":"Location adjacent to an input object definition.","isDeprecated":false,"deprecationReason":null},{"name":"INPUT_FIELD_DEFINITION","description":"Location adjacent to an input object field definition.","isDeprecated":false,"deprecationReason":null}],"possibleTypes":null}],"directives":[{"name":"include","description":"Directs the executor to include this field or fragment only when the `if` argument is true.","locations":["FIELD","FRAGMENT_SPREAD","INLINE_FRAGMENT"],"args":[{"name":"if","description":"Included when true.","type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"defaultValue":null}]},{"name":"skip","description":"Directs the executor to skip this field or fragment when the `if` argument is true.","locations":["FIELD","FRAGMENT_SPREAD","INLINE_FRAGMENT"],"args":[{"name":"if","description":"Skipped when true.","type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"defaultValue":null}]}]}}}' headers: Content-Length: - '69553' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '{"query": "{\n myFavoriteFilm: film(id: \"RmlsbToz\") {\n id\n title\n episodeId\n characters(first: 5) {\n edges {\n node {\n name\n }\n }\n }\n }\n}"}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '204' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '{"data":{"myFavoriteFilm":{"id":"RmlsbToz","title":"Return of the Jedi","episodeId":6,"characters":{"edges":[{"node":{"name":"Luke Skywalker"}},{"node":{"name":"C-3PO"}},{"node":{"name":"R2-D2"}},{"node":{"name":"Darth Vader"}},{"node":{"name":"Leia Organa"}}]}}}}' headers: Content-Length: - '264' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '{"query": "query Planet($id: ID!) {\n planet(id: $id) {\n id\n name\n }\n}", "variables": {"id": "UGxhbmV0OjEw"}}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '123' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '{"data":{"planet":{"id":"UGxhbmV0OjEw","name":"Kamino"}}}' headers: Content-Length: - '57' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '{"query": "query Planet1 {\n planet(id: \"UGxhbmV0OjEw\") {\n id\n name\n }\n}\n\nquery Planet2 {\n planet(id: \"UGxhbmV0OjEx\") {\n id\n name\n }\n}", "operationName": "Planet2"}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '197' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '{"data":{"planet":{"id":"UGxhbmV0OjEx","name":"Geonosis"}}}' headers: Content-Length: - '59' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '{"query": "query Planet($id: ID!) {\n planet(id: $id) {\n id\n name\n }\n}"}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '86' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.26.0 authorization: - xxx-123 method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '{"data":{"planet":{"id":"UGxhbmV0OjEx","name":"Geonosis"}}}' headers: Content-Length: - '59' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK version: 1 gql-3.6.0b2/tests/fixtures/vcr_cassettes/queries_batch.yaml000066400000000000000000002317641460703211500241300ustar00rootroot00000000000000interactions: - request: body: '{"query": "query IntrospectionQuery {\n __schema {\n queryType {\n name\n }\n mutationType {\n name\n }\n subscriptionType {\n name\n }\n types {\n ...FullType\n }\n directives {\n name\n description\n locations\n args {\n ...InputValue\n }\n }\n }\n}\n\nfragment FullType on __Type {\n kind\n name\n description\n fields(includeDeprecated: true) {\n name\n description\n args {\n ...InputValue\n }\n type {\n ...TypeRef\n }\n isDeprecated\n deprecationReason\n }\n inputFields {\n ...InputValue\n }\n interfaces {\n ...TypeRef\n }\n enumValues(includeDeprecated: true) {\n name\n description\n isDeprecated\n deprecationReason\n }\n possibleTypes {\n ...TypeRef\n }\n}\n\nfragment InputValue on __InputValue {\n name\n description\n type {\n ...TypeRef\n }\n defaultValue\n}\n\nfragment TypeRef on __Type {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n ofType {\n kind\n name\n }\n }\n }\n }\n }\n }\n }\n}"}' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '1417' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '{"data":{"__schema":{"queryType":{"name":"Query"},"mutationType":{"name":"Mutation"},"subscriptionType":null,"types":[{"kind":"OBJECT","name":"Query","description":null,"fields":[{"name":"allFilms","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allSpecies","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allCharacters","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allVehicles","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"VehicleConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allPlanets","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PlanetConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allStarships","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"StarshipConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"allHeroes","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"HeroConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"film","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Film","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"specie","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Specie","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"character","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Person","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"vehicle","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Vehicle","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"planet","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Planet","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"starship","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Starship","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"hero","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"Hero","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"node","description":"The ID of the object","args":[{"name":"id","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"defaultValue":null}],"type":{"kind":"INTERFACE","name":"Node","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"viewer","description":null,"args":[],"type":{"kind":"OBJECT","name":"Query","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"_debug","description":null,"args":[],"type":{"kind":"OBJECT","name":"DjangoDebug","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"FilmConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"FilmEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PageInfo","description":"The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.","fields":[{"name":"hasNextPage","description":"When paginating forwards, are there more items?","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hasPreviousPage","description":"When paginating backwards, are there more items?","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"startCursor","description":"When paginating backwards, the cursor to continue.","args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"endCursor","description":"When paginating forwards, the cursor to continue.","args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Boolean","description":"The `Boolean` scalar type represents `true` or `false`.","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"String","description":"The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"FilmEdge","description":"A Relay edge containing a `Film` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Film","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Film","description":"A single film.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"title","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"episodeId","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Int","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"openingCrawl","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"director","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"releaseDate","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Date","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"characters","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"planets","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PlanetConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"starships","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"StarshipConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"vehicles","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"VehicleConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"species","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"producers","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"INTERFACE","name":"Node","description":"An object with an ID","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":[{"kind":"OBJECT","name":"Film","ofType":null},{"kind":"OBJECT","name":"Person","ofType":null},{"kind":"OBJECT","name":"Planet","ofType":null},{"kind":"OBJECT","name":"Specie","ofType":null},{"kind":"OBJECT","name":"Hero","ofType":null},{"kind":"OBJECT","name":"Starship","ofType":null},{"kind":"OBJECT","name":"Vehicle","ofType":null}]},{"kind":"SCALAR","name":"ID","description":"The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"4\"`) or integer (such as `4`) input value will be accepted as an ID.","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Int","description":"The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31 - 1) and 2^31 - 1 since represented in JSON as double-precision floating point numbers specifiedby [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point).","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Date","description":"The `Date` scalar type represents a Date\nvalue as specified by\n[iso8601](https://en.wikipedia.org/wiki/ISO_8601).","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PersonConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"PersonEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PersonEdge","description":"A Relay edge containing a `Person` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Person","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Person","description":"An individual person or character within the Star Wars universe.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"height","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"mass","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hairColor","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"skinColor","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"eyeColor","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"birthYear","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"gender","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"homeworld","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"Planet","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"species","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"starships","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"StarshipConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"vehicles","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"VehicleConnection","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Planet","description":"A large mass, planet or planetoid in the Star Wars Universe,\nat the time of 0 ABY.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"rotationPeriod","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"orbitalPeriod","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"diameter","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"gravity","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"surfaceWater","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"population","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"speciesSet","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"SpecieConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"heroes","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name_Startswith","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"name_Contains","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"HeroConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"residents","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"climates","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"terrains","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"SpecieConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"SpecieEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"SpecieEdge","description":"A Relay edge containing a `Specie` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Specie","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Specie","description":"A type of person or character within the Star Wars Universe.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"classification","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"designation","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"averageHeight","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"averageLifespan","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"homeworld","description":"","args":[],"type":{"kind":"OBJECT","name":"Planet","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"language","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"people","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"eyeColors","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hairColors","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"skinColors","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"SCALAR","name":"Float","description":"The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). ","fields":null,"inputFields":null,"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"HeroConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"HeroEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"HeroEdge","description":"A Relay edge containing a `Hero` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Hero","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Hero","description":"A hero created by fans","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"homeworld","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"Planet","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"StarshipConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"StarshipEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"StarshipEdge","description":"A Relay edge containing a `Starship` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Starship","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Starship","description":"A single transport craft that has hyperdrive capability.","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"model","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturer","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"costInCredits","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"length","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"maxAtmospheringSpeed","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"crew","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"passengers","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"cargoCapacity","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"consumables","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"hyperdriveRating","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"MGLT","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"starshipClass","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"pilots","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturers","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"VehicleConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"VehicleEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"VehicleEdge","description":"A Relay edge containing a `Vehicle` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Vehicle","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Vehicle","description":"A single transport craft that does not have hyperdrive capability","fields":[{"name":"id","description":"The ID of the object.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"ID","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"model","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturer","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"costInCredits","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"length","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"maxAtmospheringSpeed","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"crew","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"passengers","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"cargoCapacity","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"consumables","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"vehicleClass","description":"","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"pilots","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"name","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"PersonConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"films","description":null,"args":[{"name":"before","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"after","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null},{"name":"first","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"last","description":null,"type":{"kind":"SCALAR","name":"Int","ofType":null},"defaultValue":null},{"name":"episodeId_Gt","description":null,"type":{"kind":"SCALAR","name":"Float","ofType":null},"defaultValue":null}],"type":{"kind":"OBJECT","name":"FilmConnection","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"manufacturers","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[{"kind":"INTERFACE","name":"Node","ofType":null}],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PlanetConnection","description":null,"fields":[{"name":"pageInfo","description":"Pagination data for this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"PageInfo","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"edges","description":"Contains the nodes in this connection.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"PlanetEdge","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"totalCount","description":null,"args":[],"type":{"kind":"SCALAR","name":"Int","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"PlanetEdge","description":"A Relay edge containing a `Planet` and its cursor.","fields":[{"name":"node","description":"The item at the end of the edge","args":[],"type":{"kind":"OBJECT","name":"Planet","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"cursor","description":"A cursor for use in pagination","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"DjangoDebug","description":null,"fields":[{"name":"sql","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"OBJECT","name":"DjangoDebugSQL","ofType":null}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"DjangoDebugSQL","description":null,"fields":[{"name":"vendor","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"alias","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"sql","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"duration","description":null,"args":[],"type":{"kind":"SCALAR","name":"Float","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"rawSql","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"params","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"startTime","description":null,"args":[],"type":{"kind":"SCALAR","name":"Float","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"stopTime","description":null,"args":[],"type":{"kind":"SCALAR","name":"Float","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isSlow","description":null,"args":[],"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isSelect","description":null,"args":[],"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"transId","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"transStatus","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isoLevel","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"encoding","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"Mutation","description":null,"fields":[{"name":"createHero","description":null,"args":[{"name":"input","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"INPUT_OBJECT","name":"CreateHeroInput","ofType":null}},"defaultValue":null}],"type":{"kind":"OBJECT","name":"CreateHeroPayload","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"CreateHeroPayload","description":null,"fields":[{"name":"hero","description":null,"args":[],"type":{"kind":"OBJECT","name":"Hero","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"ok","description":null,"args":[],"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"clientMutationId","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"INPUT_OBJECT","name":"CreateHeroInput","description":null,"fields":null,"inputFields":[{"name":"name","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"defaultValue":null},{"name":"homeworldId","description":null,"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"defaultValue":null},{"name":"clientMutationId","description":null,"type":{"kind":"SCALAR","name":"String","ofType":null},"defaultValue":null}],"interfaces":null,"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__Schema","description":"A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation and subscription operations.","fields":[{"name":"types","description":"A list of all types supported by this server.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"queryType","description":"The type that query operations will be rooted at.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"mutationType","description":"If this server supports mutation, the type that mutation operations will be rooted at.","args":[],"type":{"kind":"OBJECT","name":"__Type","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"subscriptionType","description":"If this server support subscription, the type that subscription operations will be rooted at.","args":[],"type":{"kind":"OBJECT","name":"__Type","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"directives","description":"A list of all directives supported by this server.","args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Directive","ofType":null}}}},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__Type","description":"The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.","fields":[{"name":"kind","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"ENUM","name":"__TypeKind","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"name","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"fields","description":null,"args":[{"name":"includeDeprecated","description":null,"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"defaultValue":"false"}],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Field","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"interfaces","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"possibleTypes","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"enumValues","description":null,"args":[{"name":"includeDeprecated","description":null,"type":{"kind":"SCALAR","name":"Boolean","ofType":null},"defaultValue":"false"}],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__EnumValue","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"inputFields","description":null,"args":[],"type":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__InputValue","ofType":null}}},"isDeprecated":false,"deprecationReason":null},{"name":"ofType","description":null,"args":[],"type":{"kind":"OBJECT","name":"__Type","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"ENUM","name":"__TypeKind","description":"An enum describing what kind of type a given `__Type` is","fields":null,"inputFields":null,"interfaces":null,"enumValues":[{"name":"SCALAR","description":"Indicates this type is a scalar.","isDeprecated":false,"deprecationReason":null},{"name":"OBJECT","description":"Indicates this type is an object. `fields` and `interfaces` are valid fields.","isDeprecated":false,"deprecationReason":null},{"name":"INTERFACE","description":"Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.","isDeprecated":false,"deprecationReason":null},{"name":"UNION","description":"Indicates this type is a union. `possibleTypes` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"ENUM","description":"Indicates this type is an enum. `enumValues` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"INPUT_OBJECT","description":"Indicates this type is an input object. `inputFields` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"LIST","description":"Indicates this type is a list. `ofType` is a valid field.","isDeprecated":false,"deprecationReason":null},{"name":"NON_NULL","description":"Indicates this type is a non-null. `ofType` is a valid field.","isDeprecated":false,"deprecationReason":null}],"possibleTypes":null},{"kind":"OBJECT","name":"__Field","description":"Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"args","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__InputValue","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"type","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"isDeprecated","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"deprecationReason","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__InputValue","description":"Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"type","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__Type","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"defaultValue","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__EnumValue","description":"One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"isDeprecated","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"deprecationReason","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"OBJECT","name":"__Directive","description":"A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL''s execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.","fields":[{"name":"name","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"String","ofType":null}},"isDeprecated":false,"deprecationReason":null},{"name":"description","description":null,"args":[],"type":{"kind":"SCALAR","name":"String","ofType":null},"isDeprecated":false,"deprecationReason":null},{"name":"locations","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"ENUM","name":"__DirectiveLocation","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"args","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"LIST","name":null,"ofType":{"kind":"NON_NULL","name":null,"ofType":{"kind":"OBJECT","name":"__InputValue","ofType":null}}}},"isDeprecated":false,"deprecationReason":null},{"name":"onOperation","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":true,"deprecationReason":"Use `locations`."},{"name":"onFragment","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":true,"deprecationReason":"Use `locations`."},{"name":"onField","description":null,"args":[],"type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"isDeprecated":true,"deprecationReason":"Use `locations`."}],"inputFields":null,"interfaces":[],"enumValues":null,"possibleTypes":null},{"kind":"ENUM","name":"__DirectiveLocation","description":"A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.","fields":null,"inputFields":null,"interfaces":null,"enumValues":[{"name":"QUERY","description":"Location adjacent to a query operation.","isDeprecated":false,"deprecationReason":null},{"name":"MUTATION","description":"Location adjacent to a mutation operation.","isDeprecated":false,"deprecationReason":null},{"name":"SUBSCRIPTION","description":"Location adjacent to a subscription operation.","isDeprecated":false,"deprecationReason":null},{"name":"FIELD","description":"Location adjacent to a field.","isDeprecated":false,"deprecationReason":null},{"name":"FRAGMENT_DEFINITION","description":"Location adjacent to a fragment definition.","isDeprecated":false,"deprecationReason":null},{"name":"FRAGMENT_SPREAD","description":"Location adjacent to a fragment spread.","isDeprecated":false,"deprecationReason":null},{"name":"INLINE_FRAGMENT","description":"Location adjacent to an inline fragment.","isDeprecated":false,"deprecationReason":null},{"name":"SCHEMA","description":"Location adjacent to a schema definition.","isDeprecated":false,"deprecationReason":null},{"name":"SCALAR","description":"Location adjacent to a scalar definition.","isDeprecated":false,"deprecationReason":null},{"name":"OBJECT","description":"Location adjacent to an object definition.","isDeprecated":false,"deprecationReason":null},{"name":"FIELD_DEFINITION","description":"Location adjacent to a field definition.","isDeprecated":false,"deprecationReason":null},{"name":"ARGUMENT_DEFINITION","description":"Location adjacent to an argument definition.","isDeprecated":false,"deprecationReason":null},{"name":"INTERFACE","description":"Location adjacent to an interface definition.","isDeprecated":false,"deprecationReason":null},{"name":"UNION","description":"Location adjacent to a union definition.","isDeprecated":false,"deprecationReason":null},{"name":"ENUM","description":"Location adjacent to an enum definition.","isDeprecated":false,"deprecationReason":null},{"name":"ENUM_VALUE","description":"Location adjacent to an enum value definition.","isDeprecated":false,"deprecationReason":null},{"name":"INPUT_OBJECT","description":"Location adjacent to an input object definition.","isDeprecated":false,"deprecationReason":null},{"name":"INPUT_FIELD_DEFINITION","description":"Location adjacent to an input object field definition.","isDeprecated":false,"deprecationReason":null}],"possibleTypes":null}],"directives":[{"name":"include","description":"Directs the executor to include this field or fragment only when the `if` argument is true.","locations":["FIELD","FRAGMENT_SPREAD","INLINE_FRAGMENT"],"args":[{"name":"if","description":"Included when true.","type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"defaultValue":null}]},{"name":"skip","description":"Directs the executor to skip this field or fragment when the `if` argument is true.","locations":["FIELD","FRAGMENT_SPREAD","INLINE_FRAGMENT"],"args":[{"name":"if","description":"Skipped when true.","type":{"kind":"NON_NULL","name":null,"ofType":{"kind":"SCALAR","name":"Boolean","ofType":null}},"defaultValue":null}]}]}}}' headers: Content-Length: - '69553' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '[{"query": "{\n myFavoriteFilm: film(id: \"RmlsbToz\") {\n id\n title\n episodeId\n characters(first: 5) {\n edges {\n node {\n name\n }\n }\n }\n }\n}"}]' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '204' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '[{"data":{"myFavoriteFilm":{"id":"RmlsbToz","title":"Return of the Jedi","episodeId":6,"characters":{"edges":[{"node":{"name":"Luke Skywalker"}},{"node":{"name":"C-3PO"}},{"node":{"name":"R2-D2"}},{"node":{"name":"Darth Vader"}},{"node":{"name":"Leia Organa"}}]}}}}]' headers: Content-Length: - '264' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '[{"query": "query Planet($id: ID!) {\n planet(id: $id) {\n id\n name\n }\n}", "variables": {"id": "UGxhbmV0OjEw"}}]' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '123' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '[{"data":{"planet":{"id":"UGxhbmV0OjEw","name":"Kamino"}}}]' headers: Content-Length: - '57' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '[{"query": "query Planet1 {\n planet(id: \"UGxhbmV0OjEw\") {\n id\n name\n }\n}\n\nquery Planet2 {\n planet(id: \"UGxhbmV0OjEx\") {\n id\n name\n }\n}", "operationName": "Planet2"}]' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '197' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.24.0 x-csrftoken: - kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '[{"data":{"planet":{"id":"UGxhbmV0OjEx","name":"Geonosis"}}}]' headers: Content-Length: - '59' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK - request: body: '[{"query": "query Planet($id: ID!) {\n planet(id: $id) {\n id\n name\n }\n}"}]' headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '86' Content-Type: - application/json Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k User-Agent: - python-requests/2.26.0 authorization: - xxx-123 method: POST uri: http://127.0.0.1:8000/graphql response: body: string: '[{"data":{"planet":{"id":"UGxhbmV0OjEx","name":"Geonosis"}}}]' headers: Content-Length: - '59' Content-Type: - application/json Date: - Fri, 06 Nov 2020 11:30:21 GMT Server: - WSGIServer/0.1 Python/2.7.18 Set-Cookie: - csrftoken=kAyQyUjNOGXZfkKUtWtvUROaFfDe2GBiV7yIRsqs3r2j9aYchRDXTNo3lHp72h5k; expires=Fri, 05-Nov-2021 11:30:21 GMT; Max-Age=31449600; Path=/ Vary: - Cookie X-Frame-Options: - SAMEORIGIN status: code: 200 message: OK version: 1 gql-3.6.0b2/tests/nested_input/000077500000000000000000000000001460703211500163715ustar00rootroot00000000000000gql-3.6.0b2/tests/nested_input/__init__.py000066400000000000000000000000001460703211500204700ustar00rootroot00000000000000gql-3.6.0b2/tests/nested_input/schema.py000066400000000000000000000015301460703211500202020ustar00rootroot00000000000000import json from graphql import ( GraphQLArgument, GraphQLField, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, GraphQLObjectType, GraphQLSchema, GraphQLString, ) nestedInput = GraphQLInputObjectType( "Nested", description="The input object that has a field pointing to itself", fields={"foo": GraphQLInputField(GraphQLInt, description="foo")}, ) nestedInput.fields["child"] = GraphQLInputField(nestedInput, description="child") queryType = GraphQLObjectType( "Query", fields=lambda: { "echo": GraphQLField( args={"nested": GraphQLArgument(type_=nestedInput)}, resolve=lambda *args, **kwargs: json.dumps(kwargs["nested"]), type_=GraphQLString, ), }, ) NestedInputSchema = GraphQLSchema( query=queryType, types=[nestedInput], ) gql-3.6.0b2/tests/nested_input/test_nested_input.py000066400000000000000000000017561460703211500225140ustar00rootroot00000000000000import pytest from gql import Client from gql.dsl import DSLQuery, DSLSchema, dsl_gql from tests.nested_input.schema import NestedInputSchema @pytest.fixture def ds(): return DSLSchema(NestedInputSchema) @pytest.fixture def client(): return Client(schema=NestedInputSchema) def test_nested_input(ds, client): query = dsl_gql(DSLQuery(ds.Query.echo.args(nested={"foo": 1}))) assert client.execute(query) == {"echo": '{"foo": 1}'} def test_nested_input_2(ds, client): query = dsl_gql( DSLQuery(ds.Query.echo.args(nested={"foo": 1, "child": {"foo": 2}})) ) assert client.execute(query) == {"echo": '{"foo": 1, "child": {"foo": 2}}'} def test_nested_input_3(ds, client): query = dsl_gql( DSLQuery( ds.Query.echo.args( nested={"foo": 1, "child": {"foo": 2, "child": {"foo": 3}}} ) ) ) assert client.execute(query) == { "echo": '{"foo": 1, "child": {"foo": 2, "child": {"foo": 3}}}' } gql-3.6.0b2/tests/regressions/000077500000000000000000000000001460703211500162335ustar00rootroot00000000000000gql-3.6.0b2/tests/regressions/issue_447_dsl_missing_directives/000077500000000000000000000000001460703211500245755ustar00rootroot00000000000000gql-3.6.0b2/tests/regressions/issue_447_dsl_missing_directives/test_dsl_directives.py000066400000000000000000000026551460703211500312210ustar00rootroot00000000000000from gql import Client, gql from gql.dsl import DSLFragment, DSLQuery, DSLSchema, dsl_gql, print_ast from gql.utilities import node_tree schema_str = """ type MonsterForm { sprites: MonsterFormSprites! } union SpriteUnion = Sprite | CopyOf type Query { monster: [Monster!]! } type MonsterFormSprites { actions: [SpriteUnion!]! } type CopyOf { action: String! } type Monster { manual(path: String!): MonsterForm } type Sprite { action: String! } """ def test_issue_447(): client = Client(schema=schema_str) ds = DSLSchema(client.schema) sprite = DSLFragment("SpriteUnionAsSprite") sprite.on(ds.Sprite) sprite.select( ds.Sprite.action, ) copy_of = DSLFragment("SpriteUnionAsCopyOf") copy_of.on(ds.CopyOf) copy_of.select( ds.CopyOf.action, ) query = ds.Query.monster.select( ds.Monster.manual(path="").select( ds.MonsterForm.sprites.select( ds.MonsterFormSprites.actions.select(sprite, copy_of), ), ), ) q = dsl_gql(sprite, copy_of, DSLQuery(query)) client.validate(q) # Creating a tree from the DocumentNode created by dsl_gql dsl_tree = node_tree(q) # Creating a tree from the DocumentNode created by gql gql_tree = node_tree(gql(print_ast(q))) print("=======") print(dsl_tree) print("+++++++") print(gql_tree) print("=======") assert dsl_tree == gql_tree gql-3.6.0b2/tests/starwars/000077500000000000000000000000001460703211500155365ustar00rootroot00000000000000gql-3.6.0b2/tests/starwars/__init__.py000066400000000000000000000000001460703211500176350ustar00rootroot00000000000000gql-3.6.0b2/tests/starwars/fixtures.py000066400000000000000000000075121460703211500177660ustar00rootroot00000000000000import asyncio from typing import Collection class Character: id: str name: str friends: Collection[str] appearsIn: Collection[str] # noinspection PyPep8Naming class Human(Character): type = "Human" homePlanet: str # noinspection PyShadowingBuiltins def __init__(self, id, name, friends, appearsIn, homePlanet): self.id, self.name = id, name self.friends, self.appearsIn = friends, appearsIn self.homePlanet = homePlanet # noinspection PyPep8Naming class Droid(Character): type = "Droid" primaryFunction: str # noinspection PyShadowingBuiltins def __init__(self, id, name, friends, appearsIn, primaryFunction): self.id, self.name = id, name self.friends, self.appearsIn = friends, appearsIn self.primaryFunction = primaryFunction luke = Human( id="1000", name="Luke Skywalker", friends=["1002", "1003", "2000", "2001"], appearsIn=[4, 5, 6], homePlanet="Tatooine", ) vader = Human( id="1001", name="Darth Vader", friends=["1004"], appearsIn=[4, 5, 6], homePlanet="Tatooine", ) han = Human( id="1002", name="Han Solo", friends=["1000", "1003", "2001"], appearsIn=[4, 5, 6], homePlanet=None, ) leia = Human( id="1003", name="Leia Organa", friends=["1000", "1002", "2000", "2001"], appearsIn=[4, 5, 6], homePlanet="Alderaan", ) tarkin = Human( id="1004", name="Wilhuff Tarkin", friends=["1001"], appearsIn=[4], homePlanet=None, ) humanData = { "1000": luke, "1001": vader, "1002": han, "1003": leia, "1004": tarkin, } threepio = Droid( id="2000", name="C-3PO", friends=["1000", "1002", "1003", "2001"], appearsIn=[4, 5, 6], primaryFunction="Protocol", ) artoo = Droid( id="2001", name="R2-D2", friends=["1000", "1002", "1003"], appearsIn=[4, 5, 6], primaryFunction="Astromech", ) droidData = { "2000": threepio, "2001": artoo, } reviews = { 4: [{"stars": 4, "commentary": "Was good.", "episode": 4}], 5: [{"stars": 5, "commentary": "This is a great movie!", "episode": 5}], 6: [{"stars": 3, "commentary": "Was expecting more stuff", "episode": 6}], } def get_character(id): return humanData.get(id) or droidData.get(id) def get_characters(ids): return map(get_character, ids) def get_friends(character): return map(get_character, character.friends) def get_hero(episode): if episode == 5: return luke return artoo async def get_hero_async(episode): await asyncio.sleep(0.001) return get_hero(episode) def get_human(id): return humanData.get(id) def get_droid(id): return droidData.get(id) def create_review(episode, review): reviews[episode].append(review) review["episode"] = episode return review async def make_starwars_backend(aiohttp_server): from aiohttp import web from .schema import StarWarsSchema from graphql import graphql_sync async def handler(request): data = await request.json() source = data["query"] try: variables = data["variables"] except KeyError: variables = None result = graphql_sync(StarWarsSchema, source, variable_values=variables) return web.json_response( { "data": result.data, "errors": [str(e) for e in result.errors] if result.errors else None, } ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) return server async def make_starwars_transport(aiohttp_server): from gql.transport.aiohttp import AIOHTTPTransport server = await make_starwars_backend(aiohttp_server) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) return transport gql-3.6.0b2/tests/starwars/schema.py000066400000000000000000000174701460703211500173610ustar00rootroot00000000000000import asyncio from graphql import ( GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, GraphQLInterfaceType, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLString, get_introspection_query, graphql_sync, print_schema, ) from .fixtures import ( create_review, get_characters, get_droid, get_friends, get_hero_async, get_human, reviews, ) episode_enum = GraphQLEnumType( "Episode", { "NEWHOPE": GraphQLEnumValue( 4, description="Released in 1977.", ), "EMPIRE": GraphQLEnumValue( 5, description="Released in 1980.", ), "JEDI": GraphQLEnumValue( 6, description="Released in 1983.", ), }, description="One of the films in the Star Wars Trilogy", ) human_type: GraphQLObjectType droid_type: GraphQLObjectType character_interface = GraphQLInterfaceType( "Character", lambda: { "id": GraphQLField( GraphQLNonNull(GraphQLString), description="The id of the character." ), "name": GraphQLField(GraphQLString, description="The name of the character."), "friends": GraphQLField( GraphQLList(character_interface), # type: ignore description="The friends of the character," " or an empty list if they have none.", ), "appearsIn": GraphQLField( GraphQLList(episode_enum), description="Which movies they appear in." ), }, resolve_type=lambda character, _info, _type: { "Human": human_type.name, "Droid": droid_type.name, }[character.type], description="A character in the Star Wars Trilogy", ) human_type = GraphQLObjectType( "Human", lambda: { "id": GraphQLField( GraphQLNonNull(GraphQLString), description="The id of the human.", ), "name": GraphQLField( GraphQLString, description="The name of the human.", ), "friends": GraphQLField( GraphQLList(character_interface), description="The friends of the human, or an empty list if they have none.", resolve=lambda human, _info: get_friends(human), ), "appearsIn": GraphQLField( GraphQLList(episode_enum), description="Which movies they appear in.", ), "homePlanet": GraphQLField( GraphQLString, description="The home planet of the human, or null if unknown.", ), }, interfaces=[character_interface], description="A humanoid creature in the Star Wars universe.", ) droid_type = GraphQLObjectType( "Droid", lambda: { "id": GraphQLField( GraphQLNonNull(GraphQLString), description="The id of the droid.", ), "name": GraphQLField( GraphQLString, description="The name of the droid.", ), "friends": GraphQLField( GraphQLList(character_interface), description="The friends of the droid, or an empty list if they have none.", resolve=lambda droid, _info: get_friends(droid), ), "appearsIn": GraphQLField( GraphQLList(episode_enum), description="Which movies they appear in.", ), "primaryFunction": GraphQLField( GraphQLString, description="The primary function of the droid.", ), }, interfaces=[character_interface], description="A mechanical creature in the Star Wars universe.", ) review_type = GraphQLObjectType( "Review", lambda: { "episode": GraphQLField(episode_enum, description="The movie"), "stars": GraphQLField( GraphQLNonNull(GraphQLInt), description="The number of stars this review gave, 1-5", ), "commentary": GraphQLField( GraphQLString, description="Comment about the movie" ), }, description="Represents a review for a movie", ) review_input_type = GraphQLInputObjectType( "ReviewInput", lambda: { "stars": GraphQLInputField(GraphQLInt, description="0-5 stars"), "commentary": GraphQLInputField( GraphQLString, description="Comment about the movie, optional" ), "deprecated_input_field": GraphQLInputField( GraphQLString, description="deprecated field example", deprecation_reason="deprecated for testing", ), }, description="The input object sent when someone is creating a new review", ) query_type = GraphQLObjectType( "Query", lambda: { "hero": GraphQLField( character_interface, args={ "episode": GraphQLArgument( episode_enum, description="If omitted, returns the hero of the whole saga. If " "provided, returns the hero of that particular episode.", ) }, resolve=lambda _source, _info, episode=None: get_hero_async(episode), ), "human": GraphQLField( human_type, args={ "id": GraphQLArgument( description="id of the human", type_=GraphQLNonNull(GraphQLString), ) }, resolve=lambda _source, _info, id: get_human(id), ), "droid": GraphQLField( droid_type, args={ "id": GraphQLArgument( description="id of the droid", type_=GraphQLNonNull(GraphQLString), ) }, resolve=lambda _source, _info, id: get_droid(id), ), "characters": GraphQLField( GraphQLList(character_interface), args={ "ids": GraphQLArgument( GraphQLList(GraphQLString), description="list of character ids", ) }, resolve=lambda _source, _info, ids=None: get_characters(ids), ), }, ) mutation_type = GraphQLObjectType( "Mutation", lambda: { "createReview": GraphQLField( review_type, args={ "episode": GraphQLArgument( episode_enum, description="Episode to create review", ), "review": GraphQLArgument( description="set alive status", type_=review_input_type, ), }, resolve=lambda _source, _info, episode=None, review=None: create_review( episode, review ), ), }, description="The mutation type, represents all updates we can make to our data", ) async def subscribe_reviews(_root, _info, episode): for review in reviews[episode]: yield review await asyncio.sleep(0.1) async def resolve_review(review, _info, **_args): return review subscription_type = GraphQLObjectType( "Subscription", lambda: { "reviewAdded": GraphQLField( review_type, args={ "episode": GraphQLArgument( episode_enum, description="Episode to review", ) }, subscribe=subscribe_reviews, resolve=resolve_review, ) }, ) StarWarsSchema = GraphQLSchema( query=query_type, mutation=mutation_type, subscription=subscription_type, types=[human_type, droid_type, review_type, review_input_type], ) StarWarsIntrospection = graphql_sync(StarWarsSchema, get_introspection_query()).data StarWarsTypeDef = print_schema(StarWarsSchema) gql-3.6.0b2/tests/starwars/test_dsl.py000066400000000000000000000724201460703211500177360ustar00rootroot00000000000000import pytest from graphql import ( FloatValueNode, GraphQLError, GraphQLFloat, GraphQLID, GraphQLInt, GraphQLList, GraphQLNonNull, IntValueNode, ListTypeNode, NamedTypeNode, NameNode, NonNullTypeNode, NullValueNode, Undefined, build_ast_schema, parse, print_ast, ) from graphql.utilities import get_introspection_query from gql import Client, gql from gql.dsl import ( DSLFragment, DSLInlineFragment, DSLMetaField, DSLMutation, DSLQuery, DSLSchema, DSLSubscription, DSLVariable, DSLVariableDefinitions, ast_from_serialized_value_untyped, ast_from_value, dsl_gql, ) from gql.utilities import get_introspection_query_ast, node_tree from ..conftest import strip_braces_spaces from .schema import StarWarsSchema @pytest.fixture def ds(): return DSLSchema(StarWarsSchema) @pytest.fixture def client(): return Client(schema=StarWarsSchema) def test_ast_from_value_with_input_type_and_not_mapping_value(): obj_type = StarWarsSchema.get_type("ReviewInput") assert ast_from_value(8, obj_type) is None def test_ast_from_value_with_list_type_and_non_iterable_value(): assert ast_from_value(5, GraphQLList(GraphQLInt)) == IntValueNode(value="5") def test_ast_from_value_with_none(): assert ast_from_value(None, GraphQLInt) == NullValueNode() def test_ast_from_value_with_undefined(): with pytest.raises(GraphQLError) as exc_info: ast_from_value(Undefined, GraphQLInt) assert "Received Undefined value for type Int." in str(exc_info.value) def test_ast_from_value_with_graphqlid(): assert ast_from_value("12345", GraphQLID) == IntValueNode(value="12345") def test_ast_from_value_with_invalid_type(): with pytest.raises(TypeError) as exc_info: ast_from_value(4, None) assert "Unexpected input type: None." in str(exc_info.value) def test_ast_from_value_with_non_null_type_and_none(): typ = GraphQLNonNull(GraphQLInt) with pytest.raises(GraphQLError) as exc_info: ast_from_value(None, typ) assert "Received Null value for a Non-Null type Int." in str(exc_info.value) def test_ast_from_value_float_precision(): # Checking precision of float serialization # See https://github.com/graphql-python/graphql-core/pull/164 assert ast_from_value(123456789.01234567, GraphQLFloat) == FloatValueNode( value="123456789.01234567" ) assert ast_from_value(1.1, GraphQLFloat) == FloatValueNode(value="1.1") assert ast_from_value(123.0, GraphQLFloat) == FloatValueNode(value="123") def test_ast_from_serialized_value_untyped_typeerror(): with pytest.raises(TypeError) as exc_info: ast_from_serialized_value_untyped(GraphQLInt) assert "Cannot convert value to AST: Int." in str(exc_info.value) def test_variable_to_ast_type_passing_wrapping_type(): wrapping_type = GraphQLNonNull(GraphQLList(StarWarsSchema.get_type("ReviewInput"))) variable = DSLVariable("review_input") ast = variable.to_ast_type(wrapping_type) assert ast == NonNullTypeNode( type=ListTypeNode(type=NamedTypeNode(name=NameNode(value="ReviewInput"))) ) def test_use_variable_definition_multiple_times(ds): var = DSLVariableDefinitions() # `episode` variable is used in both fields op = DSLMutation( ds.Mutation.createReview.alias("badReview") .args(review=var.badReview, episode=var.episode) .select(ds.Review.stars, ds.Review.commentary), ds.Mutation.createReview.alias("goodReview") .args(review=var.goodReview, episode=var.episode) .select(ds.Review.stars, ds.Review.commentary), ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """mutation \ ($badReview: ReviewInput, $episode: Episode, $goodReview: ReviewInput) { badReview: createReview(review: $badReview, episode: $episode) { stars commentary } goodReview: createReview(review: $goodReview, episode: $episode) { stars commentary } }""" ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_add_variable_definitions(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args(review=var.review, episode=var.episode).select( ds.Review.stars, ds.Review.commentary ) ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """mutation ($review: ReviewInput, $episode: Episode) { createReview(review: $review, episode: $episode) { stars commentary } }""" ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_add_variable_definitions_with_default_value_enum(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review=var.review, episode=var.episode.default(4) ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) assert ( print_ast(query) == """mutation ($review: ReviewInput, $episode: Episode = NEWHOPE) { createReview(review: $review, episode: $episode) { stars commentary } }""" ) def test_add_variable_definitions_with_default_value_input_object(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review=var.review.default({"stars": 5, "commentary": "Wow!"}), episode=var.episode, ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) assert ( strip_braces_spaces(print_ast(query)) == """ mutation ($review: ReviewInput = {stars: 5, commentary: "Wow!"}, $episode: Episode) { createReview(review: $review, episode: $episode) { stars commentary } }""".strip() ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_add_variable_definitions_in_input_object(ds): var = DSLVariableDefinitions() op = DSLMutation( ds.Mutation.createReview.args( review={"stars": var.stars, "commentary": var.commentary}, episode=var.episode, ).select(ds.Review.stars, ds.Review.commentary) ) op.variable_definitions = var query = dsl_gql(op) assert ( strip_braces_spaces(print_ast(query)) == """mutation ($stars: Int, $commentary: String, $episode: Episode) { createReview( review: {stars: $stars, commentary: $commentary} episode: $episode ) { stars commentary } }""" ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_invalid_field_on_type_query(ds): with pytest.raises(AttributeError) as exc_info: ds.Query.extras.select(ds.Character.name) assert "Field extras does not exist in type Query." in str(exc_info.value) def test_incompatible_field(ds): with pytest.raises(TypeError) as exc_info: ds.Query.hero.select("not_a_DSL_FIELD") assert ( "Fields should be instances of DSLSelectable. Received: " in str(exc_info.value) ) def test_hero_name_query(ds): query = """ hero { name } """.strip() query_dsl = ds.Query.hero.select(ds.Character.name) assert query == str(query_dsl) def test_hero_name_and_friends_query(ds): query = """ hero { id name friends { name } } """.strip() query_dsl = ds.Query.hero.select( ds.Character.id, ds.Character.name, ds.Character.friends.select( ds.Character.name, ), ) assert query == str(query_dsl) # Should also work with a chain of selects query_dsl = ( ds.Query.hero.select(ds.Character.id) .select(ds.Character.name) .select( ds.Character.friends.select( ds.Character.name, ), ) ) assert query == str(query_dsl) def test_hero_id_and_name(ds): query = """ hero { id name } """.strip() query_dsl = ds.Query.hero.select(ds.Character.id) query_dsl = query_dsl.select(ds.Character.name) assert query == str(query_dsl) def test_nested_query(ds): query = """ hero { name friends { name appearsIn friends { name } } } """.strip() query_dsl = ds.Query.hero.select( ds.Character.name, ds.Character.friends.select( ds.Character.name, ds.Character.appears_in, ds.Character.friends.select(ds.Character.name), ), ) assert query == str(query_dsl) def test_fetch_luke_query(ds): query = """ human(id: "1000") { name } """.strip() query_dsl = ds.Query.human(id="1000").select( ds.Human.name, ) assert query == str(query_dsl) def test_fetch_luke_aliased(ds): query = """ luke: human(id: "1000") { name } """.strip() query_dsl = ( ds.Query.human.args(id=1000) .alias("luke") .select( ds.Character.name, ) ) assert query == str(query_dsl) # Should also work with select before alias query_dsl = ( ds.Query.human.args(id=1000) .select( ds.Character.name, ) .alias("luke") ) assert query == str(query_dsl) def test_fetch_name_aliased(ds: DSLSchema): query = """ human(id: "1000") { my_name: name } """.strip() query_dsl = ds.Query.human.args(id=1000).select(ds.Character.name.alias("my_name")) print(str(query_dsl)) assert query == str(query_dsl) def test_fetch_name_aliased_as_kwargs(ds: DSLSchema): query = """ human(id: "1000") { my_name: name } """.strip() query_dsl = ds.Query.human.args(id=1000).select( my_name=ds.Character.name, ) assert query == str(query_dsl) def test_hero_name_query_result(ds, client): query = dsl_gql(DSLQuery(ds.Query.hero.select(ds.Character.name))) result = client.execute(query) expected = {"hero": {"name": "R2-D2"}} assert result == expected assert node_tree(query) == node_tree(gql(print_ast(query))) def test_arg_serializer_list(ds, client): query = dsl_gql( DSLQuery( ds.Query.characters.args(ids=[1000, 1001, 1003]).select( ds.Character.name, ) ) ) result = client.execute(query) expected = { "characters": [ {"name": "Luke Skywalker"}, {"name": "Darth Vader"}, {"name": "Leia Organa"}, ] } assert result == expected assert node_tree(query) == node_tree(gql(print_ast(query))) def test_arg_serializer_enum(ds, client): query = dsl_gql(DSLQuery(ds.Query.hero.args(episode=5).select(ds.Character.name))) result = client.execute(query) expected = {"hero": {"name": "Luke Skywalker"}} assert result == expected assert node_tree(query) == node_tree(gql(print_ast(query))) def test_create_review_mutation_result(ds, client): query = dsl_gql( DSLMutation( ds.Mutation.createReview.args( episode=6, review={"stars": 5, "commentary": "This is a great movie!"} ).select(ds.Review.stars, ds.Review.commentary) ) ) result = client.execute(query) expected = {"createReview": {"stars": 5, "commentary": "This is a great movie!"}} assert result == expected assert node_tree(query) == node_tree(gql(print_ast(query))) def test_subscription(ds): query = dsl_gql( DSLSubscription( ds.Subscription.reviewAdded(episode=6).select( ds.Review.stars, ds.Review.commentary ) ) ) assert ( print_ast(query) == """subscription { reviewAdded(episode: JEDI) { stars commentary } }""" ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_field_does_not_exit_in_type(ds): with pytest.raises( GraphQLError, match="Invalid field for : ", ): ds.Query.hero.select(ds.Query.hero) def test_try_to_select_on_scalar_field(ds): with pytest.raises( GraphQLError, match="Invalid field for : ", ): ds.Human.name.select(ds.Query.hero) def test_invalid_arg(ds): with pytest.raises( KeyError, match="Argument invalid_arg does not exist in Field: Character." ): ds.Query.hero.args(invalid_arg=5).select(ds.Character.name) def test_multiple_root_fields(ds, client): query = dsl_gql( DSLQuery( ds.Query.hero.select(ds.Character.name), ds.Query.hero(episode=5) .alias("hero_of_episode_5") .select(ds.Character.name), ) ) result = client.execute(query) expected = { "hero": {"name": "R2-D2"}, "hero_of_episode_5": {"name": "Luke Skywalker"}, } assert result == expected assert node_tree(query) == node_tree(gql(print_ast(query))) def test_root_fields_aliased(ds, client): query = dsl_gql( DSLQuery( ds.Query.hero.select(ds.Character.name), hero_of_episode_5=ds.Query.hero(episode=5).select(ds.Character.name), ) ) result = client.execute(query) expected = { "hero": {"name": "R2-D2"}, "hero_of_episode_5": {"name": "Luke Skywalker"}, } assert result == expected assert node_tree(query) == node_tree(gql(print_ast(query))) def test_operation_name(ds): query = dsl_gql( GetHeroName=DSLQuery( ds.Query.hero.select(ds.Character.name), ) ) assert ( print_ast(query) == """query GetHeroName { hero { name } }""" ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_multiple_operations(ds): query = dsl_gql( GetHeroName=DSLQuery(ds.Query.hero.select(ds.Character.name)), CreateReviewMutation=DSLMutation( ds.Mutation.createReview.args( episode=6, review={"stars": 5, "commentary": "This is a great movie!"} ).select(ds.Review.stars, ds.Review.commentary) ), ) assert ( strip_braces_spaces(print_ast(query)) == """query GetHeroName { hero { name } } mutation CreateReviewMutation { createReview( episode: JEDI review: {stars: 5, commentary: "This is a great movie!"} ) { stars commentary } }""" ) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_inline_fragments(ds): query = """hero(episode: JEDI) { name ... on Droid { primaryFunction } ... on Human { homePlanet } }""" query_dsl = ds.Query.hero.args(episode=6).select( ds.Character.name, DSLInlineFragment().on(ds.Droid).select(ds.Droid.primaryFunction), DSLInlineFragment().on(ds.Human).select(ds.Human.homePlanet), ) assert query == str(query_dsl) def test_inline_fragments_nested(ds): query = """hero(episode: JEDI) { name ... on Human { ... on Human { homePlanet } } }""" query_dsl = ds.Query.hero.args(episode=6).select( ds.Character.name, DSLInlineFragment() .on(ds.Human) .select(DSLInlineFragment().on(ds.Human).select(ds.Human.homePlanet)), ) assert query == str(query_dsl) def test_fragments_repr(ds): assert repr(DSLInlineFragment()) == "" assert repr(DSLInlineFragment().on(ds.Droid)) == "" assert repr(DSLFragment("fragment_1")) == "" assert repr(DSLFragment("fragment_2").on(ds.Droid)) == "" def test_fragments(ds): query = """fragment NameAndAppearances on Character { name appearsIn } { hero { ...NameAndAppearances } }""" name_and_appearances = ( DSLFragment("NameAndAppearances") .on(ds.Character) .select(ds.Character.name, ds.Character.appearsIn) ) query_dsl = DSLQuery(ds.Query.hero.select(name_and_appearances)) document = dsl_gql(name_and_appearances, query_dsl) print(print_ast(document)) assert query == print_ast(document) assert node_tree(document) == node_tree(gql(print_ast(document))) def test_fragment_without_type_condition_error(ds): # We create a fragment without using the .on(type_condition) method name_and_appearances = DSLFragment("NameAndAppearances") # If we try to use this fragment, gql generates an error with pytest.raises( AttributeError, match=r"Missing type condition. Please use .on\(type_condition\) method", ): dsl_gql(name_and_appearances) with pytest.raises( AttributeError, match=r"Missing type condition. Please use .on\(type_condition\) method", ): DSLFragment("NameAndAppearances").select( ds.Character.name, ds.Character.appearsIn ) def test_inline_fragment_in_dsl_gql(ds): inline_fragment = DSLInlineFragment() query = DSLQuery() with pytest.raises( GraphQLError, match=r"Invalid field for : ", ): query.select(inline_fragment) def test_fragment_with_name_changed(ds): fragment = DSLFragment("ABC") assert str(fragment) == "...ABC" fragment.name = "DEF" assert str(fragment) == "...DEF" def test_fragment_select_field_not_in_fragment(ds): fragment = DSLFragment("test").on(ds.Character) with pytest.raises( GraphQLError, match="Invalid field for : ", ): fragment.select(ds.Droid.primaryFunction) def test_dsl_nested_query_with_fragment(ds): query = """fragment NameAndAppearances on Character { name appearsIn } query NestedQueryWithFragment { hero { ...NameAndAppearances friends { ...NameAndAppearances friends { ...NameAndAppearances } } } }""" name_and_appearances = ( DSLFragment("NameAndAppearances") .on(ds.Character) .select(ds.Character.name, ds.Character.appearsIn) ) query_dsl = DSLQuery( ds.Query.hero.select( name_and_appearances, ds.Character.friends.select( name_and_appearances, ds.Character.friends.select(name_and_appearances) ), ) ) document = dsl_gql(name_and_appearances, NestedQueryWithFragment=query_dsl) print(print_ast(document)) assert query == print_ast(document) assert node_tree(document) == node_tree(gql(print_ast(document))) # Same thing, but incrementaly name_and_appearances = DSLFragment("NameAndAppearances") name_and_appearances.on(ds.Character) name_and_appearances.select(ds.Character.name) name_and_appearances.select(ds.Character.appearsIn) level_2 = ds.Character.friends level_2.select(name_and_appearances) level_1 = ds.Character.friends level_1.select(name_and_appearances) level_1.select(level_2) hero = ds.Query.hero hero.select(name_and_appearances) hero.select(level_1) query_dsl = DSLQuery(hero) document = dsl_gql(name_and_appearances, NestedQueryWithFragment=query_dsl) print(print_ast(document)) assert query == print_ast(document) assert node_tree(document) == node_tree(gql(print_ast(document))) def test_dsl_query_all_fields_should_be_instances_of_DSLField(): with pytest.raises( TypeError, match="Fields should be instances of DSLSelectable. Received: ", ): DSLQuery("I am a string") def test_dsl_query_all_fields_should_correspond_to_the_root_type(ds): with pytest.raises(GraphQLError) as excinfo: DSLQuery(ds.Character.name) assert ("Invalid field for : ") in str( excinfo.value ) def test_dsl_root_type_not_default(): schema_str = """ schema { query: QueryNotDefault } type QueryNotDefault { version: String } """ type_def_ast = parse(schema_str) schema = build_ast_schema(type_def_ast) ds = DSLSchema(schema) query = dsl_gql(DSLQuery(ds.QueryNotDefault.version)) expected_query = """ { version } """ assert print_ast(query) == expected_query.strip() with pytest.raises(GraphQLError) as excinfo: DSLSubscription(ds.QueryNotDefault.version) assert ( "Invalid field for : " ) in str(excinfo.value) assert node_tree(query) == node_tree(gql(print_ast(query))) def test_dsl_gql_all_arguments_should_be_operations_or_fragments(): with pytest.raises( TypeError, match="Operations should be instances of DSLExecutable " ): dsl_gql("I am a string") def test_DSLSchema_requires_a_schema(client): with pytest.raises(TypeError, match="DSLSchema needs a schema as parameter"): DSLSchema(client) def test_invalid_type(ds): with pytest.raises( AttributeError, match="Type 'invalid_type' not found in the schema!" ): ds.invalid_type def test_invalid_type_union(): schema_str = """ type FloatValue { floatValue: Float! } type IntValue { intValue: Int! } union Value = FloatValue | IntValue type Entry { name: String! value: Value } type Query { values: [Entry!]! } """ schema = build_ast_schema(parse(schema_str)) ds = DSLSchema(schema) with pytest.raises( AttributeError, match=( "Type \"Value \\(\\)\" is not valid as an " "attribute of DSLSchema. Only Object types or Interface types are accepted." ), ): ds.Value def test_hero_name_query_with_typename(ds): query = """ hero { name __typename } """.strip() query_dsl = ds.Query.hero.select(ds.Character.name, DSLMetaField("__typename")) assert query == str(query_dsl) def test_type_hero_query(ds): query = """{ __type(name: "Hero") { kind name ofType { kind name } } }""" type_hero = DSLMetaField("__type")(name="Hero") type_hero.select( ds.__Type.kind, ds.__Type.name, ds.__Type.ofType.select(ds.__Type.kind, ds.__Type.name), ) query_dsl = DSLQuery(type_hero) assert query == str(print_ast(dsl_gql(query_dsl))).strip() def test_invalid_meta_field_selection(ds): DSLQuery(DSLMetaField("__typename")) DSLQuery(DSLMetaField("__schema")) DSLQuery(DSLMetaField("__type")) metafield = DSLMetaField("__typename") assert metafield.name == "__typename" with pytest.raises(GraphQLError): DSLMetaField("__invalid_meta_field") DSLMutation(DSLMetaField("__typename")) with pytest.raises(GraphQLError): DSLMutation(DSLMetaField("__schema")) with pytest.raises(GraphQLError): DSLMutation(DSLMetaField("__type")) with pytest.raises(GraphQLError): DSLSubscription(DSLMetaField("__typename")) with pytest.raises(GraphQLError): DSLSubscription(DSLMetaField("__schema")) with pytest.raises(GraphQLError): DSLSubscription(DSLMetaField("__type")) fragment = DSLFragment("blah") with pytest.raises(AttributeError): fragment.select(DSLMetaField("__typename")) fragment.on(ds.Character) fragment.select(DSLMetaField("__typename")) with pytest.raises(GraphQLError): fragment.select(DSLMetaField("__schema")) with pytest.raises(GraphQLError): fragment.select(DSLMetaField("__type")) ds.Query.hero.select(DSLMetaField("__typename")) with pytest.raises(GraphQLError): ds.Query.hero.select(DSLMetaField("__schema")) with pytest.raises(GraphQLError): ds.Query.hero.select(DSLMetaField("__type")) @pytest.mark.parametrize("option", [True, False]) def test_get_introspection_query_ast(option): introspection_query = get_introspection_query( descriptions=option, specified_by_url=option, directive_is_repeatable=option, schema_description=option, ) dsl_introspection_query = get_introspection_query_ast( descriptions=option, specified_by_url=option, directive_is_repeatable=option, schema_description=option, ) assert print_ast(gql(introspection_query)) == print_ast(dsl_introspection_query) assert node_tree(dsl_introspection_query) == node_tree( gql(print_ast(dsl_introspection_query)) ) def test_typename_aliased(ds): query = """ hero { name typenameField: __typename } """.strip() query_dsl = ds.Query.hero.select( ds.Character.name, typenameField=DSLMetaField("__typename") ) assert query == str(query_dsl) query_dsl = ds.Query.hero.select( ds.Character.name, DSLMetaField("__typename").alias("typenameField") ) assert query == str(query_dsl) def test_node_tree_with_loc(ds): query = """query GetHeroName { hero { name } }""".strip() document = gql(query) node_tree_result = """ DocumentNode loc: Location definitions: OperationDefinitionNode loc: Location name: NameNode loc: Location value: 'GetHeroName' directives: empty tuple variable_definitions: empty tuple selection_set: SelectionSetNode loc: Location selections: FieldNode loc: Location directives: empty tuple alias: None name: NameNode loc: Location value: 'hero' arguments: empty tuple nullability_assertion: None selection_set: SelectionSetNode loc: Location selections: FieldNode loc: Location directives: empty tuple alias: None name: NameNode loc: Location value: 'name' arguments: empty tuple nullability_assertion: None selection_set: None operation: """.strip() node_tree_result_stable = """ DocumentNode loc: Location definitions: OperationDefinitionNode loc: Location name: NameNode loc: Location value: 'GetHeroName' directives: empty tuple variable_definitions: empty tuple selection_set: SelectionSetNode loc: Location selections: FieldNode loc: Location directives: empty tuple alias: None name: NameNode loc: Location value: 'hero' arguments: empty tuple selection_set: SelectionSetNode loc: Location selections: FieldNode loc: Location directives: empty tuple alias: None name: NameNode loc: Location value: 'name' arguments: empty tuple selection_set: None operation: """.strip() try: assert node_tree(document, ignore_loc=False) == node_tree_result except AssertionError: # graphql-core version 3.2.3 assert node_tree(document, ignore_loc=False) == node_tree_result_stable def test_legacy_fragment_with_variables(ds): var = DSLVariableDefinitions() hero_fragment = ( DSLFragment("heroFragment") .on(ds.Query) .select( ds.Query.hero.args(episode=var.episode).select(ds.Character.name), ) ) print(hero_fragment) hero_fragment.variable_definitions = var query = dsl_gql(hero_fragment) expected = """ fragment heroFragment($episode: Episode) on Query { hero(episode: $episode) { name } } """.strip() assert print_ast(query) == expected gql-3.6.0b2/tests/starwars/test_introspection.py000066400000000000000000000040021460703211500220430ustar00rootroot00000000000000import pytest from graphql import print_schema from gql import Client from .fixtures import make_starwars_transport # Marking all tests in this file with the aiohttp marker pytestmark = pytest.mark.aiohttp @pytest.mark.asyncio async def test_starwars_introspection_args(event_loop, aiohttp_server): transport = await make_starwars_transport(aiohttp_server) # First fetch the schema from transport using default introspection query # We should receive descriptions in the schema but not deprecated input fields async with Client( transport=transport, fetch_schema_from_transport=True, ) as session: schema_str = print_schema(session.client.schema) print(schema_str) assert '"""The number of stars this review gave, 1-5"""' in schema_str assert "deprecated_input_field" not in schema_str # Then fetch the schema from transport using an introspection query # without requesting descriptions # We should NOT receive descriptions in the schema async with Client( transport=transport, fetch_schema_from_transport=True, introspection_args={ "descriptions": False, }, ) as session: schema_str = print_schema(session.client.schema) print(schema_str) assert '"""The number of stars this review gave, 1-5"""' not in schema_str assert "deprecated_input_field" not in schema_str # Then fetch the schema from transport using and introspection query # requiring deprecated input fields # We should receive descriptions in the schema and deprecated input fields async with Client( transport=transport, fetch_schema_from_transport=True, introspection_args={ "input_value_deprecation": True, }, ) as session: schema_str = print_schema(session.client.schema) print(schema_str) assert '"""The number of stars this review gave, 1-5"""' in schema_str assert "deprecated_input_field" in schema_str gql-3.6.0b2/tests/starwars/test_parse_results.py000066400000000000000000000103571460703211500220500ustar00rootroot00000000000000import pytest from graphql import GraphQLError from gql import gql from gql.utilities import parse_result from tests.starwars.schema import StarWarsSchema def test_hero_name_and_friends_query(): query = gql( """ query HeroNameAndFriendsQuery { hero { id friends { name } name } } """ ) result = { "hero": { "id": "2001", "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "Leia Organa"}, ], "name": "R2-D2", } } parsed_result = parse_result(StarWarsSchema, query, result) assert result == parsed_result def test_hero_name_and_friends_query_with_fragment(): """Testing for issue #445""" query = gql( """ query HeroNameAndFriendsQuery { hero { ...HeroSummary friends { name } } } fragment HeroSummary on Character { id name } """ ) result = { "hero": { "id": "2001", "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "Leia Organa"}, ], "name": "R2-D2", } } parsed_result = parse_result(StarWarsSchema, query, result) assert result == parsed_result def test_key_not_found_in_result(): query = gql( """ { hero { id } } """ ) # Backend returned an invalid result without the hero key # Should be impossible. In that case, we ignore the missing key result = {} parsed_result = parse_result(StarWarsSchema, query, result) assert result == parsed_result def test_invalid_result_raise_error(): query = gql( """ { hero { id } } """ ) result = {"hero": 5} with pytest.raises(GraphQLError) as exc_info: parse_result(StarWarsSchema, query, result) assert "Invalid result for container of field id: 5" in str(exc_info) def test_fragment(): query = gql( """ query UseFragment { luke: human(id: "1000") { ...HumanFragment } leia: human(id: "1003") { ...HumanFragment } } fragment HumanFragment on Human { name homePlanet } """ ) result = { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, "leia": {"name": "Leia Organa", "homePlanet": "Alderaan"}, } parsed_result = parse_result(StarWarsSchema, query, result) assert result == parsed_result def test_fragment_not_found(): query = gql( """ query UseFragment { luke: human(id: "1000") { ...HumanFragment } } """ ) result = { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, } with pytest.raises(GraphQLError) as exc_info: parse_result(StarWarsSchema, query, result) assert 'Fragment "HumanFragment" not found in document!' in str(exc_info) def test_return_none_if_result_is_none(): query = gql( """ query { hero { id } } """ ) result = None assert parse_result(StarWarsSchema, query, result) is None def test_null_result_is_allowed(): query = gql( """ query { hero { id } } """ ) result = {"hero": None} parsed_result = parse_result(StarWarsSchema, query, result) assert result == parsed_result def test_inline_fragment(): query = gql( """ query UseFragment { luke: human(id: "1000") { ... on Human { name homePlanet } } } """ ) result = { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, } parsed_result = parse_result(StarWarsSchema, query, result) assert result == parsed_result gql-3.6.0b2/tests/starwars/test_query.py000066400000000000000000000173541460703211500203260ustar00rootroot00000000000000import pytest from graphql import GraphQLError, Source from gql import Client, gql from tests.starwars.schema import StarWarsSchema @pytest.fixture def client(): return Client(schema=StarWarsSchema) def test_hero_name_query(client): query = gql( """ query HeroNameQuery { hero { name } } """ ) expected = {"hero": {"name": "R2-D2"}} result = client.execute(query) assert result == expected def test_hero_name_and_friends_query(client): query = gql( """ query HeroNameAndFriendsQuery { hero { id name friends { name } } } """ ) expected = { "hero": { "id": "2001", "name": "R2-D2", "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "Leia Organa"}, ], } } result = client.execute(query) assert result == expected def test_nested_query(client): query = gql( """ query NestedQuery { hero { name friends { name appearsIn friends { name } } } } """ ) expected = { "hero": { "name": "R2-D2", "friends": [ { "name": "Luke Skywalker", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Han Solo"}, {"name": "Leia Organa"}, {"name": "C-3PO"}, {"name": "R2-D2"}, ], }, { "name": "Han Solo", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Luke Skywalker"}, {"name": "Leia Organa"}, {"name": "R2-D2"}, ], }, { "name": "Leia Organa", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "C-3PO"}, {"name": "R2-D2"}, ], }, ], } } result = client.execute(query, parse_result=False) assert result == expected def test_fetch_luke_query(client): query = gql( """ query FetchLukeQuery { human(id: "1000") { name } } """ ) expected = {"human": {"name": "Luke Skywalker"}} result = client.execute(query) assert result == expected def test_fetch_some_id_query(client): query = gql( """ query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } } """ ) params = { "someId": "1000", } expected = {"human": {"name": "Luke Skywalker"}} result = client.execute(query, variable_values=params) assert result == expected def test_fetch_some_id_query2(client): query = gql( """ query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } } """ ) params = { "someId": "1002", } expected = {"human": {"name": "Han Solo"}} result = client.execute(query, variable_values=params) assert result == expected def test_invalid_id_query(client): query = gql( """ query humanQuery($id: String!) { human(id: $id) { name } } """ ) params = { "id": "not a valid id", } expected = {"human": None} result = client.execute(query, variable_values=params) assert result == expected def test_fetch_luke_aliased(client): query = gql( """ query FetchLukeAliased { luke: human(id: "1000") { name } } """ ) expected = {"luke": {"name": "Luke Skywalker"}} result = client.execute(query) assert result == expected def test_fetch_luke_and_leia_aliased(client): query = gql( """ query FetchLukeAndLeiaAliased { luke: human(id: "1000") { name } leia: human(id: "1003") { name } } """ ) expected = {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}} result = client.execute(query) assert result == expected def test_duplicate_fields(client): query = gql( """ query DuplicateFields { luke: human(id: "1000") { name homePlanet } leia: human(id: "1003") { name homePlanet } } """ ) expected = { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, "leia": {"name": "Leia Organa", "homePlanet": "Alderaan"}, } result = client.execute(query) assert result == expected def test_use_fragment(client): query = gql( """ query UseFragment { luke: human(id: "1000") { ...HumanFragment } leia: human(id: "1003") { ...HumanFragment } } fragment HumanFragment on Human { name homePlanet } """ ) expected = { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, "leia": {"name": "Leia Organa", "homePlanet": "Alderaan"}, } result = client.execute(query) assert result == expected def test_check_type_of_r2(client): query = gql( """ query CheckTypeOfR2 { hero { __typename name } } """ ) expected = {"hero": {"__typename": "Droid", "name": "R2-D2"}} result = client.execute(query) assert result == expected def test_check_type_of_luke(client): query = gql( """ query CheckTypeOfLuke { hero(episode: EMPIRE) { __typename name } } """ ) expected = {"hero": {"__typename": "Human", "name": "Luke Skywalker"}} result = client.execute(query) assert result == expected def test_parse_error(client): with pytest.raises(Exception) as exc_info: gql( """ qeury """ ) error = exc_info.value assert isinstance(error, GraphQLError) assert "Syntax Error: Unexpected Name 'qeury'." in str(error) def test_mutation_result(client): query = gql( """ mutation CreateReviewForEpisode($ep: Episode!, $review: ReviewInput!) { createReview(episode: $ep, review: $review) { stars commentary } } """ ) params = { "ep": "JEDI", "review": {"stars": 5, "commentary": "This is a great movie!"}, } expected = {"createReview": {"stars": 5, "commentary": "This is a great movie!"}} result = client.execute(query, variable_values=params) assert result == expected def test_query_from_source(client): source = Source("{ hero { name } }") query = gql(source) expected = {"hero": {"name": "R2-D2"}} result = client.execute(query) assert result == expected def test_already_parsed_query(client): query = gql("{ hero { name } }") with pytest.raises(TypeError, match="must be passed as a string"): gql(query) gql-3.6.0b2/tests/starwars/test_subscription.py000066400000000000000000000055201460703211500216750ustar00rootroot00000000000000import asyncio import pytest from graphql import ExecutionResult, GraphQLError, subscribe from gql import Client, gql from .fixtures import reviews from .schema import StarWarsSchema subscription_str = """ subscription ListenEpisodeReviews($ep: Episode!) { reviewAdded(episode: $ep) { stars, commentary, episode } } """ async def await_if_coroutine(obj): """Function to make tests work for graphql-core versions before and after 3.3.0a3""" if asyncio.iscoroutine(obj): return await obj return obj @pytest.mark.asyncio async def test_subscription_support(): # reset review data for this test reviews[6] = [ {"stars": 3, "commentary": "Was expecting more stuff", "episode": 6}, {"stars": 5, "commentary": "This is a great movie!", "episode": 6}, ] subs = gql(subscription_str) params = {"ep": "JEDI"} expected = [{**review, "episode": "JEDI"} for review in reviews[6]] ai = await await_if_coroutine( subscribe(StarWarsSchema, subs, variable_values=params) ) result = [result.data["reviewAdded"] async for result in ai] assert result == expected @pytest.mark.asyncio async def test_subscription_support_using_client(): # reset review data for this test reviews[6] = [ {"stars": 3, "commentary": "Was expecting more stuff", "episode": 6}, {"stars": 5, "commentary": "This is a great movie!", "episode": 6}, ] subs = gql(subscription_str) params = {"ep": "JEDI"} expected = [{**review, "episode": "JEDI"} for review in reviews[6]] async with Client(schema=StarWarsSchema) as session: results = [ result["reviewAdded"] async for result in await await_if_coroutine( session.subscribe(subs, variable_values=params, parse_result=False) ) ] assert results == expected subscription_invalid_str = """ subscription ListenEpisodeReviews($ep: Episode!) { qsdfqsdfqsdf } """ @pytest.mark.asyncio async def test_subscription_support_using_client_invalid_field(): subs = gql(subscription_invalid_str) params = {"ep": "JEDI"} async with Client(schema=StarWarsSchema) as session: # We subscribe directly from the transport to avoid local validation results = [ result async for result in await await_if_coroutine( session.transport.subscribe(subs, variable_values=params) ) ] assert len(results) == 1 result = results[0] assert isinstance(result, ExecutionResult) assert result.data is None assert isinstance(result.errors, list) assert len(result.errors) == 1 error = result.errors[0] assert isinstance(error, GraphQLError) assert error.message == "The subscription field 'qsdfqsdfqsdf' is not defined." gql-3.6.0b2/tests/starwars/test_validation.py000066400000000000000000000125011460703211500213000ustar00rootroot00000000000000import pytest from gql import Client, gql from .schema import StarWarsIntrospection, StarWarsSchema @pytest.fixture def local_schema(): return Client(schema=StarWarsSchema) @pytest.fixture def typedef_schema(): return Client( schema=""" schema { query: Query } interface Character { appearsIn: [Episode] friends: [Character] id: String! name: String } type Droid implements Character { appearsIn: [Episode] friends: [Character] id: String! name: String primaryFunction: String } enum Episode { EMPIRE JEDI NEWHOPE } type Human implements Character { appearsIn: [Episode] friends: [Character] homePlanet: String id: String! name: String } type Query { droid(id: String!): Droid hero(episode: Episode): Character human(id: String!): Human }""" ) @pytest.fixture def introspection_schema(): return Client(introspection=StarWarsIntrospection) @pytest.fixture def introspection_schema_empty_directives(): introspection = StarWarsIntrospection # Simulate an empty dictionary for directives introspection["__schema"]["directives"] = [] return Client(introspection=introspection) @pytest.fixture def introspection_schema_no_directives(): introspection = StarWarsIntrospection # Simulate no directives key del introspection["__schema"]["directives"] return Client(introspection=introspection) @pytest.fixture( params=[ "local_schema", "typedef_schema", "introspection_schema", "introspection_schema_empty_directives", "introspection_schema_no_directives", ] ) def client(request): return request.getfixturevalue(request.param) def validation_errors(client, query): query = gql(query) try: client.validate(query) return False except Exception: return True def test_incompatible_request_gql(client): with pytest.raises(TypeError): gql(123) """ The error generated depends on graphql-core version < 3.1.5: "body must be a string" >= 3.1.5: some variation of "object of type 'int' has no len()" depending on the python environment So we are not going to check the exact error message here anymore. """ """ assert ("body must be a string" in str(exc_info.value)) or ( "object of type 'int' has no len()" in str(exc_info.value) ) """ def test_nested_query_with_fragment(client): query = """ query NestedQueryWithFragment { hero { ...NameAndAppearances friends { ...NameAndAppearances friends { ...NameAndAppearances } } } } fragment NameAndAppearances on Character { name appearsIn } """ assert not validation_errors(client, query) def test_non_existent_fields(client): query = """ query HeroSpaceshipQuery { hero { favoriteSpaceship } } """ assert validation_errors(client, query) def test_require_fields_on_object(client): query = """ query HeroNoFieldsQuery { hero } """ assert validation_errors(client, query) def test_disallows_fields_on_scalars(client): query = """ query HeroFieldsOnScalarQuery { hero { name { firstCharacterOfName } } } """ assert validation_errors(client, query) def test_disallows_object_fields_on_interfaces(client): query = """ query DroidFieldOnCharacter { hero { name primaryFunction } } """ assert validation_errors(client, query) def test_allows_object_fields_in_fragments(client): query = """ query DroidFieldInFragment { hero { name ...DroidFields } } fragment DroidFields on Droid { primaryFunction } """ assert not validation_errors(client, query) def test_allows_object_fields_in_inline_fragments(client): query = """ query DroidFieldInFragment { hero { name ... on Droid { primaryFunction } } } """ assert not validation_errors(client, query) def test_include_directive(client): query = """ query fetchHero($with_friends: Boolean!) { hero { name friends @include(if: $with_friends) { name } } } """ assert not validation_errors(client, query) def test_skip_directive(client): query = """ query fetchHero($without_friends: Boolean!) { hero { name friends @skip(if: $without_friends) { name } } } """ assert not validation_errors(client, query) def test_build_client_schema_invalid_introspection(): from gql.utilities import build_client_schema with pytest.raises(TypeError) as exc_info: build_client_schema("blah") assert ( "Invalid or incomplete introspection result. Ensure that you are passing the " "'data' attribute of an introspection response and no 'errors' were returned " "alongside: 'blah'." ) in str(exc_info.value) gql-3.6.0b2/tests/test_aiohttp.py000066400000000000000000001267141460703211500167640ustar00rootroot00000000000000import io import json from typing import Mapping import pytest from gql import Client, gql from gql.cli import get_parser, main from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportQueryError, TransportServerError, ) from .conftest import TemporaryFile, strip_braces_spaces query1_str = """ query getContinents { continents { code name } } """ query1_server_answer_data = ( '{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}' ) query1_server_answer = f'{{"data":{query1_server_answer_data}}}' # Marking all tests in this file with the aiohttp marker pytestmark = pytest.mark.aiohttp @pytest.mark.asyncio async def test_aiohttp_query(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" @pytest.mark.asyncio async def test_aiohttp_ignore_backend_content_type(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="text/plain") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(query1_str) result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.asyncio async def test_aiohttp_cookies(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): assert "COOKIE" in request.headers assert "cookie1=val1" == request.headers["COOKIE"] return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, cookies={"cookie1": "val1"}) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.asyncio async def test_aiohttp_error_code_401(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): # Will generate http error code 401 return web.Response( text='{"error":"Unauthorized","message":"401 Client Error: Unauthorized"}', content_type="application/json", status=401, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: await session.execute(query) assert "401, message='Unauthorized'" in str(exc_info.value) @pytest.mark.asyncio async def test_aiohttp_error_code_429(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): # Will generate http error code 429 return web.Response( text=""" Too Many Requests

Too Many Requests

I only allow 50 requests per hour to this Web site per logged in user. Try again soon.

""", content_type="text/html", status=429, headers={"Retry-After": "3600"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: await session.execute(query) assert "429, message='Too Many Requests'" in str(exc_info.value) # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["Retry-After"] == "3600" @pytest.mark.asyncio async def test_aiohttp_error_code_500(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): # Will generate http error code 500 raise Exception("Server error") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: await session.execute(query) assert "500, message='Internal Server Error'" in str(exc_info.value) transport_query_error_responses = [ '{"errors": ["Error 1", "Error 2"]}', '{"errors": {"error_1": "Something"}}', '{"errors": 5}', ] @pytest.mark.asyncio @pytest.mark.parametrize("query_error", transport_query_error_responses) async def test_aiohttp_error_code(event_loop, aiohttp_server, query_error): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query_error, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportQueryError): await session.execute(query) invalid_protocol_responses = [ { "response": "{}", "expected_exception": ( "Server did not return a GraphQL result: " 'No "data" or "errors" keys in answer: {}' ), }, { "response": "qlsjfqsdlkj", "expected_exception": ( "Server did not return a GraphQL result: Not a JSON answer: qlsjfqsdlkj" ), }, { "response": '{"not_data_or_errors": 35}', "expected_exception": ( "Server did not return a GraphQL result: " 'No "data" or "errors" keys in answer: {"not_data_or_errors": 35}' ), }, { "response": "", "expected_exception": ( "Server did not return a GraphQL result: Not a JSON answer: " ), }, ] @pytest.mark.asyncio @pytest.mark.parametrize("param", invalid_protocol_responses) async def test_aiohttp_invalid_protocol(event_loop, aiohttp_server, param): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport response = param["response"] async def handler(request): return web.Response(text=response, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportProtocolError) as exc_info: await session.execute(query) assert param["expected_exception"] in str(exc_info.value) @pytest.mark.asyncio async def test_aiohttp_subscribe_not_supported(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text="does not matter", content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(NotImplementedError): async for result in session.subscribe(query): pass @pytest.mark.asyncio async def test_aiohttp_cannot_connect_twice(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: with pytest.raises(TransportAlreadyConnected): await session.transport.connect() @pytest.mark.asyncio async def test_aiohttp_cannot_execute_if_not_connected(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) query = gql(query1_str) with pytest.raises(TransportClosed): await transport.execute(query) @pytest.mark.asyncio async def test_aiohttp_extra_args(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") # passing extra arguments to aiohttp.ClientSession from aiohttp import DummyCookieJar jar = DummyCookieJar() transport = AIOHTTPTransport( url=url, timeout=10, client_session_args={"version": "1.1", "cookie_jar": jar} ) async with Client(transport=transport) as session: query = gql(query1_str) # Passing extra arguments to the post method of aiohttp result = await session.execute(query, extra_args={"allow_redirects": False}) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" query2_str = """ query getEurope ($code: ID!) { continent (code: $code) { name } } """ query2_server_answer = '{"data": {"continent": {"name": "Europe"}}}' @pytest.mark.asyncio async def test_aiohttp_query_variable_values(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query2_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: params = {"code": "EU"} query = gql(query2_str) # Execute query asynchronously result = await session.execute( query, variable_values=params, operation_name="getEurope" ) continent = result["continent"] assert continent["name"] == "Europe" @pytest.mark.asyncio async def test_aiohttp_query_variable_values_fix_issue_292(event_loop, aiohttp_server): """Allow to specify variable_values without keyword. See https://github.com/graphql-python/gql/issues/292""" from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query2_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: params = {"code": "EU"} query = gql(query2_str) # Execute query asynchronously result = await session.execute(query, params, operation_name="getEurope") continent = result["continent"] assert continent["name"] == "Europe" @pytest.mark.asyncio async def test_aiohttp_execute_running_in_thread( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = AIOHTTPTransport(url=url) client = Client(transport=transport) query = gql(query1_str) client.execute(query) await run_sync_test(event_loop, server, test_code) @pytest.mark.asyncio async def test_aiohttp_subscribe_running_in_thread( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = AIOHTTPTransport(url=url) client = Client(transport=transport) query = gql(query1_str) # Note: subscriptions are not supported on the aiohttp transport # But we add this test in order to have 100% code coverage # It is to check that we will correctly set an event loop # in the subscribe function if there is none (in a Thread for example) # We cannot test this with the websockets transport because # the websockets transport will set an event loop in its init with pytest.raises(NotImplementedError): for result in client.subscribe(query): pass await run_sync_test(event_loop, server, test_code) file_upload_server_answer = '{"data":{"success":true}}' file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { success } } """ file_upload_mutation_1_operations = ( '{"query": "mutation ($file: Upload!) {\\n uploadFile(input: {other_var: ' '$other_var, file: $file}) {\\n success\\n }\\n}", "variables": ' '{"file": null, "other_var": 42}}' ) file_upload_mutation_1_map = '{"0": ["variables.file"]}' file_1_content = """ This is a test file This file will be sent in the GraphQL mutation """ async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3 is None return web.Response(text=file_upload_server_answer, content_type="application/json") @pytest.mark.asyncio async def test_aiohttp_file_upload(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file: async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success async def single_upload_handler_with_content_type(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content # Verifying the content_type assert field_2.headers["Content-Type"] == "application/pdf" field_3 = await reader.next() assert field_3 is None return web.Response(text=file_upload_server_answer, content_type="application/json") @pytest.mark.asyncio async def test_aiohttp_file_upload_with_content_type(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() app.router.add_route("POST", "/", single_upload_handler_with_content_type) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file: async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: # Setting the content_type f.content_type = "application/pdf" params = {"file": f, "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success @pytest.mark.asyncio async def test_aiohttp_file_upload_without_session( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file: client = Client(transport=transport) query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} result = client.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success await run_sync_test(event_loop, server, test_code) # This is a sample binary file content containing all possible byte values binary_file_content = bytes(range(0, 256)) async def binary_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_binary = await field_2.read() assert field_2_binary == binary_file_content field_3 = await reader.next() assert field_3 is None return web.Response(text=file_upload_server_answer, content_type="application/json") @pytest.mark.asyncio async def test_aiohttp_binary_file_upload(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() app.router.add_route("POST", "/", binary_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(binary_file_content) as test_file: async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success @pytest.mark.asyncio async def test_aiohttp_stream_reader_upload(event_loop, aiohttp_server): from aiohttp import web, ClientSession from gql.transport.aiohttp import AIOHTTPTransport async def binary_data_handler(request): return web.Response( body=binary_file_content, content_type="binary/octet-stream" ) app = web.Application() app.router.add_route("POST", "/", binary_upload_handler) app.router.add_route("GET", "/binary_data", binary_data_handler) server = await aiohttp_server(app) url = server.make_url("/") binary_data_url = server.make_url("/binary_data") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) async with ClientSession() as client: async with client.get(binary_data_url) as resp: params = {"file": resp.content, "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success @pytest.mark.asyncio async def test_aiohttp_async_generator_upload(event_loop, aiohttp_server): import aiofiles from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport app = web.Application() app.router.add_route("POST", "/", binary_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(binary_file_content) as test_file: async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename async def file_sender(file_name): async with aiofiles.open(file_name, "rb") as f: chunk = await f.read(64 * 1024) while chunk: yield chunk chunk = await f.read(64 * 1024) params = {"file": file_sender(file_path), "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success file_upload_mutation_2 = """ mutation($file1: Upload!, $file2: Upload!) { uploadFile(input:{file1:$file, file2:$file}) { success } } """ file_upload_mutation_2_operations = ( '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' '"variables": {"file1": null, "file2": null}}' ) file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' file_2_content = """ This is a second test file This file will also be sent in the GraphQL mutation """ @pytest.mark.asyncio async def test_aiohttp_file_upload_two_files(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_2_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: async with Client(transport=transport) as session: query = gql(file_upload_mutation_2) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = { "file1": f1, "file2": f2, } result = await session.execute( query, variable_values=params, upload_files=True ) f1.close() f2.close() success = result["success"] assert success file_upload_mutation_3 = """ mutation($files: [Upload!]!) { uploadFiles(input:{files:$files}) { success } } """ file_upload_mutation_3_operations = ( '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles(' "input: {files: $files})" ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' ) file_upload_mutation_3_map = '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' @pytest.mark.asyncio async def test_aiohttp_file_upload_list_of_two_files(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_3_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: async with Client(transport=transport) as session: query = gql(file_upload_mutation_3) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = {"files": [f1, f2]} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) f1.close() f2.close() success = result["success"] assert success @pytest.mark.asyncio async def test_aiohttp_using_cli(event_loop, aiohttp_server, monkeypatch, capsys): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url, "--verbose"]) # Monkeypatching sys.stdin to simulate getting the query # via the standard input monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) exit_code = await main(args) assert exit_code == 0 # Check that the result has been printed on stdout captured = capsys.readouterr() captured_out = str(captured.out).strip() expected_answer = json.loads(query1_server_answer_data) print(f"Captured: {captured_out}") received_answer = json.loads(captured_out) assert received_answer == expected_answer @pytest.mark.asyncio @pytest.mark.script_launch_mode("subprocess") async def test_aiohttp_using_cli_ep( event_loop, aiohttp_server, monkeypatch, script_runner, run_sync_test ): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) ret = script_runner.run( "gql-cli", url, "--verbose", stdin=io.StringIO(query1_str) ) assert ret.success # Check that the result has been printed on stdout captured_out = str(ret.stdout).strip() expected_answer = json.loads(query1_server_answer_data) print(f"Captured: {captured_out}") received_answer = json.loads(captured_out) assert received_answer == expected_answer await run_sync_test(event_loop, server, test_code) @pytest.mark.asyncio async def test_aiohttp_using_cli_invalid_param( event_loop, aiohttp_server, monkeypatch, capsys ): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url, "--variables", "invalid_param"]) # Monkeypatching sys.stdin to simulate getting the query # via the standard input monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) # Check that the exit_code is an error exit_code = await main(args) assert exit_code == 1 # Check that the error has been printed on stdout captured = capsys.readouterr() captured_err = str(captured.err).strip() print(f"Captured: {captured_err}") expected_error = "Error: Invalid variable: invalid_param" assert expected_error in captured_err @pytest.mark.asyncio async def test_aiohttp_using_cli_invalid_query( event_loop, aiohttp_server, monkeypatch, capsys ): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url]) # Send invalid query on standard input monkeypatch.setattr("sys.stdin", io.StringIO("BLAHBLAH")) exit_code = await main(args) assert exit_code == 1 # Check that the error has been printed on stdout captured = capsys.readouterr() captured_err = str(captured.err).strip() print(f"Captured: {captured_err}") expected_error = "Syntax Error: Unexpected Name 'BLAHBLAH'" assert expected_error in captured_err query1_server_answer_with_extensions = ( f'{{"data":{query1_server_answer_data}, "extensions":{{"key1": "val1"}}}}' ) @pytest.mark.asyncio async def test_aiohttp_query_with_extensions(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query1_server_answer_with_extensions, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(query1_str) execution_result = await session.execute(query, get_execution_result=True) assert execution_result.extensions["key1"] == "val1" @pytest.mark.asyncio @pytest.mark.parametrize("ssl_close_timeout", [0, 10]) async def test_aiohttp_query_https(event_loop, ssl_aiohttp_server, ssl_close_timeout): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await ssl_aiohttp_server(app) url = server.make_url("/") assert str(url).startswith("https://") transport = AIOHTTPTransport( url=url, timeout=10, ssl_close_timeout=ssl_close_timeout ) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.asyncio async def test_aiohttp_error_fetching_schema(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport error_answer = """ { "errors": [ { "errorType": "UnauthorizedException", "message": "Permission denied" } ] } """ async def handler(request): return web.Response( text=error_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) with pytest.raises(TransportQueryError) as exc_info: async with Client(transport=transport, fetch_schema_from_transport=True): pass expected_error = ( "Error while fetching schema: " "{'errorType': 'UnauthorizedException', 'message': 'Permission denied'}" ) assert expected_error in str(exc_info.value) assert transport.session is None @pytest.mark.asyncio async def test_aiohttp_reconnecting_session(event_loop, aiohttp_server): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) client = Client(transport=transport) session = await client.connect_async(reconnecting=True) query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" await client.close_async() @pytest.mark.asyncio @pytest.mark.parametrize("retries", [False, lambda e: e]) async def test_aiohttp_reconnecting_session_retries( event_loop, aiohttp_server, retries ): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) client = Client(transport=transport) session = await client.connect_async( reconnecting=True, retry_execute=retries, retry_connect=retries ) assert session._execute_with_retries == session._execute_once assert session._connect_with_retries == session.transport.connect await client.close_async() @pytest.mark.asyncio async def test_aiohttp_reconnecting_session_start_connecting_task_twice( event_loop, aiohttp_server, caplog ): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport(url=url, timeout=10) client = Client(transport=transport) session = await client.connect_async(reconnecting=True) await session.start_connecting_task() print(f"Captured log: {caplog.text}") expected_warning = "connect task already started!" assert expected_warning in caplog.text await client.close_async() @pytest.mark.asyncio async def test_aiohttp_json_serializer(event_loop, aiohttp_server, caplog): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): request_text = await request.text() print("Received on backend: " + request_text) return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") transport = AIOHTTPTransport( url=url, timeout=10, json_serialize=lambda e: json.dumps(e, separators=(",", ":")), ) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking that there is no space after the colon in the log expected_log = '"query":"query getContinents' assert expected_log in caplog.text query_float_str = """ query getPi { pi } """ query_float_server_answer_data = '{"pi": 3.141592653589793238462643383279502884197}' query_float_server_answer = f'{{"data":{query_float_server_answer_data}}}' @pytest.mark.asyncio async def test_aiohttp_json_deserializer(event_loop, aiohttp_server): from aiohttp import web from decimal import Decimal from functools import partial from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query_float_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") json_loads = partial(json.loads, parse_float=Decimal) transport = AIOHTTPTransport( url=url, timeout=10, json_deserialize=json_loads, ) async with Client(transport=transport) as session: query = gql(query_float_str) # Execute query asynchronously result = await session.execute(query) pi = result["pi"] assert pi == Decimal("3.141592653589793238462643383279502884197") @pytest.mark.asyncio async def test_aiohttp_connector_owner_false(event_loop, aiohttp_server): from aiohttp import web, TCPConnector from gql.transport.aiohttp import AIOHTTPTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") connector = TCPConnector() transport = AIOHTTPTransport( url=url, timeout=10, client_session_args={ "connector": connector, "connector_owner": False, }, ) for _ in range(2): async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" await connector.close() gql-3.6.0b2/tests/test_aiohttp_online.py000066400000000000000000000062501460703211500203200ustar00rootroot00000000000000import asyncio import sys from typing import Dict import pytest from gql import Client, gql from gql.transport.exceptions import TransportQueryError @pytest.mark.aiohttp @pytest.mark.online @pytest.mark.asyncio async def test_aiohttp_simple_query(event_loop): from gql.transport.aiohttp import AIOHTTPTransport # Create https url url = "https://countries.trevorblades.com/graphql" # Get transport sample_transport = AIOHTTPTransport(url=url) # Instanciate client async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code name } } """ ) # Fetch schema await session.fetch_schema() # Execute query result = await session.execute(query) # Verify result assert isinstance(result, Dict) print(result) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.aiohttp @pytest.mark.online @pytest.mark.asyncio async def test_aiohttp_invalid_query(event_loop): from gql.transport.aiohttp import AIOHTTPTransport sample_transport = AIOHTTPTransport( url="https://countries.trevorblades.com/graphql" ) async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code bloh } } """ ) with pytest.raises(TransportQueryError): await session.execute(query) @pytest.mark.aiohttp @pytest.mark.online @pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python3.8 or higher") @pytest.mark.asyncio async def test_aiohttp_two_queries_in_parallel_using_two_tasks(event_loop): from gql.transport.aiohttp import AIOHTTPTransport sample_transport = AIOHTTPTransport( url="https://countries.trevorblades.com/graphql", ) # Instanciate client async with Client(transport=sample_transport) as session: query1 = gql( """ query getContinents { continents { code } } """ ) query2 = gql( """ query getContinents { continents { name } } """ ) async def query_task1(): result = await session.execute(query1) assert isinstance(result, Dict) print(result) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" async def query_task2(): result = await session.execute(query2) assert isinstance(result, Dict) print(result) continents = result["continents"] africa = continents[0] assert africa["name"] == "Africa" task1 = asyncio.create_task(query_task1()) task2 = asyncio.create_task(query_task2()) await task1 await task2 gql-3.6.0b2/tests/test_appsync_auth.py000066400000000000000000000146741460703211500200130ustar00rootroot00000000000000import pytest mock_transport_host = "appsyncapp.awsgateway.com.example.org" mock_transport_url = f"https://{mock_transport_host}/graphql" @pytest.mark.botocore def test_appsync_init_with_minimal_args(fake_session_factory): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport sample_transport = AppSyncWebsocketsTransport( url=mock_transport_url, session=fake_session_factory() ) assert isinstance(sample_transport.auth, AppSyncIAMAuthentication) assert sample_transport.connect_timeout == 10 assert sample_transport.close_timeout == 10 assert sample_transport.ack_timeout == 10 assert sample_transport.ssl is False assert sample_transport.connect_args == {} @pytest.mark.botocore def test_appsync_init_with_no_credentials(caplog, fake_session_factory): import botocore.exceptions from gql.transport.appsync_websockets import AppSyncWebsocketsTransport with pytest.raises(botocore.exceptions.NoCredentialsError): sample_transport = AppSyncWebsocketsTransport( url=mock_transport_url, session=fake_session_factory(credentials=None), ) assert sample_transport.auth is None expected_error = "Credentials not found" print(f"Captured log: {caplog.text}") assert expected_error in caplog.text @pytest.mark.websockets def test_appsync_init_with_jwt_auth(): from gql.transport.appsync_auth import AppSyncJWTAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport auth = AppSyncJWTAuthentication(host=mock_transport_host, jwt="some-jwt") sample_transport = AppSyncWebsocketsTransport(url=mock_transport_url, auth=auth) assert sample_transport.auth is auth assert auth.get_headers() == { "host": mock_transport_host, "Authorization": "some-jwt", } @pytest.mark.websockets def test_appsync_init_with_apikey_auth(): from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport auth = AppSyncApiKeyAuthentication(host=mock_transport_host, api_key="some-api-key") sample_transport = AppSyncWebsocketsTransport(url=mock_transport_url, auth=auth) assert sample_transport.auth is auth assert auth.get_headers() == { "host": mock_transport_host, "x-api-key": "some-api-key", } @pytest.mark.botocore def test_appsync_init_with_iam_auth_without_creds(fake_session_factory): import botocore.exceptions from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport auth = AppSyncIAMAuthentication( host=mock_transport_host, session=fake_session_factory(credentials=None), ) with pytest.raises(botocore.exceptions.NoCredentialsError): AppSyncWebsocketsTransport(url=mock_transport_url, auth=auth) @pytest.mark.botocore def test_appsync_init_with_iam_auth_with_creds(fake_credentials_factory): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport auth = AppSyncIAMAuthentication( host=mock_transport_host, credentials=fake_credentials_factory(), region_name="us-east-1", ) sample_transport = AppSyncWebsocketsTransport(url=mock_transport_url, auth=auth) assert sample_transport.auth is auth @pytest.mark.botocore def test_appsync_init_with_iam_auth_and_no_region( caplog, fake_credentials_factory, fake_session_factory ): """ WARNING: this test will fail if: - you have a default region set in ~/.aws/config - you have the AWS_DEFAULT_REGION environment variable set """ from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from botocore.exceptions import NoRegionError import logging caplog.set_level(logging.WARNING) with pytest.raises(NoRegionError): session = fake_session_factory(credentials=fake_credentials_factory()) session._region_name = None session._credentials.region = None transport = AppSyncWebsocketsTransport(url=mock_transport_url, session=session) # prints the region name in case the test fails print(f"Region found: {transport.auth._region_name}") print(f"Captured: {caplog.text}") expected_error = ( "Region name not found. " "It was not possible to detect your region either from the host " "or from your default AWS configuration." ) assert expected_error in caplog.text @pytest.mark.botocore def test_munge_url(fake_signer_factory, fake_request_factory): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport test_url = "https://appsync-api.aws.example.org/some-other-params" auth = AppSyncIAMAuthentication( host=test_url, signer=fake_signer_factory(), request_creator=fake_request_factory, ) sample_transport = AppSyncWebsocketsTransport(url=test_url, auth=auth) header_string = ( "eyJGYWtlQXV0aG9yaXphdGlvbiI6ImEiLCJGYWtlVGltZSI6InRvZGF5" "IiwiaG9zdCI6Imh0dHBzOi8vYXBwc3luYy1hcGkuYXdzLmV4YW1wbGUu" "b3JnL3NvbWUtb3RoZXItcGFyYW1zIn0=" ) expected_url = ( "wss://appsync-realtime-api.aws.example.org/" f"some-other-params?header={header_string}&payload=e30=" ) assert sample_transport.url == expected_url @pytest.mark.botocore def test_munge_url_format( fake_signer_factory, fake_request_factory, fake_credentials_factory, fake_session_factory, ): from gql.transport.appsync_auth import AppSyncIAMAuthentication test_url = "https://appsync-api.aws.example.org/some-other-params" auth = AppSyncIAMAuthentication( host=test_url, signer=fake_signer_factory(), session=fake_session_factory(), request_creator=fake_request_factory, credentials=fake_credentials_factory(), ) header_string = ( "eyJGYWtlQXV0aG9yaXphdGlvbiI6ImEiLCJGYWtlVGltZSI6InRvZGF5" "IiwiaG9zdCI6Imh0dHBzOi8vYXBwc3luYy1hcGkuYXdzLmV4YW1wbGUu" "b3JnL3NvbWUtb3RoZXItcGFyYW1zIn0=" ) expected_url = ( "wss://appsync-realtime-api.aws.example.org/" f"some-other-params?header={header_string}&payload=e30=" ) assert auth.get_auth_url(test_url) == expected_url gql-3.6.0b2/tests/test_appsync_http.py000066400000000000000000000041331460703211500200160ustar00rootroot00000000000000import json import pytest from gql import Client, gql @pytest.mark.asyncio @pytest.mark.aiohttp @pytest.mark.botocore async def test_appsync_iam_mutation( event_loop, aiohttp_server, fake_credentials_factory ): from aiohttp import web from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.appsync_auth import AppSyncIAMAuthentication from urllib.parse import urlparse async def handler(request): data = { "createMessage": { "id": "4b436192-aab2-460c-8bdf-4f2605eb63da", "message": "Hello world!", "createdAt": "2021-12-06T14:49:55.087Z", } } payload = { "data": data, "extensions": {"received_headers": dict(request.headers)}, } return web.Response( text=json.dumps(payload, separators=(",", ":")), content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) # Extract host from url host = str(urlparse(url).netloc) auth = AppSyncIAMAuthentication( host=host, credentials=fake_credentials_factory(), region_name="us-east-1", ) sample_transport = AIOHTTPTransport(url=url, auth=auth) async with Client(transport=sample_transport) as session: query = gql( """ mutation createMessage($message: String!) { createMessage(input: {message: $message}) { id message createdAt } }""" ) # Execute query asynchronously execution_result = await session.execute(query, get_execution_result=True) result = execution_result.data message = result["createMessage"]["message"] assert message == "Hello world!" sent_headers = execution_result.extensions["received_headers"] assert sent_headers["X-Amz-Security-Token"] == "fake-token" assert sent_headers["Authorization"].startswith( "AWS4-HMAC-SHA256 Credential=fake-access-key/" ) gql-3.6.0b2/tests/test_appsync_websockets.py000066400000000000000000000571611460703211500212210ustar00rootroot00000000000000import asyncio import json from base64 import b64decode from typing import List from urllib import parse import pytest from gql import Client, gql from .conftest import MS, WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets SEND_MESSAGE_DELAY = 20 * MS NB_MESSAGES = 10 DUMMY_API_KEY = "da2-thisisadummyapikey01234567" DUMMY_ACCESS_KEY_ID = "DUMMYACCESSKEYID0123" DUMMY_ACCESS_KEY_ID_NOT_ALLOWED = "DUMMYACCESSKEYID!ALL" DUMMY_ACCESS_KEY_IDS = [DUMMY_ACCESS_KEY_ID, DUMMY_ACCESS_KEY_ID_NOT_ALLOWED] DUMMY_SECRET_ACCESS_KEY = "ThisIsADummySecret0123401234012340123401" DUMMY_SECRET_SESSION_TOKEN = ( "FwoREDACTEDzEREDACTED+YREDACTEDJLREDACTEDz2REDACTEDH5RE" "DACTEDbVREDACTEDqwREDACTEDHJREDACTEDxFREDACTEDtMREDACTED5kREDACTEDSwREDACTED0BRED" "ACTEDuDREDACTEDm4REDACTEDSBREDACTEDaoREDACTEDP2REDACTEDCBREDACTED0wREDACTEDmdREDA" "CTEDyhREDACTEDSKREDACTEDYbREDACTEDfeREDACTED3UREDACTEDaKREDACTEDi1REDACTEDGEREDAC" "TED4VREDACTEDjmREDACTEDYcREDACTEDkQREDACTEDyI=" ) REGION_NAME = "eu-west-3" # List which can used to store received messages by the server logged_messages: List[str] = [] def realtime_appsync_server_factory( keepalive=False, not_json_answer=False, error_without_id=False ): def verify_headers(headers, in_query=False): """Returns an error or None if all is ok""" if "x-api-key" in headers: print("API KEY Authentication detected!") if headers["x-api-key"] == DUMMY_API_KEY: return None elif "Authorization" in headers: if "X-Amz-Security-Token" in headers: with_token = True print("IAM Authentication with token detected!") else: with_token = False print("IAM Authentication with token detected!") print("IAM Authentication without token detected!") assert headers["accept"] == "application/json, text/javascript" assert headers["content-encoding"] == "amz-1.0" assert headers["content-type"] == "application/json; charset=UTF-8" assert "X-Amz-Date" in headers authorization_fields = headers["Authorization"].split(" ") assert authorization_fields[0] == "AWS4-HMAC-SHA256" credential_field = authorization_fields[1][:-1].split("=") assert credential_field[0] == "Credential" credential_content = credential_field[1].split("/") assert credential_content[0] in DUMMY_ACCESS_KEY_IDS if in_query: if credential_content[0] == DUMMY_ACCESS_KEY_ID_NOT_ALLOWED: return { "errorType": "UnauthorizedException", "message": "Permission denied", } # assert credential_content[1]== date # assert credential_content[2]== region assert credential_content[3] == "appsync" assert credential_content[4] == "aws4_request" signed_headers_field = authorization_fields[2][:-1].split("=") assert signed_headers_field[0] == "SignedHeaders" signed_headers = signed_headers_field[1].split(";") assert "accept" in signed_headers assert "content-encoding" in signed_headers assert "content-type" in signed_headers assert "host" in signed_headers assert "x-amz-date" in signed_headers if with_token: assert "x-amz-security-token" in signed_headers signature_field = authorization_fields[3].split("=") assert signature_field[0] == "Signature" return None return { "errorType": "com.amazonaws.deepdish.graphql.auth#UnauthorizedException", "message": "You are not authorized to make this call.", "errorCode": 400, } async def realtime_appsync_server_template(ws, path): import websockets logged_messages.clear() try: if not_json_answer: await ws.send("Something not json") return if error_without_id: await ws.send( json.dumps( { "type": "error", "payload": { "errors": [ { "errorType": "Error without id", "message": ( "Sometimes AppSync will send you " "an error without an id" ), } ] }, }, separators=(",", ":"), ) ) return print(f"path = {path}") path_base, parameters_str = path.split("?") assert path_base == "/graphql" parameters = parse.parse_qs(parameters_str) header_param = parameters["header"][0] payload_param = parameters["payload"][0] assert payload_param == "e30=" headers = json.loads(b64decode(header_param).decode()) print("\nHeaders received in URL:") for key, value in headers.items(): print(f" {key}: {value}") print("\n") error = verify_headers(headers) if error is not None: await ws.send( json.dumps( {"payload": {"errors": [error]}, "type": "connection_error"}, separators=(",", ":"), ) ) return await WebSocketServerHelper.send_connection_ack( ws, payload='{"connectionTimeoutMs":300000}' ) result = await ws.recv() logged_messages.append(result) json_result = json.loads(result) query_id = json_result["id"] assert json_result["type"] == "start" payload = json_result["payload"] # With appsync, the data field is serialized to string data_str = payload["data"] extensions = payload["extensions"] data = json.loads(data_str) query = data["query"] variables = data.get("variables", None) operation_name = data.get("operationName", None) print(f"Received query: {query}") print(f"Received variables: {variables}") print(f"Received operation_name: {operation_name}") authorization = extensions["authorization"] print("\nHeaders received in the extensions of the query:") for key, value in authorization.items(): print(f" {key}: {value}") print("\n") error = verify_headers(headers, in_query=True) if error is not None: await ws.send( json.dumps( { "id": str(query_id), "type": "error", "payload": {"errors": [error]}, }, separators=(",", ":"), ) ) return await ws.send( json.dumps( {"id": str(query_id), "type": "start_ack"}, separators=(",", ":") ) ) async def send_message_coro(): print(" Server: send message task started") try: for number in range(NB_MESSAGES): payload = { "data": { "onCreateMessage": {"message": f"Hello world {number}!"} } } if operation_name or variables: payload["extensions"] = {} if operation_name: payload["extensions"]["operation_name"] = operation_name if variables: payload["extensions"]["variables"] = variables await ws.send( json.dumps( { "id": str(query_id), "type": "data", "payload": payload, }, separators=(",", ":"), ) ) await asyncio.sleep(SEND_MESSAGE_DELAY) finally: print(" Server: send message task ended") print(" Server: starting send message task") send_message_task = asyncio.ensure_future(send_message_coro()) async def keepalive_coro(): while True: await asyncio.sleep(5 * MS) try: await WebSocketServerHelper.send_keepalive(ws) except websockets.exceptions.ConnectionClosed: break if keepalive: print(" Server: starting keepalive task") keepalive_task = asyncio.ensure_future(keepalive_coro()) async def receiving_coro(): print(" Server: receiving task started") try: nonlocal send_message_task while True: try: result = await ws.recv() logged_messages.append(result) except websockets.exceptions.ConnectionClosed: break finally: print(" Server: receiving task ended") if keepalive: keepalive_task.cancel() print(" Server: starting receiving task") receiving_task = asyncio.ensure_future(receiving_coro()) try: print( " Server: waiting for sending message task to complete" ) await send_message_task except asyncio.CancelledError: print(" Server: Now sending message task is cancelled") print(" Server: sending complete message") await WebSocketServerHelper.send_complete(ws, query_id) if keepalive: print(" Server: cancelling keepalive task") keepalive_task.cancel() try: await keepalive_task except asyncio.CancelledError: print(" Server: Now keepalive task is cancelled") print(" Server: waiting for client to close the connection") try: await asyncio.wait_for(receiving_task, 1000 * MS) except asyncio.TimeoutError: pass print(" Server: cancelling receiving task") receiving_task.cancel() try: await receiving_task except asyncio.CancelledError: print(" Server: Now receiving task is cancelled") except websockets.exceptions.ConnectionClosedOK: pass except AssertionError as e: print(f"\n Server: Assertion failed: {e!s}\n") except Exception as e: print(f"\n Server: Exception received: {e!s}\n") finally: print(" Server: waiting for websocket connection to close") try: await asyncio.wait_for(ws.wait_closed(), 1000 * MS) except asyncio.TimeoutError: pass try: await asyncio.wait_for(ws.close(), 1000 * MS) except asyncio.TimeoutError: pass print(" Server: connection closed") return realtime_appsync_server_template async def realtime_appsync_server(ws, path): server = realtime_appsync_server_factory() await server(ws, path) async def realtime_appsync_server_keepalive(ws, path): server = realtime_appsync_server_factory(keepalive=True) await server(ws, path) async def realtime_appsync_server_not_json_answer(ws, path): server = realtime_appsync_server_factory(not_json_answer=True) await server(ws, path) async def realtime_appsync_server_error_without_id(ws, path): server = realtime_appsync_server_factory(error_without_id=True) await server(ws, path) on_create_message_subscription_str = """ subscription onCreateMessage { onCreateMessage { message } } """ async def default_transport_test(transport): client = Client(transport=transport) expected_messages = [f"Hello world {number}!" for number in range(NB_MESSAGES)] received_messages = [] async with client as session: subscription = gql(on_create_message_subscription_str) async for result in session.subscribe(subscription): message = result["onCreateMessage"]["message"] print(f"Message received: '{message}'") received_messages.append(message) assert expected_messages == received_messages @pytest.mark.asyncio @pytest.mark.parametrize("server", [realtime_appsync_server_keepalive], indirect=True) async def test_appsync_subscription_api_key(event_loop, server): from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" auth = AppSyncApiKeyAuthentication(host=server.hostname, api_key=DUMMY_API_KEY) transport = AppSyncWebsocketsTransport( url=url, auth=auth, keep_alive_timeout=(5 * SEND_MESSAGE_DELAY) ) await default_transport_test(transport) @pytest.mark.asyncio @pytest.mark.botocore @pytest.mark.parametrize("server", [realtime_appsync_server], indirect=True) async def test_appsync_subscription_iam_with_token(event_loop, server): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from botocore.credentials import Credentials path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" dummy_credentials = Credentials( access_key=DUMMY_ACCESS_KEY_ID, secret_key=DUMMY_SECRET_ACCESS_KEY, token=DUMMY_SECRET_SESSION_TOKEN, ) auth = AppSyncIAMAuthentication( host=server.hostname, credentials=dummy_credentials, region_name=REGION_NAME ) transport = AppSyncWebsocketsTransport(url=url, auth=auth) await default_transport_test(transport) @pytest.mark.asyncio @pytest.mark.botocore @pytest.mark.parametrize("server", [realtime_appsync_server], indirect=True) async def test_appsync_subscription_iam_without_token(event_loop, server): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from botocore.credentials import Credentials path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" dummy_credentials = Credentials( access_key=DUMMY_ACCESS_KEY_ID, secret_key=DUMMY_SECRET_ACCESS_KEY, ) auth = AppSyncIAMAuthentication( host=server.hostname, credentials=dummy_credentials, region_name=REGION_NAME ) transport = AppSyncWebsocketsTransport(url=url, auth=auth) await default_transport_test(transport) @pytest.mark.asyncio @pytest.mark.botocore @pytest.mark.parametrize("server", [realtime_appsync_server], indirect=True) async def test_appsync_execute_method_not_allowed(event_loop, server): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from botocore.credentials import Credentials path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" dummy_credentials = Credentials( access_key=DUMMY_ACCESS_KEY_ID, secret_key=DUMMY_SECRET_ACCESS_KEY, ) auth = AppSyncIAMAuthentication( host=server.hostname, credentials=dummy_credentials, region_name=REGION_NAME ) transport = AppSyncWebsocketsTransport(url=url, auth=auth) client = Client(transport=transport) async with client as session: query = gql( """ mutation createMessage($message: String!) { createMessage(input: {message: $message}) { id message createdAt } }""" ) variable_values = {"message": "Hello world!"} with pytest.raises(AssertionError) as exc_info: await session.execute(query, variable_values=variable_values) assert ( "execute method is not allowed for AppSyncWebsocketsTransport " "because only subscriptions are allowed on the realtime endpoint." ) in str(exc_info) @pytest.mark.asyncio @pytest.mark.botocore async def test_appsync_fetch_schema_from_transport_not_allowed(event_loop): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from botocore.credentials import Credentials dummy_credentials = Credentials( access_key=DUMMY_ACCESS_KEY_ID, secret_key=DUMMY_SECRET_ACCESS_KEY, ) auth = AppSyncIAMAuthentication( host="something", credentials=dummy_credentials, region_name=REGION_NAME ) transport = AppSyncWebsocketsTransport(url="https://something", auth=auth) with pytest.raises(AssertionError) as exc_info: Client(transport=transport, fetch_schema_from_transport=True) assert ( "fetch_schema_from_transport=True is not allowed for AppSyncWebsocketsTransport" " because only subscriptions are allowed on the realtime endpoint." ) in str(exc_info) @pytest.mark.asyncio @pytest.mark.parametrize("server", [realtime_appsync_server], indirect=True) async def test_appsync_subscription_api_key_unauthorized(event_loop, server): from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from gql.transport.exceptions import TransportServerError path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" auth = AppSyncApiKeyAuthentication(host=server.hostname, api_key="invalid") transport = AppSyncWebsocketsTransport(url=url, auth=auth) client = Client(transport=transport) with pytest.raises(TransportServerError) as exc_info: async with client as _: pass assert "You are not authorized to make this call." in str(exc_info) @pytest.mark.asyncio @pytest.mark.botocore @pytest.mark.parametrize("server", [realtime_appsync_server], indirect=True) async def test_appsync_subscription_iam_not_allowed(event_loop, server): from gql.transport.appsync_auth import AppSyncIAMAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from gql.transport.exceptions import TransportQueryError from botocore.credentials import Credentials path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" dummy_credentials = Credentials( access_key=DUMMY_ACCESS_KEY_ID_NOT_ALLOWED, secret_key=DUMMY_SECRET_ACCESS_KEY, token=DUMMY_SECRET_SESSION_TOKEN, ) auth = AppSyncIAMAuthentication( host=server.hostname, credentials=dummy_credentials, region_name=REGION_NAME ) transport = AppSyncWebsocketsTransport(url=url, auth=auth) client = Client(transport=transport) async with client as session: subscription = gql(on_create_message_subscription_str) with pytest.raises(TransportQueryError) as exc_info: async for result in session.subscribe(subscription): pass assert "Permission denied" in str(exc_info) @pytest.mark.asyncio @pytest.mark.parametrize( "server", [realtime_appsync_server_not_json_answer], indirect=True ) async def test_appsync_subscription_server_sending_a_not_json_answer( event_loop, server ): from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from gql.transport.exceptions import TransportProtocolError path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" auth = AppSyncApiKeyAuthentication(host=server.hostname, api_key=DUMMY_API_KEY) transport = AppSyncWebsocketsTransport(url=url, auth=auth) client = Client(transport=transport) with pytest.raises(TransportProtocolError) as exc_info: async with client as _: pass assert "Server did not return a GraphQL result: Something not json" in str(exc_info) @pytest.mark.asyncio @pytest.mark.parametrize( "server", [realtime_appsync_server_error_without_id], indirect=True ) async def test_appsync_subscription_server_sending_an_error_without_an_id( event_loop, server ): from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from gql.transport.exceptions import TransportServerError path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" auth = AppSyncApiKeyAuthentication(host=server.hostname, api_key=DUMMY_API_KEY) transport = AppSyncWebsocketsTransport(url=url, auth=auth) client = Client(transport=transport) with pytest.raises(TransportServerError) as exc_info: async with client as _: pass assert "Sometimes AppSync will send you an error without an id" in str(exc_info) @pytest.mark.asyncio @pytest.mark.parametrize("server", [realtime_appsync_server_keepalive], indirect=True) async def test_appsync_subscription_variable_values_and_operation_name( event_loop, server ): from gql.transport.appsync_auth import AppSyncApiKeyAuthentication from gql.transport.appsync_websockets import AppSyncWebsocketsTransport path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" auth = AppSyncApiKeyAuthentication(host=server.hostname, api_key=DUMMY_API_KEY) transport = AppSyncWebsocketsTransport( url=url, auth=auth, keep_alive_timeout=(5 * SEND_MESSAGE_DELAY) ) client = Client(transport=transport) expected_messages = [f"Hello world {number}!" for number in range(NB_MESSAGES)] received_messages = [] async with client as session: subscription = gql(on_create_message_subscription_str) async for execution_result in session.subscribe( subscription, operation_name="onCreateMessage", variable_values={"key1": "val1"}, get_execution_result=True, ): result = execution_result.data message = result["onCreateMessage"]["message"] print(f"Message received: '{message}'") received_messages.append(message) print(f"extensions received: {execution_result.extensions}") assert execution_result.extensions["operation_name"] == "onCreateMessage" variables = execution_result.extensions["variables"] assert variables["key1"] == "val1" assert expected_messages == received_messages gql-3.6.0b2/tests/test_async_client_validation.py000066400000000000000000000162071460703211500221740ustar00rootroot00000000000000import asyncio import json import graphql import pytest from gql import Client, gql from .conftest import MS, WebSocketServerHelper from .starwars.schema import StarWarsIntrospection, StarWarsSchema, StarWarsTypeDef starwars_expected_one = { "stars": 3, "commentary": "Was expecting more stuff", "episode": "JEDI", } starwars_expected_two = { "stars": 5, "commentary": "This is a great movie!", "episode": "JEDI", } async def server_starwars(ws, path): import websockets await WebSocketServerHelper.send_connection_ack(ws) try: await ws.recv() reviews = [starwars_expected_one, starwars_expected_two] for review in reviews: data = ( '{"type":"data","id":"1","payload":{"data":{"reviewAdded": ' + json.dumps(review) + "}}}" ) await ws.send(data) await asyncio.sleep(2 * MS) await WebSocketServerHelper.send_complete(ws, 1) await WebSocketServerHelper.wait_connection_terminate(ws) except websockets.exceptions.ConnectionClosedOK: pass print("Server is now closed") starwars_subscription_str = """ subscription ListenEpisodeReviews($ep: Episode!) { reviewAdded(episode: $ep) { stars, commentary, episode } } """ starwars_invalid_subscription_str = """ subscription ListenEpisodeReviews($ep: Episode!) { reviewAdded(episode: $ep) { not_valid_field, stars, commentary, episode } } """ @pytest.mark.websockets @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_starwars], indirect=True) @pytest.mark.parametrize("subscription_str", [starwars_subscription_str]) @pytest.mark.parametrize( "client_params", [ {"schema": StarWarsSchema}, {"introspection": StarWarsIntrospection}, {"schema": StarWarsTypeDef}, ], ) async def test_async_client_validation( event_loop, server, subscription_str, client_params ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport, **client_params) async with client as session: variable_values = {"ep": "JEDI"} subscription = gql(subscription_str) expected = [] async for result in session.subscribe( subscription, variable_values=variable_values, parse_result=False ): review = result["reviewAdded"] expected.append(review) assert "stars" in review assert "commentary" in review assert "episode" in review assert expected[0] == starwars_expected_one assert expected[1] == starwars_expected_two @pytest.mark.websockets @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_starwars], indirect=True) @pytest.mark.parametrize("subscription_str", [starwars_invalid_subscription_str]) @pytest.mark.parametrize( "client_params", [ {"schema": StarWarsSchema}, {"introspection": StarWarsIntrospection}, {"schema": StarWarsTypeDef}, ], ) async def test_async_client_validation_invalid_query( event_loop, server, subscription_str, client_params ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport, **client_params) async with client as session: variable_values = {"ep": "JEDI"} subscription = gql(subscription_str) with pytest.raises(graphql.error.GraphQLError): async for _result in session.subscribe( subscription, variable_values=variable_values ): pass @pytest.mark.websockets @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_starwars], indirect=True) @pytest.mark.parametrize("subscription_str", [starwars_invalid_subscription_str]) @pytest.mark.parametrize( "client_params", [{"schema": StarWarsSchema, "introspection": StarWarsIntrospection}], ) async def test_async_client_validation_different_schemas_parameters_forbidden( event_loop, server, subscription_str, client_params ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" sample_transport = WebsocketsTransport(url=url) with pytest.raises(AssertionError): async with Client(transport=sample_transport, **client_params): pass hero_server_answers = ( '{"type":"data","id":"1","payload":{"data":' + json.dumps(StarWarsIntrospection) + "}}", '{"type":"data","id":"2","payload":{"data":{"hero":{"name": "R2-D2"}}}}', ) @pytest.mark.websockets @pytest.mark.asyncio @pytest.mark.parametrize("server", [hero_server_answers], indirect=True) async def test_async_client_validation_fetch_schema_from_server_valid_query( event_loop, client_and_server ): session, server = client_and_server client = session.client # No schema in the client at the beginning assert client.introspection is None assert client.schema is None # Fetch schema from server await session.fetch_schema() # Check that the async client correctly recreated the schema assert client.introspection == StarWarsIntrospection assert client.schema is not None query = gql( """ query HeroNameQuery { hero { name } } """ ) result = await session.execute(query) print("Client received:", result) expected = {"hero": {"name": "R2-D2"}} assert result == expected @pytest.mark.websockets @pytest.mark.asyncio @pytest.mark.parametrize("server", [hero_server_answers], indirect=True) async def test_async_client_validation_fetch_schema_from_server_invalid_query( event_loop, client_and_server ): session, server = client_and_server # Fetch schema from server await session.fetch_schema() query = gql( """ query HeroNameQuery { hero { name sldkfjqlmsdkjfqlskjfmlqkjsfmkjqsdf } } """ ) with pytest.raises(graphql.error.GraphQLError): await session.execute(query) @pytest.mark.websockets @pytest.mark.asyncio @pytest.mark.parametrize("server", [hero_server_answers], indirect=True) async def test_async_client_validation_fetch_schema_from_server_with_client_argument( event_loop, server ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" sample_transport = WebsocketsTransport(url=url) async with Client( transport=sample_transport, fetch_schema_from_transport=True, ) as session: query = gql( """ query HeroNameQuery { hero { name sldkfjqlmsdkjfqlskjfmlqkjsfmkjqsdf } } """ ) with pytest.raises(graphql.error.GraphQLError): await session.execute(query) gql-3.6.0b2/tests/test_cli.py000066400000000000000000000273641460703211500160640ustar00rootroot00000000000000import logging import pytest from gql import __version__ from gql.cli import ( get_execute_args, get_introspection_args, get_parser, get_transport, get_transport_args, main, ) @pytest.fixture def parser(): return get_parser() def test_cli_parser(parser): # Simple call with https server # gql-cli https://your_server.com args = parser.parse_args(["https://your_server.com"]) assert args.server == "https://your_server.com" assert args.headers is None assert args.loglevel is None assert args.operation_name is None assert args.variables is None # Call with variable values parameters # gql-cli https://your_server.com --variables KEY1:value1 KEY2:value2 args = parser.parse_args( ["https://your_server.com", "--variables", "KEY1:value1", "KEY2:value2"] ) assert args.server == "https://your_server.com" assert args.variables == ["KEY1:value1", "KEY2:value2"] # Call with headers values parameters # gql-cli https://your_server.com --headers HEADER1:value1 HEADER2:value2 args = parser.parse_args( ["https://your_server.com", "--headers", "HEADER1:value1", "HEADER2:value2"] ) assert args.server == "https://your_server.com" assert args.headers == ["HEADER1:value1", "HEADER2:value2"] # Call with header value with a space in value # gql-cli https://your_server.com --headers Authorization:"Bearer blahblah" args = parser.parse_args( ["https://your_server.com", "--headers", "Authorization:Bearer blahblah"] ) assert args.server == "https://your_server.com" assert args.headers == ["Authorization:Bearer blahblah"] # Check loglevel flags # gql-cli https://your_server.com --debug args = parser.parse_args(["https://your_server.com", "--debug"]) assert args.loglevel == logging.DEBUG # gql-cli https://your_server.com --verbose args = parser.parse_args(["https://your_server.com", "--verbose"]) assert args.loglevel == logging.INFO # Check operation_name # gql-cli https://your_server.com --operation-name my_operation args = parser.parse_args( ["https://your_server.com", "--operation-name", "my_operation"] ) assert args.operation_name == "my_operation" # Check execute_timeout # gql-cli https://your_server.com --execute-timeout 1 args = parser.parse_args(["https://your_server.com", "--execute-timeout", "1"]) assert args.execute_timeout == 1 # gql-cli https://your_server.com --execute-timeout=none args = parser.parse_args(["https://your_server.com", "--execute-timeout", "none"]) assert args.execute_timeout is None # gql-cli https://your_server.com --execute-timeout=-1 with pytest.raises(SystemExit): args = parser.parse_args(["https://your_server.com", "--execute-timeout", "-1"]) # gql-cli https://your_server.com --execute-timeout=invalid with pytest.raises(SystemExit): args = parser.parse_args( ["https://your_server.com", "--execute-timeout", "invalid"] ) def test_cli_parse_headers(parser): args = parser.parse_args( [ "https://your_server.com", "--headers", "Token1:1234", "Token2:5678", "TokenWithSpace:abc def", "TokenWithColon:abc:def", ] ) transport_args = get_transport_args(args) expected_headers = { "Token1": "1234", "Token2": "5678", "TokenWithSpace": "abc def", "TokenWithColon": "abc:def", } assert transport_args == {"headers": expected_headers} def test_cli_parse_headers_invalid_header(parser): args = parser.parse_args( ["https://your_server.com", "--headers", "TokenWithoutColon"] ) with pytest.raises(ValueError): get_transport_args(args) def test_cli_parse_operation_name(parser): args = parser.parse_args(["https://your_server.com", "--operation-name", "myop"]) execute_args = get_execute_args(args) assert execute_args == {"operation_name": "myop"} @pytest.mark.parametrize( "param", [ {"args": ["key:abcdef"], "d": {"key": "abcdef"}}, {"args": ['key:"abcdef"'], "d": {"key": "abcdef"}}, {"args": ["key:1234"], "d": {"key": 1234}}, {"args": ["key1:1234", "key2:5678"], "d": {"key1": 1234, "key2": 5678}}, {"args": ["key1:null"], "d": {"key1": None}}, {"args": ["key1:true"], "d": {"key1": True}}, {"args": ["key1:false"], "d": {"key1": False}}, { "args": ["key1:null", "key2:abcd", "key3:5"], "d": {"key1": None, "key2": "abcd", "key3": 5}, }, ], ) def test_cli_parse_variable_value(parser, param): args = parser.parse_args(["https://your_server.com", "--variables", *param["args"]]) execute_args = get_execute_args(args) expected_variable_values = param["d"] assert execute_args == {"variable_values": expected_variable_values} @pytest.mark.parametrize("param", ["nocolon", 'key:"']) def test_cli_parse_variable_value_invalid_param(parser, param): args = parser.parse_args(["https://your_server.com", "--variables", param]) with pytest.raises(ValueError): get_execute_args(args) @pytest.mark.aiohttp @pytest.mark.parametrize( "url", ["http://your_server.com", "https://your_server.com"], ) def test_cli_get_transport_aiohttp(parser, url): from gql.transport.aiohttp import AIOHTTPTransport args = parser.parse_args([url]) transport = get_transport(args) assert isinstance(transport, AIOHTTPTransport) @pytest.mark.websockets @pytest.mark.parametrize( "url", ["ws://your_server.com", "wss://your_server.com"], ) def test_cli_get_transport_websockets(parser, url): from gql.transport.websockets import WebsocketsTransport args = parser.parse_args([url]) transport = get_transport(args) assert isinstance(transport, WebsocketsTransport) @pytest.mark.websockets @pytest.mark.parametrize( "url", ["ws://your_server.com", "wss://your_server.com"], ) def test_cli_get_transport_phoenix(parser, url): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) args = parser.parse_args([url, "--transport", "phoenix"]) transport = get_transport(args) assert isinstance(transport, PhoenixChannelWebsocketsTransport) @pytest.mark.websockets @pytest.mark.botocore @pytest.mark.parametrize( "url", [ "wss://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql", "wss://noregion.amazonaws.com/graphql", ], ) def test_cli_get_transport_appsync_websockets_iam(parser, url): args = parser.parse_args([url, "--transport", "appsync_websockets"]) transport = get_transport(args) # In the tests, the AWS Appsync credentials are not set # So the transport is None assert transport is None @pytest.mark.asyncio @pytest.mark.websockets @pytest.mark.botocore @pytest.mark.parametrize( "url", ["wss://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql"], ) async def test_cli_main_appsync_websockets_iam(parser, url): args = parser.parse_args([url, "--transport", "appsync_websockets"]) exit_code = await main(args) # In the tests, the AWS Appsync credentials are not set # So the transport is None and the main returns # an exit_code of 1 assert exit_code == 1 @pytest.mark.websockets @pytest.mark.parametrize( "url", ["wss://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql"], ) def test_cli_get_transport_appsync_websockets_api_key(parser, url): from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from gql.transport.appsync_auth import AppSyncApiKeyAuthentication args = parser.parse_args( [url, "--transport", "appsync_websockets", "--api-key", "test-api-key"] ) transport = get_transport(args) assert isinstance(transport, AppSyncWebsocketsTransport) assert isinstance(transport.auth, AppSyncApiKeyAuthentication) assert transport.auth.api_key == "test-api-key" @pytest.mark.websockets @pytest.mark.parametrize( "url", ["wss://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql"], ) def test_cli_get_transport_appsync_websockets_jwt(parser, url): from gql.transport.appsync_websockets import AppSyncWebsocketsTransport from gql.transport.appsync_auth import AppSyncJWTAuthentication args = parser.parse_args( [url, "--transport", "appsync_websockets", "--jwt", "test-jwt"] ) transport = get_transport(args) assert isinstance(transport, AppSyncWebsocketsTransport) assert isinstance(transport.auth, AppSyncJWTAuthentication) assert transport.auth.jwt == "test-jwt" @pytest.mark.aiohttp @pytest.mark.botocore @pytest.mark.parametrize( "url", ["https://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql"], ) def test_cli_get_transport_appsync_http_iam(parser, url): from gql.transport.aiohttp import AIOHTTPTransport args = parser.parse_args([url, "--transport", "appsync_http"]) transport = get_transport(args) assert isinstance(transport, AIOHTTPTransport) @pytest.mark.aiohttp @pytest.mark.parametrize( "url", ["https://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql"], ) def test_cli_get_transport_appsync_http_api_key(parser, url): from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.appsync_auth import AppSyncApiKeyAuthentication args = parser.parse_args( [url, "--transport", "appsync_http", "--api-key", "test-api-key"] ) transport = get_transport(args) assert isinstance(transport, AIOHTTPTransport) assert isinstance(transport.auth, AppSyncApiKeyAuthentication) assert transport.auth.api_key == "test-api-key" @pytest.mark.aiohttp @pytest.mark.parametrize( "url", ["https://XXXXXX.appsync-api.eu-west-3.amazonaws.com/graphql"], ) def test_cli_get_transport_appsync_http_jwt(parser, url): from gql.transport.aiohttp import AIOHTTPTransport from gql.transport.appsync_auth import AppSyncJWTAuthentication args = parser.parse_args([url, "--transport", "appsync_http", "--jwt", "test-jwt"]) transport = get_transport(args) assert isinstance(transport, AIOHTTPTransport) assert isinstance(transport.auth, AppSyncJWTAuthentication) assert transport.auth.jwt == "test-jwt" def test_cli_get_transport_no_protocol(parser): args = parser.parse_args(["your_server.com"]) with pytest.raises(ValueError): get_transport(args) def test_cli_ep_version(script_runner): ret = script_runner.run("gql-cli", "--version") assert ret.success assert ret.stdout == f"v{__version__}\n" assert ret.stderr == "" def test_cli_parse_schema_download(parser): args = parser.parse_args( [ "https://your_server.com", "--schema-download", "descriptions:false", "input_value_deprecation:true", "specified_by_url:True", "schema_description:true", "directive_is_repeatable:true", "--print-schema", ] ) introspection_args = get_introspection_args(args) expected_args = { "descriptions": False, "input_value_deprecation": True, "specified_by_url": True, "schema_description": True, "directive_is_repeatable": True, } assert introspection_args == expected_args @pytest.mark.parametrize( "invalid_args", [ ["https://your_server.com", "--schema-download", "ArgWithoutColon"], ["https://your_server.com", "--schema-download", "blahblah:true"], ["https://your_server.com", "--schema-download", "descriptions:invalid_bool"], ], ) def test_cli_parse_schema_download_invalid_arg(parser, invalid_args): args = parser.parse_args(invalid_args) with pytest.raises(ValueError): get_introspection_args(args) gql-3.6.0b2/tests/test_client.py000066400000000000000000000174051460703211500165660ustar00rootroot00000000000000import os from contextlib import suppress import mock import pytest from graphql import build_ast_schema, parse from gql import Client, GraphQLRequest, gql from gql.transport import Transport from gql.transport.exceptions import TransportQueryError with suppress(ModuleNotFoundError): from urllib3.exceptions import NewConnectionError @pytest.fixture def http_transport_query(): return gql( """ query getContinents { continents { code name } } """ ) def test_request_transport_not_implemented(http_transport_query): class RandomTransport(Transport): def execute(self): super(RandomTransport, self).execute(http_transport_query) with pytest.raises(NotImplementedError) as exc_info: RandomTransport().execute() assert "Any Transport subclass must implement execute method" == str(exc_info.value) with pytest.raises(NotImplementedError) as exc_info: RandomTransport().execute_batch([]) assert "This Transport has not implemented the execute_batch method" == str( exc_info.value ) @pytest.mark.aiohttp def test_request_async_execute_batch_not_implemented_yet(): from gql.transport.aiohttp import AIOHTTPTransport transport = AIOHTTPTransport(url="http://localhost/") client = Client(transport=transport) with pytest.raises(NotImplementedError) as exc_info: client.execute_batch([GraphQLRequest(document=gql("{dummy}"))]) assert "Batching is not implemented for async yet." == str(exc_info.value) @pytest.mark.requests @mock.patch("urllib3.connection.HTTPConnection._new_conn") def test_retries_on_transport(execute_mock): """Testing retries on the transport level This forces us to override low-level APIs because the retry mechanism on the urllib3 (which uses requests) is pretty low-level itself. """ from gql.transport.requests import RequestsHTTPTransport expected_retries = 3 execute_mock.side_effect = NewConnectionError( "Should be HTTPConnection", "Fake connection error" ) transport = RequestsHTTPTransport( url="http://127.0.0.1:8000/graphql", retries=expected_retries, ) client = Client(transport=transport) query = gql( """ { myFavoriteFilm: film(id:"RmlsbToz") { id title episodeId } } """ ) with client as session: # We're using the client as context manager with pytest.raises(Exception): session.execute(query) # This might look strange compared to the previous test, but making 3 retries # means you're actually doing 4 calls. assert execute_mock.call_count == expected_retries + 1 execute_mock.reset_mock() queries = map(lambda d: GraphQLRequest(document=d), [query, query, query]) with client as session: # We're using the client as context manager with pytest.raises(Exception): session.execute_batch(queries) # This might look strange compared to the previous test, but making 3 retries # means you're actually doing 4 calls. assert execute_mock.call_count == expected_retries + 1 def test_no_schema_exception(): with pytest.raises(AssertionError) as exc_info: client = Client() client.validate("") assert "Cannot validate the document locally, you need to pass a schema." in str( exc_info.value ) @pytest.mark.online @pytest.mark.requests def test_execute_result_error(): from gql.transport.requests import RequestsHTTPTransport client = Client( transport=RequestsHTTPTransport(url="https://countries.trevorblades.com/"), ) failing_query = gql( """ query getContinents { continents { code name id } } """ ) with pytest.raises(TransportQueryError) as exc_info: client.execute(failing_query) assert 'Cannot query field "id" on type "Continent".' in str(exc_info.value) """ Batching is not supported anymore on countries backend with pytest.raises(TransportQueryError) as exc_info: client.execute_batch([GraphQLRequest(document=failing_query)]) assert 'Cannot query field "id" on type "Continent".' in str(exc_info.value) """ @pytest.mark.online @pytest.mark.requests def test_http_transport_verify_error(http_transport_query): from gql.transport.requests import RequestsHTTPTransport with Client( transport=RequestsHTTPTransport( url="https://countries.trevorblades.com/", verify=False, ) ) as client: with pytest.warns(Warning) as record: client.execute(http_transport_query) assert len(record) == 1 assert "Unverified HTTPS request is being made to host" in str( record[0].message ) """ Batching is not supported anymore on countries backend with pytest.warns(Warning) as record: client.execute_batch([GraphQLRequest(document=http_transport_query)]) assert len(record) == 1 assert "Unverified HTTPS request is being made to host" in str( record[0].message ) """ @pytest.mark.online @pytest.mark.requests def test_http_transport_specify_method_valid(http_transport_query): from gql.transport.requests import RequestsHTTPTransport with Client( transport=RequestsHTTPTransport( url="https://countries.trevorblades.com/", method="POST", ) ) as client: result = client.execute(http_transport_query) assert result is not None """ Batching is not supported anymore on countries backend result = client.execute_batch([GraphQLRequest(document=http_transport_query)]) assert result is not None """ def test_gql(): sample_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "fixtures", "graphql", "sample.graphql", ) with open(sample_path) as source: document = parse(source.read()) schema = build_ast_schema(document) query = gql( """ query getUser { user(id: "1000") { id username } } """ ) client = Client(schema=schema) result = client.execute(query) assert result["user"] is None @pytest.mark.requests def test_sync_transport_close_on_schema_retrieval_failure(): """ Ensure that the transport session is closed if an error occurs when entering the context manager (e.g., because schema retrieval fails) """ from gql.transport.requests import RequestsHTTPTransport transport = RequestsHTTPTransport(url="http://localhost/") client = Client(transport=transport, fetch_schema_from_transport=True) try: with client: pass except Exception: # we don't care what exception is thrown, we just want to check if the # transport is closed afterwards pass assert client.transport.session is None @pytest.mark.aiohttp @pytest.mark.asyncio async def test_async_transport_close_on_schema_retrieval_failure(): """ Ensure that the transport session is closed if an error occurs when entering the context manager (e.g., because schema retrieval fails) """ from gql.transport.aiohttp import AIOHTTPTransport transport = AIOHTTPTransport(url="http://localhost/") client = Client(transport=transport, fetch_schema_from_transport=True) try: async with client: pass except Exception: # we don't care what exception is thrown, we just want to check if the # transport is closed afterwards pass assert client.transport.session is None gql-3.6.0b2/tests/test_graphql_request.py000066400000000000000000000125271460703211500205160ustar00rootroot00000000000000import asyncio from math import isfinite from typing import Any, Dict, NamedTuple, Optional import pytest from graphql.error import GraphQLError from graphql.language import ValueNode from graphql.pyutils import inspect from graphql.type import ( GraphQLArgument, GraphQLField, GraphQLFloat, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, ) from graphql.utilities import value_from_ast_untyped from gql import GraphQLRequest, gql from .conftest import MS # Marking all tests in this file with the aiohttp marker pytestmark = pytest.mark.aiohttp class Money(NamedTuple): amount: float currency: str def is_finite(value: Any) -> bool: """Return true if a value is a finite number.""" return (isinstance(value, int) and not isinstance(value, bool)) or ( isinstance(value, float) and isfinite(value) ) def serialize_money(output_value: Any) -> Dict[str, Any]: if not isinstance(output_value, Money): raise GraphQLError("Cannot serialize money value: " + inspect(output_value)) return output_value._asdict() def parse_money_value(input_value: Any) -> Money: """Using Money custom scalar from graphql-core tests except here the input value is supposed to be a dict instead of a Money object.""" """ if isinstance(input_value, Money): return input_value """ if isinstance(input_value, dict): amount = input_value.get("amount", None) currency = input_value.get("currency", None) if not is_finite(amount) or not isinstance(currency, str): raise GraphQLError("Cannot parse money value dict: " + inspect(input_value)) return Money(float(amount), currency) else: raise GraphQLError("Cannot parse money value: " + inspect(input_value)) def parse_money_literal( value_node: ValueNode, variables: Optional[Dict[str, Any]] = None ) -> Money: money = value_from_ast_untyped(value_node, variables) if variables is not None and ( # variables are not set when checked with ValuesIOfCorrectTypeRule not money or not is_finite(money.get("amount")) or not isinstance(money.get("currency"), str) ): raise GraphQLError("Cannot parse literal money value: " + inspect(money)) return Money(**money) MoneyScalar = GraphQLScalarType( name="Money", serialize=serialize_money, parse_value=parse_money_value, parse_literal=parse_money_literal, ) root_value = { "balance": Money(42, "DM"), "friends_balance": [Money(12, "EUR"), Money(24, "EUR"), Money(150, "DM")], "countries_balance": { "Belgium": Money(15000, "EUR"), "Luxembourg": Money(99999, "EUR"), }, } def resolve_balance(root, _info): return root["balance"] def resolve_friends_balance(root, _info): return root["friends_balance"] def resolve_countries_balance(root, _info): return root["countries_balance"] def resolve_belgium_balance(countries_balance, _info): return countries_balance["Belgium"] def resolve_luxembourg_balance(countries_balance, _info): return countries_balance["Luxembourg"] def resolve_to_euros(_root, _info, money): amount = money.amount currency = money.currency if not amount or currency == "EUR": return amount if currency == "DM": return amount * 0.5 raise ValueError("Cannot convert to euros: " + inspect(money)) countriesBalance = GraphQLObjectType( name="CountriesBalance", fields={ "Belgium": GraphQLField( GraphQLNonNull(MoneyScalar), resolve=resolve_belgium_balance ), "Luxembourg": GraphQLField( GraphQLNonNull(MoneyScalar), resolve=resolve_luxembourg_balance ), }, ) queryType = GraphQLObjectType( name="RootQueryType", fields={ "balance": GraphQLField(MoneyScalar, resolve=resolve_balance), "toEuros": GraphQLField( GraphQLFloat, args={"money": GraphQLArgument(MoneyScalar)}, resolve=resolve_to_euros, ), "friends_balance": GraphQLField( GraphQLList(MoneyScalar), resolve=resolve_friends_balance ), "countries_balance": GraphQLField( GraphQLNonNull(countriesBalance), resolve=resolve_countries_balance, ), }, ) def resolve_spent_money(spent_money, _info, **kwargs): return spent_money async def subscribe_spend_all(_root, _info, money): while money.amount > 0: money = Money(money.amount - 1, money.currency) yield money await asyncio.sleep(1 * MS) subscriptionType = GraphQLObjectType( "Subscription", fields=lambda: { "spend": GraphQLField( MoneyScalar, args={"money": GraphQLArgument(MoneyScalar)}, subscribe=subscribe_spend_all, resolve=resolve_spent_money, ) }, ) schema = GraphQLSchema( query=queryType, subscription=subscriptionType, ) def test_serialize_variables_using_money_example(): req = GraphQLRequest(document=gql("{balance}")) money_value = Money(10, "DM") req = GraphQLRequest( document=gql("query myquery($money: Money) {toEuros(money: $money)}"), variable_values={"money": money_value}, ) req = req.serialize_variable_values(schema) assert req.variable_values == {"money": {"amount": 10, "currency": "DM"}} gql-3.6.0b2/tests/test_graphqlws_exceptions.py000066400000000000000000000165451460703211500215650ustar00rootroot00000000000000import asyncio from typing import List import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportClosed, TransportProtocolError, TransportQueryError, ) from .conftest import WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets invalid_query_str = """ query getContinents { continents { code bloh } } """ invalid_query1_server_answer = ( '{{"type":"next","id":"{query_id}",' '"payload":{{"errors":[' '{{"message":"Cannot query field \\"bloh\\" on type \\"Continent\\".",' '"locations":[{{"line":4,"column":5}}],' '"extensions":{{"code":"INTERNAL_SERVER_ERROR"}}}}]}}}}' ) invalid_query1_server = [invalid_query1_server_answer] @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [invalid_query1_server], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_graphqlws_invalid_query( event_loop, client_and_graphqlws_server, query_str ): session, server = client_and_graphqlws_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" invalid_subscription_str = """ subscription getContinents { continents { code bloh } } """ async def server_invalid_subscription(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.recv() await ws.send(invalid_query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_invalid_subscription], indirect=True ) @pytest.mark.parametrize("query_str", [invalid_subscription_str]) async def test_graphqlws_invalid_subscription( event_loop, client_and_graphqlws_server, query_str ): session, server = client_and_graphqlws_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: async for result in session.subscribe(query): pass exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" async def server_no_ack(ws, path): await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_no_ack], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_graphqlws_server_does_not_send_ack( event_loop, graphqlws_server, query_str ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" sample_transport = WebsocketsTransport(url=url, ack_timeout=1) with pytest.raises(asyncio.TimeoutError): async with Client(transport=sample_transport): pass invalid_query_server_answer = ( '{"id":"1","type":"error","payload":[{"message":"Cannot query field ' '\\"helo\\" on type \\"Query\\". Did you mean \\"hello\\"?",' '"locations":[{"line":2,"column":3}]}]}' ) async def server_invalid_query(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}") await ws.send(invalid_query_server_answer) await WebSocketServerHelper.wait_connection_terminate(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_invalid_query], indirect=True) async def test_graphqlws_sending_invalid_query(event_loop, client_and_graphqlws_server): session, server = client_and_graphqlws_server query = gql("{helo}") with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert ( error["message"] == 'Cannot query field "helo" on type "Query". Did you mean "hello"?' ) not_json_answer = ["BLAHBLAH"] missing_type_answer = ["{}"] missing_id_answer_1 = ['{"type": "next"}'] missing_id_answer_2 = ['{"type": "error"}'] missing_id_answer_3 = ['{"type": "complete"}'] data_without_payload = ['{"type": "next", "id":"1"}'] error_without_payload = ['{"type": "error", "id":"1"}'] error_with_payload_not_a_list = ['{"type": "error", "id":"1", "payload": "NOT A LIST"}'] payload_is_not_a_dict = ['{"type": "next", "id":"1", "payload": "BLAH"}'] empty_payload = ['{"type": "next", "id":"1", "payload": {}}'] sending_bytes = [b"\x01\x02\x03"] @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [ not_json_answer, missing_type_answer, missing_id_answer_1, missing_id_answer_2, missing_id_answer_3, data_without_payload, error_without_payload, payload_is_not_a_dict, error_with_payload_not_a_list, empty_payload, sending_bytes, ], indirect=True, ) async def test_graphqlws_transport_protocol_errors( event_loop, client_and_graphqlws_server ): session, server = client_and_graphqlws_server query = gql("query { hello }") with pytest.raises(TransportProtocolError): await session.execute(query) async def server_without_ack(ws, path): # Sending something else than an ack await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_without_ack], indirect=True) async def test_graphqlws_server_does_not_ack(event_loop, graphqlws_server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport): pass async def server_closing_directly(ws, path): await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_closing_directly], indirect=True) async def test_graphqlws_server_closing_directly(event_loop, graphqlws_server): import websockets from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(websockets.exceptions.ConnectionClosed): async with Client(transport=sample_transport): pass async def server_closing_after_ack(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_closing_after_ack], indirect=True) async def test_graphqlws_server_closing_after_ack( event_loop, client_and_graphqlws_server ): import websockets session, server = client_and_graphqlws_server query = gql("query { hello }") with pytest.raises(websockets.exceptions.ConnectionClosed): await session.execute(query) await session.transport.wait_closed() with pytest.raises(TransportClosed): await session.execute(query) gql-3.6.0b2/tests/test_graphqlws_subscription.py000066400000000000000000000651711460703211500221270ustar00rootroot00000000000000import asyncio import json import sys import warnings from typing import List import pytest from parse import search from gql import Client, gql from gql.transport.exceptions import TransportServerError from .conftest import MS, WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets countdown_server_answer = ( '{{"type":"next","id":"{query_id}","payload":{{"data":{{"number":{number}}}}}}}' ) COUNTING_DELAY = 20 * MS PING_SENDING_DELAY = 50 * MS PONG_TIMEOUT = 100 * MS # List which can used to store received messages by the server logged_messages: List[str] = [] def server_countdown_factory( keepalive=False, answer_pings=True, simulate_disconnect=False ): async def server_countdown_template(ws, path): import websockets logged_messages.clear() try: await WebSocketServerHelper.send_connection_ack( ws, payload="dummy_connection_ack_payload" ) result = await ws.recv() logged_messages.append(result) json_result = json.loads(result) assert json_result["type"] == "subscribe" payload = json_result["payload"] query = payload["query"] query_id = json_result["id"] count_found = search("count: {:d}", query) count = count_found[0] print(f" Server: Countdown started from: {count}") if simulate_disconnect and count == 8: await ws.close() pong_received: asyncio.Event = asyncio.Event() async def counting_coro(): print(" Server: counting task started") try: for number in range(count, -1, -1): await ws.send( countdown_server_answer.format( query_id=query_id, number=number ) ) await asyncio.sleep(COUNTING_DELAY) finally: print(" Server: counting task ended") print(" Server: starting counting task") counting_task = asyncio.ensure_future(counting_coro()) async def keepalive_coro(): print(" Server: keepalive task started") try: while True: await asyncio.sleep(PING_SENDING_DELAY) try: # Send a ping await WebSocketServerHelper.send_ping( ws, payload="dummy_ping_payload" ) # Wait for a pong try: await asyncio.wait_for( pong_received.wait(), PONG_TIMEOUT ) except asyncio.TimeoutError: print( "\n Server: No pong received in time!\n" ) break pong_received.clear() except websockets.exceptions.ConnectionClosed: break finally: print(" Server: keepalive task ended") if keepalive: print(" Server: starting keepalive task") keepalive_task = asyncio.ensure_future(keepalive_coro()) async def receiving_coro(): print(" Server: receiving task started") try: nonlocal counting_task while True: try: result = await ws.recv() logged_messages.append(result) except websockets.exceptions.ConnectionClosed: break json_result = json.loads(result) answer_type = json_result["type"] if answer_type == "complete" and json_result["id"] == str( query_id ): print("Cancelling counting task now") counting_task.cancel() if keepalive: print("Cancelling keep alive task now") keepalive_task.cancel() elif answer_type == "ping": if answer_pings: payload = json_result.get("payload", None) await WebSocketServerHelper.send_pong( ws, payload=payload ) elif answer_type == "pong": pong_received.set() finally: print(" Server: receiving task ended") if keepalive: keepalive_task.cancel() print(" Server: starting receiving task") receiving_task = asyncio.ensure_future(receiving_coro()) try: print(" Server: waiting for counting task to complete") await counting_task except asyncio.CancelledError: print(" Server: Now counting task is cancelled") print(" Server: sending complete message") await WebSocketServerHelper.send_complete(ws, query_id) if keepalive: print(" Server: cancelling keepalive task") keepalive_task.cancel() try: await keepalive_task except asyncio.CancelledError: print(" Server: Now keepalive task is cancelled") print(" Server: waiting for client to close the connection") try: await asyncio.wait_for(receiving_task, 1000 * MS) except asyncio.TimeoutError: pass print(" Server: cancelling receiving task") receiving_task.cancel() try: await receiving_task except asyncio.CancelledError: print(" Server: Now receiving task is cancelled") except websockets.exceptions.ConnectionClosedOK: pass except AssertionError as e: print(f"\n Server: Assertion failed: {e!s}\n") finally: print(" Server: waiting for websocket connection to close") await ws.wait_closed() print(" Server: connection closed") return server_countdown_template async def server_countdown(ws, path): server = server_countdown_factory() await server(ws, path) async def server_countdown_keepalive(ws, path): server = server_countdown_factory(keepalive=True) await server(ws, path) async def server_countdown_dont_answer_pings(ws, path): server = server_countdown_factory(answer_pings=False) await server(ws, path) async def server_countdown_disconnect(ws, path): server = server_countdown_factory(simulate_disconnect=True) await server(ws, path) countdown_subscription_str = """ subscription {{ countdown (count: {count}) {{ number }} }} """ @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription( event_loop, client_and_graphqlws_server, subscription_str ): session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_break( event_loop, client_and_graphqlws_server, subscription_str ): session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count if count <= 5: # Note: the following line is only necessary for pypy3 v3.6.1 if sys.version_info < (3, 7): await session._generator.aclose() break count -= 1 assert count == 5 @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_task_cancel( event_loop, client_and_graphqlws_server, subscription_str ): session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) async def task_coro(): nonlocal count async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 task = asyncio.ensure_future(task_coro()) async def cancel_task_coro(): nonlocal task await asyncio.sleep(5.5 * COUNTING_DELAY) task.cancel() cancel_task = asyncio.ensure_future(cancel_task_coro()) await asyncio.gather(task, cancel_task) assert count > 0 @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_close_transport( event_loop, client_and_graphqlws_server, subscription_str ): session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) async def task_coro(): nonlocal count async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 task = asyncio.ensure_future(task_coro()) async def close_transport_task_coro(): nonlocal task await asyncio.sleep(5.5 * COUNTING_DELAY) await session.transport.close() close_transport_task = asyncio.ensure_future(close_transport_task_coro()) await asyncio.gather(task, close_transport_task) assert count > 0 async def server_countdown_close_connection_in_middle(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() json_result = json.loads(result) assert json_result["type"] == "subscribe" payload = json_result["payload"] query = payload["query"] query_id = json_result["id"] count_found = search("count: {:d}", query) count = count_found[0] stopping_before = count // 2 print(f"Countdown started from: {count}, stopping server before {stopping_before}") for number in range(count, stopping_before, -1): await ws.send(countdown_server_answer.format(query_id=query_id, number=number)) await asyncio.sleep(COUNTING_DELAY) print("Closing server while subscription is still running now") await ws.close() await ws.wait_closed() print("Server is now closed") @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_close_connection_in_middle], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_server_connection_closed( event_loop, client_and_graphqlws_server, subscription_str ): import websockets session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) with pytest.raises(websockets.exceptions.ConnectionClosedOK): async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 @pytest.mark.asyncio @pytest.mark.parametrize("graphqlws_server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_with_operation_name( event_loop, client_and_graphqlws_server, subscription_str ): session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe( subscription, operation_name="CountdownSubscription" ): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 # Check that the query contains the operationName assert '"operationName": "CountdownSubscription"' in logged_messages[0] @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_with_keepalive( event_loop, client_and_graphqlws_server, subscription_str ): session, server = client_and_graphqlws_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 assert "ping" in session.transport.payloads assert session.transport.payloads["ping"] == "dummy_ping_payload" assert ( session.transport.payloads["connection_ack"] == "dummy_connection_ack_payload" ) @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_with_keepalive_with_timeout_ok( event_loop, graphqlws_server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url, keep_alive_timeout=(5 * COUNTING_DELAY)) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_with_keepalive_with_timeout_nok( event_loop, graphqlws_server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url, keep_alive_timeout=(COUNTING_DELAY / 2)) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: with pytest.raises(TransportServerError) as exc_info: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert "No keep-alive message has been received" in str(exc_info.value) @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_with_ping_interval_ok( event_loop, graphqlws_server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport( url=url, ping_interval=(5 * COUNTING_DELAY), pong_timeout=(4 * COUNTING_DELAY), ) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_dont_answer_pings], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_with_ping_interval_nok( event_loop, graphqlws_server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url, ping_interval=(5 * COUNTING_DELAY)) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: with pytest.raises(TransportServerError) as exc_info: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert "No pong received" in str(exc_info.value) @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_manual_pings_with_payload( event_loop, graphqlws_server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") payload = {"count_received": count} await transport.send_ping(payload=payload) await asyncio.wait_for(transport.pong_received.wait(), 10000 * MS) transport.pong_received.clear() assert transport.payloads["pong"] == payload assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_manual_pong_answers_with_payload( event_loop, graphqlws_server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url, answer_pings=False) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: async def answer_ping_coro(): while True: await transport.ping_received.wait() transport.ping_received.clear() await transport.send_pong(payload={"some": "data"}) answer_ping_task = asyncio.ensure_future(answer_ping_coro()) try: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 finally: answer_ping_task.cancel() assert count == -1 @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) def test_graphqlws_subscription_sync(graphqlws_server, subscription_str): from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.skipif(sys.platform.startswith("win"), reason="test failing on windows") @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) def test_graphqlws_subscription_sync_graceful_shutdown( graphqlws_server, subscription_str ): """Note: this test will simulate a control-C happening while a sync subscription is in progress. To do that we will throw a KeyboardInterrupt exception inside the subscription async generator. The code should then do a clean close: - send stop messages for each active query - send a connection_terminate message Then the KeyboardInterrupt will be reraise (to warn potential user code) This test does not work on Windows but the behaviour with Windows is correct. """ from gql.transport.websockets import WebsocketsTransport url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) with pytest.raises(KeyboardInterrupt): for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count if count == 5: # Simulate a KeyboardInterrupt in the generator with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="There is no current event loop" ) asyncio.ensure_future( client.session._generator.athrow(KeyboardInterrupt) ) count -= 1 assert count == 4 # Check that the server received a connection_terminate message last # assert logged_messages.pop() == '{"type": "connection_terminate"}' @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_keepalive], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_graphqlws_subscription_running_in_thread( event_loop, graphqlws_server, subscription_str, run_sync_test ): from gql.transport.websockets import WebsocketsTransport def test_code(): path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url) client = Client(transport=transport) count = 10 subscription = gql(subscription_str.format(count=count)) for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 await run_sync_test(event_loop, graphqlws_server, test_code) @pytest.mark.asyncio @pytest.mark.parametrize( "graphqlws_server", [server_countdown_disconnect], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) @pytest.mark.parametrize("execute_instead_of_subscribe", [False, True]) async def test_graphqlws_subscription_reconnecting_session( event_loop, graphqlws_server, subscription_str, execute_instead_of_subscribe ): import websockets from gql.transport.websockets import WebsocketsTransport from gql.transport.exceptions import TransportClosed path = "/graphql" url = f"ws://{graphqlws_server.hostname}:{graphqlws_server.port}{path}" transport = WebsocketsTransport(url=url) client = Client(transport=transport) count = 8 subscription_with_disconnect = gql(subscription_str.format(count=count)) count = 10 subscription = gql(subscription_str.format(count=count)) session = await client.connect_async( reconnecting=True, retry_connect=False, retry_execute=False ) # First we make a subscription which will cause a disconnect in the backend # (count=8) try: print("\nSUBSCRIPTION_1_WITH_DISCONNECT\n") async for result in session.subscribe(subscription_with_disconnect): pass except websockets.exceptions.ConnectionClosedOK: pass await asyncio.sleep(50 * MS) # Then with the same session handle, we make a subscription or an execute # which will detect that the transport is closed so that the client could # try to reconnect try: if execute_instead_of_subscribe: print("\nEXECUTION_2\n") await session.execute(subscription) else: print("\nSUBSCRIPTION_2\n") async for result in session.subscribe(subscription): pass except TransportClosed: pass await asyncio.sleep(50 * MS) # And finally with the same session handle, we make a subscription # which works correctly print("\nSUBSCRIPTION_3\n") async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 await client.close_async() gql-3.6.0b2/tests/test_http_async_sync.py000066400000000000000000000072621460703211500205200ustar00rootroot00000000000000import pytest from gql import Client, gql @pytest.mark.aiohttp @pytest.mark.online @pytest.mark.asyncio @pytest.mark.parametrize("fetch_schema_from_transport", [True, False]) async def test_async_client_async_transport(event_loop, fetch_schema_from_transport): from gql.transport.aiohttp import AIOHTTPTransport # Create https url url = "https://countries.trevorblades.com/graphql" # Get async transport sample_transport = AIOHTTPTransport(url=url) # Instantiate client async with Client( transport=sample_transport, fetch_schema_from_transport=fetch_schema_from_transport, ) as session: query = gql( """ query getContinents { continents { code name } } """ ) # Execute query result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" if fetch_schema_from_transport: assert session.client.schema is not None @pytest.mark.requests @pytest.mark.online @pytest.mark.asyncio @pytest.mark.parametrize("fetch_schema_from_transport", [True, False]) async def test_async_client_sync_transport(event_loop, fetch_schema_from_transport): from gql.transport.requests import RequestsHTTPTransport url = "http://countries.trevorblades.com/graphql" # Get sync transport sample_transport = RequestsHTTPTransport(url=url, use_json=True) # Impossible to use a sync transport asynchronously with pytest.raises(AssertionError): async with Client( transport=sample_transport, fetch_schema_from_transport=fetch_schema_from_transport, ): pass sample_transport.close() @pytest.mark.aiohttp @pytest.mark.online @pytest.mark.parametrize("fetch_schema_from_transport", [True, False]) def test_sync_client_async_transport(fetch_schema_from_transport): from gql.transport.aiohttp import AIOHTTPTransport # Create https url url = "https://countries.trevorblades.com/graphql" # Get async transport sample_transport = AIOHTTPTransport(url=url) # Instanciate client client = Client( transport=sample_transport, fetch_schema_from_transport=fetch_schema_from_transport, ) query = gql( """ query getContinents { continents { code name } } """ ) # Execute query synchronously result = client.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" if fetch_schema_from_transport: assert client.schema is not None @pytest.mark.requests @pytest.mark.online @pytest.mark.parametrize("fetch_schema_from_transport", [True, False]) def test_sync_client_sync_transport(fetch_schema_from_transport): from gql.transport.requests import RequestsHTTPTransport # Create https url url = "https://countries.trevorblades.com/graphql" # Get sync transport sample_transport = RequestsHTTPTransport(url=url, use_json=True) # Instanciate client client = Client( transport=sample_transport, fetch_schema_from_transport=fetch_schema_from_transport, ) query = gql( """ query getContinents { continents { code name } } """ ) # Execute query synchronously result = client.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" if fetch_schema_from_transport: assert client.schema is not None gql-3.6.0b2/tests/test_httpx.py000066400000000000000000000636641460703211500164670ustar00rootroot00000000000000from typing import Mapping import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportQueryError, TransportServerError, ) from .conftest import TemporaryFile, strip_braces_spaces # Marking all tests in this file with the httpx marker pytestmark = pytest.mark.httpx query1_str = """ query getContinents { continents { code name } } """ query1_server_answer = ( '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) # Execute query synchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_cookies(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): assert "COOKIE" in request.headers assert "cookie1=val1" == request.headers["COOKIE"] return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url, cookies={"cookie1": "val1"}) with Client(transport=transport) as session: query = gql(query1_str) # Execute query synchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code_401(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): # Will generate http error code 401 return web.Response( text='{"error":"Unauthorized","message":"401 Client Error: Unauthorized"}', content_type="application/json", status=401, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: session.execute(query) assert "Client error '401 Unauthorized'" in str(exc_info.value) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code_429(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): # Will generate http error code 429 return web.Response( text=""" Too Many Requests

Too Many Requests

I only allow 50 requests per hour to this Web site per logged in user. Try again soon.

""", content_type="text/html", status=429, headers={"Retry-After": "3600"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: session.execute(query) assert "429, message='Too Many Requests'" in str(exc_info.value) # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["Retry-After"] == "3600" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code_500(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): # Will generate http error code 500 raise Exception("Server error") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError): session.execute(query) await run_sync_test(event_loop, server, test_code) query1_server_error_answer = '{"errors": ["Error 1", "Error 2"]}' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): return web.Response( text=query1_server_error_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportQueryError): session.execute(query) await run_sync_test(event_loop, server, test_code) invalid_protocol_responses = [ "{}", "qlsjfqsdlkj", '{"not_data_or_errors": 35}', ] @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.parametrize("response", invalid_protocol_responses) async def test_httpx_invalid_protocol( event_loop, aiohttp_server, response, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): return web.Response(text=response, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportProtocolError): session.execute(query) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_cannot_connect_twice(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: with pytest.raises(TransportAlreadyConnected): session.transport.connect() await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_cannot_execute_if_not_connected( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) query = gql(query1_str) with pytest.raises(TransportClosed): transport.execute(query) await run_sync_test(event_loop, server, test_code) query1_server_answer_with_extensions = ( '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]},' '"extensions": {"key1": "val1"}' "}" ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query_with_extensions(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def handler(request): return web.Response( text=query1_server_answer_with_extensions, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) execution_result = session.execute(query, get_execution_result=True) assert execution_result.extensions["key1"] == "val1" await run_sync_test(event_loop, server, test_code) file_upload_server_answer = '{"data":{"success":true}}' file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { success } } """ file_upload_mutation_1_operations = ( '{"query": "mutation ($file: Upload!) {\\n uploadFile(input: {other_var: ' '$other_var, file: $file}) {\\n success\\n }\\n}", "variables": ' '{"file": null, "other_var": 42}}' ) file_upload_mutation_1_map = '{"0": ["variables.file"]}' file_1_content = """ This is a test file This file will be sent in the GraphQL mutation """ @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with TemporaryFile(file_1_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_with_content_type( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content # Verifying the content_type assert field_2.headers["Content-Type"] == "application/pdf" field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with TemporaryFile(file_1_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: # Setting the content_type f.content_type = "application/pdf" params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_additional_headers( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXTransport async def single_upload_handler(request): from aiohttp import web assert request.headers["X-Auth"] == "foobar" reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url, headers={"X-Auth": "foobar"}) with TemporaryFile(file_1_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_binary_file_upload(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport # This is a sample binary file content containing all possible byte values binary_file_content = bytes(range(0, 256)) async def binary_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_binary = await field_2.read() assert field_2_binary == binary_file_content field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", binary_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXTransport(url=url) def test_code(): with TemporaryFile(binary_file_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) file_upload_mutation_2_operations = ( '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' '"variables": {"file1": null, "file2": null}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_two_files(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport file_upload_mutation_2 = """ mutation($file1: Upload!, $file2: Upload!) { uploadFile(input:{file1:$file, file2:$file}) { success } } """ file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' file_2_content = """ This is a second test file This file will also be sent in the GraphQL mutation """ async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_2_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: with Client(transport=transport) as session: query = gql(file_upload_mutation_2) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = { "file1": f1, "file2": f2, } execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] f1.close() f2.close() await run_sync_test(event_loop, server, test_code) file_upload_mutation_3_operations = ( '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' "(input: {files: $files})" ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_list_of_two_files( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXTransport file_upload_mutation_3 = """ mutation($files: [Upload!]!) { uploadFiles(input:{files:$files}) { success } } """ file_upload_mutation_3_map = ( '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' ) file_2_content = """ This is a second test file This file will also be sent in the GraphQL mutation """ async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_3_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: with Client(transport=transport) as session: query = gql(file_upload_mutation_3) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = {"files": [f1, f2]} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] f1.close() f2.close() await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_fetching_schema(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.httpx import HTTPXTransport error_answer = """ { "errors": [ { "errorType": "UnauthorizedException", "message": "Permission denied" } ] } """ async def handler(request): return web.Response( text=error_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXTransport(url=url) with pytest.raises(TransportQueryError) as exc_info: with Client(transport=transport, fetch_schema_from_transport=True): pass expected_error = ( "Error while fetching schema: " "{'errorType': 'UnauthorizedException', 'message': 'Permission denied'}" ) assert expected_error in str(exc_info.value) assert transport.client is None await run_sync_test(event_loop, server, test_code) gql-3.6.0b2/tests/test_httpx_async.py000066400000000000000000001156061460703211500176560ustar00rootroot00000000000000import io import json from typing import Mapping import pytest from gql import Client, gql from gql.cli import get_parser, main from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportQueryError, TransportServerError, ) from .conftest import TemporaryFile, get_localhost_ssl_context, strip_braces_spaces query1_str = """ query getContinents { continents { code name } } """ query1_server_answer_data = ( '{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}' ) query1_server_answer = f'{{"data":{query1_server_answer_data}}}' # Marking all tests in this file with the httpx marker pytestmark = pytest.mark.httpx @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_ignore_backend_content_type(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="text/plain") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(query1_str) result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_cookies(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): assert "COOKIE" in request.headers assert "cookie1=val1" == request.headers["COOKIE"] return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, cookies={"cookie1": "val1"}) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code_401(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): # Will generate http error code 401 return web.Response( text='{"error":"Unauthorized","message":"401 Client Error: Unauthorized"}', content_type="application/json", status=401, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: await session.execute(query) assert "Client error '401 Unauthorized'" in str(exc_info.value) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code_429(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): # Will generate http error code 429 return web.Response( text=""" Too Many Requests

Too Many Requests

I only allow 50 requests per hour to this Web site per logged in user. Try again soon.

""", content_type="text/html", status=429, headers={"Retry-After": "3600"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: await session.execute(query) assert "Client error '429 Too Many Requests'" in str(exc_info.value) # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["Retry-After"] == "3600" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_code_500(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): # Will generate http error code 500 raise Exception("Server error") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: await session.execute(query) assert "Server error '500 Internal Server Error'" in str(exc_info.value) transport_query_error_responses = [ '{"errors": ["Error 1", "Error 2"]}', '{"errors": {"error_1": "Something"}}', '{"errors": 5}', ] @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.parametrize("query_error", transport_query_error_responses) async def test_httpx_error_code(event_loop, aiohttp_server, query_error): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query_error, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportQueryError): await session.execute(query) invalid_protocol_responses = [ { "response": "{}", "expected_exception": ( "Server did not return a GraphQL result: " 'No "data" or "errors" keys in answer: {}' ), }, { "response": "qlsjfqsdlkj", "expected_exception": ( "Server did not return a GraphQL result: Not a JSON answer: qlsjfqsdlkj" ), }, { "response": '{"not_data_or_errors": 35}', "expected_exception": ( "Server did not return a GraphQL result: " 'No "data" or "errors" keys in answer: {"not_data_or_errors": 35}' ), }, { "response": "", "expected_exception": ( "Server did not return a GraphQL result: Not a JSON answer: " ), }, ] @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.parametrize("param", invalid_protocol_responses) async def test_httpx_invalid_protocol(event_loop, aiohttp_server, param): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport response = param["response"] async def handler(request): return web.Response(text=response, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportProtocolError) as exc_info: await session.execute(query) assert param["expected_exception"] in str(exc_info.value) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_subscribe_not_supported(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text="does not matter", content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url) async with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(NotImplementedError): async for result in session.subscribe(query): pass @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_cannot_connect_twice(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) async with Client(transport=transport) as session: with pytest.raises(TransportAlreadyConnected): await session.transport.connect() @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_cannot_execute_if_not_connected(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) query = gql(query1_str) with pytest.raises(TransportClosed): await transport.execute(query) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_extra_args(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport import httpx async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) # passing extra arguments to httpx.AsyncClient transport = httpx.AsyncHTTPTransport(retries=2) transport = HTTPXAsyncTransport(url=url, max_redirects=2, transport=transport) async with Client(transport=transport) as session: query = gql(query1_str) # Passing extra arguments to the post method of aiohttp result = await session.execute(query, extra_args={"follow_redirects": True}) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" query2_str = """ query getEurope ($code: ID!) { continent (code: $code) { name } } """ query2_server_answer = '{"data": {"continent": {"name": "Europe"}}}' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query_variable_values(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query2_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) async with Client(transport=transport) as session: params = {"code": "EU"} query = gql(query2_str) # Execute query asynchronously result = await session.execute( query, variable_values=params, operation_name="getEurope" ) continent = result["continent"] assert continent["name"] == "Europe" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query_variable_values_fix_issue_292(event_loop, aiohttp_server): """Allow to specify variable_values without keyword. See https://github.com/graphql-python/gql/issues/292""" from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query2_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) async with Client(transport=transport) as session: params = {"code": "EU"} query = gql(query2_str) # Execute query asynchronously result = await session.execute(query, params, operation_name="getEurope") continent = result["continent"] assert continent["name"] == "Europe" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_execute_running_in_thread( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXAsyncTransport(url=url) client = Client(transport=transport) query = gql(query1_str) client.execute(query) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_subscribe_running_in_thread( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXAsyncTransport(url=url) client = Client(transport=transport) query = gql(query1_str) # Note: subscriptions are not supported on the httpx transport # But we add this test in order to have 100% code coverage # It is to check that we will correctly set an event loop # in the subscribe function if there is none (in a Thread for example) # We cannot test this with the websockets transport because # the websockets transport will set an event loop in its init with pytest.raises(NotImplementedError): for result in client.subscribe(query): pass await run_sync_test(event_loop, server, test_code) file_upload_server_answer = '{"data":{"success":true}}' file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { success } } """ file_upload_mutation_1_operations = ( '{"query": "mutation ($file: Upload!) {\\n uploadFile(input: {other_var: ' '$other_var, file: $file}) {\\n success\\n }\\n}", "variables": ' '{"file": null, "other_var": 42}}' ) file_upload_mutation_1_map = '{"0": ["variables.file"]}' file_1_content = """ This is a test file This file will be sent in the GraphQL mutation """ async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3 is None return web.Response(text=file_upload_server_answer, content_type="application/json") @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file: async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_without_session( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): transport = HTTPXAsyncTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file: client = Client(transport=transport) query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} result = client.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success await run_sync_test(event_loop, server, test_code) # This is a sample binary file content containing all possible byte values binary_file_content = bytes(range(0, 256)) async def binary_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_binary = await field_2.read() assert field_2_binary == binary_file_content field_3 = await reader.next() assert field_3 is None return web.Response(text=file_upload_server_answer, content_type="application/json") @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_binary_file_upload(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport app = web.Application() app.router.add_route("POST", "/", binary_upload_handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) with TemporaryFile(binary_file_content) as test_file: async with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) success = result["success"] assert success file_upload_mutation_2 = """ mutation($file1: Upload!, $file2: Upload!) { uploadFile(input:{file1:$file, file2:$file}) { success } } """ file_upload_mutation_2_operations = ( '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' '"variables": {"file1": null, "file2": null}}' ) file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' file_2_content = """ This is a second test file This file will also be sent in the GraphQL mutation """ @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_two_files(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_2_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: async with Client(transport=transport) as session: query = gql(file_upload_mutation_2) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = { "file1": f1, "file2": f2, } result = await session.execute( query, variable_values=params, upload_files=True ) f1.close() f2.close() success = result["success"] assert success file_upload_mutation_3 = """ mutation($files: [Upload!]!) { uploadFiles(input:{files:$files}) { success } } """ file_upload_mutation_3_operations = ( '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles(' "input: {files: $files})" ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' ) file_upload_mutation_3_map = '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_file_upload_list_of_two_files(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_3_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: async with Client(transport=transport) as session: query = gql(file_upload_mutation_3) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = {"files": [f1, f2]} # Execute query asynchronously result = await session.execute( query, variable_values=params, upload_files=True ) f1.close() f2.close() success = result["success"] assert success @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_using_cli(event_loop, aiohttp_server, monkeypatch, capsys): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url, "--verbose"]) # Monkeypatching sys.stdin to simulate getting the query # via the standard input monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) exit_code = await main(args) assert exit_code == 0 # Check that the result has been printed on stdout captured = capsys.readouterr() captured_out = str(captured.out).strip() expected_answer = json.loads(query1_server_answer_data) print(f"Captured: {captured_out}") received_answer = json.loads(captured_out) assert received_answer == expected_answer @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.script_launch_mode("subprocess") async def test_httpx_using_cli_ep( event_loop, aiohttp_server, monkeypatch, script_runner, run_sync_test ): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) def test_code(): monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) ret = script_runner.run( "gql-cli", url, "--verbose", stdin=io.StringIO(query1_str) ) assert ret.success # Check that the result has been printed on stdout captured_out = str(ret.stdout).strip() expected_answer = json.loads(query1_server_answer_data) print(f"Captured: {captured_out}") received_answer = json.loads(captured_out) assert received_answer == expected_answer await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_using_cli_invalid_param( event_loop, aiohttp_server, monkeypatch, capsys ): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url, "--variables", "invalid_param"]) # Monkeypatching sys.stdin to simulate getting the query # via the standard input monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) # Check that the exit_code is an error exit_code = await main(args) assert exit_code == 1 # Check that the error has been printed on stdout captured = capsys.readouterr() captured_err = str(captured.err).strip() print(f"Captured: {captured_err}") expected_error = "Error: Invalid variable: invalid_param" assert expected_error in captured_err @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_using_cli_invalid_query( event_loop, aiohttp_server, monkeypatch, capsys ): from aiohttp import web async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) parser = get_parser(with_examples=True) args = parser.parse_args([url]) # Send invalid query on standard input monkeypatch.setattr("sys.stdin", io.StringIO("BLAHBLAH")) exit_code = await main(args) assert exit_code == 1 # Check that the error has been printed on stdout captured = capsys.readouterr() captured_err = str(captured.err).strip() print(f"Captured: {captured_err}") expected_error = "Syntax Error: Unexpected Name 'BLAHBLAH'" assert expected_error in captured_err query1_server_answer_with_extensions = ( f'{{"data":{query1_server_answer_data}, "extensions":{{"key1": "val1"}}}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query_with_extensions(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response( text=query1_server_answer_with_extensions, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) async with Client(transport=transport) as session: query = gql(query1_str) execution_result = await session.execute(query, get_execution_result=True) assert execution_result.extensions["key1"] == "val1" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_query_https(event_loop, ssl_aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await ssl_aiohttp_server(app) url = str(server.make_url("/")) assert url.startswith("https://") cert, _ = get_localhost_ssl_context() transport = HTTPXAsyncTransport(url=url, timeout=10, verify=cert.decode()) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_error_fetching_schema(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport error_answer = """ { "errors": [ { "errorType": "UnauthorizedException", "message": "Permission denied" } ] } """ async def handler(request): return web.Response( text=error_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) with pytest.raises(TransportQueryError) as exc_info: async with Client(transport=transport, fetch_schema_from_transport=True): pass expected_error = ( "Error while fetching schema: " "{'errorType': 'UnauthorizedException', 'message': 'Permission denied'}" ) assert expected_error in str(exc_info.value) assert transport.client is None @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_reconnecting_session(event_loop, aiohttp_server): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) client = Client(transport=transport) session = await client.connect_async(reconnecting=True) query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" await client.close_async() @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.parametrize("retries", [False, lambda e: e]) async def test_httpx_reconnecting_session_retries(event_loop, aiohttp_server, retries): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) client = Client(transport=transport) session = await client.connect_async( reconnecting=True, retry_execute=retries, retry_connect=retries ) assert session._execute_with_retries == session._execute_once assert session._connect_with_retries == session.transport.connect await client.close_async() @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_reconnecting_session_start_connecting_task_twice( event_loop, aiohttp_server, caplog ): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport(url=url, timeout=10) client = Client(transport=transport) session = await client.connect_async(reconnecting=True) await session.start_connecting_task() print(f"Captured log: {caplog.text}") expected_warning = "connect task already started!" assert expected_warning in caplog.text await client.close_async() @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_json_serializer(event_loop, aiohttp_server, caplog): from aiohttp import web from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): request_text = await request.text() print(f"Received on backend: {request_text}") return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) transport = HTTPXAsyncTransport( url=url, timeout=10, json_serialize=lambda e: json.dumps(e, separators=(",", ":")), ) async with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = await session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking that there is no space after the colon in the log expected_log = '"query":"query getContinents' assert expected_log in caplog.text query_float_str = """ query getPi { pi } """ query_float_server_answer_data = '{"pi": 3.141592653589793238462643383279502884197}' query_float_server_answer = f'{{"data":{query_float_server_answer_data}}}' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_httpx_json_deserializer(event_loop, aiohttp_server): from aiohttp import web from decimal import Decimal from functools import partial from gql.transport.httpx import HTTPXAsyncTransport async def handler(request): return web.Response( text=query_float_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = str(server.make_url("/")) json_loads = partial(json.loads, parse_float=Decimal) transport = HTTPXAsyncTransport( url=url, timeout=10, json_deserialize=json_loads, ) async with Client(transport=transport) as session: query = gql(query_float_str) # Execute query asynchronously result = await session.execute(query) pi = result["pi"] assert pi == Decimal("3.141592653589793238462643383279502884197") gql-3.6.0b2/tests/test_httpx_online.py000066400000000000000000000062501460703211500200170ustar00rootroot00000000000000import asyncio import sys from typing import Dict import pytest from gql import Client, gql from gql.transport.exceptions import TransportQueryError @pytest.mark.httpx @pytest.mark.online @pytest.mark.asyncio async def test_httpx_simple_query(event_loop): from gql.transport.httpx import HTTPXAsyncTransport # Create https url url = "https://countries.trevorblades.com/graphql" # Get transport sample_transport = HTTPXAsyncTransport(url=url) # Instanciate client async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code name } } """ ) # Fetch schema await session.fetch_schema() # Execute query result = await session.execute(query) # Verify result assert isinstance(result, Dict) print(result) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.httpx @pytest.mark.online @pytest.mark.asyncio async def test_httpx_invalid_query(event_loop): from gql.transport.httpx import HTTPXAsyncTransport sample_transport = HTTPXAsyncTransport( url="https://countries.trevorblades.com/graphql" ) async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code bloh } } """ ) with pytest.raises(TransportQueryError): await session.execute(query) @pytest.mark.httpx @pytest.mark.online @pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python3.8 or higher") @pytest.mark.asyncio async def test_httpx_two_queries_in_parallel_using_two_tasks(event_loop): from gql.transport.httpx import HTTPXAsyncTransport sample_transport = HTTPXAsyncTransport( url="https://countries.trevorblades.com/graphql", ) # Instanciate client async with Client(transport=sample_transport) as session: query1 = gql( """ query getContinents { continents { code } } """ ) query2 = gql( """ query getContinents { continents { name } } """ ) async def query_task1(): result = await session.execute(query1) assert isinstance(result, Dict) print(result) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" async def query_task2(): result = await session.execute(query2) assert isinstance(result, Dict) print(result) continents = result["continents"] africa = continents[0] assert africa["name"] == "Africa" task1 = asyncio.create_task(query_task1()) task2 = asyncio.create_task(query_task2()) await task1 await task2 gql-3.6.0b2/tests/test_localhost.cnf000066400000000000000000000004101460703211500174020ustar00rootroot00000000000000[ req ] default_md = sha256 encrypt_key = no prompt = no distinguished_name = dn x509_extensions = ext [ dn ] C = "FR" L = "Paris" O = "Aymeric Augustin" CN = "localhost" [ ext ] subjectAltName = @san [ san ] DNS.1 = localhost IP.2 = 127.0.0.1 IP.3 = ::1 gql-3.6.0b2/tests/test_localhost.pem000066400000000000000000000055341460703211500174310ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCUgrQVkNbAWRlo zZUj14Ufz7YEp2MXmvmhdlfOGLwjy+xPO98aJRv5/nYF2eWM3llcmLe8FbBSK+QF To4su7ZVnc6qITOHqcSDUw06WarQUMs94bhHUvQp1u8+b2hNiMeGw6+QiBI6OJRO iGpLRbkN6Uj3AKwi8SYVoLyMiztuwbNyGf8fF3DDpHZtBitGtMSBCMsQsfB465pl 2UoyBrWa2lsbLt3VvBZZvHqfEuPjpjjKN5USIXnaf0NizaR6ps3EyfftWy4i7zIQ N5uTExvaPDyPn9nH3q/dkT99mSMSU1AvTTpX8PN7DlqE6wZMbQsBPRGW7GElQ+Ox IKdKOLk5AgMBAAECggEAd3kqzQqnaTiEs4ZoC9yPUUc1pErQ8iWP27Ar9TZ67MVa B2ggFJV0C0sFwbFI9WnPNCn77gj4vzJmD0riH+SnS/tXThDFtscBu7BtvNp0C4Bj 8RWMvXxjxuENuQnBPFbkRWtZ6wk8uK/Zx9AAyyt9M07Qjz1wPfAIdm/IH7zHBFMA gsqjnkLh1r0FvjNEbLiuGqYU/GVxaZYd+xy+JU52IxjHUUL9yD0BPWb+Szar6AM2 gUpmTX6+BcCZwwZ//DzCoWYZ9JbP8akn6edBeZyuMPqYgLzZkPyQ+hRW46VPPw89 yg4LR9nzgQiBHlac0laB4NrWa+d9QRRLitl1O3gVAQKBgQDDkptxXu7w9Lpc+HeE N/pJfpCzUuF7ZC4vatdoDzvfB5Ky6W88Poq+I7bB9m7StXdFAbDyUBxvisjTBMVA OtYqpAk/rhX8MjSAtjoFe2nH+eEiQriuZmtA5CdKEXS4hNbc/HhEPWhk7Zh8OV5v y7l4r6l4UHqaN9QyE0vlFdmcmQKBgQDCZZR/trJ2/g2OquaS+Zd2h/3NXw0NBq4z 4OBEWqNa/R35jdK6WlWJH7+tKOacr+xtswLpPeZHGwMdk64/erbYWBuJWAjpH72J DM9+1H5fFHANWpWTNn94enQxwfzZRvdkxq4IWzGhesptYnHIzoAmaqC3lbn/e3u0 Flng32hFoQKBgQCF3D4K3hib0lYQtnxPgmUMktWF+A+fflViXTWs4uhu4mcVkFNz n7clJ5q6reryzAQjtmGfqRedfRex340HRn46V2aBMK2Znd9zzcZu5CbmGnFvGs3/ iNiWZNNDjike9sV+IkxLIODoW/vH4xhxWrbLFSjg0ezoy5ew4qZK2abF2QKBgQC5 M5efeQpbjTyTUERtf/aKCZOGZmkDoPq0GCjxVjzNQdqd1z0NJ2TYR/QP36idXIlu FZ7PYZaS5aw5MGpQtfOe94n8dm++0et7t0WzunRO1yTNxCA+aSxWNquegAcJZa/q RdKlyWPmSRqzzZdDzWCPuQQ3AyF5wkYfUy/7qjwoIQKBgB2v96BV7+lICviIKzzb 1o3A3VzAX5MGd98uLGjlK4qsBC+s7mk2eQztiNZgbA0W6fhQ5Dz3HcXJ5ppy8Okc jeAktrNRzz15hvi/XkWdO+VMqiHW4l+sWYukjhCyod1oO1KGHq0LYYvv076syxGw vRKLq7IJ4WIp1VtfaBlrIogq -----END PRIVATE KEY----- -----BEGIN CERTIFICATE----- MIIDTTCCAjWgAwIBAgIJAJ6VG2cQlsepMA0GCSqGSIb3DQEBCwUAMEwxCzAJBgNV BAYTAkZSMQ4wDAYDVQQHDAVQYXJpczEZMBcGA1UECgwQQXltZXJpYyBBdWd1c3Rp bjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE4MDUwNTE2NTc1NloYDzIwNjAwNTA0 MTY1NzU2WjBMMQswCQYDVQQGEwJGUjEOMAwGA1UEBwwFUGFyaXMxGTAXBgNVBAoM EEF5bWVyaWMgQXVndXN0aW4xEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZI hvcNAQEBBQADggEPADCCAQoCggEBAJSCtBWQ1sBZGWjNlSPXhR/PtgSnYxea+aF2 V84YvCPL7E873xolG/n+dgXZ5YzeWVyYt7wVsFIr5AVOjiy7tlWdzqohM4epxINT DTpZqtBQyz3huEdS9CnW7z5vaE2Ix4bDr5CIEjo4lE6IaktFuQ3pSPcArCLxJhWg vIyLO27Bs3IZ/x8XcMOkdm0GK0a0xIEIyxCx8HjrmmXZSjIGtZraWxsu3dW8Flm8 ep8S4+OmOMo3lRIhedp/Q2LNpHqmzcTJ9+1bLiLvMhA3m5MTG9o8PI+f2cfer92R P32ZIxJTUC9NOlfw83sOWoTrBkxtCwE9EZbsYSVD47Egp0o4uTkCAwEAAaMwMC4w LAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0G CSqGSIb3DQEBCwUAA4IBAQA0imKp/rflfbDCCx78NdsR5rt0jKem2t3YPGT6tbeU +FQz62SEdeD2OHWxpvfPf+6h3iTXJbkakr2R4lP3z7GHUe61lt3So9VHAvgbtPTH aB1gOdThA83o0fzQtnIv67jCvE9gwPQInViZLEcm2iQEZLj6AuSvBKmluTR7vNRj 8/f2R4LsDfCWGrzk2W+deGRvSow7irS88NQ8BW8S8otgMiBx4D2UlOmQwqr6X+/r jYIDuMb6GDKRXtBUGDokfE94hjj9u2mrNRwt8y4tqu8ZNa//yLEQ0Ow2kP3QJPLY 941VZpwRi2v/+JvI7OBYlvbOTFwM8nAk79k+Dgviygd9 -----END CERTIFICATE----- gql-3.6.0b2/tests/test_phoenix_channel_exceptions.py000066400000000000000000000354331460703211500227140ustar00rootroot00000000000000import asyncio import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportProtocolError, TransportQueryError, TransportServerError, ) from .conftest import MS # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets def ensure_list(s): return ( s if s is None or isinstance(s, list) else list(s) if isinstance(s, tuple) else [s] ) query1_str = """ query getContinents { continents { code name } } """ default_query_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}},' '"status":"ok"},' '"ref":2,' '"topic":"test_topic"}' ) # other protocol exceptions reply_ref_null_answer = ( '{"event":"phx_reply","payload":{}', '"ref":null,' '"topic":"test_topic"}', ) reply_ref_zero_answer = ( '{"event":"phx_reply","payload":{}', '"ref":0,' '"topic":"test_topic"}', ) # "status":"error" responses generic_error_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"status":"error"},' '"ref":2,' '"topic":"test_topic"}' ) error_with_reason_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{"reason":"internal error"},' '"status":"error"},' '"ref":2,' '"topic":"test_topic"}' ) multiple_errors_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{"errors": ["error 1", "error 2"]},' '"status":"error"},' '"ref":2,' '"topic":"test_topic"}' ) timeout_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"status":"timeout"},' '"ref":2,' '"topic":"test_topic"}' ) invalid_payload_data_answer = ( '{"event":"phx_reply",' '"payload":"INVALID",' '"ref":2,' '"topic":"test_topic"}' ) # "status":"ok" exceptions invalid_response_server_answer = ( '{"event":"phx_reply",' '"payload":{"response":"INVALID",' '"status":"ok"}' '"ref":2,' '"topic":"test_topic"}' ) invalid_response_keys_server_answer = ( '{"event":"phx_reply",' '"payload":{"response":' '{"data":{"continents":null},"invalid":null}",' '"status":"ok"}' '"ref":2,' '"topic":"test_topic"}' ) invalid_event_server_answer = '{"event":"unknown"}' def query_server(server_answers=default_query_server_answer): from .conftest import PhoenixChannelServerHelper async def phoenix_server(ws, path): await PhoenixChannelServerHelper.send_connection_ack(ws) await ws.recv() for server_answer in ensure_list(server_answers): await ws.send(server_answer) await PhoenixChannelServerHelper.send_close(ws) await ws.wait_closed() return phoenix_server async def no_connection_ack_phoenix_server(ws, path): from .conftest import PhoenixChannelServerHelper await ws.recv() await PhoenixChannelServerHelper.send_close(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize( "server", [ query_server(reply_ref_null_answer), query_server(reply_ref_zero_answer), query_server(invalid_payload_data_answer), query_server(invalid_response_server_answer), query_server(invalid_response_keys_server_answer), no_connection_ack_phoenix_server, query_server(invalid_event_server_answer), ], indirect=True, ) @pytest.mark.parametrize("query_str", [query1_str]) async def test_phoenix_channel_query_protocol_error(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url ) query = gql(query_str) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport) as session: await session.execute(query) @pytest.mark.asyncio @pytest.mark.parametrize( "server", [ query_server(generic_error_server_answer), query_server(error_with_reason_server_answer), query_server(multiple_errors_server_answer), query_server(timeout_server_answer), ], indirect=True, ) @pytest.mark.parametrize("query_str", [query1_str]) async def test_phoenix_channel_query_error(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url ) query = gql(query_str) with pytest.raises(TransportQueryError): async with Client(transport=sample_transport) as session: await session.execute(query) query2_str = """ subscription getContinents { continents { code name } } """ default_subscription_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"ref":2,' '"topic":"test_topic"}' ) ref_is_not_an_integer_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"ref":"not_an_integer",' '"topic":"test_topic"}' ) missing_ref_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"topic":"test_topic"}' ) missing_subscription_id_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{},"status":"ok"},' '"ref":2,' '"topic":"test_topic"}' ) null_subscription_id_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":{"subscriptionId":null},"status":"ok"},' '"ref":2,' '"topic":"test_topic"}' ) default_subscription_data_answer = ( '{"event":"subscription:data","payload":' '{"subscriptionId":"test_subscription","result":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}},' '"ref":null,' '"topic":"test_subscription"}' ) default_subscription_unsubscribe_answer = ( '{"event":"phx_reply",' '"payload":{"response":{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"ref":3,' '"topic":"test_topic"}' ) missing_subscription_id_data_answer = ( '{"event":"subscription:data","payload":' '{"result":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}},' '"ref":null,' '"topic":"test_subscription"}' ) null_subscription_id_data_answer = ( '{"event":"subscription:data","payload":' '{"subscriptionId":null,"result":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}},' '"ref":null,' '"topic":"test_subscription"}' ) invalid_subscription_id_data_answer = ( '{"event":"subscription:data","payload":' '{"subscriptionId":"INVALID","result":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}},' '"ref":null,' '"topic":"test_subscription"}' ) invalid_payload_data_answer = ( '{"event":"subscription:data",' '"payload":"INVALID",' '"ref":null,' '"topic":"test_subscription"}' ) invalid_result_data_answer = ( '{"event":"subscription:data","payload":' '{"subscriptionId":"test_subscription","result":"INVALID"},' '"ref":null,' '"topic":"test_subscription"}' ) invalid_result_keys_data_answer = ( '{"event":"subscription:data",' '"payload":{"subscriptionId":"test_subscription",' '"result":{"data":{"continents":null},"invalid":null}},' '"ref":null,' '"topic":"test_subscription"}' ) invalid_subscription_ref_answer = ( '{"event":"phx_reply",' '"payload":{"response":{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"ref":99,' '"topic":"test_topic"}' ) mismatched_unsubscribe_answer = ( '{"event":"phx_reply",' '"payload":{"response":{"subscriptionId":"no_such_subscription"},' '"status":"ok"},' '"ref":3,' '"topic":"test_topic"}' ) def subscription_server( server_answers=default_subscription_server_answer, data_answers=default_subscription_data_answer, unsubscribe_answers=default_subscription_unsubscribe_answer, ): from .conftest import PhoenixChannelServerHelper import json async def phoenix_server(ws, path): await PhoenixChannelServerHelper.send_connection_ack(ws) await ws.recv() if server_answers is not None: for server_answer in ensure_list(server_answers): await ws.send(server_answer) if data_answers is not None: for data_answer in ensure_list(data_answers): await ws.send(data_answer) if unsubscribe_answers is not None: result = await ws.recv() json_result = json.loads(result) assert json_result["event"] == "unsubscribe" for unsubscribe_answer in ensure_list(unsubscribe_answers): await ws.send(unsubscribe_answer) else: await PhoenixChannelServerHelper.send_close(ws) await ws.wait_closed() return phoenix_server @pytest.mark.asyncio @pytest.mark.parametrize( "server", [ subscription_server(invalid_subscription_ref_answer), subscription_server(missing_subscription_id_server_answer), subscription_server(null_subscription_id_server_answer), subscription_server( [default_subscription_server_answer, default_subscription_server_answer] ), subscription_server(data_answers=missing_subscription_id_data_answer), subscription_server(data_answers=null_subscription_id_data_answer), subscription_server(data_answers=invalid_subscription_id_data_answer), subscription_server(data_answers=ref_is_not_an_integer_server_answer), subscription_server(data_answers=missing_ref_server_answer), subscription_server(data_answers=invalid_payload_data_answer), subscription_server(data_answers=invalid_result_data_answer), subscription_server(data_answers=invalid_result_keys_data_answer), ], indirect=True, ) @pytest.mark.parametrize("query_str", [query2_str]) async def test_phoenix_channel_subscription_protocol_error( event_loop, server, query_str ): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url ) query = gql(query_str) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport) as session: async for _result in session.subscribe(query): await asyncio.sleep(10 * MS) break server_error_server_answer = '{"event":"phx_error", "ref":2, "topic":"test_topic"}' @pytest.mark.asyncio @pytest.mark.parametrize( "server", [query_server(server_error_server_answer)], indirect=True, ) @pytest.mark.parametrize("query_str", [query1_str]) async def test_phoenix_channel_server_error(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url ) query = gql(query_str) with pytest.raises(TransportServerError): async with Client(transport=sample_transport) as session: await session.execute(query) # These cannot be caught by the client @pytest.mark.asyncio @pytest.mark.parametrize( "server", [ subscription_server(unsubscribe_answers=invalid_subscription_ref_answer), subscription_server(unsubscribe_answers=mismatched_unsubscribe_answer), ], indirect=True, ) @pytest.mark.parametrize("query_str", [query2_str]) async def test_phoenix_channel_unsubscribe_error(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" # Reduce close_timeout. These tests will wait for an unsubscribe # reply that will never come... sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url, close_timeout=1 ) query = gql(query_str) async with Client(transport=sample_transport) as session: async for _result in session.subscribe(query): break # We can force the error if somehow the generator is still running while # we receive a mismatched unsubscribe answer @pytest.mark.asyncio @pytest.mark.parametrize( "server", [subscription_server(unsubscribe_answers=mismatched_unsubscribe_answer)], indirect=True, ) @pytest.mark.parametrize("query_str", [query2_str]) async def test_phoenix_channel_unsubscribe_error_forcing(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url, close_timeout=1 ) query = gql(query_str) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport) as session: async for _result in session.subscribe(query): await session.transport._send_stop_message(2) await asyncio.sleep(10 * MS) gql-3.6.0b2/tests/test_phoenix_channel_query.py000066400000000000000000000077571460703211500217100ustar00rootroot00000000000000import pytest from gql import Client, gql # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets query1_str = """ query getContinents { continents { code name } } """ default_query_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}},' '"status":"ok"},' '"ref":2,' '"topic":"test_topic"}' ) @pytest.fixture def ws_server_helper(request): from .conftest import PhoenixChannelServerHelper yield PhoenixChannelServerHelper async def query_server(ws, path): from .conftest import PhoenixChannelServerHelper await PhoenixChannelServerHelper.send_connection_ack(ws) await ws.recv() await ws.send(default_query_server_answer) await PhoenixChannelServerHelper.send_close(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [query_server], indirect=True) @pytest.mark.parametrize("query_str", [query1_str]) async def test_phoenix_channel_query(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url ) query = gql(query_str) async with Client(transport=sample_transport) as session: result = await session.execute(query) print("Client received:", result) query2_str = """ subscription getContinents { continents { code name } } """ subscription_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":' '{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"ref":2,' '"topic":"test_topic"}' ) subscription_data_server_answer = ( '{"event":"subscription:data","payload":' '{"subscriptionId":"test_subscription","result":' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}},' '"ref":null,' '"topic":"test_subscription"}' ) unsubscribe_server_answer = ( '{"event":"phx_reply",' '"payload":' '{"response":' '{"subscriptionId":"test_subscription"},' '"status":"ok"},' '"ref":3,' '"topic":"test_topic"}' ) async def subscription_server(ws, path): from .conftest import PhoenixChannelServerHelper await PhoenixChannelServerHelper.send_connection_ack(ws) await ws.recv() await ws.send(subscription_server_answer) await ws.send(subscription_data_server_answer) await ws.recv() await ws.send(unsubscribe_server_answer) # Unsubscribe will remove the listener # await PhoenixChannelServerHelper.send_close(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [subscription_server], indirect=True) @pytest.mark.parametrize("query_str", [query2_str]) async def test_phoenix_channel_subscription(event_loop, server, query_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name="test_channel", url=url ) first_result = None query = gql(query_str) async with Client(transport=sample_transport) as session: async for result in session.subscribe(query): first_result = result break print("Client received:", first_result) gql-3.6.0b2/tests/test_phoenix_channel_subscription.py000066400000000000000000000306401460703211500232520ustar00rootroot00000000000000import asyncio import json import sys import pytest from parse import search from gql import Client, gql # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets test_channel = "test_channel" test_subscription_id = "test_subscription" # A server should send this after receiving a 'phx_leave' request message. # 'query_id' should be the value of the 'ref' in the 'phx_leave' request. # With only one listener, the transport is closed automatically when # it exits a subscription, so this is not used in current tests. channel_leave_reply_template = ( "{{" '"topic":"{channel_name}",' '"event":"phx_reply",' '"payload":{{' '"response":{{}},' '"status":"ok"' "}}," '"ref":{query_id}' "}}" ) # A server should send this after sending the 'channel_leave_reply' # above, to confirm to the client that the channel was actually closed. # With only one listener, the transport is closed automatically when # it exits a subscription, so this is not used in current tests. channel_close_reply_template = ( "{{" '"topic":"{channel_name}",' '"event":"phx_close",' '"payload":{{}},' '"ref":null' "}}" ) # A server sends this when it receives a 'subscribe' request, # after creating a unique subscription id. 'query_id' should be the # value of the 'ref' in the 'subscribe' request. subscription_reply_template = ( "{{" '"topic":"{channel_name}",' '"event":"phx_reply",' '"payload":{{' '"response":{{' '"subscriptionId":"{subscription_id}"' "}}," '"status":"ok"' "}}," '"ref":{query_id}' "}}" ) countdown_data_template = ( "{{" '"topic":"{subscription_id}",' '"event":"subscription:data",' '"payload":{{' '"subscriptionId":"{subscription_id}",' '"result":{{' '"data":{{' '"countdown":{{' '"number":{number}' "}}" "}}" "}}" "}}," '"ref":null' "}}" ) async def server_countdown(ws, path): import websockets from .conftest import MS, PhoenixChannelServerHelper try: await PhoenixChannelServerHelper.send_connection_ack(ws) result = await ws.recv() json_result = json.loads(result) assert json_result["event"] == "doc" channel_name = json_result["topic"] query_id = json_result["ref"] payload = json_result["payload"] query = payload["query"] count_found = search("count: {:d}", query) count = count_found[0] print(f"Countdown started from: {count}") await ws.send( subscription_reply_template.format( subscription_id=test_subscription_id, channel_name=channel_name, query_id=query_id, ) ) async def counting_coro(): for number in range(count, -1, -1): await ws.send( countdown_data_template.format( subscription_id=test_subscription_id, number=number ) ) await asyncio.sleep(2 * MS) counting_task = asyncio.ensure_future(counting_coro()) async def stopping_coro(): nonlocal counting_task while True: result = await ws.recv() json_result = json.loads(result) if json_result["event"] == "unsubscribe": query_id = json_result["ref"] payload = json_result["payload"] subscription_id = payload["subscriptionId"] assert subscription_id == test_subscription_id print("Sending unsubscribe reply") await ws.send( subscription_reply_template.format( subscription_id=subscription_id, channel_name=channel_name, query_id=query_id, ) ) counting_task.cancel() stopping_task = asyncio.ensure_future(stopping_coro()) try: await counting_task except asyncio.CancelledError: print("Now counting task is cancelled") # Waiting for a clean stop try: await asyncio.wait_for(stopping_task, 3) except asyncio.CancelledError: print("Now stopping task is cancelled") except asyncio.TimeoutError: print("Now stopping task is in timeout") # await PhoenixChannelServerHelper.send_close(ws) except websockets.exceptions.ConnectionClosedOK: print("Connection closed") finally: await ws.wait_closed() countdown_subscription_str = """ subscription {{ countdown (count: {count}) {{ number }} }} """ @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) @pytest.mark.parametrize("end_count", [0, 5]) async def test_phoenix_channel_subscription( event_loop, server, subscription_str, end_count ): """Parameterized test. :param end_count: Target count at which the test will 'break' to unsubscribe. """ import logging from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) from gql.transport.phoenix_channel_websockets import log as phoenix_logger from gql.transport.websockets import log as websockets_logger websockets_logger.setLevel(logging.DEBUG) phoenix_logger.setLevel(logging.DEBUG) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name=test_channel, url=url, close_timeout=5 ) count = 10 subscription = gql(subscription_str.format(count=count)) async with Client(transport=sample_transport) as session: async for result in session.subscribe(subscription): number = result["countdown"]["number"] print(f"Number received: {number}") assert number == count if number == end_count: # Note: we need to run generator.aclose() here or the finally block in # the subscribe will not be reached in pypy3 (python version 3.6.1) # In more recent versions, 'break' will trigger __aexit__. if sys.version_info < (3, 7): await session._generator.aclose() print("break") break count -= 1 assert count == end_count @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_phoenix_channel_subscription_no_break( event_loop, server, subscription_str ): import logging from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) from gql.transport.phoenix_channel_websockets import log as phoenix_logger from gql.transport.websockets import log as websockets_logger from .conftest import MS websockets_logger.setLevel(logging.DEBUG) phoenix_logger.setLevel(logging.DEBUG) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" async def testing_stopping_without_break(): sample_transport = PhoenixChannelWebsocketsTransport( channel_name=test_channel, url=url, close_timeout=(5000 * MS) ) count = 10 subscription = gql(subscription_str.format(count=count)) async with Client(transport=sample_transport) as session: async for result in session.subscribe(subscription): number = result["countdown"]["number"] print(f"Number received: {number}") # Simulate a slow consumer if number == 10: await asyncio.sleep(50 * MS) if number == 9: # When we consume the number 9 here in the async generator, # all the 10 numbers have already been sent by the backend and # are present in the listener queue # we simulate here an unsubscribe message # In that case, all the 10 numbers should be consumed in the # generator and then the generator should be closed properly await session.transport._send_stop_message(2) assert number == count count -= 1 assert count == -1 try: await asyncio.wait_for(testing_stopping_without_break(), timeout=(5000 * MS)) except asyncio.TimeoutError: assert False, "The async generator did not stop" heartbeat_data_template = ( "{{" '"topic":"{subscription_id}",' '"event":"subscription:data",' '"payload":{{' '"subscriptionId":"{subscription_id}",' '"result":{{' '"data":{{' '"heartbeat":{{' '"heartbeat_count":{count}' "}}" "}}" "}}" "}}," '"ref":null' "}}" ) async def phoenix_heartbeat_server(ws, path): import websockets from .conftest import PhoenixChannelServerHelper try: await PhoenixChannelServerHelper.send_connection_ack(ws) result = await ws.recv() json_result = json.loads(result) assert json_result["event"] == "doc" channel_name = json_result["topic"] query_id = json_result["ref"] await ws.send( subscription_reply_template.format( subscription_id=test_subscription_id, channel_name=channel_name, query_id=query_id, ) ) async def heartbeat_coro(): i = 0 while True: heartbeat_result = await ws.recv() json_result = json.loads(heartbeat_result) if json_result["event"] == "heartbeat": await ws.send( heartbeat_data_template.format( subscription_id=test_subscription_id, count=i ) ) i = i + 1 elif json_result["event"] == "unsubscribe": query_id = json_result["ref"] payload = json_result["payload"] subscription_id = payload["subscriptionId"] assert subscription_id == test_subscription_id print("Sending unsubscribe reply") await ws.send( subscription_reply_template.format( subscription_id=subscription_id, channel_name=channel_name, query_id=query_id, ) ) await asyncio.wait_for(heartbeat_coro(), 60) # await PhoenixChannelServerHelper.send_close(ws) except websockets.exceptions.ConnectionClosedOK: print("Connection closed") finally: await ws.wait_closed() heartbeat_subscription_str = """ subscription { heartbeat { heartbeat_count } } """ @pytest.mark.asyncio @pytest.mark.parametrize("server", [phoenix_heartbeat_server], indirect=True) @pytest.mark.parametrize("subscription_str", [heartbeat_subscription_str]) async def test_phoenix_channel_heartbeat(event_loop, server, subscription_str): from gql.transport.phoenix_channel_websockets import ( PhoenixChannelWebsocketsTransport, ) path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = PhoenixChannelWebsocketsTransport( channel_name=test_channel, url=url, heartbeat_interval=0.1 ) subscription = gql(heartbeat_subscription_str) async with Client(transport=sample_transport) as session: i = 0 async for result in session.subscribe(subscription): heartbeat_count = result["heartbeat"]["heartbeat_count"] print(f"Heartbeat count received: {heartbeat_count}") assert heartbeat_count == i if heartbeat_count == 5: # Note: we need to run generator.aclose() here or the finally block in # the subscribe will not be reached in pypy3 (python version 3.6.1) # In more recent versions, 'break' will trigger __aexit__. if sys.version_info < (3, 7): await session._generator.aclose() break i += 1 gql-3.6.0b2/tests/test_requests.py000066400000000000000000000715671460703211500171740ustar00rootroot00000000000000from typing import Mapping import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportQueryError, TransportServerError, ) from .conftest import TemporaryFile, strip_braces_spaces # Marking all tests in this file with the requests marker pytestmark = pytest.mark.requests query1_str = """ query getContinents { continents { code name } } """ query1_server_answer = ( '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_query(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_answer, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) # Execute query synchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_cookies(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): assert "COOKIE" in request.headers assert "cookie1=val1" == request.headers["COOKIE"] return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url, cookies={"cookie1": "val1"}) with Client(transport=transport) as session: query = gql(query1_str) # Execute query synchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_401(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 401 return web.Response( text='{"error":"Unauthorized","message":"401 Client Error: Unauthorized"}', content_type="application/json", status=401, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: session.execute(query) assert "401 Client Error: Unauthorized" in str(exc_info.value) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_429(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 429 return web.Response( text=""" Too Many Requests

Too Many Requests

I only allow 50 requests per hour to this Web site per logged in user. Try again soon.

""", content_type="text/html", status=429, headers={"Retry-After": "3600"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: session.execute(query) assert "429, message='Too Many Requests'" in str(exc_info.value) # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["Retry-After"] == "3600" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_500(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 500 raise Exception("Server error") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportServerError): session.execute(query) await run_sync_test(event_loop, server, test_code) query1_server_error_answer = '{"errors": ["Error 1", "Error 2"]}' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_error_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportQueryError): session.execute(query) await run_sync_test(event_loop, server, test_code) invalid_protocol_responses = [ "{}", "qlsjfqsdlkj", '{"not_data_or_errors": 35}', ] @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.parametrize("response", invalid_protocol_responses) async def test_requests_invalid_protocol( event_loop, aiohttp_server, response, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response(text=response, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) with pytest.raises(TransportProtocolError): session.execute(query) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_cannot_connect_twice(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: with pytest.raises(TransportAlreadyConnected): session.transport.connect() await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_cannot_execute_if_not_connected( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response(text=query1_server_answer, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) query = gql(query1_str) with pytest.raises(TransportClosed): transport.execute(query) await run_sync_test(event_loop, server, test_code) query1_server_answer_with_extensions = ( '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]},' '"extensions": {"key1": "val1"}' "}" ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_query_with_extensions( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_answer_with_extensions, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = gql(query1_str) execution_result = session.execute(query, get_execution_result=True) assert execution_result.extensions["key1"] == "val1" await run_sync_test(event_loop, server, test_code) file_upload_server_answer = '{"data":{"success":true}}' file_upload_mutation_1 = """ mutation($file: Upload!) { uploadFile(input:{other_var:$other_var, file:$file}) { success } } """ file_upload_mutation_1_operations = ( '{"query": "mutation ($file: Upload!) {\\n uploadFile(input: {other_var: ' '$other_var, file: $file}) {\\n success\\n }\\n}", "variables": ' '{"file": null, "other_var": 42}}' ) file_upload_mutation_1_map = '{"0": ["variables.file"]}' file_1_content = """ This is a test file This file will be sent in the GraphQL mutation """ @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with TemporaryFile(file_1_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload_with_content_type( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def single_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content # Verifying the content_type assert field_2.headers["Content-Type"] == "application/pdf" field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with TemporaryFile(file_1_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: # Setting the content_type f.content_type = "application/pdf" params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload_additional_headers( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def single_upload_handler(request): from aiohttp import web assert request.headers["X-Auth"] == "foobar" reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", single_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url, headers={"X-Auth": "foobar"}) with TemporaryFile(file_1_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_binary_file_upload(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport # This is a sample binary file content containing all possible byte values binary_file_content = bytes(range(0, 256)) async def binary_upload_handler(request): from aiohttp import web reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_1_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_1_map field_2 = await reader.next() assert field_2.name == "0" field_2_binary = await field_2.read() assert field_2_binary == binary_file_content field_3 = await reader.next() assert field_3 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", binary_upload_handler) server = await aiohttp_server(app) url = server.make_url("/") transport = RequestsHTTPTransport(url=url) def test_code(): with TemporaryFile(binary_file_content) as test_file: with Client(transport=transport) as session: query = gql(file_upload_mutation_1) file_path = test_file.filename with open(file_path, "rb") as f: params = {"file": f, "other_var": 42} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] await run_sync_test(event_loop, server, test_code) file_upload_mutation_2_operations = ( '{"query": "mutation ($file1: Upload!, $file2: Upload!) {\\n ' 'uploadFile(input: {file1: $file, file2: $file}) {\\n success\\n }\\n}", ' '"variables": {"file1": null, "file2": null}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload_two_files( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport file_upload_mutation_2 = """ mutation($file1: Upload!, $file2: Upload!) { uploadFile(input:{file1:$file, file2:$file}) { success } } """ file_upload_mutation_2_map = '{"0": ["variables.file1"], "1": ["variables.file2"]}' file_2_content = """ This is a second test file This file will also be sent in the GraphQL mutation """ async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_2_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_2_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: with Client(transport=transport) as session: query = gql(file_upload_mutation_2) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = { "file1": f1, "file2": f2, } execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] f1.close() f2.close() await run_sync_test(event_loop, server, test_code) file_upload_mutation_3_operations = ( '{"query": "mutation ($files: [Upload!]!) {\\n uploadFiles' "(input: {files: $files})" ' {\\n success\\n }\\n}", "variables": {"files": [null, null]}}' ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_file_upload_list_of_two_files( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport file_upload_mutation_3 = """ mutation($files: [Upload!]!) { uploadFiles(input:{files:$files}) { success } } """ file_upload_mutation_3_map = ( '{"0": ["variables.files.0"], "1": ["variables.files.1"]}' ) file_2_content = """ This is a second test file This file will also be sent in the GraphQL mutation """ async def handler(request): reader = await request.multipart() field_0 = await reader.next() assert field_0.name == "operations" field_0_text = await field_0.text() assert strip_braces_spaces(field_0_text) == file_upload_mutation_3_operations field_1 = await reader.next() assert field_1.name == "map" field_1_text = await field_1.text() assert field_1_text == file_upload_mutation_3_map field_2 = await reader.next() assert field_2.name == "0" field_2_text = await field_2.text() assert field_2_text == file_1_content field_3 = await reader.next() assert field_3.name == "1" field_3_text = await field_3.text() assert field_3_text == file_2_content field_4 = await reader.next() assert field_4 is None return web.Response( text=file_upload_server_answer, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with TemporaryFile(file_1_content) as test_file_1: with TemporaryFile(file_2_content) as test_file_2: with Client(transport=transport) as session: query = gql(file_upload_mutation_3) file_path_1 = test_file_1.filename file_path_2 = test_file_2.filename f1 = open(file_path_1, "rb") f2 = open(file_path_2, "rb") params = {"files": [f1, f2]} execution_result = session._execute( query, variable_values=params, upload_files=True ) assert execution_result.data["success"] f1.close() f2.close() await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_fetching_schema( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport error_answer = """ { "errors": [ { "errorType": "UnauthorizedException", "message": "Permission denied" } ] } """ async def handler(request): return web.Response( text=error_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with pytest.raises(TransportQueryError) as exc_info: with Client(transport=transport, fetch_schema_from_transport=True): pass expected_error = ( "Error while fetching schema: " "{'errorType': 'UnauthorizedException', 'message': 'Permission denied'}" ) assert expected_error in str(exc_info.value) assert transport.session is None await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_json_serializer( event_loop, aiohttp_server, run_sync_test, caplog ): import json from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): request_text = await request.text() print("Received on backend: " + request_text) return web.Response( text=query1_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport( url=url, json_serialize=lambda e: json.dumps(e, separators=(",", ":")), ) with Client(transport=transport) as session: query = gql(query1_str) # Execute query asynchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking that there is no space after the colon in the log expected_log = '"query":"query getContinents' assert expected_log in caplog.text await run_sync_test(event_loop, server, test_code) query_float_str = """ query getPi { pi } """ query_float_server_answer_data = '{"pi": 3.141592653589793238462643383279502884197}' query_float_server_answer = f'{{"data":{query_float_server_answer_data}}}' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_json_deserializer(event_loop, aiohttp_server, run_sync_test): import json from aiohttp import web from decimal import Decimal from functools import partial from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query_float_server_answer, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): json_loads = partial(json.loads, parse_float=Decimal) transport = RequestsHTTPTransport( url=url, json_deserialize=json_loads, ) with Client(transport=transport) as session: query = gql(query_float_str) # Execute query asynchronously result = session.execute(query) pi = result["pi"] assert pi == Decimal("3.141592653589793238462643383279502884197") await run_sync_test(event_loop, server, test_code) gql-3.6.0b2/tests/test_requests_batch.py000066400000000000000000000453401460703211500203230ustar00rootroot00000000000000from typing import Mapping import pytest from gql import Client, GraphQLRequest, gql from gql.transport.exceptions import ( TransportClosed, TransportProtocolError, TransportQueryError, TransportServerError, ) # Marking all tests in this file with the requests marker pytestmark = pytest.mark.requests query1_str = """ query getContinents { continents { code name } } """ query1_server_answer_list = ( '[{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}]' ) query1_server_answer_twice_list = ( "[" '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}},' '{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}}' "]" ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_query(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_answer_list, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] # Execute query synchronously results = session.execute_batch(query) continents = results[0]["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_query_auto_batch_enabled( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_answer_list, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client( transport=transport, batch_interval=0.01, ) as session: query = gql(query1_str) # Execute query synchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_query_auto_batch_enabled_two_requests( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport from threading import Thread async def handler(request): return web.Response( text=query1_server_answer_twice_list, content_type="application/json", headers={"dummy": "test1234"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) threads = [] with Client( transport=transport, batch_interval=0.01, ) as session: def test_thread(): query = gql(query1_str) # Execute query synchronously result = session.execute(query) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" for _ in range(2): thread = Thread(target=test_thread) thread.start() threads.append(thread) for thread in threads: thread.join() await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_cookies(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): assert "COOKIE" in request.headers assert "cookie1=val1" == request.headers["COOKIE"] return web.Response( text=query1_server_answer_list, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url, cookies={"cookie1": "val1"}) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] # Execute query synchronously results = session.execute_batch(query) continents = results[0]["continents"] africa = continents[0] assert africa["code"] == "AF" await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_401(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 401 return web.Response( text='{"error":"Unauthorized","message":"401 Client Error: Unauthorized"}', content_type="application/json", status=401, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] with pytest.raises(TransportServerError) as exc_info: session.execute_batch(query) assert "401 Client Error: Unauthorized" in str(exc_info.value) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_401_auto_batch_enabled( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 401 return web.Response( text='{"error":"Unauthorized","message":"401 Client Error: Unauthorized"}', content_type="application/json", status=401, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client( transport=transport, batch_interval=0.01, ) as session: query = gql(query1_str) with pytest.raises(TransportServerError) as exc_info: session.execute(query) assert "401 Client Error: Unauthorized" in str(exc_info.value) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_429(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 429 return web.Response( text=""" Too Many Requests

Too Many Requests

I only allow 50 requests per hour to this Web site per logged in user. Try again soon.

""", content_type="text/html", status=429, headers={"Retry-After": "3600"}, ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] with pytest.raises(TransportServerError) as exc_info: session.execute_batch(query) assert "429, message='Too Many Requests'" in str(exc_info.value) # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["Retry-After"] == "3600" @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code_500(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): # Will generate http error code 500 raise Exception("Server error") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] with pytest.raises(TransportServerError): session.execute_batch(query) await run_sync_test(event_loop, server, test_code) query1_server_error_answer_list = '[{"errors": ["Error 1", "Error 2"]}]' @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_error_code(event_loop, aiohttp_server, run_sync_test): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_error_answer_list, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] with pytest.raises(TransportQueryError): session.execute_batch(query) await run_sync_test(event_loop, server, test_code) invalid_protocol_responses = [ "{}", "qlsjfqsdlkj", '{"not_data_or_errors": 35}', "[{}]", "[qlsjfqsdlkj]", '[{"not_data_or_errors": 35}]', "[]", "[1]", ] @pytest.mark.aiohttp @pytest.mark.asyncio @pytest.mark.parametrize("response", invalid_protocol_responses) async def test_requests_invalid_protocol( event_loop, aiohttp_server, response, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response(text=response, content_type="application/json") app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] with pytest.raises(TransportProtocolError): session.execute_batch(query) await run_sync_test(event_loop, server, test_code) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_cannot_execute_if_not_connected( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_answer_list, content_type="application/json" ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) query = [GraphQLRequest(document=gql(query1_str))] with pytest.raises(TransportClosed): transport.execute_batch(query) await run_sync_test(event_loop, server, test_code) query1_server_answer_with_extensions_list = ( '[{"data":{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]},' '"extensions": {"key1": "val1"}' "}]" ) @pytest.mark.aiohttp @pytest.mark.asyncio async def test_requests_query_with_extensions( event_loop, aiohttp_server, run_sync_test ): from aiohttp import web from gql.transport.requests import RequestsHTTPTransport async def handler(request): return web.Response( text=query1_server_answer_with_extensions_list, content_type="application/json", ) app = web.Application() app.router.add_route("POST", "/", handler) server = await aiohttp_server(app) url = server.make_url("/") def test_code(): transport = RequestsHTTPTransport(url=url) with Client(transport=transport) as session: query = [GraphQLRequest(document=gql(query1_str))] execution_results = session.execute_batch(query, get_execution_result=True) assert execution_results[0].extensions["key1"] == "val1" await run_sync_test(event_loop, server, test_code) ONLINE_URL = "https://countries.trevorblades.com/" skip_reason = "backend does not support batching anymore..." @pytest.mark.online @pytest.mark.requests @pytest.mark.skip(reason=skip_reason) def test_requests_sync_batch_auto(): from threading import Thread from gql.transport.requests import RequestsHTTPTransport client = Client( transport=RequestsHTTPTransport(url=ONLINE_URL), batch_interval=0.01, batch_max=3, ) query = gql( """ query getContinentName($continent_code: ID!) { continent(code: $continent_code) { name } } """ ) def get_continent_name(session, continent_code): variables = { "continent_code": continent_code, } result = session.execute(query, variable_values=variables) name = result["continent"]["name"] print(f"The continent with the code {continent_code} has the name: '{name}'") continent_codes = ["EU", "AF", "NA", "OC", "SA", "AS", "AN"] with client as session: for continent_code in continent_codes: thread = Thread( target=get_continent_name, args=( session, continent_code, ), ) thread.start() thread.join() # Doing it twice to check that everything is closing and reconnecting correctly with client as session: for continent_code in continent_codes: thread = Thread( target=get_continent_name, args=( session, continent_code, ), ) thread.start() thread.join() @pytest.mark.online @pytest.mark.requests @pytest.mark.skip(reason=skip_reason) def test_requests_sync_batch_auto_execute_future(): from gql.transport.requests import RequestsHTTPTransport client = Client( transport=RequestsHTTPTransport(url=ONLINE_URL), batch_interval=0.01, batch_max=3, ) query = gql( """ query getContinentName($continent_code: ID!) { continent(code: $continent_code) { name } } """ ) with client as session: request_eu = GraphQLRequest(query, variable_values={"continent_code": "EU"}) future_result_eu = session._execute_future(request_eu) request_af = GraphQLRequest(query, variable_values={"continent_code": "AF"}) future_result_af = session._execute_future(request_af) result_eu = future_result_eu.result().data result_af = future_result_af.result().data assert result_eu["continent"]["name"] == "Europe" assert result_af["continent"]["name"] == "Africa" @pytest.mark.online @pytest.mark.requests @pytest.mark.skip(reason=skip_reason) def test_requests_sync_batch_manual(): from gql.transport.requests import RequestsHTTPTransport client = Client( transport=RequestsHTTPTransport(url=ONLINE_URL), ) query = gql( """ query getContinentName($continent_code: ID!) { continent(code: $continent_code) { name } } """ ) with client as session: request_eu = GraphQLRequest(query, variable_values={"continent_code": "EU"}) request_af = GraphQLRequest(query, variable_values={"continent_code": "AF"}) result_eu, result_af = session.execute_batch([request_eu, request_af]) assert result_eu["continent"]["name"] == "Europe" assert result_af["continent"]["name"] == "Africa" gql-3.6.0b2/tests/test_transport.py000066400000000000000000000066751460703211500173530ustar00rootroot00000000000000import os import pytest from gql import Client, gql # We serve https://github.com/graphql-python/swapi-graphene locally: URL = "http://127.0.0.1:8000/graphql" # Marking all tests in this file with the requests marker pytestmark = pytest.mark.requests def use_cassette(name): import vcr query_vcr = vcr.VCR( cassette_library_dir=os.path.join( os.path.dirname(__file__), "fixtures", "vcr_cassettes" ), record_mode="new_episodes", match_on=["uri", "method", "body"], ) return query_vcr.use_cassette(name + ".yaml") @pytest.fixture def client(): import requests from gql.transport.requests import RequestsHTTPTransport with use_cassette("client"): response = requests.get( URL, headers={"Host": "swapi.graphene-python.org", "Accept": "text/html"} ) response.raise_for_status() csrf = response.cookies["csrftoken"] return Client( transport=RequestsHTTPTransport( url=URL, cookies={"csrftoken": csrf}, headers={"x-csrftoken": csrf} ), fetch_schema_from_transport=True, ) def test_hero_name_query(client): query = gql( """ { myFavoriteFilm: film(id:"RmlsbToz") { id title episodeId characters(first:5) { edges { node { name } } } } } """ ) expected = { "myFavoriteFilm": { "id": "RmlsbToz", "title": "Return of the Jedi", "episodeId": 6, "characters": { "edges": [ {"node": {"name": "Luke Skywalker"}}, {"node": {"name": "C-3PO"}}, {"node": {"name": "R2-D2"}}, {"node": {"name": "Darth Vader"}}, {"node": {"name": "Leia Organa"}}, ] }, } } with use_cassette("queries"): result = client.execute(query) assert result == expected def test_query_with_variable(client): query = gql( """ query Planet($id: ID!) { planet(id: $id) { id name } } """ ) expected = {"planet": {"id": "UGxhbmV0OjEw", "name": "Kamino"}} with use_cassette("queries"): result = client.execute(query, variable_values={"id": "UGxhbmV0OjEw"}) assert result == expected def test_named_query(client): query = gql( """ query Planet1 { planet(id: "UGxhbmV0OjEw") { id name } } query Planet2 { planet(id: "UGxhbmV0OjEx") { id name } } """ ) expected = {"planet": {"id": "UGxhbmV0OjEx", "name": "Geonosis"}} with use_cassette("queries"): result = client.execute(query, operation_name="Planet2") assert result == expected def test_header_query(client): query = gql( """ query Planet($id: ID!) { planet(id: $id) { id name } } """ ) expected = {"planet": {"id": "UGxhbmV0OjEx", "name": "Geonosis"}} with use_cassette("queries"): result = client.execute( query, extra_args={"headers": {"authorization": "xxx-123"}} ) assert result == expected gql-3.6.0b2/tests/test_transport_batch.py000066400000000000000000000073741460703211500205110ustar00rootroot00000000000000import os import pytest from gql import Client, GraphQLRequest, gql # We serve https://github.com/graphql-python/swapi-graphene locally: URL = "http://127.0.0.1:8000/graphql" # Marking all tests in this file with the requests marker pytestmark = pytest.mark.requests def use_cassette(name): import vcr query_vcr = vcr.VCR( cassette_library_dir=os.path.join( os.path.dirname(__file__), "fixtures", "vcr_cassettes" ), record_mode="new_episodes", match_on=["uri", "method", "body"], ) return query_vcr.use_cassette(name + ".yaml") @pytest.fixture def client(): import requests from gql.transport.requests import RequestsHTTPTransport with use_cassette("client"): response = requests.get( URL, headers={"Host": "swapi.graphene-python.org", "Accept": "text/html"} ) response.raise_for_status() csrf = response.cookies["csrftoken"] return Client( transport=RequestsHTTPTransport( url=URL, cookies={"csrftoken": csrf}, headers={"x-csrftoken": csrf} ), fetch_schema_from_transport=True, ) def test_hero_name_query(client): query = gql( """ { myFavoriteFilm: film(id:"RmlsbToz") { id title episodeId characters(first:5) { edges { node { name } } } } } """ ) expected = [ { "myFavoriteFilm": { "id": "RmlsbToz", "title": "Return of the Jedi", "episodeId": 6, "characters": { "edges": [ {"node": {"name": "Luke Skywalker"}}, {"node": {"name": "C-3PO"}}, {"node": {"name": "R2-D2"}}, {"node": {"name": "Darth Vader"}}, {"node": {"name": "Leia Organa"}}, ] }, } } ] with use_cassette("queries_batch"): results = client.execute_batch([GraphQLRequest(document=query)]) assert results == expected def test_query_with_variable(client): query = gql( """ query Planet($id: ID!) { planet(id: $id) { id name } } """ ) expected = [{"planet": {"id": "UGxhbmV0OjEw", "name": "Kamino"}}] with use_cassette("queries_batch"): results = client.execute_batch( [GraphQLRequest(document=query, variable_values={"id": "UGxhbmV0OjEw"})] ) assert results == expected def test_named_query(client): query = gql( """ query Planet1 { planet(id: "UGxhbmV0OjEw") { id name } } query Planet2 { planet(id: "UGxhbmV0OjEx") { id name } } """ ) expected = [{"planet": {"id": "UGxhbmV0OjEx", "name": "Geonosis"}}] with use_cassette("queries_batch"): results = client.execute_batch( [GraphQLRequest(document=query, operation_name="Planet2")] ) assert results == expected def test_header_query(client): query = gql( """ query Planet($id: ID!) { planet(id: $id) { id name } } """ ) expected = [{"planet": {"id": "UGxhbmV0OjEx", "name": "Geonosis"}}] with use_cassette("queries_batch"): results = client.execute_batch( [GraphQLRequest(document=query)], extra_args={"headers": {"authorization": "xxx-123"}}, ) assert results == expected gql-3.6.0b2/tests/test_websocket_exceptions.py000066400000000000000000000262121460703211500215330ustar00rootroot00000000000000import asyncio import json import types from typing import List import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, TransportProtocolError, TransportQueryError, ) from .conftest import MS, WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets invalid_query_str = """ query getContinents { continents { code bloh } } """ invalid_query1_server_answer = ( '{{"type":"data","id":"{query_id}",' '"payload":{{"errors":[' '{{"message":"Cannot query field \\"bloh\\" on type \\"Continent\\".",' '"locations":[{{"line":4,"column":5}}],' '"extensions":{{"code":"INTERNAL_SERVER_ERROR"}}}}]}}}}' ) invalid_query1_server = [invalid_query1_server_answer] @pytest.mark.asyncio @pytest.mark.parametrize("server", [invalid_query1_server], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_websocket_invalid_query(event_loop, client_and_server, query_str): session, server = client_and_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" invalid_subscription_str = """ subscription getContinents { continents { code bloh } } """ async def server_invalid_subscription(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.recv() await ws.send(invalid_query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_invalid_subscription], indirect=True) @pytest.mark.parametrize("query_str", [invalid_subscription_str]) async def test_websocket_invalid_subscription(event_loop, client_and_server, query_str): session, server = client_and_server query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: async for result in session.subscribe(query): pass exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["extensions"]["code"] == "INTERNAL_SERVER_ERROR" connection_error_server_answer = ( '{"type":"connection_error","id":null,' '"payload":{"message":"Unexpected token Q in JSON at position 0"}}' ) async def server_no_ack(ws, path): await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_no_ack], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_websocket_server_does_not_send_ack(event_loop, server, query_str): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" sample_transport = WebsocketsTransport(url=url, ack_timeout=1) with pytest.raises(asyncio.TimeoutError): async with Client(transport=sample_transport): pass async def server_connection_error(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}") await ws.send(connection_error_server_answer) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_connection_error], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_websocket_sending_invalid_data(event_loop, client_and_server, query_str): session, server = client_and_server invalid_data = "QSDF" print(f">>> {invalid_data}") await session.transport.websocket.send(invalid_data) await asyncio.sleep(2 * MS) invalid_payload_server_answer = ( '{"type":"error","id":"1","payload":{"message":"Must provide document"}}' ) async def server_invalid_payload(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}") await ws.send(invalid_payload_server_answer) await WebSocketServerHelper.wait_connection_terminate(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_invalid_payload], indirect=True) @pytest.mark.parametrize("query_str", [invalid_query_str]) async def test_websocket_sending_invalid_payload( event_loop, client_and_server, query_str ): session, server = client_and_server # Monkey patching the _send_query method to send an invalid payload async def monkey_patch_send_query( self, document, variable_values=None, operation_name=None, ) -> int: query_id = self.next_query_id self.next_query_id += 1 query_str = json.dumps( {"id": str(query_id), "type": "start", "payload": "BLAHBLAH"} ) await self._send(query_str) return query_id session.transport._send_query = types.MethodType( monkey_patch_send_query, session.transport ) query = gql(query_str) with pytest.raises(TransportQueryError) as exc_info: await session.execute(query) exception = exc_info.value assert isinstance(exception.errors, List) error = exception.errors[0] assert error["message"] == "Must provide document" not_json_answer = ["BLAHBLAH"] missing_type_answer = ["{}"] missing_id_answer_1 = ['{"type": "data"}'] missing_id_answer_2 = ['{"type": "error"}'] missing_id_answer_3 = ['{"type": "complete"}'] data_without_payload = ['{"type": "data", "id":"1"}'] error_without_payload = ['{"type": "error", "id":"1"}'] payload_is_not_a_dict = ['{"type": "data", "id":"1", "payload": "BLAH"}'] empty_payload = ['{"type": "data", "id":"1", "payload": {}}'] sending_bytes = [b"\x01\x02\x03"] @pytest.mark.asyncio @pytest.mark.parametrize( "server", [ not_json_answer, missing_type_answer, missing_id_answer_1, missing_id_answer_2, missing_id_answer_3, data_without_payload, error_without_payload, payload_is_not_a_dict, empty_payload, sending_bytes, ], indirect=True, ) async def test_websocket_transport_protocol_errors(event_loop, client_and_server): session, server = client_and_server query = gql("query { hello }") with pytest.raises(TransportProtocolError): await session.execute(query) async def server_without_ack(ws, path): # Sending something else than an ack await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_without_ack], indirect=True) async def test_websocket_server_does_not_ack(event_loop, server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(TransportProtocolError): async with Client(transport=sample_transport): pass async def server_closing_directly(ws, path): await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_closing_directly], indirect=True) async def test_websocket_server_closing_directly(event_loop, server): import websockets from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) with pytest.raises(websockets.exceptions.ConnectionClosed): async with Client(transport=sample_transport): pass async def server_closing_after_ack(ws, path): await WebSocketServerHelper.send_connection_ack(ws) await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_closing_after_ack], indirect=True) async def test_websocket_server_closing_after_ack(event_loop, client_and_server): import websockets session, server = client_and_server query = gql("query { hello }") with pytest.raises(websockets.exceptions.ConnectionClosed): await session.execute(query) await session.transport.wait_closed() with pytest.raises(TransportClosed): await session.execute(query) async def server_sending_invalid_query_errors(ws, path): await WebSocketServerHelper.send_connection_ack(ws) invalid_error = ( '{"type":"error","id":"404","payload":' '{"message":"error for no good reason on non existing query"}}' ) await ws.send(invalid_error) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_sending_invalid_query_errors], indirect=True) async def test_websocket_server_sending_invalid_query_errors(event_loop, server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) # Invalid server message is ignored async with Client(transport=sample_transport): await asyncio.sleep(2 * MS) @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_sending_invalid_query_errors], indirect=True) async def test_websocket_non_regression_bug_105(event_loop, server): from gql.transport.websockets import WebsocketsTransport # This test will check a fix to a race condition which happens if the user is trying # to connect using the same client twice at the same time # See bug #105 url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport) # Create a coroutine which start the connection with the transport but does nothing async def client_connect(client): async with client: await asyncio.sleep(2 * MS) # Create two tasks which will try to connect using the same client (not allowed) connect_task1 = asyncio.ensure_future(client_connect(client)) connect_task2 = asyncio.ensure_future(client_connect(client)) with pytest.raises(TransportAlreadyConnected): await asyncio.gather(connect_task1, connect_task2) @pytest.mark.asyncio @pytest.mark.parametrize("server", [invalid_query1_server], indirect=True) async def test_websocket_using_cli_invalid_query( event_loop, server, monkeypatch, capsys ): url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") from gql.cli import main, get_parser import io parser = get_parser(with_examples=True) args = parser.parse_args([url]) # Monkeypatching sys.stdin to simulate getting the query # via the standard input monkeypatch.setattr("sys.stdin", io.StringIO(invalid_query_str)) # Flush captured output captured = capsys.readouterr() await main(args) # Check that the error has been printed on stdout captured = capsys.readouterr() captured_err = str(captured.err).strip() print(f"Captured: {captured_err}") expected_error = 'Cannot query field "bloh" on type "Continent"' assert expected_error in captured_err gql-3.6.0b2/tests/test_websocket_online.py000066400000000000000000000147011460703211500206360ustar00rootroot00000000000000import asyncio import logging import sys from typing import Dict import pytest from gql import Client, gql from gql.transport.exceptions import TransportError, TransportQueryError from .conftest import MS # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets logging.basicConfig(level=logging.INFO) skip_reason = ( "backend does not support websockets anymore: " "https://github.com/trevorblades/countries/issues/42" ) @pytest.mark.online @pytest.mark.skip(reason=skip_reason) @pytest.mark.asyncio async def test_websocket_simple_query(): from gql.transport.websockets import WebsocketsTransport # Get Websockets transport sample_transport = WebsocketsTransport( url="wss://countries.trevorblades.com/graphql" ) # Instanciate client async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code name } } """ ) # Fetch schema await session.fetch_schema() # Execute query result = await session.execute(query) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.online @pytest.mark.skip(reason=skip_reason) @pytest.mark.asyncio async def test_websocket_invalid_query(): from gql.transport.websockets import WebsocketsTransport # Get Websockets transport sample_transport = WebsocketsTransport( url="wss://countries.trevorblades.com/graphql", ssl=True ) # Instanciate client async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code bloh } } """ ) # Execute query with pytest.raises(TransportQueryError): await session.execute(query) @pytest.mark.online @pytest.mark.skip(reason=skip_reason) @pytest.mark.asyncio async def test_websocket_sending_invalid_data(): from gql.transport.websockets import WebsocketsTransport # Get Websockets transport sample_transport = WebsocketsTransport( url="wss://countries.trevorblades.com/graphql", ssl=True ) # Instanciate client async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code } } """ ) # Execute query result = await session.execute(query) print(f"result = {result!r}") invalid_data = "QSDF" print(f">>> {invalid_data}") await sample_transport.websocket.send(invalid_data) await asyncio.sleep(2) @pytest.mark.online @pytest.mark.skip(reason=skip_reason) @pytest.mark.asyncio async def test_websocket_sending_invalid_payload(): from gql.transport.websockets import WebsocketsTransport # Get Websockets transport sample_transport = WebsocketsTransport( url="wss://countries.trevorblades.com/graphql", ssl=True ) # Instanciate client async with Client(transport=sample_transport): invalid_payload = '{"id": "1", "type": "start", "payload": "BLAHBLAH"}' print(f">>> {invalid_payload}") await sample_transport.websocket.send(invalid_payload) await asyncio.sleep(2) @pytest.mark.online @pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python3.8 or higher") @pytest.mark.skip(reason=skip_reason) @pytest.mark.asyncio async def test_websocket_sending_invalid_data_while_other_query_is_running(): from gql.transport.websockets import WebsocketsTransport # Get Websockets transport sample_transport = WebsocketsTransport( url="wss://countries.trevorblades.com/graphql", ssl=True ) # Instanciate client async with Client(transport=sample_transport) as session: query = gql( """ query getContinents { continents { code } } """ ) async def query_task1(): await asyncio.sleep(2 * MS) with pytest.raises(TransportError): result = await session.execute(query) assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" async def query_task2(): invalid_data = "QSDF" print(f">>> {invalid_data}") await sample_transport.websocket.send(invalid_data) task1 = asyncio.create_task(query_task1()) task2 = asyncio.create_task(query_task2()) # await task1 # await task2 await asyncio.gather(task1, task2) @pytest.mark.online @pytest.mark.skipif(sys.version_info < (3, 8), reason="requires python3.8 or higher") @pytest.mark.skip(reason=skip_reason) @pytest.mark.asyncio async def test_websocket_two_queries_in_parallel_using_two_tasks(): from gql.transport.websockets import WebsocketsTransport # Get Websockets transport sample_transport = WebsocketsTransport( url="wss://countries.trevorblades.com/graphql", ssl=True ) # Instanciate client async with Client(transport=sample_transport) as session: query1 = gql( """ query getContinents { continents { code } } """ ) query2 = gql( """ query getContinents { continents { name } } """ ) async def query_task1(): result = await session.execute(query1) assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" async def query_task2(): result = await session.execute(query2) assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["name"] == "Africa" task1 = asyncio.create_task(query_task1()) task2 = asyncio.create_task(query_task2()) await task1 await task2 gql-3.6.0b2/tests/test_websocket_query.py000066400000000000000000000420211460703211500205130ustar00rootroot00000000000000import asyncio import json import ssl import sys from typing import Dict, Mapping import pytest from gql import Client, gql from gql.transport.exceptions import ( TransportAlreadyConnected, TransportClosed, TransportQueryError, TransportServerError, ) from .conftest import MS, WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets query1_str = """ query getContinents { continents { code name } } """ query1_server_answer_data = ( '{"continents":[' '{"code":"AF","name":"Africa"},{"code":"AN","name":"Antarctica"},' '{"code":"AS","name":"Asia"},{"code":"EU","name":"Europe"},' '{"code":"NA","name":"North America"},{"code":"OC","name":"Oceania"},' '{"code":"SA","name":"South America"}]}' ) query1_server_answer = ( '{{"type":"data","id":"{query_id}","payload":{{"data":{{"continents":[' '{{"code":"AF","name":"Africa"}},{{"code":"AN","name":"Antarctica"}},' '{{"code":"AS","name":"Asia"}},{{"code":"EU","name":"Europe"}},' '{{"code":"NA","name":"North America"}},{{"code":"OC","name":"Oceania"}},' '{{"code":"SA","name":"South America"}}]}}}}}}' ) server1_answers = [ query1_server_answer, ] @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) async def test_websocket_starting_client_in_context_manager(event_loop, server): import websockets from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url) async with Client(transport=transport) as session: assert isinstance( transport.websocket, websockets.client.WebSocketClientProtocol ) query1 = gql(query1_str) result = await session.execute(query1) print("Client received:", result) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Checking response headers are saved in the transport assert hasattr(transport, "response_headers") assert isinstance(transport.response_headers, Mapping) assert transport.response_headers["dummy"] == "test1234" # Check client is disconnect here assert transport.websocket is None @pytest.mark.asyncio @pytest.mark.parametrize("ws_ssl_server", [server1_answers], indirect=True) async def test_websocket_using_ssl_connection(event_loop, ws_ssl_server): import websockets from gql.transport.websockets import WebsocketsTransport server = ws_ssl_server url = f"wss://{server.hostname}:{server.port}/graphql" print(f"url = {url}") ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(ws_ssl_server.testcert) transport = WebsocketsTransport(url=url, ssl=ssl_context) async with Client(transport=transport) as session: assert isinstance( transport.websocket, websockets.client.WebSocketClientProtocol ) query1 = gql(query1_str) result = await session.execute(query1) print("Client received:", result) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Check client is disconnect here assert transport.websocket is None @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_simple_query(event_loop, client_and_server, query_str): session, server = client_and_server query = gql(query_str) result = await session.execute(query) print("Client received:", result) server1_two_answers_in_series = [ query1_server_answer, query1_server_answer, ] @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_two_answers_in_series], indirect=True) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_two_queries_in_series( event_loop, client_and_server, query_str ): session, server = client_and_server query = gql(query_str) result1 = await session.execute(query) print("Query1 received:", result1) result2 = await session.execute(query) print("Query2 received:", result2) assert result1 == result2 async def server1_two_queries_in_parallel(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}", file=sys.stderr) result = await ws.recv() print(f"Server received: {result}", file=sys.stderr) await ws.send(query1_server_answer.format(query_id=1)) await ws.send(query1_server_answer.format(query_id=2)) await WebSocketServerHelper.send_complete(ws, 1) await WebSocketServerHelper.send_complete(ws, 2) await WebSocketServerHelper.wait_connection_terminate(ws) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_two_queries_in_parallel], indirect=True) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_two_queries_in_parallel( event_loop, client_and_server, query_str ): session, server = client_and_server query = gql(query_str) result1 = None result2 = None async def task1_coro(): nonlocal result1 result1 = await session.execute(query) async def task2_coro(): nonlocal result2 result2 = await session.execute(query) task1 = asyncio.ensure_future(task1_coro()) task2 = asyncio.ensure_future(task2_coro()) await asyncio.gather(task1, task2) print("Query1 received:", result1) print("Query2 received:", result2) assert result1 == result2 async def server_closing_while_we_are_doing_something_else(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}", file=sys.stderr) await ws.send(query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) await asyncio.sleep(1 * MS) # Closing server after first query await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize( "server", [server_closing_while_we_are_doing_something_else], indirect=True ) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_server_closing_after_first_query( event_loop, client_and_server, query_str ): session, server = client_and_server query = gql(query_str) # First query is working await session.execute(query) # Then we do other things await asyncio.sleep(100 * MS) # Now the server is closed but we don't know it yet, we have to send a query # to notice it and to receive the exception with pytest.raises(TransportClosed): await session.execute(query) ignore_invalid_id_answers = [ query1_server_answer, '{"type":"complete","id": "55"}', query1_server_answer, ] @pytest.mark.asyncio @pytest.mark.parametrize("server", [ignore_invalid_id_answers], indirect=True) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_ignore_invalid_id(event_loop, client_and_server, query_str): session, server = client_and_server query = gql(query_str) # First query is working await session.execute(query) # Second query gets no answer -> raises with pytest.raises(TransportQueryError): await session.execute(query) # Third query is working await session.execute(query) async def assert_client_is_working(session): query1 = gql(query1_str) result = await session.execute(query1) print("Client received:", result) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) async def test_websocket_multiple_connections_in_series(event_loop, server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url) async with Client(transport=transport) as session: await assert_client_is_working(session) # Check client is disconnect here assert transport.websocket is None async with Client(transport=transport) as session: await assert_client_is_working(session) # Check client is disconnect here assert transport.websocket is None @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) async def test_websocket_multiple_connections_in_parallel(event_loop, server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") async def task_coro(): transport = WebsocketsTransport(url=url) async with Client(transport=transport) as session: await assert_client_is_working(session) task1 = asyncio.ensure_future(task_coro()) task2 = asyncio.ensure_future(task_coro()) await asyncio.gather(task1, task2) @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) async def test_websocket_trying_to_connect_to_already_connected_transport( event_loop, server ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url) async with Client(transport=transport) as session: await assert_client_is_working(session) with pytest.raises(TransportAlreadyConnected): async with Client(transport=transport): pass async def server_with_authentication_in_connection_init_payload(ws, path): # Wait the connection_init message init_message_str = await ws.recv() init_message = json.loads(init_message_str) payload = init_message["payload"] if "Authorization" in payload: if payload["Authorization"] == 12345: await ws.send('{"type":"connection_ack"}') result = await ws.recv() print(f"Server received: {result}", file=sys.stderr) await ws.send(query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) else: await ws.send( '{"type":"connection_error", "payload": "Invalid Authorization token"}' ) else: await ws.send( '{"type":"connection_error", "payload": "No Authorization token"}' ) await ws.close() @pytest.mark.asyncio @pytest.mark.parametrize( "server", [server_with_authentication_in_connection_init_payload], indirect=True ) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_connect_success_with_authentication_in_connection_init( event_loop, server, query_str ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") init_payload = {"Authorization": 12345} transport = WebsocketsTransport(url=url, init_payload=init_payload) async with Client(transport=transport) as session: query1 = gql(query_str) result = await session.execute(query1) print("Client received:", result) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.asyncio @pytest.mark.parametrize( "server", [server_with_authentication_in_connection_init_payload], indirect=True ) @pytest.mark.parametrize("query_str", [query1_str]) @pytest.mark.parametrize("init_payload", [{}, {"Authorization": "invalid_code"}]) async def test_websocket_connect_failed_with_authentication_in_connection_init( event_loop, server, query_str, init_payload ): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url, init_payload=init_payload) with pytest.raises(TransportServerError): async with Client(transport=transport) as session: query1 = gql(query_str) await session.execute(query1) @pytest.mark.parametrize("server", [server1_answers], indirect=True) def test_websocket_execute_sync(server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") transport = WebsocketsTransport(url=url) client = Client(transport=transport) query1 = gql(query1_str) result = client.execute(query1) print("Client received:", result) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Execute sync a second time result = client.execute(query1) print("Client received:", result) # Verify result assert isinstance(result, Dict) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" # Check client is disconnect here assert transport.websocket is None @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) async def test_websocket_add_extra_parameters_to_connect(event_loop, server): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" # Increase max payload size to avoid websockets.exceptions.PayloadTooBig exceptions transport = WebsocketsTransport(url=url, connect_args={"max_size": 2**21}) query = gql(query1_str) async with Client(transport=transport) as session: await session.execute(query) async def server_sending_keep_alive_before_connection_ack(ws, path): await WebSocketServerHelper.send_keepalive(ws) await WebSocketServerHelper.send_keepalive(ws) await WebSocketServerHelper.send_keepalive(ws) await WebSocketServerHelper.send_keepalive(ws) await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() print(f"Server received: {result}", file=sys.stderr) await ws.send(query1_server_answer.format(query_id=1)) await WebSocketServerHelper.send_complete(ws, 1) await ws.wait_closed() @pytest.mark.asyncio @pytest.mark.parametrize( "server", [server_sending_keep_alive_before_connection_ack], indirect=True ) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_non_regression_bug_108( event_loop, client_and_server, query_str ): # This test will check that we now ignore keepalive message # arriving before the connection_ack # See bug #108 session, server = client_and_server query = gql(query_str) result = await session.execute(query) print("Client received:", result) continents = result["continents"] africa = continents[0] assert africa["code"] == "AF" @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers], indirect=True) async def test_websocket_using_cli(event_loop, server, monkeypatch, capsys): url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") from gql.cli import main, get_parser import io import json parser = get_parser(with_examples=True) args = parser.parse_args([url]) # Monkeypatching sys.stdin to simulate getting the query # via the standard input monkeypatch.setattr("sys.stdin", io.StringIO(query1_str)) # Flush captured output captured = capsys.readouterr() exit_code = await main(args) assert exit_code == 0 # Check that the result has been printed on stdout captured = capsys.readouterr() captured_out = str(captured.out).strip() expected_answer = json.loads(query1_server_answer_data) print(f"Captured: {captured_out}") received_answer = json.loads(captured_out) assert received_answer == expected_answer query1_server_answer_with_extensions = ( '{{"type":"data","id":"{query_id}","payload":{{"data":{{"continents":[' '{{"code":"AF","name":"Africa"}},{{"code":"AN","name":"Antarctica"}},' '{{"code":"AS","name":"Asia"}},{{"code":"EU","name":"Europe"}},' '{{"code":"NA","name":"North America"}},{{"code":"OC","name":"Oceania"}},' '{{"code":"SA","name":"South America"}}]}},' '"extensions": {{"key1": "val1"}}}}}}' ) server1_answers_with_extensions = [ query1_server_answer_with_extensions, ] @pytest.mark.asyncio @pytest.mark.parametrize("server", [server1_answers_with_extensions], indirect=True) @pytest.mark.parametrize("query_str", [query1_str]) async def test_websocket_simple_query_with_extensions( event_loop, client_and_server, query_str ): session, server = client_and_server query = gql(query_str) execution_result = await session.execute(query, get_execution_result=True) assert execution_result.extensions["key1"] == "val1" gql-3.6.0b2/tests/test_websocket_subscription.py000066400000000000000000000443401460703211500221000ustar00rootroot00000000000000import asyncio import json import sys import warnings from typing import List import pytest from graphql import ExecutionResult from parse import search from gql import Client, gql from gql.transport.exceptions import TransportServerError from .conftest import MS, WebSocketServerHelper # Marking all tests in this file with the websockets marker pytestmark = pytest.mark.websockets countdown_server_answer = ( '{{"type":"data","id":"{query_id}","payload":{{"data":{{"number":{number}}}}}}}' ) WITH_KEEPALIVE = False # List which can used to store received messages by the server logged_messages: List[str] = [] async def server_countdown(ws, path): import websockets logged_messages.clear() global WITH_KEEPALIVE try: await WebSocketServerHelper.send_connection_ack(ws) if WITH_KEEPALIVE: await WebSocketServerHelper.send_keepalive(ws) result = await ws.recv() logged_messages.append(result) json_result = json.loads(result) assert json_result["type"] == "start" payload = json_result["payload"] query = payload["query"] query_id = json_result["id"] count_found = search("count: {:d}", query) count = count_found[0] print(f"Countdown started from: {count}") async def counting_coro(): for number in range(count, -1, -1): await ws.send( countdown_server_answer.format(query_id=query_id, number=number) ) await asyncio.sleep(2 * MS) counting_task = asyncio.ensure_future(counting_coro()) async def stopping_coro(): nonlocal counting_task while True: try: result = await ws.recv() logged_messages.append(result) except websockets.exceptions.ConnectionClosed: break json_result = json.loads(result) if json_result["type"] == "stop" and json_result["id"] == str(query_id): print("Cancelling counting task now") counting_task.cancel() async def keepalive_coro(): while True: await asyncio.sleep(5 * MS) try: await WebSocketServerHelper.send_keepalive(ws) except websockets.exceptions.ConnectionClosed: break stopping_task = asyncio.ensure_future(stopping_coro()) keepalive_task = asyncio.ensure_future(keepalive_coro()) try: await counting_task except asyncio.CancelledError: print("Now counting task is cancelled") stopping_task.cancel() try: await stopping_task except asyncio.CancelledError: print("Now stopping task is cancelled") if WITH_KEEPALIVE: keepalive_task.cancel() try: await keepalive_task except asyncio.CancelledError: print("Now keepalive task is cancelled") await WebSocketServerHelper.send_complete(ws, query_id) await WebSocketServerHelper.wait_connection_terminate(ws) except websockets.exceptions.ConnectionClosedOK: pass finally: await ws.wait_closed() countdown_subscription_str = """ subscription {{ countdown (count: {count}) {{ number }} }} """ @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription(event_loop, client_and_server, subscription_str): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_get_execution_result( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription, get_execution_result=True): assert isinstance(result, ExecutionResult) number = result.data["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_break( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count if count <= 5: # Note: the following line is only necessary for pypy3 v3.6.1 if sys.version_info < (3, 7): await session._generator.aclose() break count -= 1 assert count == 5 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_task_cancel( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async def task_coro(): nonlocal count async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 task = asyncio.ensure_future(task_coro()) async def cancel_task_coro(): nonlocal task await asyncio.sleep(11 * MS) task.cancel() cancel_task = asyncio.ensure_future(cancel_task_coro()) await asyncio.gather(task, cancel_task) assert count > 0 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_close_transport( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async def task_coro(): nonlocal count async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 task = asyncio.ensure_future(task_coro()) async def close_transport_task_coro(): nonlocal task await asyncio.sleep(11 * MS) await session.transport.close() close_transport_task = asyncio.ensure_future(close_transport_task_coro()) await asyncio.gather(task, close_transport_task) assert count > 0 async def server_countdown_close_connection_in_middle(ws, path): await WebSocketServerHelper.send_connection_ack(ws) result = await ws.recv() json_result = json.loads(result) assert json_result["type"] == "start" payload = json_result["payload"] query = payload["query"] query_id = json_result["id"] count_found = search("count: {:d}", query) count = count_found[0] stopping_before = count // 2 print(f"Countdown started from: {count}, stopping server before {stopping_before}") for number in range(count, stopping_before, -1): await ws.send(countdown_server_answer.format(query_id=query_id, number=number)) await asyncio.sleep(2 * MS) print("Closing server while subscription is still running now") await ws.close() await ws.wait_closed() print("Server is now closed") @pytest.mark.asyncio @pytest.mark.parametrize( "server", [server_countdown_close_connection_in_middle], indirect=True ) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_server_connection_closed( event_loop, client_and_server, subscription_str ): import websockets session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) with pytest.raises(websockets.exceptions.ConnectionClosedOK): async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_slow_consumer( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): await asyncio.sleep(10 * MS) number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_with_operation_name( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe( subscription, operation_name="CountdownSubscription" ): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 # Check that the query contains the operationName assert '"operationName": "CountdownSubscription"' in logged_messages[0] WITH_KEEPALIVE = True @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_with_keepalive( event_loop, client_and_server, subscription_str ): session, server = client_and_server count = 10 subscription = gql(subscription_str.format(count=count)) async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_with_keepalive_with_timeout_ok( event_loop, server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = WebsocketsTransport(url=url, keep_alive_timeout=(20 * MS)) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_with_keepalive_with_timeout_nok( event_loop, server, subscription_str ): from gql.transport.websockets import WebsocketsTransport path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = WebsocketsTransport(url=url, keep_alive_timeout=(1 * MS)) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) async with client as session: with pytest.raises(TransportServerError) as exc_info: async for result in session.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert "No keep-alive message has been received" in str(exc_info.value) @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) def test_websocket_subscription_sync(server, subscription_str): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) def test_websocket_subscription_sync_user_exception(server, subscription_str): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) with pytest.raises(Exception) as exc_info: for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 if count == 5: raise Exception("This is an user exception") assert count == 5 assert "This is an user exception" in str(exc_info.value) @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) def test_websocket_subscription_sync_break(server, subscription_str): from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 if count == 5: break assert count == 5 @pytest.mark.skipif(sys.platform.startswith("win"), reason="test failing on windows") @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) def test_websocket_subscription_sync_graceful_shutdown(server, subscription_str): """Note: this test will simulate a control-C happening while a sync subscription is in progress. To do that we will throw a KeyboardInterrupt exception inside the subscription async generator. The code should then do a clean close: - send stop messages for each active query - send a connection_terminate message Then the KeyboardInterrupt will be reraise (to warn potential user code) This test does not work on Windows but the behaviour with Windows is correct. """ from gql.transport.websockets import WebsocketsTransport url = f"ws://{server.hostname}:{server.port}/graphql" print(f"url = {url}") sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) interrupt_task = None with pytest.raises(KeyboardInterrupt): for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count if count == 5: # Simulate a KeyboardInterrupt in the generator with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="There is no current event loop" ) interrupt_task = asyncio.ensure_future( client.session._generator.athrow(KeyboardInterrupt) ) count -= 1 assert count == 4 # Catch interrupt_task exception to remove warning interrupt_task.exception() # Check that the server received a connection_terminate message last assert logged_messages.pop() == '{"type": "connection_terminate"}' @pytest.mark.asyncio @pytest.mark.parametrize("server", [server_countdown], indirect=True) @pytest.mark.parametrize("subscription_str", [countdown_subscription_str]) async def test_websocket_subscription_running_in_thread( event_loop, server, subscription_str, run_sync_test ): from gql.transport.websockets import WebsocketsTransport def test_code(): path = "/graphql" url = f"ws://{server.hostname}:{server.port}{path}" sample_transport = WebsocketsTransport(url=url) client = Client(transport=sample_transport) count = 10 subscription = gql(subscription_str.format(count=count)) for result in client.subscribe(subscription): number = result["number"] print(f"Number received: {number}") assert number == count count -= 1 assert count == -1 await run_sync_test(event_loop, server, test_code) gql-3.6.0b2/tox.ini000066400000000000000000000030201460703211500140340ustar00rootroot00000000000000[tox] envlist = black,flake8,import-order,mypy,manifest, py{37,38,39,310,311,312,py3} [gh-actions] python = 3.7: py37 3.8: py38 3.9: py39 3.10: py310 3.11: py311 3.12: py312 pypy-3: pypy3 [testenv] conda_channels = conda-forge passenv = * setenv = PYTHONPATH = {toxinidir} MULTIDICT_NO_EXTENSIONS = 1 ; Related to https://github.com/aio-libs/multidict YARL_NO_EXTENSIONS = 1 ; Related to https://github.com/aio-libs/yarl GQL_TESTS_TIMEOUT_FACTOR = 10 install_command = python -m pip install --ignore-installed {opts} {packages} whitelist_externals = python deps = -e.[test] ; Prevent installing issues: https://github.com/ContinuumIO/anaconda-issues/issues/542 commands = pip install -U setuptools ; run "tox -- tests -s" to show output for debugging py{37,39,310,311,312,py3}: pytest {posargs:tests} py{38}: pytest {posargs:tests --cov-report=term-missing --cov=gql} [testenv:black] basepython = python3.8 deps = -e.[dev] commands = black --check gql tests [testenv:flake8] basepython = python3.8 deps = -e.[dev] commands = flake8 gql tests [testenv:import-order] basepython = python3.8 deps = -e.[dev] commands = isort --recursive --check-only --diff gql tests [testenv:mypy] basepython = python3.8 deps = -e.[dev] commands = mypy gql tests [testenv:docs] basepython = python3.8 deps = -e.[dev] commands = sphinx-build -b html -nEW docs docs/_build/html [testenv:manifest] basepython = python3.8 deps = -e.[dev] commands = check-manifest -v