pax_global_header00006660000000000000000000000064147137445450014530gustar00rootroot0000000000000052 comment=82903263080b3b7f22c2ad84319584d7a3b1a1f6 python-graphene-3.4.3/000077500000000000000000000000001471374454500146475ustar00rootroot00000000000000python-graphene-3.4.3/.coveragerc000066400000000000000000000000521471374454500167650ustar00rootroot00000000000000[run] omit = graphene/pyutils/*,*/tests/* python-graphene-3.4.3/.editorconfig000066400000000000000000000003001471374454500173150ustar00rootroot00000000000000# http://editorconfig.org root = true [*] charset = utf-8 end_of_line = lf insert_final_newline = true trim_trailing_whitespace = true [*.{py,rst,ini}] indent_style = space indent_size = 4 python-graphene-3.4.3/.github/000077500000000000000000000000001471374454500162075ustar00rootroot00000000000000python-graphene-3.4.3/.github/ISSUE_TEMPLATE/000077500000000000000000000000001471374454500203725ustar00rootroot00000000000000python-graphene-3.4.3/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000014451471374454500230700ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: '' labels: "\U0001F41B bug" assignees: '' --- **Note: for support questions, please use stackoverflow**. This repository's issues are reserved for feature requests and bug reports. * **What is the current behavior?** * **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem** via a github repo, https://repl.it or similar. * **What is the expected behavior?** * **What is the motivation / use case for changing the behavior?** * **Please tell us about your environment:** - Version: - Platform: * **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow) python-graphene-3.4.3/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000000341471374454500223570ustar00rootroot00000000000000blank_issues_enabled: false python-graphene-3.4.3/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000011421471374454500241150ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project title: '' labels: "✨ enhancement" assignees: '' --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here. python-graphene-3.4.3/.github/stale.yml000066400000000000000000000015051471374454500200430ustar00rootroot00000000000000# Number of days of inactivity before an issue becomes stale daysUntilStale: false # Number of days of inactivity before a stale issue is closed daysUntilClose: false # Issues with these labels will never be considered stale exemptLabels: - pinned - security - πŸ› bug - πŸ“– documentation - πŸ™‹ help wanted - ✨ enhancement - good first issue - work in progress # Label to use when marking an issue as stale staleLabel: wontfix # Comment to post when marking an issue as stale. Set to `false` to disable markComment: false # markComment: > # This issue has been automatically marked as stale because it has not had # recent activity. It will be closed if no further activity occurs. Thank you # for your contributions. # Comment to post when closing a stale issue. Set to `false` to disable closeComment: false python-graphene-3.4.3/.github/workflows/000077500000000000000000000000001471374454500202445ustar00rootroot00000000000000python-graphene-3.4.3/.github/workflows/build.yaml000066400000000000000000000007451471374454500222350ustar00rootroot00000000000000name: πŸ“¦ Build on: [push, pull_request] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install dependencies run: | python -m pip install --upgrade pip pip install build twine - name: Building package run: python3 -m build - name: Check package with Twine run: twine check dist/* python-graphene-3.4.3/.github/workflows/deploy.yml000066400000000000000000000010631471374454500222630ustar00rootroot00000000000000name: πŸš€ Deploy to PyPI on: push: tags: - 'v*' jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v5 with: python-version: "3.10" - name: Build wheel and source tarball run: | pip install wheel python setup.py sdist bdist_wheel - name: Publish a Python distribution to PyPI uses: pypa/gh-action-pypi-publish@v1.1.0 with: user: __token__ password: ${{ secrets.pypi_password }} python-graphene-3.4.3/.github/workflows/lint.yml000066400000000000000000000007571471374454500217460ustar00rootroot00000000000000name: πŸ’… Lint on: [push, pull_request] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install dependencies run: | python -m pip install --upgrade pip pip install tox - name: Run lint run: tox env: TOXENV: pre-commit - name: Run mypy run: tox env: TOXENV: mypy python-graphene-3.4.3/.github/workflows/tests.yml000066400000000000000000000035651471374454500221420ustar00rootroot00000000000000name: πŸ“„ Tests on: push: branches: - master - '*.x' paths-ignore: - 'docs/**' - '*.md' - '*.rst' pull_request: branches: - master - '*.x' paths-ignore: - 'docs/**' - '*.md' - '*.rst' jobs: tests: # runs the test suite name: ${{ matrix.name }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: include: - {name: '3.13', python: '3.13', os: ubuntu-latest, tox: py313} - {name: '3.12', python: '3.12', os: ubuntu-latest, tox: py312} - {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311} - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: update pip run: | python -m pip install --upgrade pip pip install --upgrade setuptools wheel - name: get pip cache dir id: pip-cache run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - name: cache pip dependencies uses: actions/cache@v3 with: path: ${{ steps.pip-cache.outputs.dir }} key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }} - run: pip install tox - run: tox -e ${{ matrix.tox }} - name: Upload coverage.xml if: ${{ matrix.python == '3.10' }} uses: actions/upload-artifact@v4 with: name: graphene-coverage path: coverage.xml if-no-files-found: error - name: Upload coverage.xml to codecov if: ${{ matrix.python == '3.10' }} uses: codecov/codecov-action@v4 python-graphene-3.4.3/.gitignore000066400000000000000000000021261471374454500166400ustar00rootroot00000000000000# Created by https://www.gitignore.io ### Python ### # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache .pytest_cache nosetests.xml coverage.xml *.cover .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # VirtualEnv .env .venv env/ venv/ # Typing .mypy_cache/ /tests/django.sqlite /graphene/index.json /graphene/meta.json /meta.json /index.json /docs/playground/graphene-js/pypyjs-release-nojit/ /docs/static/playground/lib /docs/static/playground # PyCharm .idea *.iml # Databases *.sqlite3 .vscode .mypy_cache .ruff_cache python-graphene-3.4.3/.pre-commit-config.yaml000066400000000000000000000012751471374454500211350ustar00rootroot00000000000000default_language_version: python: python3.10 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: check-merge-conflict - id: check-json - id: check-yaml - id: debug-statements - id: end-of-file-fixer exclude: ^docs/.*$ - id: pretty-format-json args: - --autofix - id: trailing-whitespace exclude: README.md - repo: https://github.com/asottile/pyupgrade rev: v2.37.3 hooks: - id: pyupgrade - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. rev: v0.5.0 hooks: - id: ruff - id: ruff-format args: [ --check ] python-graphene-3.4.3/LICENSE000066400000000000000000000020771471374454500156620ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2015-Present Syrus Akbary Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-graphene-3.4.3/MANIFEST.in000066400000000000000000000001751471374454500164100ustar00rootroot00000000000000global-exclude tests/* recursive-exclude tests * recursive-exclude tests_py35 * recursive-exclude examples * include LICENSE python-graphene-3.4.3/Makefile000066400000000000000000000013011471374454500163020ustar00rootroot00000000000000.PHONY: help help: @echo "Please use \`make ' where is one of" @grep -E '^\.PHONY: [a-zA-Z_-]+ .*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = "(: |##)"}; {printf "\033[36m%-30s\033[0m %s\n", $$2, $$3}' .PHONY: install-dev ## Install development dependencies install-dev: pip install -e ".[dev]" .PHONY: test ## Run tests test: py.test graphene examples .PHONY: docs ## Generate docs docs: install-dev cd docs && make install && make html .PHONY: docs-live ## Generate docs with live reloading docs-live: install-dev cd docs && make install && make livehtml .PHONY: format format: black graphene examples setup.py .PHONY: lint lint: flake8 graphene examples setup.py python-graphene-3.4.3/README.md000066400000000000000000000117771471374454500161430ustar00rootroot00000000000000# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe?style=flat)](https://discord.gg/T6Gp6NFYHe) [πŸ’¬ Join the community on Discord](https://discord.gg/T6Gp6NFYHe) **We are looking for contributors**! Please check the current issues to see how you can help ❀️ ## Introduction [Graphene](http://graphene-python.org) is an opinionated Python library for building GraphQL schemas/types fast and easily. - **Easy to use:** Graphene helps you use GraphQL in Python without effort. - **Relay:** Graphene has builtin support for Relay. - **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), Mongo, custom Python objects, etc. We believe that by providing a complete API you could plug Graphene anywhere your data lives and make your data available through GraphQL. ## Integrations Graphene has multiple integrations with different frameworks: | integration | Package | | ----------------- | --------------------------------------------------------------------------------------- | | SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) | | Mongo | [graphene-mongo](https://github.com/graphql-python/graphene-mongo/) | | Apollo Federation | [graphene-federation](https://github.com/graphql-python/graphene-federation/) | | Django | [graphene-django](https://github.com/graphql-python/graphene-django/) | Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql). ## Installation To install `graphene`, just run this command in your shell ```bash pip install "graphene>=3.1" ``` ## Examples Here is one example for you to get started: ```python import graphene class Query(graphene.ObjectType): hello = graphene.String(description='A typical hello world') def resolve_hello(self, info): return 'World' schema = graphene.Schema(query=Query) ``` Then Querying `graphene.Schema` is as simple as: ```python query = ''' query SayHello { hello } ''' result = schema.execute(query) ``` If you want to learn even more, you can also check the following [examples](examples/): - **Basic Schema**: [Starwars example](examples/starwars) - **Relay Schema**: [Starwars Relay example](examples/starwars_relay) ## Documentation Documentation and links to additional resources are available at https://docs.graphene-python.org/en/latest/ ## Contributing After cloning this repo, create a [virtualenv](https://virtualenv.pypa.io/en/stable/) and ensure dependencies are installed by running: ```sh virtualenv venv source venv/bin/activate pip install -e ".[test]" ``` Well-written tests and maintaining good test coverage is important to this project. While developing, run new and existing tests with: ```sh pytest graphene/relay/tests/test_node.py # Single file pytest graphene/relay # All tests in directory ``` Add the `-s` flag if you have introduced breakpoints into the code for debugging. Add the `-v` ("verbose") flag to get more detailed test output. For even more detailed output, use `-vv`. Check out the [pytest documentation](https://docs.pytest.org/en/latest/) for more options and test running controls. Regularly ensure your `pre-commit` hooks are up to date and enabled: ```sh pre-commit install ``` You can also run the benchmarks with: ```sh pytest graphene --benchmark-only ``` Graphene supports several versions of Python. To make sure that changes do not break compatibility with any of those versions, we use `tox` to create virtualenvs for each Python version and run tests with that version. To run against all Python versions defined in the `tox.ini` config file, just run: ```sh tox ``` If you wish to run against a specific version defined in the `tox.ini` file: ```sh tox -e py39 ``` Tox can only use whatever versions of Python are installed on your system. When you create a pull request, GitHub Actions pipelines will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of Python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful! ### Building Documentation The documentation is generated using the excellent [Sphinx](http://www.sphinx-doc.org/) and a custom theme. An HTML version of the documentation is produced by running: ```sh make docs ``` python-graphene-3.4.3/SECURITY.md000066400000000000000000000011021471374454500164320ustar00rootroot00000000000000# Security Policy ## Supported Versions Support for security issues is currently provided for Graphene 3.0 and above. Support on earlier versions cannot be guaranteed by the maintainers of this library, but community PRs may be accepted in critical cases. The preferred mitigation strategy is via an upgrade to Graphene 3. | Version | Supported | | ------- | ------------------ | | 3.x | :white_check_mark: | | <3.x | :x: | ## Reporting a Vulnerability Please use responsible disclosure by contacting a core maintainer via Discord or E-Mail. python-graphene-3.4.3/UPGRADE-v1.0.md000066400000000000000000000115141471374454500167440ustar00rootroot00000000000000# v1.0 Upgrade Guide Big changes from v0.10.x to 1.0. While on the surface a lot of this just looks like shuffling around API, the entire codebase has been rewritten to handle some really great use cases and improved performance. ## Backwards Compatibility and Deprecation Warnings This has been a community project from the start, we need your help making the upgrade as smooth as possible for everybody! We have done our best to provide backwards compatibility with deprecated APIs. ## Deprecations - `with_context` is no longer needed. Resolvers now always take the context argument. Before: ```python def resolve_xxx(root, args, info): # ... ``` With 1.0: ```python def resolve_xxx(root, args, context, info): # ... ``` - `ObjectType` and `Interface` no longer accept the `abstract` option in the `Meta`. Inheriting fields should be now achieved using `AbstractType` inheritance. Before: ```python class MyBaseQuery(graphene.ObjectType): my_field = String() class Meta: abstract = True class Query(MyBaseQuery): pass ``` With 1.0: ```python class MyBaseQuery(graphene.AbstractType): my_field = String() class Query(MyBaseQuery, graphene.ObjectType): pass ``` - The `type_name` option in the Meta in types is now `name` - Type references no longer work with strings, but with functions. Before: ```python class Query(graphene.ObjectType): user = graphene.Field('User') users = graphene.List('User') ``` With 1.0: ```python class Query(graphene.ObjectType): user = graphene.Field(lambda: User) users = graphene.List(lambda: User) ``` ## Schema Schemas in graphene `1.0` are `Immutable`, that means that once you create a `graphene.Schema` any change in their attributes will not have any effect. The `name` argument is removed from the Schema. The arguments `executor` and `middlewares` are also removed from the `Schema` definition. You can still use them, but by calling explicitly in the `execute` method in `graphql`. ```python # Old way schema = graphene.Schema(name='My Schema') schema.query = Query schema.mutation = Mutation # New way schema = graphene.Schema( query=Query, mutation=Mutation ) ``` ## Interfaces For implementing an Interface in an ObjectType, you have to add it onto `Meta.interfaces`. Like: ```python from graphene import Interface, ObjectType, String class Character(Interface): name = String() class Human(Character): # Old way, Human will still be an Interface pass class Droid(ObjectType): # New way, you have to specify the ObjectType class Meta: interfaces = (Character, ) ``` ## Mutations Mutation fields have changed the way of usage, before if you have the mutation `MyMutation` you only have to reference with `graphene.Field(MyMutation)` now it's simply `MyMutation.Field()` Example: ```python from graphene import ObjectType, Mutation, String class ReverseString(Mutation): class Input: input = String(required=True) reversed = String() def mutate(root, args, context, info): reversed = args.get('input')[::-1] return ReverseString(reversed=reversed) class Query(ObjectType): reverse_string = graphene.Field(ReverseString) # Old way, will not include the mutation arguments by default reverse_string = ReverseString.Field() ``` ## Nodes Apart from implementing as shown in the previous section, to use the node field you have to specify the node Type. Example: ```python from graphene import ObjectType, relay class Query(ObjectType): node = relay.NodeField() # Old way, NodeField no longer exists. Use Node.Field node = relay.Node.Field() # New way ``` Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it explicitly. ## Django The Django integration with Graphene now has an independent package: `graphene-django`. For installing, you have to replace the old `graphene[django]` with `graphene-django`. - As the package is now independent, you now have to import from `graphene_django`. - **DjangoNode no longer exists**, please use `relay.Node` instead: ```python from graphene.relay import Node from graphene_django import DjangoObjectType class Droid(DjangoObjectType): class Meta: interfaces = (Node, ) ``` ## SQLAlchemy The SQLAlchemy integration with Graphene now has an independent package: `graphene-sqlalchemy`. For installing, you have to replace the old `graphene[sqlalchemy]` with `graphene-sqlalchemy`. - As the package is now independent, you have to import now from `graphene_sqlalchemy`. - **SQLAlchemyNode no longer exists**, please use `relay.Node` instead: ```python from graphene.relay import Node from graphene_sqlalchemy import SQLAlchemyObjectType class Droid(SQLAlchemyObjectType): class Meta: interfaces = (Node, ) ``` python-graphene-3.4.3/UPGRADE-v2.0.md000066400000000000000000000177401471374454500167540ustar00rootroot00000000000000# v2.0 Upgrade Guide `ObjectType`, `Interface`, `InputObjectType`, `Scalar` and `Enum` implementations have been quite simplified, without the need to define a explicit Metaclass for each subtype. It also improves the field resolvers, [simplifying the code](#simpler-resolvers) the developer has to write to use them. **Deprecations:** - [`AbstractType`](#abstracttype-deprecated) - [`resolve_only_args`](#resolve_only_args) - [`Mutation.Input`](#mutationinput) **Breaking changes:** - [`Simpler Resolvers`](#simpler-resolvers) - [`Node Connections`](#node-connections) **New Features!** - [`InputObjectType`](#inputobjecttype) - [`Meta as Class arguments`](#meta-as-class-arguments) (_only available for Python 3_) > The type metaclasses are now deleted as they are no longer necessary. If your code was depending > on this strategy for creating custom attrs, see an [example on how to do it in 2.0](https://github.com/graphql-python/graphene/blob/v2.0.0/graphene/tests/issues/test_425.py). ## Deprecations ### AbstractType deprecated AbstractType is deprecated in graphene 2.0, you can now use normal inheritance instead. Before: ```python class CommonFields(AbstractType): name = String() class Pet(CommonFields, Interface): pass ``` With 2.0: ```python class CommonFields(object): name = String() class Pet(CommonFields, Interface): pass ``` ### resolve_only_args `resolve_only_args` is now deprecated as the resolver API has been simplified. Before: ```python class User(ObjectType): name = String() @resolve_only_args def resolve_name(root): return root.name ``` With 2.0: ```python class User(ObjectType): name = String() def resolve_name(root, info): return root.name ``` ### Mutation.Input `Mutation.Input` is now deprecated in favor of using `Mutation.Arguments` (`ClientIDMutation` still uses `Input`). Before: ```python class User(Mutation): class Input: name = String() ``` With 2.0: ```python class User(Mutation): class Arguments: name = String() ``` ## Breaking Changes ### Simpler resolvers All the resolvers in graphene have been simplified. Prior to Graphene `2.0`, all resolvers required four arguments: `(root, args, context, info)`. Now, resolver `args` are passed as keyword arguments to the function, and `context` argument dissapeared in favor of `info.context`. Before: ```python my_field = graphene.String(my_arg=graphene.String()) def resolve_my_field(root, args, context, info): my_arg = args.get('my_arg') return ... ``` With 2.0: ```python my_field = graphene.String(my_arg=graphene.String()) def resolve_my_field(root, info, my_arg): return ... ``` **PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as standard `Connection`'s arguments (first, last, after, before).** You may need something like this: ```python def resolve_my_field(root, info, known_field1, known_field2, **args): ## get other args with: args.get('arg_key') ``` And, if you need the context in the resolver, you can use `info.context`: ```python my_field = graphene.String(my_arg=graphene.String()) def resolve_my_field(root, info, my_arg): context = info.context return ... ``` ### Node Connections Node types no longer have a `Connection` by default. In 2.0 and onwards `Connection`s should be defined explicitly. Before: ```python class User(ObjectType): class Meta: interfaces = [relay.Node] name = String() class Query(ObjectType): user_connection = relay.ConnectionField(User) ``` With 2.0: ```python class User(ObjectType): class Meta: interfaces = [relay.Node] name = String() class UserConnection(relay.Connection): class Meta: node = User class Query(ObjectType): user_connection = relay.ConnectionField(UserConnection) ``` ## Node.get_node The method `get_node` in `ObjectTypes` that have `Node` as interface, changes its API. From `def get_node(cls, id, context, info)` to `def get_node(cls, info, id)`. ```python class MyObject(ObjectType): class Meta: interfaces = (Node, ) @classmethod def get_node(cls, id, context, info): return ... ``` To: ```python class MyObject(ObjectType): class Meta: interfaces = (Node, ) @classmethod def get_node(cls, info, id): return ... ``` ## Node.get_node_from_global_id The parameters' order of `get_node_from_global_id` method has changed. You may need to adjust your [Node Root Field](http://docs.graphene-python.org/en/latest/relay/nodes/#node-root-field) and maybe other places that uses this method to obtain an object. Before: ```python class RootQuery(object): ... node = Field(relay.Node, id=ID(required=True)) def resolve_node(root, args, context, info): node = relay.Node.get_node_from_global_id(args['id'], context, info) return node ``` Now: ```python class RootQuery(object): ... node = Field(relay.Node, id=ID(required=True)) def resolve_node(root, info, id): node = relay.Node.get_node_from_global_id(info, id) return node ``` ## Mutation.mutate Now only receives (`root`, `info`, `**kwargs`) and is not a @classmethod Before: ```python class SomeMutation(Mutation): ... @classmethod def mutate(cls, instance, args, context, info): ... ``` With 2.0: ```python class SomeMutation(Mutation): ... def mutate(root, info, **args): ... ``` With 2.0 you can also get your declared (as above) `args` this way: ```python class SomeMutation(Mutation): class Arguments: first_name = String(required=True) last_name = String(required=True) ... def mutate(root, info, first_name, last_name): ... ``` ## ClientIDMutation.mutate_and_get_payload Now only receives (`root`, `info`, `**input`) ### Middlewares If you are using Middelwares, you need to some adjustments: Before: ```python class MyGrapheneMiddleware(object): def resolve(self, next_mw, root, args, context, info): ## Middleware code return next_mw(root, args, context, info) ``` With 2.0: ```python class MyGrapheneMiddleware(object): def resolve(self, next_mw, root, info, **args): context = info.context ## Middleware code info.context = context Β  Β  Β  Β return next_mw(root, info, **args) ``` ## New Features ### InputObjectType If you are using `InputObjectType`, you now can access its fields via `getattr` (`my_input.myattr`) when resolving, instead of the classic way `my_input['myattr']`. And also use custom defined properties on your input class. Example. Before: ```python class UserInput(InputObjectType): id = ID(required=True) def is_valid_input(input): return input.get('id').startswith('userid_') class Query(ObjectType): user = graphene.Field(User, input=UserInput()) @resolve_only_args def resolve_user(root, input): user_id = input.get('id') if is_valid_input(user_id): return get_user(user_id) ``` With 2.0: ```python class UserInput(InputObjectType): id = ID(required=True) @property def is_valid(root): return root.id.startswith('userid_') class Query(ObjectType): user = graphene.Field(User, input=UserInput()) def resolve_user(root, info, input): if input.is_valid: return get_user(input.id) ``` ### Meta as Class arguments Now you can use the meta options as class arguments (**ONLY PYTHON 3**). Before: ```python class Dog(ObjectType): class Meta: interfaces = [Pet] name = String() ``` With 2.0: ```python class Dog(ObjectType, interfaces=[Pet]): name = String() ``` ### Abstract types Now you can create abstact types super easily, without the need of subclassing the meta. ```python class Base(ObjectType): class Meta: abstract = True id = ID() def resolve_id(root, info): return f"{root.__class__.__name__}_{root.id}" ``` ### UUID Scalar In Graphene 2.0 there is a new dedicated scalar for UUIDs, `UUID`. python-graphene-3.4.3/bin/000077500000000000000000000000001471374454500154175ustar00rootroot00000000000000python-graphene-3.4.3/bin/convert_documentation000077500000000000000000000001101471374454500217460ustar00rootroot00000000000000#!/bin/bash pandoc README.md --from markdown --to rst -s -o README.rst python-graphene-3.4.3/docs/000077500000000000000000000000001471374454500155775ustar00rootroot00000000000000python-graphene-3.4.3/docs/Makefile000066400000000000000000000166301471374454500172450ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help help: @echo "Please use \`make ' where is one of" @grep -E '^\.PHONY: [a-zA-Z_-]+ .*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = "(: |##)"}; {printf "\033[36m%-30s\033[0m %s\n", $$2, $$3}' .PHONY: install ## to install all documentation related requirements install: pip install -r requirements.txt .PHONY: clean ## to remove all built documentation clean: rm -rf $(BUILDDIR)/* .PHONY: html ## to make standalone HTML files html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." .PHONY: dirhtml ## to make HTML files named index.html in directories dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." .PHONY: singlehtml ## to make a single large HTML file singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." .PHONY: pickle ## to make pickle files pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." .PHONY: json ## to make JSON files json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." .PHONY: htmlhelp ## to make HTML files and a HTML help project htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." .PHONY: qthelp ## to make HTML files and a qthelp project qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Graphene.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Graphene.qhc" .PHONY: applehelp ## to make an Apple Help Book applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." .PHONY: devhelp ## to make HTML files and a Devhelp project devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Graphene" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Graphene" @echo "# devhelp" .PHONY: epub ## to make an epub epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." .PHONY: epub3 ## to make an epub3 epub3: $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 @echo @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." .PHONY: latex ## to make LaTeX files, you can set PAPER=a4 or PAPER=letter latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." .PHONY: latexpdf ## to make LaTeX files and run them through pdflatex latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: latexpdfja ## to make LaTeX files and run them through platex/dvipdfmx latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: text ## to make text files text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." .PHONY: man ## to make manual pages man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." .PHONY: texinfo ## to make Texinfo files texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." .PHONY: info ## to make Texinfo files and run them through makeinfo info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." .PHONY: gettext ## to make PO message catalogs gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." .PHONY: changes ## to make an overview of all changed/added/deprecated items changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." .PHONY: linkcheck ## to check all external links for integrity linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." .PHONY: doctest ## to run all doctests embedded in the documentation (if enabled) doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." .PHONY: coverage ## to run coverage check of the documentation (if enabled) coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." .PHONY: xml ## to make Docutils-native XML files xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." .PHONY: pseudoxml ## to make pseudoxml-XML files for display purposes pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." .PHONY: dummy ## to check syntax errors of document sources dummy: $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy @echo @echo "Build finished. Dummy builder generates no files." .PHONY: livehtml ## to build and serve live-reloading documentation livehtml: sphinx-autobuild -b html --watch ../graphene $(ALLSPHINXOPTS) $(BUILDDIR)/html python-graphene-3.4.3/docs/_static/000077500000000000000000000000001471374454500172255ustar00rootroot00000000000000python-graphene-3.4.3/docs/_static/.gitkeep000066400000000000000000000000001471374454500206440ustar00rootroot00000000000000python-graphene-3.4.3/docs/api/000077500000000000000000000000001471374454500163505ustar00rootroot00000000000000python-graphene-3.4.3/docs/api/index.rst000066400000000000000000000032761471374454500202210ustar00rootroot00000000000000API Reference ============= Schema ------ .. autoclass:: graphene.types.schema.Schema :members: .. Uncomment sections / types as API documentation is fleshed out .. in each class Object types ------------ .. autoclass:: graphene.ObjectType .. autoclass:: graphene.InputObjectType .. autoclass:: graphene.Mutation :members: .. _fields-mounted-types: Fields (Mounted Types) ---------------------- .. autoclass:: graphene.Field .. autoclass:: graphene.Argument .. autoclass:: graphene.InputField Fields (Unmounted Types) ------------------------ .. autoclass:: graphene.types.unmountedtype.UnmountedType GraphQL Scalars --------------- .. autoclass:: graphene.Int() .. autoclass:: graphene.Float() .. autoclass:: graphene.String() .. autoclass:: graphene.Boolean() .. autoclass:: graphene.ID() Graphene Scalars ---------------- .. autoclass:: graphene.Date() .. autoclass:: graphene.DateTime() .. autoclass:: graphene.Time() .. autoclass:: graphene.Decimal() .. autoclass:: graphene.UUID() .. autoclass:: graphene.JSONString() .. autoclass:: graphene.Base64() Enum ---- .. autoclass:: graphene.Enum() Structures ---------- .. autoclass:: graphene.List .. autoclass:: graphene.NonNull Type Extension -------------- .. autoclass:: graphene.Interface() .. autoclass:: graphene.Union() Execution Metadata ------------------ .. autoclass:: graphene.ResolveInfo .. autoclass:: graphene.Context .. autoclass:: graphql.ExecutionResult .. Relay .. ----- .. .. autoclass:: graphene.Node .. .. autoclass:: graphene.GlobalID .. .. autoclass:: graphene.ClientIDMutation .. .. autoclass:: graphene.Connection .. .. autoclass:: graphene.ConnectionField .. .. autoclass:: graphene.PageInfo python-graphene-3.4.3/docs/conf.py000066400000000000000000000307131471374454500171020ustar00rootroot00000000000000import os import sys import sphinx_graphene_theme on_rtd = os.environ.get("READTHEDOCS", None) == "True" # -*- coding: utf-8 -*- # # Graphene documentation build configuration file, created by # sphinx-quickstart on Sun Sep 11 18:30:51 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath("..")) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.viewcode", "sphinx.ext.napoleon", ] if not on_rtd: extensions += ["sphinx.ext.githubpages"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. project = "Graphene" copyright = "Graphene 2016" author = "Syrus Akbary" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = "1.0" # The full version, including alpha/beta/rc tags. release = "1.0" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # # today = '' # # Else, today_fmt is used as the format for a strftime call. # # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The reST default role (used for this markup: `text`) to use for all # documents. # # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # # html_theme = 'alabaster' # if on_rtd: # html_theme = 'sphinx_rtd_theme' html_theme = "sphinx_graphene_theme" html_theme_path = [sphinx_graphene_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. # " v documentation" by default. # # html_title = u'Graphene v1.0' # A shorter title for the navigation bar. Default is the same as html_title. # # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # # html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # # html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. # # html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # # html_additional_pages = {} # If false, no module index is generated. # # html_domain_indices = True # If false, no index is generated. # # html_use_index = True # If true, the index is split into individual pages for each letter. # # html_split_index = False # If true, links to the reST sources are added to the pages. # # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' # # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. # # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = "Graphenedoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual") ] # The name of an image file (relative to this directory) to place at the top of # the title page. # # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # # latex_use_parts = False # If true, show page references after internal links. # # latex_show_pagerefs = False # If true, show URL addresses after external links. # # latex_show_urls = False # Documents to append as an appendix to all manuals. # # latex_appendices = [] # It false, will not define \strong, \code, itleref, \crossref ... but only # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added # packages. # # latex_keep_old_macro_names = True # If false, no module index is generated. # # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "graphene", "Graphene Documentation", [author], 1)] # If true, show URL addresses after external links. # # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "Graphene", "Graphene Documentation", author, "Graphene", "One line description of project.", "Miscellaneous", ) ] # Documents to append as an appendix to all manuals. # # texinfo_appendices = [] # If false, no module index is generated. # # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # # texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The basename for the epub file. It defaults to the project name. # epub_basename = project # The HTML theme for the epub output. Since the default themes are not # optimized for small screen space, using the same theme for HTML and epub # output is usually not wise. This defaults to 'epub', a theme designed to save # visual space. # # epub_theme = 'epub' # The language of the text. It defaults to the language option # or 'en' if the language is not set. # # epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. # epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A tuple containing the cover image and cover page html template filenames. # # epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. # # epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. # # epub_pre_files = [] # HTML files that should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. # # epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] # The depth of the table of contents in toc.ncx. # # epub_tocdepth = 3 # Allow duplicate toc entries. # # epub_tocdup = True # Choose between 'default' and 'includehidden'. # # epub_tocscope = 'default' # Fix unsupported image types using the Pillow. # # epub_fix_images = False # Scale large images. # # epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. # # epub_show_urls = 'inline' # If false, no index is generated. # # epub_use_index = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "https://docs.python.org/": None, "python": ("https://docs.python.org/", None), "graphene_django": ( "http://docs.graphene-python.org/projects/django/en/latest/", None, ), "graphene_sqlalchemy": ( "http://docs.graphene-python.org/projects/sqlalchemy/en/latest/", None, ), } python-graphene-3.4.3/docs/execution/000077500000000000000000000000001471374454500176025ustar00rootroot00000000000000python-graphene-3.4.3/docs/execution/dataloader.rst000066400000000000000000000071271471374454500224430ustar00rootroot00000000000000Dataloader ========== DataLoader is a generic utility to be used as part of your application's data fetching layer to provide a simplified and consistent API over various remote data sources such as databases or web services via batching and caching. It is provided by a separate package `aiodataloader `. Batching -------- Batching is not an advanced feature, it's DataLoader's primary feature. Create loaders by providing a batch loading function. .. code:: python from aiodataloader import DataLoader class UserLoader(DataLoader): async def batch_load_fn(self, keys): # Here we call a function to return a user for each key in keys return [get_user(id=key) for key in keys] A batch loading async function accepts a list of keys, and returns a list of ``values``. ``DataLoader`` will coalesce all individual loads which occur within a single frame of execution (executed once the wrapping event loop is resolved) and then call your batch function with all requested keys. .. code:: python user_loader = UserLoader() user1 = await user_loader.load(1) user1_best_friend = await user_loader.load(user1.best_friend_id) user2 = await user_loader.load(2) user2_best_friend = await user_loader.load(user2.best_friend_id) A naive application may have issued *four* round-trips to a backend for the required information, but with ``DataLoader`` this application will make at most *two*. Note that loaded values are one-to-one with the keys and must have the same order. This means that if you load all values from a single query, you must make sure that you then order the query result for the results to match the keys: .. code:: python class UserLoader(DataLoader): async def batch_load_fn(self, keys): users = {user.id: user for user in User.objects.filter(id__in=keys)} return [users.get(user_id) for user_id in keys] ``DataLoader`` allows you to decouple unrelated parts of your application without sacrificing the performance of batch data-loading. While the loader presents an API that loads individual values, all concurrent requests will be coalesced and presented to your batch loading function. This allows your application to safely distribute data fetching requirements throughout your application and maintain minimal outgoing data requests. Using with Graphene ------------------- DataLoader pairs nicely well with Graphene/GraphQL. GraphQL fields are designed to be stand-alone functions. Without a caching or batching mechanism, it's easy for a naive GraphQL server to issue new database requests each time a field is resolved. Consider the following GraphQL request: .. code:: { me { name bestFriend { name } friends(first: 5) { name bestFriend { name } } } } If ``me``, ``bestFriend`` and ``friends`` each need to send a request to the backend, there could be at most 13 database requests! When using DataLoader, we could define the User type using our previous example with leaner code and at most 4 database requests, and possibly fewer if there are cache hits. .. code:: python class User(graphene.ObjectType): name = graphene.String() best_friend = graphene.Field(lambda: User) friends = graphene.List(lambda: User) async def resolve_best_friend(root, info): return await user_loader.load(root.best_friend_id) async def resolve_friends(root, info): return await user_loader.load_many(root.friend_ids) python-graphene-3.4.3/docs/execution/execute.rst000066400000000000000000000057401471374454500220040ustar00rootroot00000000000000.. _SchemaExecute: Executing a query ================= For executing a query against a schema, you can directly call the ``execute`` method on it. .. code:: python from graphene import Schema schema = Schema(...) result = schema.execute('{ name }') ``result`` represents the result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred. .. _SchemaExecuteContext: Context _______ You can pass context to a query via ``context``. .. code:: python from graphene import ObjectType, String, Schema class Query(ObjectType): name = String() def resolve_name(root, info): return info.context.get('name') schema = Schema(Query) result = schema.execute('{ name }', context={'name': 'Syrus'}) assert result.data['name'] == 'Syrus' Variables _________ You can pass variables to a query via ``variables``. .. code:: python from graphene import ObjectType, Field, ID, Schema class Query(ObjectType): user = Field(User, id=ID(required=True)) def resolve_user(root, info, id): return get_user_by_id(id) schema = Schema(Query) result = schema.execute( ''' query getUser($id: ID) { user(id: $id) { id firstName lastName } } ''', variables={'id': 12}, ) Root Value __________ Value used for :ref:`ResolverParamParent` in root queries and mutations can be overridden using ``root`` parameter. .. code:: python from graphene import ObjectType, Field, Schema class Query(ObjectType): me = Field(User) def resolve_user(root, info): return {'id': root.id, 'firstName': root.name} schema = Schema(Query) user_root = User(id=12, name='bob') result = schema.execute( ''' query getUser { user { id firstName lastName } } ''', root=user_root ) assert result.data['user']['id'] == user_root.id Operation Name ______________ If there are multiple operations defined in a query string, ``operation_name`` should be used to indicate which should be executed. .. code:: python from graphene import ObjectType, Field, Schema class Query(ObjectType): user = Field(User) def resolve_user(root, info): return get_user_by_id(12) schema = Schema(Query) query_string = ''' query getUserWithFirstName { user { id firstName lastName } } query getUserWithFullName { user { id fullName } } ''' result = schema.execute( query_string, operation_name='getUserWithFullName' ) assert result.data['user']['fullName'] python-graphene-3.4.3/docs/execution/fileuploading.rst000066400000000000000000000007211471374454500231560ustar00rootroot00000000000000File uploading ============== File uploading is not part of the official GraphQL spec yet and is not natively implemented in Graphene. If your server needs to support file uploading then you can use the library: `graphene-file-upload `_ which enhances Graphene to add file uploads and conforms to the unoffical GraphQL `multipart request spec `_. python-graphene-3.4.3/docs/execution/index.rst000066400000000000000000000002311471374454500214370ustar00rootroot00000000000000========= Execution ========= .. toctree:: :maxdepth: 2 execute middleware dataloader fileuploading subscriptions queryvalidation python-graphene-3.4.3/docs/execution/middleware.rst000066400000000000000000000037061471374454500224570ustar00rootroot00000000000000Middleware ========== You can use ``middleware`` to affect the evaluation of fields in your schema. A middleware is any object or function that responds to ``resolve(next_middleware, *args)``. Inside that method, it should either: - Send ``resolve`` to the next middleware to continue the evaluation; or - Return a value to end the evaluation early. Resolve arguments ----------------- Middlewares ``resolve`` is invoked with several arguments: - ``next`` represents the execution chain. Call ``next`` to continue evaluation. - ``root`` is the root value object passed throughout the query. - ``info`` is the resolver info. - ``args`` is the dict of arguments passed to the field. Example ------- This middleware only continues evaluation if the ``field_name`` is not ``'user'`` .. code:: python class AuthorizationMiddleware(object): def resolve(self, next, root, info, **args): if info.field_name == 'user': return None return next(root, info, **args) And then execute it with: .. code:: python result = schema.execute('THE QUERY', middleware=[AuthorizationMiddleware()]) If the ``middleware`` argument includes multiple middlewares, these middlewares will be executed bottom-up, i.e. from last to first. Functional example ------------------ Middleware can also be defined as a function. Here we define a middleware that logs the time it takes to resolve each field: .. code:: python from time import time as timer def timing_middleware(next, root, info, **args): start = timer() return_value = next(root, info, **args) duration = round((timer() - start) * 1000, 2) parent_type_name = root._meta.name if root and hasattr(root, '_meta') else '' logger.debug(f"{parent_type_name}.{info.field_name}: {duration} ms") return return_value And then execute it with: .. code:: python result = schema.execute('THE QUERY', middleware=[timing_middleware]) python-graphene-3.4.3/docs/execution/queryvalidation.rst000066400000000000000000000077011471374454500235610ustar00rootroot00000000000000Query Validation ================ GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements standard query validators. For example, there is an validator that tests if queried field exists on queried type, that makes query fail with "Cannot query field on type" error if it doesn't. To help with common use cases, graphene provides a few validation rules out of the box. Depth limit Validator --------------------- The depth limit validator helps to prevent execution of malicious queries. It takes in the following arguments. - ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document. - ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean - ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation. Usage ----- Here is how you would implement depth-limiting on your schema. .. code:: python from graphql import validate, parse from graphene import ObjectType, Schema, String from graphene.validation import depth_limit_validator class MyQuery(ObjectType): name = String(required=True) schema = Schema(query=MyQuery) # queries which have a depth more than 20 # will not be executed. validation_errors = validate( schema=schema.graphql_schema, document_ast=parse('THE QUERY'), rules=( depth_limit_validator( max_depth=20 ), ) ) Disable Introspection --------------------- the disable introspection validation rule ensures that your schema cannot be introspected. This is a useful security measure in production environments. Usage ----- Here is how you would disable introspection for your schema. .. code:: python from graphql import validate, parse from graphene import ObjectType, Schema, String from graphene.validation import DisableIntrospection class MyQuery(ObjectType): name = String(required=True) schema = Schema(query=MyQuery) # introspection queries will not be executed. validation_errors = validate( schema=schema.graphql_schema, document_ast=parse('THE QUERY'), rules=( DisableIntrospection, ) ) Implementing custom validators ------------------------------ All custom query validators should extend the `ValidationRule `_ base class importable from the graphql.validation.rules module. Query validators are visitor classes. They are instantiated at the time of query validation with one required argument (context: ASTValidationContext). In order to perform validation, your validator class should define one or more of enter_* and leave_* methods. For possible enter/leave items as well as details on function documentation, please see contents of the visitor module. To make validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation if any of those fields are blacklisted: .. code:: python from graphql import GraphQLError from graphql.language import FieldNode from graphql.validation import ValidationRule my_blacklist = ( "disallowed_field", ) def is_blacklisted_field(field_name: str): return field_name.lower() in my_blacklist class BlackListRule(ValidationRule): def enter_field(self, node: FieldNode, *_args): field_name = node.name.value if not is_blacklisted_field(field_name): return self.report_error( GraphQLError( f"Cannot query '{field_name}': field is blacklisted.", node, ) ) python-graphene-3.4.3/docs/execution/subscriptions.rst000066400000000000000000000021221471374454500232400ustar00rootroot00000000000000.. _SchemaSubscription: Subscriptions ============= To create a subscription, you can directly call the ``subscribe`` method on the schema. This method is async and must be awaited. .. code:: python import asyncio from datetime import datetime from graphene import ObjectType, String, Schema, Field # Every schema requires a query. class Query(ObjectType): hello = String() def resolve_hello(root, info): return "Hello, world!" class Subscription(ObjectType): time_of_day = String() async def subscribe_time_of_day(root, info): while True: yield datetime.now().isoformat() await asyncio.sleep(1) schema = Schema(query=Query, subscription=Subscription) async def main(schema): subscription = 'subscription { timeOfDay }' result = await schema.subscribe(subscription) async for item in result: print(item.data['timeOfDay']) asyncio.run(main(schema)) The ``result`` is an async iterator which yields items in the same manner as a query. python-graphene-3.4.3/docs/index.rst000066400000000000000000000016421471374454500174430ustar00rootroot00000000000000Graphene ======== Contents: .. toctree:: :maxdepth: 2 quickstart types/index execution/index relay/index testing/index api/index .. _Integrations: Integrations ------------ * `Graphene-Django `_ (`source `_) * Flask-Graphql (`source `_) * `Graphene-SQLAlchemy `_ (`source `_) * `Graphene-Mongo `_ (`source `_) * `Starlette `_ (`source `_) * `FastAPI `_ (`source `_) python-graphene-3.4.3/docs/quickstart.rst000066400000000000000000000115211471374454500205230ustar00rootroot00000000000000Getting started =============== Introduction ------------ What is GraphQL? ~~~~~~~~~~~~~~~~ GraphQL is a query language for your API. It provides a standard way to: * *describe data provided by a server* in a statically typed **Schema** * *request data* in a **Query** which exactly describes your data requirements and * *receive data* in a **Response** containing only the data you requested. For an introduction to GraphQL and an overview of its concepts, please refer to `the official GraphQL documentation`_. .. _the official GraphQL documentation: http://graphql.org/learn/ What is Graphene? ~~~~~~~~~~~~~~~~~ Graphene is a library that provides tools to implement a GraphQL API in Python using a *code-first* approach. Compare Graphene's *code-first* approach to building a GraphQL API with *schema-first* approaches like `Apollo Server`_ (JavaScript) or Ariadne_ (Python). Instead of writing GraphQL **Schema Definition Language (SDL)**, we write Python code to describe the data provided by your server. .. _Apollo Server: https://www.apollographql.com/docs/apollo-server/ .. _Ariadne: https://ariadnegraphql.org/ Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well. An example in Graphene ---------------------- Let’s build a basic GraphQL schema to say "hello" and "goodbye" in Graphene. When we send a **Query** requesting only one **Field**, ``hello``, and specify a value for the ``firstName`` **Argument**... .. code:: { hello(firstName: "friend") } ...we would expect the following Response containing only the data requested (the ``goodbye`` field is not resolved). .. code:: { "data": { "hello": "Hello friend!" } } Requirements ~~~~~~~~~~~~ - Python (3.8, 3.9, 3.10, 3.11, 3.12, pypy) - Graphene (3.0) Project setup ~~~~~~~~~~~~~ .. code:: bash pip install "graphene>=3.0" Creating a basic Schema ~~~~~~~~~~~~~~~~~~~~~~~ In Graphene, we can define a simple schema using the following code: .. code:: python from graphene import ObjectType, String, Schema class Query(ObjectType): # this defines a Field `hello` in our Schema with a single Argument `first_name` # By default, the argument name will automatically be camel-based into firstName in the generated schema hello = String(first_name=String(default_value="stranger")) goodbye = String() # our Resolver method takes the GraphQL context (root, info) as well as # Argument (first_name) for the Field and returns data for the query Response def resolve_hello(root, info, first_name): return f'Hello {first_name}!' def resolve_goodbye(root, info): return 'See ya!' schema = Schema(query=Query) A GraphQL **Schema** describes each **Field** in the data model provided by the server using scalar types like *String*, *Int* and *Enum* and compound types like *List* and *Object*. For more details refer to the Graphene :ref:`TypesReference`. Our schema can also define any number of **Arguments** for our **Fields**. This is a powerful way for a **Query** to describe the exact data requirements for each **Field**. For each **Field** in our **Schema**, we write a **Resolver** method to fetch data requested by a client's **Query** using the current context and **Arguments**. For more details, refer to this section on :ref:`Resolvers`. Schema Definition Language (SDL) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the `GraphQL Schema Definition Language`_, we could describe the fields defined by our example code as shown below. .. _GraphQL Schema Definition Language: https://graphql.org/learn/schema/ .. code:: type Query { hello(firstName: String = "stranger"): String goodbye: String } Further examples in this documentation will use SDL to describe schema created by ObjectTypes and other fields. Querying ~~~~~~~~ Then we can start querying our **Schema** by passing a GraphQL query string to ``execute``: .. code:: python # we can query for our field (with the default argument) query_string = '{ hello }' result = schema.execute(query_string) print(result.data['hello']) # "Hello stranger!" # or passing the argument in the query query_with_argument = '{ hello(firstName: "GraphQL") }' result = schema.execute(query_with_argument) print(result.data['hello']) # "Hello GraphQL!" Next steps ~~~~~~~~~~ Congrats! You got your first Graphene schema working! Normally, we don't need to directly execute a query string against our schema as Graphene provides many useful Integrations with popular web frameworks like Flask and Django. Check out :ref:`Integrations` for more information on how to get started serving your GraphQL API. python-graphene-3.4.3/docs/relay/000077500000000000000000000000001471374454500167135ustar00rootroot00000000000000python-graphene-3.4.3/docs/relay/connection.rst000066400000000000000000000025311471374454500216050ustar00rootroot00000000000000Connection ========== A connection is a vitaminized version of a List that provides ways of slicing and paginating through it. The way you create Connection types in ``graphene`` is using ``relay.Connection`` and ``relay.ConnectionField``. Quick example ------------- If we want to create a custom Connection on a given node, we have to subclass the ``Connection`` class. In the following example, ``extra`` will be an extra field in the connection, and ``other`` an extra field in the Connection Edge. .. code:: python class ShipConnection(Connection): extra = String() class Meta: node = Ship class Edge: other = String() The ``ShipConnection`` connection class, will have automatically a ``pageInfo`` field, and a ``edges`` field (which is a list of ``ShipConnection.Edge``). This ``Edge`` will have a ``node`` field linking to the specified node (in ``ShipConnection.Meta``) and the field ``other`` that we defined in the class. Connection Field ---------------- You can create connection fields in any Connection, in case any ObjectType that implements ``Node`` will have a default Connection. .. code:: python class Faction(graphene.ObjectType): name = graphene.String() ships = relay.ConnectionField(ShipConnection) def resolve_ships(root, info): return [] python-graphene-3.4.3/docs/relay/index.rst000066400000000000000000000012541471374454500205560ustar00rootroot00000000000000Relay ===== Graphene has complete support for `Relay`_ and offers some utils to make integration from Python easy. .. toctree:: :maxdepth: 2 nodes connection mutations Useful links ------------ - `Getting started with Relay`_ - `Relay Global Identification Specification`_ - `Relay Cursor Connection Specification`_ .. _Relay: https://relay.dev/docs/guides/graphql-server-specification/ .. _Getting started with Relay: https://relay.dev/docs/getting-started/step-by-step-guide/ .. _Relay Global Identification Specification: https://relay.dev/graphql/objectidentification.htm .. _Relay Cursor Connection Specification: https://relay.dev/graphql/connections.htm python-graphene-3.4.3/docs/relay/mutations.rst000066400000000000000000000032201471374454500214650ustar00rootroot00000000000000Mutations ========= Most APIs don’t just allow you to read data, they also allow you to write. In GraphQL, this is done using mutations. Just like queries, Relay puts some additional requirements on mutations, but Graphene nicely manages that for you. All you need to do is make your mutation a subclass of ``relay.ClientIDMutation``. .. code:: python class IntroduceShip(relay.ClientIDMutation): class Input: ship_name = graphene.String(required=True) faction_id = graphene.String(required=True) ship = graphene.Field(Ship) faction = graphene.Field(Faction) @classmethod def mutate_and_get_payload(cls, root, info, **input): ship_name = input.ship_name faction_id = input.faction_id ship = create_ship(ship_name, faction_id) faction = get_faction(faction_id) return IntroduceShip(ship=ship, faction=faction) Accepting Files --------------- Mutations can also accept files, that's how it will work with different integrations: .. code:: python class UploadFile(graphene.ClientIDMutation): class Input: pass # nothing needed for uploading file # your return fields success = graphene.String() @classmethod def mutate_and_get_payload(cls, root, info, **input): # When using it in Django, context will be the request files = info.context.FILES # Or, if used in Flask, context will be the flask global request # files = context.files # do something with files return UploadFile(success=True) python-graphene-3.4.3/docs/relay/nodes.rst000066400000000000000000000063531471374454500205640ustar00rootroot00000000000000Nodes ===== A ``Node`` is an Interface provided by ``graphene.relay`` that contains a single field ``id`` (which is a ``ID!``). Any object that inherits from it has to implement a ``get_node`` method for retrieving a ``Node`` by an *id*. Quick example ------------- Example usage (taken from the `Starwars Relay example`_): .. code:: python class Ship(graphene.ObjectType): '''A ship in the Star Wars saga''' class Meta: interfaces = (relay.Node, ) name = graphene.String(description='The name of the ship.') @classmethod def get_node(cls, info, id): return get_ship(id) The ``id`` returned by the ``Ship`` type when you query it will be a scalar which contains enough info for the server to know its type and its id. For example, the instance ``Ship(id=1)`` will return ``U2hpcDox`` as the id when you query it (which is the base64 encoding of ``Ship:1``), and which could be useful later if we want to query a node by its id. Custom Nodes ------------ You can use the predefined ``relay.Node`` or you can subclass it, defining custom ways of how a node id is encoded (using the ``to_global_id`` method in the class) or how we can retrieve a Node given a encoded id (with the ``get_node_from_global_id`` method). Example of a custom node: .. code:: python class CustomNode(Node): class Meta: name = 'Node' @staticmethod def to_global_id(type_, id): return f"{type_}:{id}" @staticmethod def get_node_from_global_id(info, global_id, only_type=None): type_, id = global_id.split(':') if only_type: # We assure that the node type that we want to retrieve # is the same that was indicated in the field type assert type_ == only_type._meta.name, 'Received not compatible node.' if type_ == 'User': return get_user(id) elif type_ == 'Photo': return get_photo(id) The ``get_node_from_global_id`` method will be called when ``CustomNode.Field`` is resolved. Accessing node types -------------------- If we want to retrieve node instances from a ``global_id`` (scalar that identifies an instance by it's type name and id), we can simply do ``Node.get_node_from_global_id(info, global_id)``. In the case we want to restrict the instance retrieval to a specific type, we can do: ``Node.get_node_from_global_id(info, global_id, only_type=Ship)``. This will raise an error if the ``global_id`` doesn't correspond to a Ship type. Node Root field --------------- As is required in the `Relay specification`_, the server must implement a root field called ``node`` that returns a ``Node`` Interface. For this reason, ``graphene`` provides the field ``relay.Node.Field``, which links to any type in the Schema which implements ``Node``. Example usage: .. code:: python class Query(graphene.ObjectType): # Should be CustomNode.Field() if we want to use our custom Node node = relay.Node.Field() .. _Relay specification: https://facebook.github.io/relay/docs/graphql-relay-specification.html .. _Starwars Relay example: https://github.com/graphql-python/graphene/blob/master/examples/starwars_relay/schema.py python-graphene-3.4.3/docs/requirements.txt000066400000000000000000000002021471374454500210550ustar00rootroot00000000000000# Required library Sphinx==6.1.3 sphinx-autobuild==2021.3.14 # Docs template http://graphene-python.org/sphinx_graphene_theme.zip python-graphene-3.4.3/docs/testing/000077500000000000000000000000001471374454500172545ustar00rootroot00000000000000python-graphene-3.4.3/docs/testing/index.rst000066400000000000000000000044451471374454500211240ustar00rootroot00000000000000=================== Testing in Graphene =================== Automated testing is an extremely useful bug-killing tool for the modern developer. You can use a collection of tests – a test suite – to solve, or avoid, a number of problems: - When you’re writing new code, you can use tests to validate your code works as expected. - When you’re refactoring or modifying old code, you can use tests to ensure your changes haven’t affected your application’s behavior unexpectedly. Testing a GraphQL application is a complex task, because a GraphQL application is made of several layers of logic – schema definition, schema validation, permissions and field resolution. With Graphene test-execution framework and assorted utilities, you can simulate GraphQL requests, execute mutations, inspect your application’s output and generally verify your code is doing what it should be doing. Testing tools ------------- Graphene provides a small set of tools that come in handy when writing tests. Test Client ~~~~~~~~~~~ The test client is a Python class that acts as a dummy GraphQL client, allowing you to test your views and interact with your Graphene-powered application programmatically. Some of the things you can do with the test client are: - Simulate Queries and Mutations and observe the response. - Test that a given query request is rendered by a given Django template, with a template context that contains certain values. Overview and a quick example ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To use the test client, instantiate ``graphene.test.Client`` and retrieve GraphQL responses: .. code:: python from graphene.test import Client def test_hey(): client = Client(my_schema) executed = client.execute('''{ hey }''') assert executed == { 'data': { 'hey': 'hello!' } } Execute parameters ~~~~~~~~~~~~~~~~~~ You can also add extra keyword arguments to the ``execute`` method, such as ``context``, ``root``, ``variables``, ...: .. code:: python from graphene.test import Client def test_hey(): client = Client(my_schema) executed = client.execute('''{ hey }''', context={'user': 'Peter'}) assert executed == { 'data': { 'hey': 'hello Peter!' } } python-graphene-3.4.3/docs/types/000077500000000000000000000000001471374454500167435ustar00rootroot00000000000000python-graphene-3.4.3/docs/types/enums.rst000066400000000000000000000042751471374454500206340ustar00rootroot00000000000000Enums ===== An ``Enum`` is a special ``GraphQL`` type that represents a set of symbolic names (members) bound to unique, constant values. Definition ---------- You can create an ``Enum`` using classes: .. code:: python import graphene class Episode(graphene.Enum): NEWHOPE = 4 EMPIRE = 5 JEDI = 6 But also using instances of Enum: .. code:: python Episode = graphene.Enum('Episode', [('NEWHOPE', 4), ('EMPIRE', 5), ('JEDI', 6)]) Value descriptions ------------------ It's possible to add a description to an enum value, for that the enum value needs to have the ``description`` property on it. .. code:: python class Episode(graphene.Enum): NEWHOPE = 4 EMPIRE = 5 JEDI = 6 @property def description(self): if self == Episode.NEWHOPE: return 'New Hope Episode' return 'Other episode' Usage with Python Enums ----------------------- In case the Enums are already defined it's possible to reuse them using the ``Enum.from_enum`` function. .. code:: python graphene.Enum.from_enum(AlreadyExistingPyEnum) ``Enum.from_enum`` supports a ``description`` and ``deprecation_reason`` lambdas as input so you can add description etc. to your enum without changing the original: .. code:: python graphene.Enum.from_enum( AlreadyExistingPyEnum, description=lambda v: return 'foo' if v == AlreadyExistingPyEnum.Foo else 'bar' ) Notes ----- ``graphene.Enum`` uses |enum.Enum|_ internally (or a backport if that's not available) and can be used in a similar way, with the exception of member getters. In the Python ``Enum`` implementation you can access a member by initing the Enum. .. code:: python from enum import Enum class Color(Enum): RED = 1 GREEN = 2 BLUE = 3 assert Color(1) == Color.RED However, in Graphene ``Enum`` you need to call `.get` to have the same effect: .. code:: python from graphene import Enum class Color(Enum): RED = 1 GREEN = 2 BLUE = 3 assert Color.get(1) == Color.RED .. |enum.Enum| replace:: ``enum.Enum`` .. _enum.Enum: https://docs.python.org/3/library/enum.html python-graphene-3.4.3/docs/types/index.rst000066400000000000000000000003121471374454500206000ustar00rootroot00000000000000.. _TypesReference: =============== Types Reference =============== .. toctree:: :maxdepth: 1 schema scalars list-and-nonnull objecttypes enums interfaces unions mutations python-graphene-3.4.3/docs/types/interfaces.rst000066400000000000000000000111161471374454500216200ustar00rootroot00000000000000.. _Interfaces: Interfaces ========== An *Interface* is an abstract type that defines a certain set of fields that a type must include to implement the interface. For example, you can define an Interface ``Character`` that represents any character in the Star Wars trilogy: .. code:: python import graphene class Character(graphene.Interface): id = graphene.ID(required=True) name = graphene.String(required=True) friends = graphene.List(lambda: Character) Any ObjectType that implements ``Character`` will have these exact fields, with these arguments and return types. For example, here are some types that might implement ``Character``: .. code:: python class Human(graphene.ObjectType): class Meta: interfaces = (Character, ) starships = graphene.List(Starship) home_planet = graphene.String() class Droid(graphene.ObjectType): class Meta: interfaces = (Character, ) primary_function = graphene.String() Both of these types have all of the fields from the ``Character`` interface, but also bring in extra fields, ``home_planet``, ``starships`` and ``primary_function``, that are specific to that particular type of character. The full GraphQL schema definition will look like this: .. code:: interface Character { id: ID! name: String! friends: [Character] } type Human implements Character { id: ID! name: String! friends: [Character] starships: [Starship] homePlanet: String } type Droid implements Character { id: ID! name: String! friends: [Character] primaryFunction: String } Interfaces are useful when you want to return an object or set of objects, which might be of several different types. For example, you can define a field ``hero`` that resolves to any ``Character``, depending on the episode, like this: .. code:: python class Query(graphene.ObjectType): hero = graphene.Field( Character, required=True, episode=graphene.Int(required=True) ) def resolve_hero(root, info, episode): # Luke is the hero of Episode V if episode == 5: return get_human(name='Luke Skywalker') return get_droid(name='R2-D2') schema = graphene.Schema(query=Query, types=[Human, Droid]) This allows you to directly query for fields that exist on the Character interface as well as selecting specific fields on any type that implements the interface using `inline fragments `_. For example, the following query: .. code:: query HeroForEpisode($episode: Int!) { hero(episode: $episode) { __typename name ... on Droid { primaryFunction } ... on Human { homePlanet } } } Will return the following data with variables ``{ "episode": 4 }``: .. code:: json { "data": { "hero": { "__typename": "Droid", "name": "R2-D2", "primaryFunction": "Astromech" } } } And different data with the variables ``{ "episode": 5 }``: .. code:: json { "data": { "hero": { "__typename": "Human", "name": "Luke Skywalker", "homePlanet": "Tatooine" } } } Resolving data objects to types ------------------------------- As you build out your schema in Graphene it's common for your resolvers to return objects that represent the data backing your GraphQL types rather than instances of the Graphene types (e.g. Django or SQLAlchemy models). This works well with ``ObjectType`` and ``Scalar`` fields, however when you start using Interfaces you might come across this error: .. code:: "Abstract type Character must resolve to an Object type at runtime for field Query.hero ..." This happens because Graphene doesn't have enough information to convert the data object into a Graphene type needed to resolve the ``Interface``. To solve this you can define a ``resolve_type`` class method on the ``Interface`` which maps a data object to a Graphene type: .. code:: python class Character(graphene.Interface): id = graphene.ID(required=True) name = graphene.String(required=True) @classmethod def resolve_type(cls, instance, info): if instance.type == 'DROID': return Droid return Human python-graphene-3.4.3/docs/types/list-and-nonnull.rst000066400000000000000000000034441471374454500227000ustar00rootroot00000000000000Lists and Non-Null ================== Object types, scalars, and enums are the only kinds of types you can define in Graphene. But when you use the types in other parts of the schema, or in your query variable declarations, you can apply additional type modifiers that affect validation of those values. NonNull ------- .. code:: python import graphene class Character(graphene.ObjectType): name = graphene.NonNull(graphene.String) Here, we're using a ``String`` type and marking it as Non-Null by wrapping it using the ``NonNull`` class. This means that our server always expects to return a non-null value for this field, and if it ends up getting a null value that will actually trigger a GraphQL execution error, letting the client know that something has gone wrong. The previous ``NonNull`` code snippet is also equivalent to: .. code:: python import graphene class Character(graphene.ObjectType): name = graphene.String(required=True) List ---- .. code:: python import graphene class Character(graphene.ObjectType): appears_in = graphene.List(graphene.String) Lists work in a similar way: We can use a type modifier to mark a type as a ``List``, which indicates that this field will return a list of that type. It works the same for arguments, where the validation step will expect a list for that value. NonNull Lists ------------- By default items in a list will be considered nullable. To define a list without any nullable items the type needs to be marked as ``NonNull``. For example: .. code:: python import graphene class Character(graphene.ObjectType): appears_in = graphene.List(graphene.NonNull(graphene.String)) The above results in the type definition: .. code:: type Character { appearsIn: [String!] } python-graphene-3.4.3/docs/types/mutations.rst000066400000000000000000000102631471374454500215220ustar00rootroot00000000000000Mutations ========= A Mutation is a special ObjectType that also defines an Input. Quick example ------------- This example defines a Mutation: .. code:: python import graphene class CreatePerson(graphene.Mutation): class Arguments: name = graphene.String() ok = graphene.Boolean() person = graphene.Field(lambda: Person) def mutate(root, info, name): person = Person(name=name) ok = True return CreatePerson(person=person, ok=ok) **person** and **ok** are the output fields of the Mutation when it is resolved. **Arguments** attributes are the arguments that the Mutation ``CreatePerson`` needs for resolving, in this case **name** will be the only argument for the mutation. **mutate** is the function that will be applied once the mutation is called. This method is just a special resolver that we can change data within. It takes the same arguments as the standard query :ref:`ResolverArguments`. So, we can finish our schema like this: .. code:: python # ... the Mutation Class class Person(graphene.ObjectType): name = graphene.String() age = graphene.Int() class MyMutations(graphene.ObjectType): create_person = CreatePerson.Field() # We must define a query for our schema class Query(graphene.ObjectType): person = graphene.Field(Person) schema = graphene.Schema(query=Query, mutation=MyMutations) Executing the Mutation ---------------------- Then, if we query (``schema.execute(query_str)``) the following: .. code:: mutation myFirstMutation { createPerson(name:"Peter") { person { name } ok } } We should receive: .. code:: json { "createPerson": { "person" : { "name": "Peter" }, "ok": true } } InputFields and InputObjectTypes ---------------------------------- InputFields are used in mutations to allow nested input data for mutations. To use an InputField you define an InputObjectType that specifies the structure of your input data: .. code:: python import graphene class PersonInput(graphene.InputObjectType): name = graphene.String(required=True) age = graphene.Int(required=True) class CreatePerson(graphene.Mutation): class Arguments: person_data = PersonInput(required=True) person = graphene.Field(Person) def mutate(root, info, person_data=None): person = Person( name=person_data.name, age=person_data.age ) return CreatePerson(person=person) Note that **name** and **age** are part of **person_data** now. Using the above mutation your new query would look like this: .. code:: mutation myFirstMutation { createPerson(personData: {name:"Peter", age: 24}) { person { name, age } } } InputObjectTypes can also be fields of InputObjectTypes allowing you to have as complex of input data as you need: .. code:: python import graphene class LatLngInput(graphene.InputObjectType): lat = graphene.Float() lng = graphene.Float() #A location has a latlng associated to it class LocationInput(graphene.InputObjectType): name = graphene.String() latlng = graphene.InputField(LatLngInput) Output type example ------------------- To return an existing ObjectType instead of a mutation-specific type, set the **Output** attribute to the desired ObjectType: .. code:: python import graphene class CreatePerson(graphene.Mutation): class Arguments: name = graphene.String() Output = Person def mutate(root, info, name): return Person(name=name) Then, if we query (``schema.execute(query_str)``) with the following: .. code:: mutation myFirstMutation { createPerson(name:"Peter") { name __typename } } We should receive: .. code:: json { "createPerson": { "name": "Peter", "__typename": "Person" } } python-graphene-3.4.3/docs/types/objecttypes.rst000066400000000000000000000337161471374454500220420ustar00rootroot00000000000000.. _ObjectType: ObjectType ========== A Graphene *ObjectType* is the building block used to define the relationship between **Fields** in your **Schema** and how their data is retrieved. The basics: - Each ObjectType is a Python class that inherits from ``graphene.ObjectType``. - Each attribute of the ObjectType represents a ``Field``. - Each ``Field`` has a :ref:`resolver method` to fetch data (or :ref:`DefaultResolver`). Quick example ------------- This example model defines a Person, with a first and a last name: .. code:: python from graphene import ObjectType, String class Person(ObjectType): first_name = String() last_name = String() full_name = String() def resolve_full_name(parent, info): return f"{parent.first_name} {parent.last_name}" This *ObjectType* defines the field **first\_name**, **last\_name**, and **full\_name**. Each field is specified as a class attribute, and each attribute maps to a Field. Data is fetched by our ``resolve_full_name`` :ref:`resolver method` for ``full_name`` field and the :ref:`DefaultResolver` for other fields. The above ``Person`` ObjectType has the following schema representation: .. code:: type Person { firstName: String lastName: String fullName: String } .. _Resolvers: Resolvers --------- A **Resolver** is a method that helps us answer **Queries** by fetching data for a **Field** in our **Schema**. Resolvers are lazily executed, so if a field is not included in a query, its resolver will not be executed. Each field on an *ObjectType* in Graphene should have a corresponding resolver method to fetch data. This resolver method should match the field name. For example, in the ``Person`` type above, the ``full_name`` field is resolved by the method ``resolve_full_name``. Each resolver method takes the parameters: * :ref:`ResolverParamParent` for the value object use to resolve most fields * :ref:`ResolverParamInfo` for query and schema meta information and per-request context * :ref:`ResolverParamGraphQLArguments` as defined on the **Field**. .. _ResolverArguments: Resolver Parameters ~~~~~~~~~~~~~~~~~~~ .. _ResolverParamParent: Parent Value Object (*parent*) ****************************** This parameter is typically used to derive the values for most fields on an *ObjectType*. The first parameter of a resolver method (*parent*) is the value object returned from the resolver of the parent field. If there is no parent field, such as a root Query field, then the value for *parent* is set to the ``root_value`` configured while executing the query (default ``None``). See :ref:`SchemaExecute` for more details on executing queries. Resolver example ^^^^^^^^^^^^^^^^ If we have a schema with Person type and one field on the root query. .. code:: python from graphene import ObjectType, String, Field def get_human(name): first_name, last_name = name.split() return Person(first_name, last_name) class Person(ObjectType): full_name = String() def resolve_full_name(parent, info): return f"{parent.first_name} {parent.last_name}" class Query(ObjectType): me = Field(Person) def resolve_me(parent, info): # returns an object that represents a Person return get_human(name="Luke Skywalker") When we execute a query against that schema. .. code:: python schema = Schema(query=Query) query_string = "{ me { fullName } }" result = schema.execute(query_string) assert result.data["me"] == {"fullName": "Luke Skywalker"} Then we go through the following steps to resolve this query: * ``parent`` is set with the root_value from query execution (None). * ``Query.resolve_me`` called with ``parent`` None which returns a value object ``Person("Luke", "Skywalker")``. * This value object is then used as ``parent`` while calling ``Person.resolve_full_name`` to resolve the scalar String value "Luke Skywalker". * The scalar value is serialized and sent back in the query response. Each resolver returns the next :ref:`ResolverParamParent` to be used in executing the following resolver in the chain. If the Field is a Scalar type, that value will be serialized and sent in the **Response**. Otherwise, while resolving Compound types like *ObjectType*, the value be passed forward as the next :ref:`ResolverParamParent`. Naming convention ^^^^^^^^^^^^^^^^^ This :ref:`ResolverParamParent` is sometimes named ``obj``, ``parent``, or ``source`` in other GraphQL documentation. It can also be named after the value object being resolved (ex. ``root`` for a root Query or Mutation, and ``person`` for a Person value object). Sometimes this argument will be named ``self`` in Graphene code, but this can be misleading due to :ref:`ResolverImplicitStaticMethod` while executing queries in Graphene. .. _ResolverParamInfo: GraphQL Execution Info (*info*) ******************************* The second parameter provides two things: * reference to meta information about the execution of the current GraphQL Query (fields, schema, parsed query, etc.) * access to per-request ``context`` which can be used to store user authentication, data loader instances or anything else useful for resolving the query. Only context will be required for most applications. See :ref:`SchemaExecuteContext` for more information about setting context. .. _ResolverParamGraphQLArguments: GraphQL Arguments (*\*\*kwargs*) ******************************** Any arguments that a field defines gets passed to the resolver function as keyword arguments. For example: .. code:: python from graphene import ObjectType, Field, String class Query(ObjectType): human_by_name = Field(Human, name=String(required=True)) def resolve_human_by_name(parent, info, name): return get_human(name=name) You can then execute the following query: .. code:: query { humanByName(name: "Luke Skywalker") { firstName lastName } } *Note:* There are several arguments to a field that are "reserved" by Graphene (see :ref:`fields-mounted-types`). You can still define an argument that clashes with one of these fields by using the ``args`` parameter like so: .. code:: python from graphene import ObjectType, Field, String class Query(ObjectType): answer = String(args={'description': String()}) def resolve_answer(parent, info, description): return description Convenience Features of Graphene Resolvers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. _ResolverImplicitStaticMethod: Implicit staticmethod ********************* One surprising feature of Graphene is that all resolver methods are treated implicitly as staticmethods. This means that, unlike other methods in Python, the first argument of a resolver is *never* ``self`` while it is being executed by Graphene. Instead, the first argument is always :ref:`ResolverParamParent`. In practice, this is very convenient as, in GraphQL, we are almost always more concerned with the using the parent value object to resolve queries than attributes on the Python object itself. The two resolvers in this example are effectively the same. .. code:: python from graphene import ObjectType, String class Person(ObjectType): first_name = String() last_name = String() @staticmethod def resolve_first_name(parent, info): ''' Decorating a Python method with `staticmethod` ensures that `self` will not be provided as an argument. However, Graphene does not need this decorator for this behavior. ''' return parent.first_name def resolve_last_name(parent, info): ''' Normally the first argument for this method would be `self`, but Graphene executes this as a staticmethod implicitly. ''' return parent.last_name # ... If you prefer your code to be more explicit, feel free to use ``@staticmethod`` decorators. Otherwise, your code may be cleaner without them! .. _DefaultResolver: Default Resolver **************** If a resolver method is not defined for a **Field** attribute on our *ObjectType*, Graphene supplies a default resolver. If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a dictionary key matching the field name. Otherwise, the resolver will get the attribute from the parent value object matching the field name. .. code:: python from collections import namedtuple from graphene import ObjectType, String, Field, Schema PersonValueObject = namedtuple("Person", ["first_name", "last_name"]) class Person(ObjectType): first_name = String() last_name = String() class Query(ObjectType): me = Field(Person) my_best_friend = Field(Person) def resolve_me(parent, info): # always pass an object for `me` field return PersonValueObject(first_name="Luke", last_name="Skywalker") def resolve_my_best_friend(parent, info): # always pass a dictionary for `my_best_fiend_field` return {"first_name": "R2", "last_name": "D2"} schema = Schema(query=Query) result = schema.execute(''' { me { firstName lastName } myBestFriend { firstName lastName } } ''') # With default resolvers we can resolve attributes from an object.. assert result.data["me"] == {"firstName": "Luke", "lastName": "Skywalker"} # With default resolvers, we can also resolve keys from a dictionary.. assert result.data["myBestFriend"] == {"firstName": "R2", "lastName": "D2"} Advanced ~~~~~~~~ GraphQL Argument defaults ************************* If you define an argument for a field that is not required (and in a query execution it is not provided as an argument) it will not be passed to the resolver function at all. This is so that the developer can differentiate between a ``undefined`` value for an argument and an explicit ``null`` value. For example, given this schema: .. code:: python from graphene import ObjectType, String class Query(ObjectType): hello = String(required=True, name=String()) def resolve_hello(parent, info, name): return name if name else 'World' And this query: .. code:: query { hello } An error will be thrown: .. code:: TypeError: resolve_hello() missing 1 required positional argument: 'name' You can fix this error in several ways. Either by combining all keyword arguments into a dict: .. code:: python from graphene import ObjectType, String class Query(ObjectType): hello = String(required=True, name=String()) def resolve_hello(parent, info, **kwargs): name = kwargs.get('name', 'World') return f'Hello, {name}!' Or by setting a default value for the keyword argument: .. code:: python from graphene import ObjectType, String class Query(ObjectType): hello = String(required=True, name=String()) def resolve_hello(parent, info, name='World'): return f'Hello, {name}!' One can also set a default value for an Argument in the GraphQL schema itself using Graphene! .. code:: python from graphene import ObjectType, String class Query(ObjectType): hello = String( required=True, name=String(default_value='World') ) def resolve_hello(parent, info, name): return f'Hello, {name}!' Resolvers outside the class *************************** A field can use a custom resolver from outside the class: .. code:: python from graphene import ObjectType, String def resolve_full_name(person, info): return f"{person.first_name} {person.last_name}" class Person(ObjectType): first_name = String() last_name = String() full_name = String(resolver=resolve_full_name) Instances as value objects ************************** Graphene ``ObjectType``\ s can act as value objects too. So with the previous example you could use ``Person`` to capture data for each of the *ObjectType*'s fields. .. code:: python peter = Person(first_name='Peter', last_name='Griffin') peter.first_name # prints "Peter" peter.last_name # prints "Griffin" Field camelcasing ***************** Graphene automatically camelcases fields on *ObjectType* from ``field_name`` to ``fieldName`` to conform with GraphQL standards. See :ref:`SchemaAutoCamelCase` for more information. *ObjectType* Configuration - Meta class --------------------------------------- Graphene uses a Meta inner class on *ObjectType* to set different options. GraphQL type name ~~~~~~~~~~~~~~~~~ By default the type name in the GraphQL schema will be the same as the class name that defines the ``ObjectType``. This can be changed by setting the ``name`` property on the ``Meta`` class: .. code:: python from graphene import ObjectType class MyGraphQlSong(ObjectType): class Meta: name = 'Song' GraphQL Description ~~~~~~~~~~~~~~~~~~~ The schema description of an *ObjectType* can be set as a docstring on the Python object or on the Meta inner class. .. code:: python from graphene import ObjectType class MyGraphQlSong(ObjectType): ''' We can set the schema description for an Object Type here on a docstring ''' class Meta: description = 'But if we set the description in Meta, this value is used instead' Interfaces & Possible Types ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Setting ``interfaces`` in Meta inner class specifies the GraphQL Interfaces that this Object implements. Providing ``possible_types`` helps Graphene resolve ambiguous types such as interfaces or Unions. See :ref:`Interfaces` for more information. .. code:: python from graphene import ObjectType, Node Song = namedtuple('Song', ('title', 'artist')) class MyGraphQlSong(ObjectType): class Meta: interfaces = (Node, ) possible_types = (Song, ) .. _Interface: /docs/interfaces/ python-graphene-3.4.3/docs/types/scalars.rst000066400000000000000000000176301471374454500211340ustar00rootroot00000000000000.. _Scalars: Scalars ======= Scalar types represent concrete values at the leaves of a query. There are several built in types that Graphene provides out of the box which represent common values in Python. You can also create your own Scalar types to better express values that you might have in your data model. All Scalar types accept the following arguments. All are optional: ``name``: *string* Override the name of the Field. ``description``: *string* A description of the type to show in the GraphiQL browser. ``required``: *boolean* If ``True``, the server will enforce a value for this field. See `NonNull <../list-and-nonnull.html#nonnull>`_. Default is ``False``. ``deprecation_reason``: *string* Provide a deprecation reason for the Field. ``default_value``: *any* Provide a default value for the Field. Built in scalars ---------------- Graphene defines the following base Scalar Types that match the default `GraphQL types `_: ``graphene.String`` ^^^^^^^^^^^^^^^^^^^ Represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. ``graphene.Int`` ^^^^^^^^^^^^^^^^ Represents non-fractional signed whole numeric values. Int is a signed 32‐bit integer per the `GraphQL spec `_ ``graphene.Float`` ^^^^^^^^^^^^^^^^^^ Represents signed double-precision fractional values as specified by `IEEE 754 `_. ``graphene.Boolean`` ^^^^^^^^^^^^^^^^^^^^ Represents `true` or `false`. ``graphene.ID`` ^^^^^^^^^^^^^^^ Represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID. ---- Graphene also provides custom scalars for common values: ``graphene.Date`` ^^^^^^^^^^^^^^^^^ Represents a Date value as specified by `iso8601 `_. .. code:: python import datetime from graphene import Schema, ObjectType, Date class Query(ObjectType): one_week_from = Date(required=True, date_input=Date(required=True)) def resolve_one_week_from(root, info, date_input): assert date_input == datetime.date(2006, 1, 2) return date_input + datetime.timedelta(weeks=1) schema = Schema(query=Query) results = schema.execute(""" query { oneWeekFrom(dateInput: "2006-01-02") } """) assert results.data == {"oneWeekFrom": "2006-01-09"} ``graphene.DateTime`` ^^^^^^^^^^^^^^^^^^^^^ Represents a DateTime value as specified by `iso8601 `_. .. code:: python import datetime from graphene import Schema, ObjectType, DateTime class Query(ObjectType): one_hour_from = DateTime(required=True, datetime_input=DateTime(required=True)) def resolve_one_hour_from(root, info, datetime_input): assert datetime_input == datetime.datetime(2006, 1, 2, 15, 4, 5) return datetime_input + datetime.timedelta(hours=1) schema = Schema(query=Query) results = schema.execute(""" query { oneHourFrom(datetimeInput: "2006-01-02T15:04:05") } """) assert results.data == {"oneHourFrom": "2006-01-02T16:04:05"} ``graphene.Time`` ^^^^^^^^^^^^^^^^^ Represents a Time value as specified by `iso8601 `_. .. code:: python import datetime from graphene import Schema, ObjectType, Time class Query(ObjectType): one_hour_from = Time(required=True, time_input=Time(required=True)) def resolve_one_hour_from(root, info, time_input): assert time_input == datetime.time(15, 4, 5) tmp_time_input = datetime.datetime.combine(datetime.date(1, 1, 1), time_input) return (tmp_time_input + datetime.timedelta(hours=1)).time() schema = Schema(query=Query) results = schema.execute(""" query { oneHourFrom(timeInput: "15:04:05") } """) assert results.data == {"oneHourFrom": "16:04:05"} ``graphene.Decimal`` ^^^^^^^^^^^^^^^^^^^^ Represents a Python Decimal value. .. code:: python import decimal from graphene import Schema, ObjectType, Decimal class Query(ObjectType): add_one_to = Decimal(required=True, decimal_input=Decimal(required=True)) def resolve_add_one_to(root, info, decimal_input): assert decimal_input == decimal.Decimal("10.50") return decimal_input + decimal.Decimal("1") schema = Schema(query=Query) results = schema.execute(""" query { addOneTo(decimalInput: "10.50") } """) assert results.data == {"addOneTo": "11.50"} ``graphene.JSONString`` ^^^^^^^^^^^^^^^^^^^^^^^ Represents a JSON string. .. code:: python from graphene import Schema, ObjectType, JSONString, String class Query(ObjectType): update_json_key = JSONString( required=True, json_input=JSONString(required=True), key=String(required=True), value=String(required=True) ) def resolve_update_json_key(root, info, json_input, key, value): assert json_input == {"name": "Jane"} json_input[key] = value return json_input schema = Schema(query=Query) results = schema.execute(""" query { updateJsonKey(jsonInput: "{\\"name\\": \\"Jane\\"}", key: "name", value: "Beth") } """) assert results.data == {"updateJsonKey": "{\"name\": \"Beth\"}"} ``graphene.Base64`` ^^^^^^^^^^^^^^^^^^^ Represents a Base64 encoded string. .. code:: python from graphene import Schema, ObjectType, Base64 class Query(ObjectType): increment_encoded_id = Base64( required=True, base64_input=Base64(required=True), ) def resolve_increment_encoded_id(root, info, base64_input): assert base64_input == "4" return int(base64_input) + 1 schema = Schema(query=Query) results = schema.execute(""" query { incrementEncodedId(base64Input: "NA==") } """) assert results.data == {"incrementEncodedId": "NQ=="} Custom scalars -------------- You can create custom scalars for your schema. The following is an example for creating a DateTime scalar: .. code:: python import datetime from graphene.types import Scalar from graphql.language import ast class DateTime(Scalar): '''DateTime Scalar Description''' @staticmethod def serialize(dt): return dt.isoformat() @staticmethod def parse_literal(node, _variables=None): if isinstance(node, ast.StringValueNode): return datetime.datetime.strptime( node.value, "%Y-%m-%dT%H:%M:%S.%f") @staticmethod def parse_value(value): return datetime.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f") Mounting Scalars ---------------- Scalars mounted in a ``ObjectType``, ``Interface`` or ``Mutation`` act as ``Field``\ s. .. code:: python class Person(graphene.ObjectType): name = graphene.String() # Is equivalent to: class Person(graphene.ObjectType): name = graphene.Field(graphene.String) **Note:** when using the ``Field`` constructor directly, pass the type and not an instance. Types mounted in a ``Field`` act as ``Argument``\ s. .. code:: python graphene.Field(graphene.String, to=graphene.String()) # Is equivalent to: graphene.Field(graphene.String, to=graphene.Argument(graphene.String)) python-graphene-3.4.3/docs/types/schema.rst000066400000000000000000000051741471374454500207440ustar00rootroot00000000000000Schema ====== A GraphQL **Schema** defines the types and relationships between **Fields** in your API. A Schema is created by supplying the root :ref:`ObjectType` of each operation, query (mandatory), mutation and subscription. Schema will collect all type definitions related to the root operations and then supply them to the validator and executor. .. code:: python my_schema = Schema( query=MyRootQuery, mutation=MyRootMutation, subscription=MyRootSubscription ) A Root Query is just a special :ref:`ObjectType` that defines the fields that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types: * Query fetches data * Mutation changes data and retrieves the changes * Subscription sends changes to clients in real-time Review the `GraphQL documentation on Schema`_ for a brief overview of fields, schema and operations. .. _GraphQL documentation on Schema: https://graphql.org/learn/schema/ Querying -------- To query a schema, call the ``execute`` method on it. See :ref:`SchemaExecute` for more details. .. code:: python query_string = 'query whoIsMyBestFriend { myBestFriend { lastName } }' my_schema.execute(query_string) Types ----- There are some cases where the schema cannot access all of the types that we plan to have. For example, when a field returns an ``Interface``, the schema doesn't know about any of the implementations. In this case, we need to use the ``types`` argument when creating the Schema: .. code:: python my_schema = Schema( query=MyRootQuery, types=[SomeExtraObjectType, ] ) .. _SchemaAutoCamelCase: Auto camelCase field names -------------------------- By default all field and argument names (that are not explicitly set with the ``name`` arg) will be converted from ``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client) For example with the ObjectType the ``last_name`` field name is converted to ``lastName``: .. code:: python class Person(graphene.ObjectType): last_name = graphene.String() other_name = graphene.String(name='_other_Name') In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor. ``other_name`` converts to ``_other_Name`` (without further transformations). Your query should look like: .. code:: { lastName _other_Name } To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation: .. code:: python my_schema = Schema( query=MyRootQuery, auto_camelcase=False, ) python-graphene-3.4.3/docs/types/unions.rst000066400000000000000000000027321471374454500210140ustar00rootroot00000000000000Unions ====== Union types are very similar to interfaces, but they don't get to specify any common fields between the types. The basics: - Each Union is a Python class that inherits from ``graphene.Union``. - Unions don't have any fields on it, just links to the possible ObjectTypes. Quick example ------------- This example model defines several ObjectTypes with their own fields. ``SearchResult`` is the implementation of ``Union`` of this object types. .. code:: python import graphene class Human(graphene.ObjectType): name = graphene.String() born_in = graphene.String() class Droid(graphene.ObjectType): name = graphene.String() primary_function = graphene.String() class Starship(graphene.ObjectType): name = graphene.String() length = graphene.Int() class SearchResult(graphene.Union): class Meta: types = (Human, Droid, Starship) Wherever we return a SearchResult type in our schema, we might get a Human, a Droid, or a Starship. Note that members of a union type need to be concrete object types; you can't create a union type out of interfaces or other unions. The above types have the following representation in a schema: .. code:: type Droid { name: String primaryFunction: String } type Human { name: String bornIn: String } type Ship { name: String length: Int } union SearchResult = Human | Droid | Starship python-graphene-3.4.3/examples/000077500000000000000000000000001471374454500164655ustar00rootroot00000000000000python-graphene-3.4.3/examples/__init__.py000066400000000000000000000000001471374454500205640ustar00rootroot00000000000000python-graphene-3.4.3/examples/complex_example.py000066400000000000000000000026651471374454500222320ustar00rootroot00000000000000import graphene class GeoInput(graphene.InputObjectType): lat = graphene.Float(required=True) lng = graphene.Float(required=True) @property def latlng(self): return f"({self.lat},{self.lng})" class Address(graphene.ObjectType): latlng = graphene.String() class Query(graphene.ObjectType): address = graphene.Field(Address, geo=GeoInput(required=True)) def resolve_address(root, info, geo): return Address(latlng=geo.latlng) class CreateAddress(graphene.Mutation): class Arguments: geo = GeoInput(required=True) Output = Address def mutate(root, info, geo): return Address(latlng=geo.latlng) class Mutation(graphene.ObjectType): create_address = CreateAddress.Field() schema = graphene.Schema(query=Query, mutation=Mutation) query = """ query something{ address(geo: {lat:32.2, lng:12}) { latlng } } """ mutation = """ mutation addAddress{ createAddress(geo: {lat:32.2, lng:12}) { latlng } } """ def test_query(): result = schema.execute(query) assert not result.errors assert result.data == {"address": {"latlng": "(32.2,12.0)"}} def test_mutation(): result = schema.execute(mutation) assert not result.errors assert result.data == {"createAddress": {"latlng": "(32.2,12.0)"}} if __name__ == "__main__": result = schema.execute(query) print(result.data["address"]["latlng"]) python-graphene-3.4.3/examples/context_example.py000066400000000000000000000012721471374454500222400ustar00rootroot00000000000000import graphene class User(graphene.ObjectType): id = graphene.ID() name = graphene.String() class Query(graphene.ObjectType): me = graphene.Field(User) def resolve_me(root, info): return info.context["user"] schema = graphene.Schema(query=Query) query = """ query something{ me { id name } } """ def test_query(): result = schema.execute(query, context={"user": User(id="1", name="Syrus")}) assert not result.errors assert result.data == {"me": {"id": "1", "name": "Syrus"}} if __name__ == "__main__": result = schema.execute(query, context={"user": User(id="X", name="Console")}) print(result.data["me"]) python-graphene-3.4.3/examples/simple_example.py000066400000000000000000000012621471374454500220440ustar00rootroot00000000000000import graphene class Patron(graphene.ObjectType): id = graphene.ID() name = graphene.String() age = graphene.Int() class Query(graphene.ObjectType): patron = graphene.Field(Patron) def resolve_patron(root, info): return Patron(id=1, name="Syrus", age=27) schema = graphene.Schema(query=Query) query = """ query something{ patron { id name age } } """ def test_query(): result = schema.execute(query) assert not result.errors assert result.data == {"patron": {"id": "1", "name": "Syrus", "age": 27}} if __name__ == "__main__": result = schema.execute(query) print(result.data["patron"]) python-graphene-3.4.3/examples/starwars/000077500000000000000000000000001471374454500203335ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars/__init__.py000066400000000000000000000000001471374454500224320ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars/data.py000066400000000000000000000035311471374454500216200ustar00rootroot00000000000000human_data = {} droid_data = {} def setup(): from .schema import Human, Droid global human_data, droid_data luke = Human( id="1000", name="Luke Skywalker", friends=["1002", "1003", "2000", "2001"], appears_in=[4, 5, 6], home_planet="Tatooine", ) vader = Human( id="1001", name="Darth Vader", friends=["1004"], appears_in=[4, 5, 6], home_planet="Tatooine", ) han = Human( id="1002", name="Han Solo", friends=["1000", "1003", "2001"], appears_in=[4, 5, 6], home_planet=None, ) leia = Human( id="1003", name="Leia Organa", friends=["1000", "1002", "2000", "2001"], appears_in=[4, 5, 6], home_planet="Alderaan", ) tarkin = Human( id="1004", name="Wilhuff Tarkin", friends=["1001"], appears_in=[4], home_planet=None, ) human_data = { "1000": luke, "1001": vader, "1002": han, "1003": leia, "1004": tarkin, } c3po = Droid( id="2000", name="C-3PO", friends=["1000", "1002", "1003", "2001"], appears_in=[4, 5, 6], primary_function="Protocol", ) r2d2 = Droid( id="2001", name="R2-D2", friends=["1000", "1002", "1003"], appears_in=[4, 5, 6], primary_function="Astromech", ) droid_data = {"2000": c3po, "2001": r2d2} def get_character(id): return human_data.get(id) or droid_data.get(id) def get_friends(character): return map(get_character, character.friends) def get_hero(episode): if episode == 5: return human_data["1000"] return droid_data["2001"] def get_human(id): return human_data.get(id) def get_droid(id): return droid_data.get(id) python-graphene-3.4.3/examples/starwars/schema.py000066400000000000000000000022551471374454500221510ustar00rootroot00000000000000import graphene from .data import get_character, get_droid, get_hero, get_human class Episode(graphene.Enum): NEWHOPE = 4 EMPIRE = 5 JEDI = 6 class Character(graphene.Interface): id = graphene.ID() name = graphene.String() friends = graphene.List(lambda: Character) appears_in = graphene.List(Episode) def resolve_friends(self, info): # The character friends is a list of strings return [get_character(f) for f in self.friends] class Human(graphene.ObjectType): class Meta: interfaces = (Character,) home_planet = graphene.String() class Droid(graphene.ObjectType): class Meta: interfaces = (Character,) primary_function = graphene.String() class Query(graphene.ObjectType): hero = graphene.Field(Character, episode=Episode()) human = graphene.Field(Human, id=graphene.String()) droid = graphene.Field(Droid, id=graphene.String()) def resolve_hero(root, info, episode=None): return get_hero(episode) def resolve_human(root, info, id): return get_human(id) def resolve_droid(root, info, id): return get_droid(id) schema = graphene.Schema(query=Query) python-graphene-3.4.3/examples/starwars/tests/000077500000000000000000000000001471374454500214755ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars/tests/__init__.py000066400000000000000000000000001471374454500235740ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars/tests/test_query.py000066400000000000000000000135751471374454500242660ustar00rootroot00000000000000from graphene.test import Client from ..data import setup from ..schema import schema setup() client = Client(schema) def test_hero_name_query(): result = client.execute(""" query HeroNameQuery { hero { name } } """) assert result == {"data": {"hero": {"name": "R2-D2"}}} def test_hero_name_and_friends_query(): result = client.execute(""" query HeroNameAndFriendsQuery { hero { id name friends { name } } } """) assert result == { "data": { "hero": { "id": "2001", "name": "R2-D2", "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "Leia Organa"}, ], } } } def test_nested_query(): result = client.execute(""" query NestedQuery { hero { name friends { name appearsIn friends { name } } } } """) assert result == { "data": { "hero": { "name": "R2-D2", "friends": [ { "name": "Luke Skywalker", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Han Solo"}, {"name": "Leia Organa"}, {"name": "C-3PO"}, {"name": "R2-D2"}, ], }, { "name": "Han Solo", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Luke Skywalker"}, {"name": "Leia Organa"}, {"name": "R2-D2"}, ], }, { "name": "Leia Organa", "appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"], "friends": [ {"name": "Luke Skywalker"}, {"name": "Han Solo"}, {"name": "C-3PO"}, {"name": "R2-D2"}, ], }, ], } } } def test_fetch_luke_query(): result = client.execute(""" query FetchLukeQuery { human(id: "1000") { name } } """) assert result == {"data": {"human": {"name": "Luke Skywalker"}}} def test_fetch_some_id_query(): result = client.execute( """ query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } } """, variables={"someId": "1000"}, ) assert result == {"data": {"human": {"name": "Luke Skywalker"}}} def test_fetch_some_id_query2(): result = client.execute( """ query FetchSomeIDQuery($someId: String!) { human(id: $someId) { name } } """, variables={"someId": "1002"}, ) assert result == {"data": {"human": {"name": "Han Solo"}}} def test_invalid_id_query(): result = client.execute( """ query humanQuery($id: String!) { human(id: $id) { name } } """, variables={"id": "not a valid id"}, ) assert result == {"data": {"human": None}} def test_fetch_luke_aliased(): result = client.execute(""" query FetchLukeAliased { luke: human(id: "1000") { name } } """) assert result == {"data": {"luke": {"name": "Luke Skywalker"}}} def test_fetch_luke_and_leia_aliased(): result = client.execute(""" query FetchLukeAndLeiaAliased { luke: human(id: "1000") { name } leia: human(id: "1003") { name } } """) assert result == { "data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}} } def test_duplicate_fields(): result = client.execute(""" query DuplicateFields { luke: human(id: "1000") { name homePlanet } leia: human(id: "1003") { name homePlanet } } """) assert result == { "data": { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, "leia": {"name": "Leia Organa", "homePlanet": "Alderaan"}, } } def test_use_fragment(): result = client.execute(""" query UseFragment { luke: human(id: "1000") { ...HumanFragment } leia: human(id: "1003") { ...HumanFragment } } fragment HumanFragment on Human { name homePlanet } """) assert result == { "data": { "luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"}, "leia": {"name": "Leia Organa", "homePlanet": "Alderaan"}, } } def test_check_type_of_r2(): result = client.execute(""" query CheckTypeOfR2 { hero { __typename name } } """) assert result == {"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}} def test_check_type_of_luke(): result = client.execute(""" query CheckTypeOfLuke { hero(episode: EMPIRE) { __typename name } } """) assert result == { "data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}} } python-graphene-3.4.3/examples/starwars/tests/test_schema.py000066400000000000000000000000001471374454500243340ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars_relay/000077500000000000000000000000001471374454500215275ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars_relay/__init__.py000066400000000000000000000000001471374454500236260ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars_relay/data.py000066400000000000000000000030711471374454500230130ustar00rootroot00000000000000data = {} def setup(): global data from .schema import Ship, Faction xwing = Ship(id="1", name="X-Wing") ywing = Ship(id="2", name="Y-Wing") awing = Ship(id="3", name="A-Wing") # Yeah, technically it's Corellian. But it flew in the service of the rebels, # so for the purposes of this demo it's a rebel ship. falcon = Ship(id="4", name="Millennium Falcon") homeOne = Ship(id="5", name="Home One") tieFighter = Ship(id="6", name="TIE Fighter") tieInterceptor = Ship(id="7", name="TIE Interceptor") executor = Ship(id="8", name="Executor") rebels = Faction( id="1", name="Alliance to Restore the Republic", ships=["1", "2", "3", "4", "5"] ) empire = Faction(id="2", name="Galactic Empire", ships=["6", "7", "8"]) data = { "Faction": {"1": rebels, "2": empire}, "Ship": { "1": xwing, "2": ywing, "3": awing, "4": falcon, "5": homeOne, "6": tieFighter, "7": tieInterceptor, "8": executor, }, } def create_ship(ship_name, faction_id): from .schema import Ship next_ship = len(data["Ship"].keys()) + 1 new_ship = Ship(id=str(next_ship), name=ship_name) data["Ship"][new_ship.id] = new_ship data["Faction"][faction_id].ships.append(new_ship.id) return new_ship def get_ship(_id): return data["Ship"][_id] def get_faction(_id): return data["Faction"][_id] def get_rebels(): return get_faction("1") def get_empire(): return get_faction("2") python-graphene-3.4.3/examples/starwars_relay/schema.py000066400000000000000000000036431471374454500233470ustar00rootroot00000000000000import graphene from graphene import relay from .data import create_ship, get_empire, get_faction, get_rebels, get_ship class Ship(graphene.ObjectType): """A ship in the Star Wars saga""" class Meta: interfaces = (relay.Node,) name = graphene.String(description="The name of the ship.") @classmethod def get_node(cls, info, id): return get_ship(id) class ShipConnection(relay.Connection): class Meta: node = Ship class Faction(graphene.ObjectType): """A faction in the Star Wars saga""" class Meta: interfaces = (relay.Node,) name = graphene.String(description="The name of the faction.") ships = relay.ConnectionField( ShipConnection, description="The ships used by the faction." ) def resolve_ships(self, info, **args): # Transform the instance ship_ids into real instances return [get_ship(ship_id) for ship_id in self.ships] @classmethod def get_node(cls, info, id): return get_faction(id) class IntroduceShip(relay.ClientIDMutation): class Input: ship_name = graphene.String(required=True) faction_id = graphene.String(required=True) ship = graphene.Field(Ship) faction = graphene.Field(Faction) @classmethod def mutate_and_get_payload( cls, root, info, ship_name, faction_id, client_mutation_id=None ): ship = create_ship(ship_name, faction_id) faction = get_faction(faction_id) return IntroduceShip(ship=ship, faction=faction) class Query(graphene.ObjectType): rebels = graphene.Field(Faction) empire = graphene.Field(Faction) node = relay.Node.Field() def resolve_rebels(root, info): return get_rebels() def resolve_empire(root, info): return get_empire() class Mutation(graphene.ObjectType): introduce_ship = IntroduceShip.Field() schema = graphene.Schema(query=Query, mutation=Mutation) python-graphene-3.4.3/examples/starwars_relay/tests/000077500000000000000000000000001471374454500226715ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars_relay/tests/__init__.py000066400000000000000000000000001471374454500247700ustar00rootroot00000000000000python-graphene-3.4.3/examples/starwars_relay/tests/test_connections.py000066400000000000000000000024711471374454500266300ustar00rootroot00000000000000from graphene.test import Client from ..data import setup from ..schema import schema setup() client = Client(schema) def test_correct_fetch_first_ship_rebels(): result = client.execute(""" query RebelsShipsQuery { rebels { name, ships(first: 1) { pageInfo { startCursor endCursor hasNextPage hasPreviousPage } edges { cursor node { name } } } } } """) assert result == { "data": { "rebels": { "name": "Alliance to Restore the Republic", "ships": { "pageInfo": { "startCursor": "YXJyYXljb25uZWN0aW9uOjA=", "endCursor": "YXJyYXljb25uZWN0aW9uOjA=", "hasNextPage": True, "hasPreviousPage": False, }, "edges": [ { "cursor": "YXJyYXljb25uZWN0aW9uOjA=", "node": {"name": "X-Wing"}, } ], }, } } } python-graphene-3.4.3/examples/starwars_relay/tests/test_mutation.py000066400000000000000000000027161471374454500261500ustar00rootroot00000000000000from graphene.test import Client from ..data import setup from ..schema import schema setup() client = Client(schema) def test_mutations(): result = client.execute(""" mutation MyMutation { introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) { ship { id name } faction { name ships { edges { node { id name } } } } } } """) assert result == { "data": { "introduceShip": { "ship": {"id": "U2hpcDo5", "name": "Peter"}, "faction": { "name": "Alliance to Restore the Republic", "ships": { "edges": [ {"node": {"id": "U2hpcDox", "name": "X-Wing"}}, {"node": {"id": "U2hpcDoy", "name": "Y-Wing"}}, {"node": {"id": "U2hpcDoz", "name": "A-Wing"}}, {"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}}, {"node": {"id": "U2hpcDo1", "name": "Home One"}}, {"node": {"id": "U2hpcDo5", "name": "Peter"}}, ] }, }, } } } python-graphene-3.4.3/examples/starwars_relay/tests/test_objectidentification.py000066400000000000000000000100141471374454500304560ustar00rootroot00000000000000import textwrap from graphene.test import Client from ..data import setup from ..schema import schema setup() client = Client(schema) def test_str_schema(): assert str(schema).strip() == textwrap.dedent( '''\ type Query { rebels: Faction empire: Faction node( """The ID of the object""" id: ID! ): Node } """A faction in the Star Wars saga""" type Faction implements Node { """The ID of the object""" id: ID! """The name of the faction.""" name: String """The ships used by the faction.""" ships(before: String, after: String, first: Int, last: Int): ShipConnection } """An object with an ID""" interface Node { """The ID of the object""" id: ID! } type ShipConnection { """Pagination data for this connection.""" pageInfo: PageInfo! """Contains the nodes in this connection.""" edges: [ShipEdge]! } """ The Relay compliant `PageInfo` type, containing data necessary to paginate this connection. """ type PageInfo { """When paginating forwards, are there more items?""" hasNextPage: Boolean! """When paginating backwards, are there more items?""" hasPreviousPage: Boolean! """When paginating backwards, the cursor to continue.""" startCursor: String """When paginating forwards, the cursor to continue.""" endCursor: String } """A Relay edge containing a `Ship` and its cursor.""" type ShipEdge { """The item at the end of the edge""" node: Ship """A cursor for use in pagination""" cursor: String! } """A ship in the Star Wars saga""" type Ship implements Node { """The ID of the object""" id: ID! """The name of the ship.""" name: String } type Mutation { introduceShip(input: IntroduceShipInput!): IntroduceShipPayload } type IntroduceShipPayload { ship: Ship faction: Faction clientMutationId: String } input IntroduceShipInput { shipName: String! factionId: String! clientMutationId: String }''' ) def test_correctly_fetches_id_name_rebels(): result = client.execute(""" query RebelsQuery { rebels { id name } } """) assert result == { "data": { "rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"} } } def test_correctly_refetches_rebels(): result = client.execute(""" query RebelsRefetchQuery { node(id: "RmFjdGlvbjox") { id ... on Faction { name } } } """) assert result == { "data": { "node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"} } } def test_correctly_fetches_id_name_empire(): result = client.execute(""" query EmpireQuery { empire { id name } } """) assert result == { "data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}} } def test_correctly_refetches_empire(): result = client.execute(""" query EmpireRefetchQuery { node(id: "RmFjdGlvbjoy") { id ... on Faction { name } } } """) assert result == { "data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}} } def test_correctly_refetches_xwing(): result = client.execute(""" query XWingRefetchQuery { node(id: "U2hpcDox") { id ... on Ship { name } } } """) assert result == {"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}} python-graphene-3.4.3/graphene/000077500000000000000000000000001471374454500164405ustar00rootroot00000000000000python-graphene-3.4.3/graphene/__init__.py000066400000000000000000000030261471374454500205520ustar00rootroot00000000000000from .pyutils.version import get_version from .relay import ( BaseGlobalIDType, ClientIDMutation, Connection, ConnectionField, DefaultGlobalIDType, GlobalID, Node, PageInfo, SimpleGlobalIDType, UUIDGlobalIDType, is_node, ) from .types import ( ID, UUID, Argument, Base64, BigInt, Boolean, Context, Date, DateTime, Decimal, Dynamic, Enum, Field, Float, InputField, InputObjectType, Int, Interface, JSONString, List, Mutation, NonNull, ObjectType, ResolveInfo, Scalar, Schema, String, Time, Union, ) from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args VERSION = (3, 4, 3, "final", 0) __version__ = get_version(VERSION) __all__ = [ "__version__", "Argument", "Base64", "BigInt", "BaseGlobalIDType", "Boolean", "ClientIDMutation", "Connection", "ConnectionField", "Context", "Date", "DateTime", "Decimal", "DefaultGlobalIDType", "Dynamic", "Enum", "Field", "Float", "GlobalID", "ID", "InputField", "InputObjectType", "Int", "Interface", "JSONString", "List", "Mutation", "Node", "NonNull", "ObjectType", "PageInfo", "ResolveInfo", "Scalar", "Schema", "SimpleGlobalIDType", "String", "Time", "Union", "UUID", "UUIDGlobalIDType", "is_node", "lazy_import", "resolve_only_args", ] python-graphene-3.4.3/graphene/pyutils/000077500000000000000000000000001471374454500201515ustar00rootroot00000000000000python-graphene-3.4.3/graphene/pyutils/__init__.py000066400000000000000000000000001471374454500222500ustar00rootroot00000000000000python-graphene-3.4.3/graphene/pyutils/version.py000066400000000000000000000045571471374454500222230ustar00rootroot00000000000000import datetime import os import subprocess def get_version(version=None): "Returns a PEP 440-compliant version number from VERSION." version = get_complete_version(version) # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|rc}N - for alpha, beta, and rc releases main = get_main_version(version) sub = "" if version[3] == "alpha" and version[4] == 0: git_changeset = get_git_changeset() sub = ".dev%s" % git_changeset if git_changeset else ".dev" elif version[3] != "final": mapping = {"alpha": "a", "beta": "b", "rc": "rc"} sub = mapping[version[3]] + str(version[4]) return str(main + sub) def get_main_version(version=None): "Returns main version (X.Y[.Z]) from VERSION." version = get_complete_version(version) parts = 2 if version[2] == 0 else 3 return ".".join(str(x) for x in version[:parts]) def get_complete_version(version=None): """Returns a tuple of the graphene version. If version argument is non-empty, then checks for correctness of the tuple provided. """ if version is None: from graphene import VERSION as version else: assert len(version) == 5 assert version[3] in ("alpha", "beta", "rc", "final") return version def get_docs_version(version=None): version = get_complete_version(version) if version[3] != "final": return "dev" else: return "%d.%d" % version[:2] def get_git_changeset(): """Returns a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers. """ repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) try: git_log = subprocess.Popen( "git log --pretty=format:%ct --quiet -1 HEAD", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=repo_dir, universal_newlines=True, ) timestamp = git_log.communicate()[0] timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) except Exception: return None return timestamp.strftime("%Y%m%d%H%M%S") python-graphene-3.4.3/graphene/relay/000077500000000000000000000000001471374454500175545ustar00rootroot00000000000000python-graphene-3.4.3/graphene/relay/__init__.py000066400000000000000000000007651471374454500216750ustar00rootroot00000000000000from .node import Node, is_node, GlobalID from .mutation import ClientIDMutation from .connection import Connection, ConnectionField, PageInfo from .id_type import ( BaseGlobalIDType, DefaultGlobalIDType, SimpleGlobalIDType, UUIDGlobalIDType, ) __all__ = [ "BaseGlobalIDType", "ClientIDMutation", "Connection", "ConnectionField", "DefaultGlobalIDType", "GlobalID", "Node", "PageInfo", "SimpleGlobalIDType", "UUIDGlobalIDType", "is_node", ] python-graphene-3.4.3/graphene/relay/connection.py000066400000000000000000000146131471374454500222720ustar00rootroot00000000000000import re from collections.abc import Iterable from functools import partial from typing import Type from graphql_relay import connection_from_array from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String, Union from ..types.field import Field from ..types.objecttype import ObjectType, ObjectTypeOptions from ..utils.thenables import maybe_thenable from .node import is_node, AbstractNode def get_edge_class( connection_class: Type["Connection"], _node: Type[AbstractNode], base_name: str, strict_types: bool = False, ): edge_class = getattr(connection_class, "Edge", None) class EdgeBase: node = Field( NonNull(_node) if strict_types else _node, description="The item at the end of the edge", ) cursor = String(required=True, description="A cursor for use in pagination") class EdgeMeta: description = f"A Relay edge containing a `{base_name}` and its cursor." edge_name = f"{base_name}Edge" edge_bases = [edge_class, EdgeBase] if edge_class else [EdgeBase] if not isinstance(edge_class, ObjectType): edge_bases = [*edge_bases, ObjectType] return type(edge_name, tuple(edge_bases), {"Meta": EdgeMeta}) class PageInfo(ObjectType): class Meta: description = ( "The Relay compliant `PageInfo` type, containing data necessary to" " paginate this connection." ) has_next_page = Boolean( required=True, name="hasNextPage", description="When paginating forwards, are there more items?", ) has_previous_page = Boolean( required=True, name="hasPreviousPage", description="When paginating backwards, are there more items?", ) start_cursor = String( name="startCursor", description="When paginating backwards, the cursor to continue.", ) end_cursor = String( name="endCursor", description="When paginating forwards, the cursor to continue.", ) # noinspection PyPep8Naming def page_info_adapter(startCursor, endCursor, hasPreviousPage, hasNextPage): """Adapter for creating PageInfo instances""" return PageInfo( start_cursor=startCursor, end_cursor=endCursor, has_previous_page=hasPreviousPage, has_next_page=hasNextPage, ) class ConnectionOptions(ObjectTypeOptions): node = None class Connection(ObjectType): class Meta: abstract = True @classmethod def __init_subclass_with_meta__( cls, node=None, name=None, strict_types=False, _meta=None, **options ): if not _meta: _meta = ConnectionOptions(cls) assert node, f"You have to provide a node in {cls.__name__}.Meta" assert isinstance(node, NonNull) or issubclass( node, (Scalar, Enum, ObjectType, Interface, Union, NonNull) ), f'Received incompatible node "{node}" for Connection {cls.__name__}.' base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name if not name: name = f"{base_name}Connection" options["name"] = name _meta.node = node if not _meta.fields: _meta.fields = {} if "page_info" not in _meta.fields: _meta.fields["page_info"] = Field( PageInfo, name="pageInfo", required=True, description="Pagination data for this connection.", ) if "edges" not in _meta.fields: edge_class = get_edge_class(cls, node, base_name, strict_types) # type: ignore cls.Edge = edge_class _meta.fields["edges"] = Field( NonNull(List(NonNull(edge_class) if strict_types else edge_class)), description="Contains the nodes in this connection.", ) return super(Connection, cls).__init_subclass_with_meta__( _meta=_meta, **options ) # noinspection PyPep8Naming def connection_adapter(cls, edges, pageInfo): """Adapter for creating Connection instances""" return cls(edges=edges, page_info=pageInfo) class IterableConnectionField(Field): def __init__(self, type_, *args, **kwargs): kwargs.setdefault("before", String()) kwargs.setdefault("after", String()) kwargs.setdefault("first", Int()) kwargs.setdefault("last", Int()) super(IterableConnectionField, self).__init__(type_, *args, **kwargs) @property def type(self): type_ = super(IterableConnectionField, self).type connection_type = type_ if isinstance(type_, NonNull): connection_type = type_.of_type if is_node(connection_type): raise Exception( "ConnectionFields now need a explicit ConnectionType for Nodes.\n" "Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections" ) assert issubclass( connection_type, Connection ), f'{self.__class__.__name__} type has to be a subclass of Connection. Received "{connection_type}".' return type_ @classmethod def resolve_connection(cls, connection_type, args, resolved): if isinstance(resolved, connection_type): return resolved assert isinstance(resolved, Iterable), ( f"Resolved value from the connection field has to be an iterable or instance of {connection_type}. " f'Received "{resolved}"' ) connection = connection_from_array( resolved, args, connection_type=partial(connection_adapter, connection_type), edge_type=connection_type.Edge, page_info_type=page_info_adapter, ) connection.iterable = resolved return connection @classmethod def connection_resolver(cls, resolver, connection_type, root, info, **args): resolved = resolver(root, info, **args) if isinstance(connection_type, NonNull): connection_type = connection_type.of_type on_resolve = partial(cls.resolve_connection, connection_type, args) return maybe_thenable(resolved, on_resolve) def wrap_resolve(self, parent_resolver): resolver = super(IterableConnectionField, self).wrap_resolve(parent_resolver) return partial(self.connection_resolver, resolver, self.type) ConnectionField = IterableConnectionField python-graphene-3.4.3/graphene/relay/id_type.py000066400000000000000000000043071471374454500215670ustar00rootroot00000000000000from graphql_relay import from_global_id, to_global_id from ..types import ID, UUID from ..types.base import BaseType from typing import Type class BaseGlobalIDType: """ Base class that define the required attributes/method for a type. """ graphene_type: Type[BaseType] = ID @classmethod def resolve_global_id(cls, info, global_id): # return _type, _id raise NotImplementedError @classmethod def to_global_id(cls, _type, _id): # return _id raise NotImplementedError class DefaultGlobalIDType(BaseGlobalIDType): """ Default global ID type: base64 encoded version of ": ". """ graphene_type = ID @classmethod def resolve_global_id(cls, info, global_id): try: _type, _id = from_global_id(global_id) if not _type: raise ValueError("Invalid Global ID") return _type, _id except Exception as e: raise Exception( f'Unable to parse global ID "{global_id}". ' 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' f"Exception message: {e}" ) @classmethod def to_global_id(cls, _type, _id): return to_global_id(_type, _id) class SimpleGlobalIDType(BaseGlobalIDType): """ Simple global ID type: simply the id of the object. To be used carefully as the user is responsible for ensuring that the IDs are indeed global (otherwise it could cause request caching issues). """ graphene_type = ID @classmethod def resolve_global_id(cls, info, global_id): _type = info.return_type.graphene_type._meta.name return _type, global_id @classmethod def to_global_id(cls, _type, _id): return _id class UUIDGlobalIDType(BaseGlobalIDType): """ UUID global ID type. By definition UUID are global so they are used as they are. """ graphene_type = UUID @classmethod def resolve_global_id(cls, info, global_id): _type = info.return_type.graphene_type._meta.name return _type, global_id @classmethod def to_global_id(cls, _type, _id): return _id python-graphene-3.4.3/graphene/relay/mutation.py000066400000000000000000000042451471374454500217730ustar00rootroot00000000000000import re from ..types import Field, InputObjectType, String from ..types.mutation import Mutation from ..utils.thenables import maybe_thenable class ClientIDMutation(Mutation): class Meta: abstract = True @classmethod def __init_subclass_with_meta__( cls, output=None, input_fields=None, arguments=None, name=None, **options ): input_class = getattr(cls, "Input", None) base_name = re.sub("Payload$", "", name or cls.__name__) assert not output, "Can't specify any output" assert not arguments, "Can't specify any arguments" bases = (InputObjectType,) if input_class: bases += (input_class,) if not input_fields: input_fields = {} cls.Input = type( f"{base_name}Input", bases, dict(input_fields, client_mutation_id=String(name="clientMutationId")), ) arguments = dict( input=cls.Input(required=True) # 'client_mutation_id': String(name='clientMutationId') ) mutate_and_get_payload = getattr(cls, "mutate_and_get_payload", None) if cls.mutate and cls.mutate.__func__ == ClientIDMutation.mutate.__func__: assert mutate_and_get_payload, ( f"{name or cls.__name__}.mutate_and_get_payload method is required" " in a ClientIDMutation." ) if not name: name = f"{base_name}Payload" super(ClientIDMutation, cls).__init_subclass_with_meta__( output=None, arguments=arguments, name=name, **options ) cls._meta.fields["client_mutation_id"] = Field(String, name="clientMutationId") @classmethod def mutate(cls, root, info, input): def on_resolve(payload): try: payload.client_mutation_id = input.get("client_mutation_id") except Exception: raise Exception( f"Cannot set client_mutation_id in the payload object {repr(payload)}" ) return payload result = cls.mutate_and_get_payload(root, info, **input) return maybe_thenable(result, on_resolve) python-graphene-3.4.3/graphene/relay/node.py000066400000000000000000000104071471374454500210550ustar00rootroot00000000000000from functools import partial from inspect import isclass from ..types import Field, Interface, ObjectType from ..types.interface import InterfaceOptions from ..types.utils import get_type from .id_type import BaseGlobalIDType, DefaultGlobalIDType def is_node(objecttype): """ Check if the given objecttype has Node as an interface """ if not isclass(objecttype): return False if not issubclass(objecttype, ObjectType): return False return any(issubclass(i, Node) for i in objecttype._meta.interfaces) class GlobalID(Field): def __init__( self, node=None, parent_type=None, required=True, global_id_type=DefaultGlobalIDType, *args, **kwargs, ): super(GlobalID, self).__init__( global_id_type.graphene_type, required=required, *args, **kwargs ) self.node = node or Node self.parent_type_name = parent_type._meta.name if parent_type else None @staticmethod def id_resolver(parent_resolver, node, root, info, parent_type_name=None, **args): type_id = parent_resolver(root, info, **args) parent_type_name = parent_type_name or info.parent_type.name return node.to_global_id(parent_type_name, type_id) # root._meta.name def wrap_resolve(self, parent_resolver): return partial( self.id_resolver, parent_resolver, self.node, parent_type_name=self.parent_type_name, ) class NodeField(Field): def __init__(self, node, type_=False, **kwargs): assert issubclass(node, Node), "NodeField can only operate in Nodes" self.node_type = node self.field_type = type_ global_id_type = node._meta.global_id_type super(NodeField, self).__init__( # If we don't specify a type, the field type will be the node interface type_ or node, id=global_id_type.graphene_type( required=True, description="The ID of the object" ), **kwargs, ) def wrap_resolve(self, parent_resolver): return partial(self.node_type.node_resolver, get_type(self.field_type)) class AbstractNode(Interface): class Meta: abstract = True @classmethod def __init_subclass_with_meta__(cls, global_id_type=DefaultGlobalIDType, **options): assert issubclass( global_id_type, BaseGlobalIDType ), "Custom ID type need to be implemented as a subclass of BaseGlobalIDType." _meta = InterfaceOptions(cls) _meta.global_id_type = global_id_type _meta.fields = { "id": GlobalID( cls, global_id_type=global_id_type, description="The ID of the object" ) } super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options) @classmethod def resolve_global_id(cls, info, global_id): return cls._meta.global_id_type.resolve_global_id(info, global_id) class Node(AbstractNode): """An object with an ID""" @classmethod def Field(cls, *args, **kwargs): # noqa: N802 return NodeField(cls, *args, **kwargs) @classmethod def node_resolver(cls, only_type, root, info, id): return cls.get_node_from_global_id(info, id, only_type=only_type) @classmethod def get_node_from_global_id(cls, info, global_id, only_type=None): _type, _id = cls.resolve_global_id(info, global_id) graphene_type = info.schema.get_type(_type) if graphene_type is None: raise Exception(f'Relay Node "{_type}" not found in schema') graphene_type = graphene_type.graphene_type if only_type: assert ( graphene_type == only_type ), f"Must receive a {only_type._meta.name} id." # We make sure the ObjectType implements the "Node" interface if cls not in graphene_type._meta.interfaces: raise Exception( f'ObjectType "{_type}" does not implement the "{cls}" interface.' ) get_node = getattr(graphene_type, "get_node", None) if get_node: return get_node(info, _id) @classmethod def to_global_id(cls, type_, id): return cls._meta.global_id_type.to_global_id(type_, id) python-graphene-3.4.3/graphene/relay/tests/000077500000000000000000000000001471374454500207165ustar00rootroot00000000000000python-graphene-3.4.3/graphene/relay/tests/__init__.py000066400000000000000000000000001471374454500230150ustar00rootroot00000000000000python-graphene-3.4.3/graphene/relay/tests/test_connection.py000066400000000000000000000221571471374454500244750ustar00rootroot00000000000000import re from pytest import raises from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String from ..connection import ( Connection, ConnectionField, PageInfo, ConnectionOptions, get_edge_class, ) from ..node import Node class MyObject(ObjectType): class Meta: interfaces = [Node] field = String() def test_connection(): class MyObjectConnection(Connection): extra = String() class Meta: node = MyObject class Edge: other = String() assert MyObjectConnection._meta.name == "MyObjectConnection" fields = MyObjectConnection._meta.fields assert list(fields) == ["page_info", "edges", "extra"] edge_field = fields["edges"] pageinfo_field = fields["page_info"] assert isinstance(edge_field, Field) assert isinstance(edge_field.type, NonNull) assert isinstance(edge_field.type.of_type, List) assert edge_field.type.of_type.of_type == MyObjectConnection.Edge assert isinstance(pageinfo_field, Field) assert isinstance(pageinfo_field.type, NonNull) assert pageinfo_field.type.of_type == PageInfo def test_connection_inherit_abstracttype(): class BaseConnection: extra = String() class MyObjectConnection(BaseConnection, Connection): class Meta: node = MyObject assert MyObjectConnection._meta.name == "MyObjectConnection" fields = MyObjectConnection._meta.fields assert list(fields) == ["page_info", "edges", "extra"] def test_connection_extra_abstract_fields(): class ConnectionWithNodes(Connection): class Meta: abstract = True @classmethod def __init_subclass_with_meta__(cls, node=None, name=None, **options): _meta = ConnectionOptions(cls) _meta.fields = { "nodes": Field( NonNull(List(node)), description="Contains all the nodes in this connection.", ), } return super(ConnectionWithNodes, cls).__init_subclass_with_meta__( node=node, name=name, _meta=_meta, **options ) class MyObjectConnection(ConnectionWithNodes): class Meta: node = MyObject class Edge: other = String() assert MyObjectConnection._meta.name == "MyObjectConnection" fields = MyObjectConnection._meta.fields assert list(fields) == ["nodes", "page_info", "edges"] edge_field = fields["edges"] pageinfo_field = fields["page_info"] nodes_field = fields["nodes"] assert isinstance(edge_field, Field) assert isinstance(edge_field.type, NonNull) assert isinstance(edge_field.type.of_type, List) assert edge_field.type.of_type.of_type == MyObjectConnection.Edge assert isinstance(pageinfo_field, Field) assert isinstance(pageinfo_field.type, NonNull) assert pageinfo_field.type.of_type == PageInfo assert isinstance(nodes_field, Field) assert isinstance(nodes_field.type, NonNull) assert isinstance(nodes_field.type.of_type, List) assert nodes_field.type.of_type.of_type == MyObject def test_connection_override_fields(): class ConnectionWithNodes(Connection): class Meta: abstract = True @classmethod def __init_subclass_with_meta__(cls, node=None, name=None, **options): _meta = ConnectionOptions(cls) base_name = ( re.sub("Connection$", "", name or cls.__name__) or node._meta.name ) edge_class = get_edge_class(cls, node, base_name) _meta.fields = { "page_info": Field( NonNull( PageInfo, name="pageInfo", required=True, description="Pagination data for this connection.", ) ), "edges": Field( NonNull(List(NonNull(edge_class))), description="Contains the nodes in this connection.", ), } return super(ConnectionWithNodes, cls).__init_subclass_with_meta__( node=node, name=name, _meta=_meta, **options ) class MyObjectConnection(ConnectionWithNodes): class Meta: node = MyObject assert MyObjectConnection._meta.name == "MyObjectConnection" fields = MyObjectConnection._meta.fields assert list(fields) == ["page_info", "edges"] edge_field = fields["edges"] pageinfo_field = fields["page_info"] assert isinstance(edge_field, Field) assert isinstance(edge_field.type, NonNull) assert isinstance(edge_field.type.of_type, List) assert isinstance(edge_field.type.of_type.of_type, NonNull) assert edge_field.type.of_type.of_type.of_type.__name__ == "MyObjectEdge" # This page info is NonNull assert isinstance(pageinfo_field, Field) assert isinstance(edge_field.type, NonNull) assert pageinfo_field.type.of_type == PageInfo def test_connection_name(): custom_name = "MyObjectCustomNameConnection" class BaseConnection: extra = String() class MyObjectConnection(BaseConnection, Connection): class Meta: node = MyObject name = custom_name assert MyObjectConnection._meta.name == custom_name def test_edge(): class MyObjectConnection(Connection): class Meta: node = MyObject class Edge: other = String() Edge = MyObjectConnection.Edge assert Edge._meta.name == "MyObjectEdge" edge_fields = Edge._meta.fields assert list(edge_fields) == ["node", "cursor", "other"] assert isinstance(edge_fields["node"], Field) assert edge_fields["node"].type == MyObject assert isinstance(edge_fields["other"], Field) assert edge_fields["other"].type == String def test_edge_with_bases(): class BaseEdge: extra = String() class MyObjectConnection(Connection): class Meta: node = MyObject class Edge(BaseEdge): other = String() Edge = MyObjectConnection.Edge assert Edge._meta.name == "MyObjectEdge" edge_fields = Edge._meta.fields assert list(edge_fields) == ["node", "cursor", "extra", "other"] assert isinstance(edge_fields["node"], Field) assert edge_fields["node"].type == MyObject assert isinstance(edge_fields["other"], Field) assert edge_fields["other"].type == String def test_edge_with_nonnull_node(): class MyObjectConnection(Connection): class Meta: node = NonNull(MyObject) edge_fields = MyObjectConnection.Edge._meta.fields assert isinstance(edge_fields["node"], Field) assert isinstance(edge_fields["node"].type, NonNull) assert edge_fields["node"].type.of_type == MyObject def test_pageinfo(): assert PageInfo._meta.name == "PageInfo" fields = PageInfo._meta.fields assert list(fields) == [ "has_next_page", "has_previous_page", "start_cursor", "end_cursor", ] def test_connectionfield(): class MyObjectConnection(Connection): class Meta: node = MyObject field = ConnectionField(MyObjectConnection) assert field.args == { "before": Argument(String), "after": Argument(String), "first": Argument(Int), "last": Argument(Int), } def test_connectionfield_node_deprecated(): field = ConnectionField(MyObject) with raises(Exception) as exc_info: field.type assert "ConnectionFields now need a explicit ConnectionType for Nodes." in str( exc_info.value ) def test_connectionfield_custom_args(): class MyObjectConnection(Connection): class Meta: node = MyObject field = ConnectionField( MyObjectConnection, before=String(required=True), extra=String() ) assert field.args == { "before": Argument(NonNull(String)), "after": Argument(String), "first": Argument(Int), "last": Argument(Int), "extra": Argument(String), } def test_connectionfield_required(): class MyObjectConnection(Connection): class Meta: node = MyObject class Query(ObjectType): test_connection = ConnectionField(MyObjectConnection, required=True) def resolve_test_connection(root, info, **args): return [] schema = Schema(query=Query) executed = schema.execute("{ testConnection { edges { cursor } } }") assert not executed.errors assert executed.data == {"testConnection": {"edges": []}} def test_connectionfield_strict_types(): class MyObjectConnection(Connection): class Meta: node = MyObject strict_types = True connection_field = ConnectionField(MyObjectConnection) edges_field_type = connection_field.type._meta.fields["edges"].type assert isinstance(edges_field_type, NonNull) edges_list_element_type = edges_field_type.of_type.of_type assert isinstance(edges_list_element_type, NonNull) node_field = edges_list_element_type.of_type._meta.fields["node"] assert isinstance(node_field.type, NonNull) python-graphene-3.4.3/graphene/relay/tests/test_connection_async.py000066400000000000000000000053021471374454500256630ustar00rootroot00000000000000from pytest import mark from graphql_relay.utils import base64 from graphene.types import ObjectType, Schema, String from graphene.relay.connection import Connection, ConnectionField, PageInfo from graphene.relay.node import Node letter_chars = ["A", "B", "C", "D", "E"] class Letter(ObjectType): class Meta: interfaces = (Node,) letter = String() class LetterConnection(Connection): class Meta: node = Letter class Query(ObjectType): letters = ConnectionField(LetterConnection) connection_letters = ConnectionField(LetterConnection) async_letters = ConnectionField(LetterConnection) node = Node.Field() def resolve_letters(self, info, **args): return list(letters.values()) async def resolve_async_letters(self, info, **args): return list(letters.values()) def resolve_connection_letters(self, info, **args): return LetterConnection( page_info=PageInfo(has_next_page=True, has_previous_page=False), edges=[ LetterConnection.Edge(node=Letter(id=0, letter="A"), cursor="a-cursor") ], ) schema = Schema(Query) letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter_chars)} def edges(selected_letters): return [ { "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter}, "cursor": base64("arrayconnection:%s" % letter.id), } for letter in [letters[i] for i in selected_letters] ] def cursor_for(ltr): letter = letters[ltr] return base64("arrayconnection:%s" % letter.id) def execute(args=""): if args: args = "(" + args + ")" return schema.execute( """ { letters%s { edges { node { id letter } cursor } pageInfo { hasPreviousPage hasNextPage startCursor endCursor } } } """ % args ) @mark.asyncio async def test_connection_async(): result = await schema.execute_async( """ { asyncLetters(first:1) { edges { node { id letter } } pageInfo { hasPreviousPage hasNextPage } } } """ ) assert not result.errors assert result.data == { "asyncLetters": { "edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}], "pageInfo": {"hasPreviousPage": False, "hasNextPage": True}, } } python-graphene-3.4.3/graphene/relay/tests/test_connection_query.py000066400000000000000000000155451471374454500257250ustar00rootroot00000000000000from pytest import mark from graphql_relay.utils import base64 from ...types import ObjectType, Schema, String from ..connection import Connection, ConnectionField, PageInfo from ..node import Node letter_chars = ["A", "B", "C", "D", "E"] class Letter(ObjectType): class Meta: interfaces = (Node,) letter = String() class LetterConnection(Connection): class Meta: node = Letter class Query(ObjectType): letters = ConnectionField(LetterConnection) connection_letters = ConnectionField(LetterConnection) async_letters = ConnectionField(LetterConnection) node = Node.Field() def resolve_letters(self, info, **args): return list(letters.values()) async def resolve_async_letters(self, info, **args): return list(letters.values()) def resolve_connection_letters(self, info, **args): return LetterConnection( page_info=PageInfo(has_next_page=True, has_previous_page=False), edges=[ LetterConnection.Edge(node=Letter(id=0, letter="A"), cursor="a-cursor") ], ) schema = Schema(Query) letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter_chars)} def edges(selected_letters): return [ { "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter}, "cursor": base64("arrayconnection:%s" % letter.id), } for letter in [letters[i] for i in selected_letters] ] def cursor_for(ltr): letter = letters[ltr] return base64("arrayconnection:%s" % letter.id) async def execute(args=""): if args: args = "(" + args + ")" return await schema.execute_async( """ { letters%s { edges { node { id letter } cursor } pageInfo { hasPreviousPage hasNextPage startCursor endCursor } } } """ % args ) async def check(args, letters, has_previous_page=False, has_next_page=False): result = await execute(args) expected_edges = edges(letters) expected_page_info = { "hasPreviousPage": has_previous_page, "hasNextPage": has_next_page, "endCursor": expected_edges[-1]["cursor"] if expected_edges else None, "startCursor": expected_edges[0]["cursor"] if expected_edges else None, } assert not result.errors assert result.data == { "letters": {"edges": expected_edges, "pageInfo": expected_page_info} } @mark.asyncio async def test_returns_all_elements_without_filters(): await check("", "ABCDE") @mark.asyncio async def test_respects_a_smaller_first(): await check("first: 2", "AB", has_next_page=True) @mark.asyncio async def test_respects_an_overly_large_first(): await check("first: 10", "ABCDE") @mark.asyncio async def test_respects_a_smaller_last(): await check("last: 2", "DE", has_previous_page=True) @mark.asyncio async def test_respects_an_overly_large_last(): await check("last: 10", "ABCDE") @mark.asyncio async def test_respects_first_and_after(): await check(f'first: 2, after: "{cursor_for("B")}"', "CD", has_next_page=True) @mark.asyncio async def test_respects_first_and_after_with_long_first(): await check(f'first: 10, after: "{cursor_for("B")}"', "CDE") @mark.asyncio async def test_respects_last_and_before(): await check(f'last: 2, before: "{cursor_for("D")}"', "BC", has_previous_page=True) @mark.asyncio async def test_respects_last_and_before_with_long_last(): await check(f'last: 10, before: "{cursor_for("D")}"', "ABC") @mark.asyncio async def test_respects_first_and_after_and_before_too_few(): await check( f'first: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BC", has_next_page=True, ) @mark.asyncio async def test_respects_first_and_after_and_before_too_many(): await check( f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_first_and_after_and_before_exactly_right(): await check( f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_last_and_after_and_before_too_few(): await check( f'last: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "CD", has_previous_page=True, ) @mark.asyncio async def test_respects_last_and_after_and_before_too_many(): await check( f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_last_and_after_and_before_exactly_right(): await check( f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_returns_no_elements_if_first_is_0(): await check("first: 0", "", has_next_page=True) @mark.asyncio async def test_returns_all_elements_if_cursors_are_invalid(): await check('before: "invalid" after: "invalid"', "ABCDE") @mark.asyncio async def test_returns_all_elements_if_cursors_are_on_the_outside(): await check( f'before: "{base64("arrayconnection:%s" % 6)}" after: "{base64("arrayconnection:%s" % -1)}"', "ABCDE", ) @mark.asyncio async def test_returns_no_elements_if_cursors_cross(): await check( f'before: "{base64("arrayconnection:%s" % 2)}" after: "{base64("arrayconnection:%s" % 4)}"', "", ) @mark.asyncio async def test_connection_type_nodes(): result = await schema.execute_async( """ { connectionLetters { edges { node { id letter } cursor } pageInfo { hasPreviousPage hasNextPage } } } """ ) assert not result.errors assert result.data == { "connectionLetters": { "edges": [ {"node": {"id": "TGV0dGVyOjA=", "letter": "A"}, "cursor": "a-cursor"} ], "pageInfo": {"hasPreviousPage": False, "hasNextPage": True}, } } @mark.asyncio async def test_connection_async(): result = await schema.execute_async( """ { asyncLetters(first:1) { edges { node { id letter } } pageInfo { hasPreviousPage hasNextPage } } } """ ) assert not result.errors assert result.data == { "asyncLetters": { "edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}], "pageInfo": {"hasPreviousPage": False, "hasNextPage": True}, } } python-graphene-3.4.3/graphene/relay/tests/test_custom_global_id.py000066400000000000000000000241551471374454500256440ustar00rootroot00000000000000import re from uuid import uuid4 from graphql import graphql_sync from ..id_type import BaseGlobalIDType, SimpleGlobalIDType, UUIDGlobalIDType from ..node import Node from ...types import Int, ObjectType, Schema, String class TestUUIDGlobalID: def setup_method(self): self.user_list = [ {"id": uuid4(), "name": "First"}, {"id": uuid4(), "name": "Second"}, {"id": uuid4(), "name": "Third"}, {"id": uuid4(), "name": "Fourth"}, ] self.users = {user["id"]: user for user in self.user_list} class CustomNode(Node): class Meta: global_id_type = UUIDGlobalIDType class User(ObjectType): class Meta: interfaces = [CustomNode] name = String() @classmethod def get_node(cls, _type, _id): return self.users[_id] class RootQuery(ObjectType): user = CustomNode.Field(User) self.schema = Schema(query=RootQuery, types=[User]) self.graphql_schema = self.schema.graphql_schema def test_str_schema_correct(self): """ Check that the schema has the expected and custom node interface and user type and that they both use UUIDs """ parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema)) types = [t for t, f in parsed] fields = [f for t, f in parsed] custom_node_interface = "interface CustomNode" assert custom_node_interface in types assert ( '"""The ID of the object"""\n id: UUID!' == fields[types.index(custom_node_interface)] ) user_type = "type User implements CustomNode" assert user_type in types assert ( '"""The ID of the object"""\n id: UUID!\n name: String' == fields[types.index(user_type)] ) def test_get_by_id(self): query = """query userById($id: UUID!) { user(id: $id) { id name } }""" # UUID need to be converted to string for serialization result = graphql_sync( self.graphql_schema, query, variable_values={"id": str(self.user_list[0]["id"])}, ) assert not result.errors assert result.data["user"]["id"] == str(self.user_list[0]["id"]) assert result.data["user"]["name"] == self.user_list[0]["name"] class TestSimpleGlobalID: def setup_method(self): self.user_list = [ {"id": "my global primary key in clear 1", "name": "First"}, {"id": "my global primary key in clear 2", "name": "Second"}, {"id": "my global primary key in clear 3", "name": "Third"}, {"id": "my global primary key in clear 4", "name": "Fourth"}, ] self.users = {user["id"]: user for user in self.user_list} class CustomNode(Node): class Meta: global_id_type = SimpleGlobalIDType class User(ObjectType): class Meta: interfaces = [CustomNode] name = String() @classmethod def get_node(cls, _type, _id): return self.users[_id] class RootQuery(ObjectType): user = CustomNode.Field(User) self.schema = Schema(query=RootQuery, types=[User]) self.graphql_schema = self.schema.graphql_schema def test_str_schema_correct(self): """ Check that the schema has the expected and custom node interface and user type and that they both use UUIDs """ parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema)) types = [t for t, f in parsed] fields = [f for t, f in parsed] custom_node_interface = "interface CustomNode" assert custom_node_interface in types assert ( '"""The ID of the object"""\n id: ID!' == fields[types.index(custom_node_interface)] ) user_type = "type User implements CustomNode" assert user_type in types assert ( '"""The ID of the object"""\n id: ID!\n name: String' == fields[types.index(user_type)] ) def test_get_by_id(self): query = """query { user(id: "my global primary key in clear 3") { id name } }""" result = graphql_sync(self.graphql_schema, query) assert not result.errors assert result.data["user"]["id"] == self.user_list[2]["id"] assert result.data["user"]["name"] == self.user_list[2]["name"] class TestCustomGlobalID: def setup_method(self): self.user_list = [ {"id": 1, "name": "First"}, {"id": 2, "name": "Second"}, {"id": 3, "name": "Third"}, {"id": 4, "name": "Fourth"}, ] self.users = {user["id"]: user for user in self.user_list} class CustomGlobalIDType(BaseGlobalIDType): """ Global id that is simply and integer in clear. """ graphene_type = Int @classmethod def resolve_global_id(cls, info, global_id): _type = info.return_type.graphene_type._meta.name return _type, global_id @classmethod def to_global_id(cls, _type, _id): return _id class CustomNode(Node): class Meta: global_id_type = CustomGlobalIDType class User(ObjectType): class Meta: interfaces = [CustomNode] name = String() @classmethod def get_node(cls, _type, _id): return self.users[_id] class RootQuery(ObjectType): user = CustomNode.Field(User) self.schema = Schema(query=RootQuery, types=[User]) self.graphql_schema = self.schema.graphql_schema def test_str_schema_correct(self): """ Check that the schema has the expected and custom node interface and user type and that they both use UUIDs """ parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema)) types = [t for t, f in parsed] fields = [f for t, f in parsed] custom_node_interface = "interface CustomNode" assert custom_node_interface in types assert ( '"""The ID of the object"""\n id: Int!' == fields[types.index(custom_node_interface)] ) user_type = "type User implements CustomNode" assert user_type in types assert ( '"""The ID of the object"""\n id: Int!\n name: String' == fields[types.index(user_type)] ) def test_get_by_id(self): query = """query { user(id: 2) { id name } }""" result = graphql_sync(self.graphql_schema, query) assert not result.errors assert result.data["user"]["id"] == self.user_list[1]["id"] assert result.data["user"]["name"] == self.user_list[1]["name"] class TestIncompleteCustomGlobalID: def setup_method(self): self.user_list = [ {"id": 1, "name": "First"}, {"id": 2, "name": "Second"}, {"id": 3, "name": "Third"}, {"id": 4, "name": "Fourth"}, ] self.users = {user["id"]: user for user in self.user_list} def test_must_define_to_global_id(self): """ Test that if the `to_global_id` method is not defined, we can query the object, but we can't request its ID. """ class CustomGlobalIDType(BaseGlobalIDType): graphene_type = Int @classmethod def resolve_global_id(cls, info, global_id): _type = info.return_type.graphene_type._meta.name return _type, global_id class CustomNode(Node): class Meta: global_id_type = CustomGlobalIDType class User(ObjectType): class Meta: interfaces = [CustomNode] name = String() @classmethod def get_node(cls, _type, _id): return self.users[_id] class RootQuery(ObjectType): user = CustomNode.Field(User) self.schema = Schema(query=RootQuery, types=[User]) self.graphql_schema = self.schema.graphql_schema query = """query { user(id: 2) { name } }""" result = graphql_sync(self.graphql_schema, query) assert not result.errors assert result.data["user"]["name"] == self.user_list[1]["name"] query = """query { user(id: 2) { id name } }""" result = graphql_sync(self.graphql_schema, query) assert result.errors is not None assert len(result.errors) == 1 assert result.errors[0].path == ["user", "id"] def test_must_define_resolve_global_id(self): """ Test that if the `resolve_global_id` method is not defined, we can't query the object by ID. """ class CustomGlobalIDType(BaseGlobalIDType): graphene_type = Int @classmethod def to_global_id(cls, _type, _id): return _id class CustomNode(Node): class Meta: global_id_type = CustomGlobalIDType class User(ObjectType): class Meta: interfaces = [CustomNode] name = String() @classmethod def get_node(cls, _type, _id): return self.users[_id] class RootQuery(ObjectType): user = CustomNode.Field(User) self.schema = Schema(query=RootQuery, types=[User]) self.graphql_schema = self.schema.graphql_schema query = """query { user(id: 2) { id name } }""" result = graphql_sync(self.graphql_schema, query) assert result.errors is not None assert len(result.errors) == 1 assert result.errors[0].path == ["user"] python-graphene-3.4.3/graphene/relay/tests/test_global_id.py000066400000000000000000000030361471374454500242450ustar00rootroot00000000000000from graphql_relay import to_global_id from ...types import ID, NonNull, ObjectType, String from ...types.definitions import GrapheneObjectType from ..node import GlobalID, Node class CustomNode(Node): class Meta: name = "Node" class User(ObjectType): class Meta: interfaces = [CustomNode] name = String() class Info: def __init__(self, parent_type): self.parent_type = GrapheneObjectType( graphene_type=parent_type, name=parent_type._meta.name, description=parent_type._meta.description, fields=None, is_type_of=parent_type.is_type_of, interfaces=None, ) def test_global_id_defaults_to_required_and_node(): gid = GlobalID() assert isinstance(gid.type, NonNull) assert gid.type.of_type == ID assert gid.node == Node def test_global_id_allows_overriding_of_node_and_required(): gid = GlobalID(node=CustomNode, required=False) assert gid.type == ID assert gid.node == CustomNode def test_global_id_defaults_to_info_parent_type(): my_id = "1" gid = GlobalID() id_resolver = gid.wrap_resolve(lambda *_: my_id) my_global_id = id_resolver(None, Info(User)) assert my_global_id == to_global_id(User._meta.name, my_id) def test_global_id_allows_setting_customer_parent_type(): my_id = "1" gid = GlobalID(parent_type=User) id_resolver = gid.wrap_resolve(lambda *_: my_id) my_global_id = id_resolver(None, None) assert my_global_id == to_global_id(User._meta.name, my_id) python-graphene-3.4.3/graphene/relay/tests/test_mutation.py000066400000000000000000000131661471374454500241760ustar00rootroot00000000000000from pytest import mark, raises from ...types import ( ID, Argument, Field, InputField, InputObjectType, NonNull, ObjectType, Schema, ) from ...types.scalars import String from ..mutation import ClientIDMutation class SharedFields: shared = String() class MyNode(ObjectType): # class Meta: # interfaces = (Node, ) id = ID() name = String() class SaySomething(ClientIDMutation): class Input: what = String() phrase = String() @staticmethod def mutate_and_get_payload(self, info, what, client_mutation_id=None): return SaySomething(phrase=str(what)) class FixedSaySomething: __slots__ = ("phrase",) def __init__(self, phrase): self.phrase = phrase class SaySomethingFixed(ClientIDMutation): class Input: what = String() phrase = String() @staticmethod def mutate_and_get_payload(self, info, what, client_mutation_id=None): return FixedSaySomething(phrase=str(what)) class SaySomethingAsync(ClientIDMutation): class Input: what = String() phrase = String() @staticmethod async def mutate_and_get_payload(self, info, what, client_mutation_id=None): return SaySomething(phrase=str(what)) # MyEdge = MyNode.Connection.Edge class MyEdge(ObjectType): node = Field(MyNode) cursor = String() class OtherMutation(ClientIDMutation): class Input(SharedFields): additional_field = String() name = String() my_node_edge = Field(MyEdge) @staticmethod def mutate_and_get_payload( self, info, shared="", additional_field="", client_mutation_id=None ): edge_type = MyEdge return OtherMutation( name=shared + additional_field, my_node_edge=edge_type(cursor="1", node=MyNode(name="name")), ) class RootQuery(ObjectType): something = String() class Mutation(ObjectType): say = SaySomething.Field() say_fixed = SaySomethingFixed.Field() say_async = SaySomethingAsync.Field() other = OtherMutation.Field() schema = Schema(query=RootQuery, mutation=Mutation) def test_no_mutate_and_get_payload(): with raises(AssertionError) as excinfo: class MyMutation(ClientIDMutation): pass assert ( "MyMutation.mutate_and_get_payload method is required in a ClientIDMutation." == str(excinfo.value) ) def test_mutation(): fields = SaySomething._meta.fields assert list(fields) == ["phrase", "client_mutation_id"] assert SaySomething._meta.name == "SaySomethingPayload" assert isinstance(fields["phrase"], Field) field = SaySomething.Field() assert field.type == SaySomething assert list(field.args) == ["input"] assert isinstance(field.args["input"], Argument) assert isinstance(field.args["input"].type, NonNull) assert field.args["input"].type.of_type == SaySomething.Input assert isinstance(fields["client_mutation_id"], Field) assert fields["client_mutation_id"].name == "clientMutationId" assert fields["client_mutation_id"].type == String def test_mutation_input(): Input = SaySomething.Input assert issubclass(Input, InputObjectType) fields = Input._meta.fields assert list(fields) == ["what", "client_mutation_id"] assert isinstance(fields["what"], InputField) assert fields["what"].type == String assert isinstance(fields["client_mutation_id"], InputField) assert fields["client_mutation_id"].type == String def test_subclassed_mutation(): fields = OtherMutation._meta.fields assert list(fields) == ["name", "my_node_edge", "client_mutation_id"] assert isinstance(fields["name"], Field) field = OtherMutation.Field() assert field.type == OtherMutation assert list(field.args) == ["input"] assert isinstance(field.args["input"], Argument) assert isinstance(field.args["input"].type, NonNull) assert field.args["input"].type.of_type == OtherMutation.Input def test_subclassed_mutation_input(): Input = OtherMutation.Input assert issubclass(Input, InputObjectType) fields = Input._meta.fields assert list(fields) == ["shared", "additional_field", "client_mutation_id"] assert isinstance(fields["shared"], InputField) assert fields["shared"].type == String assert isinstance(fields["additional_field"], InputField) assert fields["additional_field"].type == String assert isinstance(fields["client_mutation_id"], InputField) assert fields["client_mutation_id"].type == String def test_node_query(): executed = schema.execute( 'mutation a { say(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) assert not executed.errors assert executed.data == {"say": {"phrase": "hello"}} def test_node_query_fixed(): executed = schema.execute( 'mutation a { sayFixed(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) assert "Cannot set client_mutation_id in the payload object" in str( executed.errors[0] ) @mark.asyncio async def test_node_query_async(): executed = await schema.execute_async( 'mutation a { sayAsync(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) assert not executed.errors assert executed.data == {"sayAsync": {"phrase": "hello"}} def test_edge_query(): executed = schema.execute( 'mutation a { other(input: {clientMutationId:"1"}) { clientMutationId, myNodeEdge { cursor node { name }} } }' ) assert not executed.errors assert dict(executed.data) == { "other": { "clientMutationId": "1", "myNodeEdge": {"cursor": "1", "node": {"name": "name"}}, } } python-graphene-3.4.3/graphene/relay/tests/test_mutation_async.py000066400000000000000000000043261471374454500253710ustar00rootroot00000000000000from pytest import mark from graphene.types import ID, Field, ObjectType, Schema from graphene.types.scalars import String from graphene.relay.mutation import ClientIDMutation from graphene.test import Client class SharedFields(object): shared = String() class MyNode(ObjectType): # class Meta: # interfaces = (Node, ) id = ID() name = String() class SaySomethingAsync(ClientIDMutation): class Input: what = String() phrase = String() @staticmethod async def mutate_and_get_payload(self, info, what, client_mutation_id=None): return SaySomethingAsync(phrase=str(what)) # MyEdge = MyNode.Connection.Edge class MyEdge(ObjectType): node = Field(MyNode) cursor = String() class OtherMutation(ClientIDMutation): class Input(SharedFields): additional_field = String() name = String() my_node_edge = Field(MyEdge) @staticmethod def mutate_and_get_payload( self, info, shared="", additional_field="", client_mutation_id=None ): edge_type = MyEdge return OtherMutation( name=shared + additional_field, my_node_edge=edge_type(cursor="1", node=MyNode(name="name")), ) class RootQuery(ObjectType): something = String() class Mutation(ObjectType): say_promise = SaySomethingAsync.Field() other = OtherMutation.Field() schema = Schema(query=RootQuery, mutation=Mutation) client = Client(schema) @mark.asyncio async def test_node_query_promise(): executed = await client.execute_async( 'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) assert isinstance(executed, dict) assert "errors" not in executed assert executed["data"] == {"sayPromise": {"phrase": "hello"}} @mark.asyncio async def test_edge_query(): executed = await client.execute_async( 'mutation a { other(input: {clientMutationId:"1"}) { clientMutationId, myNodeEdge { cursor node { name }} } }' ) assert isinstance(executed, dict) assert "errors" not in executed assert executed["data"] == { "other": { "clientMutationId": "1", "myNodeEdge": {"cursor": "1", "node": {"name": "name"}}, } } python-graphene-3.4.3/graphene/relay/tests/test_node.py000066400000000000000000000127501471374454500232610ustar00rootroot00000000000000import re from textwrap import dedent from graphql_relay import to_global_id from ...types import ObjectType, Schema, String from ..node import Node, is_node class SharedNodeFields: shared = String() something_else = String() def resolve_something_else(*_): return "----" class MyNode(ObjectType): class Meta: interfaces = (Node,) name = String() @staticmethod def get_node(info, id): return MyNode(name=str(id)) class MyOtherNode(SharedNodeFields, ObjectType): extra_field = String() class Meta: interfaces = (Node,) def resolve_extra_field(self, *_): return "extra field info." @staticmethod def get_node(info, id): return MyOtherNode(shared=str(id)) class RootQuery(ObjectType): first = String() node = Node.Field() only_node = Node.Field(MyNode) only_node_lazy = Node.Field(lambda: MyNode) schema = Schema(query=RootQuery, types=[MyNode, MyOtherNode]) def test_node_good(): assert "id" in MyNode._meta.fields assert is_node(MyNode) assert not is_node(object) assert not is_node("node") def test_node_query(): executed = schema.execute( '{ node(id:"%s") { ... on MyNode { name } } }' % Node.to_global_id("MyNode", 1) ) assert not executed.errors assert executed.data == {"node": {"name": "1"}} def test_subclassed_node_query(): executed = schema.execute( '{ node(id:"%s") { ... on MyOtherNode { shared, extraField, somethingElse } } }' % to_global_id("MyOtherNode", 1) ) assert not executed.errors assert executed.data == { "node": { "shared": "1", "extraField": "extra field info.", "somethingElse": "----", } } def test_node_requesting_non_node(): executed = schema.execute( '{ node(id:"%s") { __typename } } ' % Node.to_global_id("RootQuery", 1) ) assert executed.errors assert re.match( r"ObjectType .* does not implement the .* interface.", executed.errors[0].message, ) assert executed.data == {"node": None} def test_node_requesting_unknown_type(): executed = schema.execute( '{ node(id:"%s") { __typename } } ' % Node.to_global_id("UnknownType", 1) ) assert executed.errors assert re.match(r"Relay Node .* not found in schema", executed.errors[0].message) assert executed.data == {"node": None} def test_node_query_incorrect_id(): executed = schema.execute( '{ node(id:"%s") { ... on MyNode { name } } }' % "something:2" ) assert executed.errors assert re.match(r"Unable to parse global ID .*", executed.errors[0].message) assert executed.data == {"node": None} def test_node_field(): node_field = Node.Field() assert node_field.type == Node assert node_field.node_type == Node def test_node_field_custom(): node_field = Node.Field(MyNode) assert node_field.type == MyNode assert node_field.node_type == Node def test_node_field_args(): field_args = { "name": "my_custom_name", "description": "my_custom_description", "deprecation_reason": "my_custom_deprecation_reason", } node_field = Node.Field(**field_args) for field_arg, value in field_args.items(): assert getattr(node_field, field_arg) == value def test_node_field_only_type(): executed = schema.execute( '{ onlyNode(id:"%s") { __typename, name } } ' % Node.to_global_id("MyNode", 1) ) assert not executed.errors assert executed.data == {"onlyNode": {"__typename": "MyNode", "name": "1"}} def test_node_field_only_type_wrong(): executed = schema.execute( '{ onlyNode(id:"%s") { __typename, name } } ' % Node.to_global_id("MyOtherNode", 1) ) assert len(executed.errors) == 1 assert str(executed.errors[0]).startswith("Must receive a MyNode id.") assert executed.data == {"onlyNode": None} def test_node_field_only_lazy_type(): executed = schema.execute( '{ onlyNodeLazy(id:"%s") { __typename, name } } ' % Node.to_global_id("MyNode", 1) ) assert not executed.errors assert executed.data == {"onlyNodeLazy": {"__typename": "MyNode", "name": "1"}} def test_node_field_only_lazy_type_wrong(): executed = schema.execute( '{ onlyNodeLazy(id:"%s") { __typename, name } } ' % Node.to_global_id("MyOtherNode", 1) ) assert len(executed.errors) == 1 assert str(executed.errors[0]).startswith("Must receive a MyNode id.") assert executed.data == {"onlyNodeLazy": None} def test_str_schema(): assert ( str(schema).strip() == dedent( ''' schema { query: RootQuery } type MyNode implements Node { """The ID of the object""" id: ID! name: String } """An object with an ID""" interface Node { """The ID of the object""" id: ID! } type MyOtherNode implements Node { """The ID of the object""" id: ID! shared: String somethingElse: String extraField: String } type RootQuery { first: String node( """The ID of the object""" id: ID! ): Node onlyNode( """The ID of the object""" id: ID! ): MyNode onlyNodeLazy( """The ID of the object""" id: ID! ): MyNode } ''' ).strip() ) python-graphene-3.4.3/graphene/relay/tests/test_node_custom.py000066400000000000000000000151251471374454500246520ustar00rootroot00000000000000from textwrap import dedent from graphql import graphql_sync from ...types import Interface, ObjectType, Schema from ...types.scalars import Int, String from ..node import Node class CustomNode(Node): class Meta: name = "Node" @staticmethod def to_global_id(type_, id): return id @staticmethod def get_node_from_global_id(info, id, only_type=None): assert info.schema is graphql_schema if id in user_data: return user_data.get(id) else: return photo_data.get(id) class BasePhoto(Interface): width = Int(description="The width of the photo in pixels") class User(ObjectType): class Meta: interfaces = [CustomNode] name = String(description="The full name of the user") class Photo(ObjectType): class Meta: interfaces = [CustomNode, BasePhoto] user_data = {"1": User(id="1", name="John Doe"), "2": User(id="2", name="Jane Smith")} photo_data = {"3": Photo(id="3", width=300), "4": Photo(id="4", width=400)} class RootQuery(ObjectType): node = CustomNode.Field() schema = Schema(query=RootQuery, types=[User, Photo]) graphql_schema = schema.graphql_schema def test_str_schema_correct(): assert ( str(schema).strip() == dedent( ''' schema { query: RootQuery } type User implements Node { """The ID of the object""" id: ID! """The full name of the user""" name: String } interface Node { """The ID of the object""" id: ID! } type Photo implements Node & BasePhoto { """The ID of the object""" id: ID! """The width of the photo in pixels""" width: Int } interface BasePhoto { """The width of the photo in pixels""" width: Int } type RootQuery { node( """The ID of the object""" id: ID! ): Node } ''' ).strip() ) def test_gets_the_correct_id_for_users(): query = """ { node(id: "1") { id } } """ expected = {"node": {"id": "1"}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_gets_the_correct_id_for_photos(): query = """ { node(id: "4") { id } } """ expected = {"node": {"id": "4"}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_gets_the_correct_name_for_users(): query = """ { node(id: "1") { id ... on User { name } } } """ expected = {"node": {"id": "1", "name": "John Doe"}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_gets_the_correct_width_for_photos(): query = """ { node(id: "4") { id ... on Photo { width } } } """ expected = {"node": {"id": "4", "width": 400}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_gets_the_correct_typename_for_users(): query = """ { node(id: "1") { id __typename } } """ expected = {"node": {"id": "1", "__typename": "User"}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_gets_the_correct_typename_for_photos(): query = """ { node(id: "4") { id __typename } } """ expected = {"node": {"id": "4", "__typename": "Photo"}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_ignores_photo_fragments_on_user(): query = """ { node(id: "1") { id ... on Photo { width } } } """ expected = {"node": {"id": "1"}} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_returns_null_for_bad_ids(): query = """ { node(id: "5") { id } } """ expected = {"node": None} result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_have_correct_node_interface(): query = """ { __type(name: "Node") { name kind fields { name type { kind ofType { name kind } } } } } """ expected = { "__type": { "name": "Node", "kind": "INTERFACE", "fields": [ { "name": "id", "type": { "kind": "NON_NULL", "ofType": {"name": "ID", "kind": "SCALAR"}, }, } ], } } result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected def test_has_correct_node_root_field(): query = """ { __schema { queryType { fields { name type { name kind } args { name type { kind ofType { name kind } } } } } } } """ expected = { "__schema": { "queryType": { "fields": [ { "name": "node", "type": {"name": "Node", "kind": "INTERFACE"}, "args": [ { "name": "id", "type": { "kind": "NON_NULL", "ofType": {"name": "ID", "kind": "SCALAR"}, }, } ], } ] } } } result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected python-graphene-3.4.3/graphene/test/000077500000000000000000000000001471374454500174175ustar00rootroot00000000000000python-graphene-3.4.3/graphene/test/__init__.py000066400000000000000000000024061471374454500215320ustar00rootroot00000000000000from graphql.error import GraphQLError from graphene.types.schema import Schema def default_format_error(error): if isinstance(error, GraphQLError): return error.formatted return {"message": str(error)} def format_execution_result(execution_result, format_error): if execution_result: response = {} if execution_result.errors: response["errors"] = [format_error(e) for e in execution_result.errors] response["data"] = execution_result.data return response class Client: def __init__(self, schema, format_error=None, **execute_options): assert isinstance(schema, Schema) self.schema = schema self.execute_options = execute_options self.format_error = format_error or default_format_error def format_result(self, result): return format_execution_result(result, self.format_error) def execute(self, *args, **kwargs): executed = self.schema.execute(*args, **dict(self.execute_options, **kwargs)) return self.format_result(executed) async def execute_async(self, *args, **kwargs): executed = await self.schema.execute_async( *args, **dict(self.execute_options, **kwargs) ) return self.format_result(executed) python-graphene-3.4.3/graphene/tests/000077500000000000000000000000001471374454500176025ustar00rootroot00000000000000python-graphene-3.4.3/graphene/tests/__init__.py000066400000000000000000000000001471374454500217010ustar00rootroot00000000000000python-graphene-3.4.3/graphene/tests/issues/000077500000000000000000000000001471374454500211155ustar00rootroot00000000000000python-graphene-3.4.3/graphene/tests/issues/__init__.py000066400000000000000000000000001471374454500232140ustar00rootroot00000000000000python-graphene-3.4.3/graphene/tests/issues/test_1293.py000066400000000000000000000020521471374454500231230ustar00rootroot00000000000000# https://github.com/graphql-python/graphene/issues/1293 from datetime import datetime, timezone import graphene from graphql.utilities import print_schema class Filters(graphene.InputObjectType): datetime_after = graphene.DateTime( required=False, default_value=datetime.fromtimestamp(1434549820.776, timezone.utc), ) datetime_before = graphene.DateTime( required=False, default_value=datetime.fromtimestamp(1444549820.776, timezone.utc), ) class SetDatetime(graphene.Mutation): class Arguments: filters = Filters(required=True) ok = graphene.Boolean() def mutate(root, info, filters): return SetDatetime(ok=True) class Query(graphene.ObjectType): goodbye = graphene.String() class Mutations(graphene.ObjectType): set_datetime = SetDatetime.Field() def test_schema_printable_with_default_datetime_value(): schema = graphene.Schema(query=Query, mutation=Mutations) schema_str = print_schema(schema.graphql_schema) assert schema_str, "empty schema printed" python-graphene-3.4.3/graphene/tests/issues/test_1394.py000066400000000000000000000016631471374454500231340ustar00rootroot00000000000000from ...types import ObjectType, Schema, String, NonNull class Query(ObjectType): hello = String(input=NonNull(String)) def resolve_hello(self, info, input): if input == "nothing": return None return f"Hello {input}!" schema = Schema(query=Query) def test_required_input_provided(): """ Test that a required argument works when provided. """ input_value = "Potato" result = schema.execute('{ hello(input: "%s") }' % input_value) assert not result.errors assert result.data == {"hello": "Hello Potato!"} def test_required_input_missing(): """ Test that a required argument raised an error if not provided. """ result = schema.execute("{ hello }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Field 'hello' argument 'input' of type 'String!' is required, but it was not provided." ) python-graphene-3.4.3/graphene/tests/issues/test_1419.py000066400000000000000000000030611471374454500231240ustar00rootroot00000000000000import pytest from ...types.base64 import Base64 from ...types.datetime import Date, DateTime from ...types.decimal import Decimal from ...types.generic import GenericScalar from ...types.json import JSONString from ...types.objecttype import ObjectType from ...types.scalars import ID, BigInt, Boolean, Float, Int, String from ...types.schema import Schema from ...types.uuid import UUID @pytest.mark.parametrize( "input_type,input_value", [ (Date, '"2022-02-02"'), (GenericScalar, '"foo"'), (Int, "1"), (BigInt, "12345678901234567890"), (Float, "1.1"), (String, '"foo"'), (Boolean, "true"), (ID, "1"), (DateTime, '"2022-02-02T11:11:11"'), (UUID, '"cbebbc62-758e-4f75-a890-bc73b5017d81"'), (Decimal, '"1.1"'), (JSONString, '"{\\"key\\":\\"foo\\",\\"value\\":\\"bar\\"}"'), (Base64, '"Q2hlbG8gd29ycmxkCg=="'), ], ) def test_parse_literal_with_variables(input_type, input_value): # input_b needs to be evaluated as literal while the variable dict for # input_a is passed along. class Query(ObjectType): generic = GenericScalar(input_a=GenericScalar(), input_b=input_type()) def resolve_generic(self, info, input_a=None, input_b=None): return input schema = Schema(query=Query) query = f""" query Test($a: GenericScalar){{ generic(inputA: $a, inputB: {input_value}) }} """ result = schema.execute( query, variables={"a": "bar"}, ) assert not result.errors python-graphene-3.4.3/graphene/tests/issues/test_313.py000066400000000000000000000021221471374454500230310ustar00rootroot00000000000000# https://github.com/graphql-python/graphene/issues/313 import graphene class Query(graphene.ObjectType): rand = graphene.String() class Success(graphene.ObjectType): yeah = graphene.String() class Error(graphene.ObjectType): message = graphene.String() class CreatePostResult(graphene.Union): class Meta: types = [Success, Error] class CreatePost(graphene.Mutation): class Arguments: text = graphene.String(required=True) result = graphene.Field(CreatePostResult) def mutate(self, info, text): result = Success(yeah="yeah") return CreatePost(result=result) class Mutations(graphene.ObjectType): create_post = CreatePost.Field() # tests.py def test_create_post(): query_string = """ mutation { createPost(text: "Try this out") { result { __typename } } } """ schema = graphene.Schema(query=Query, mutation=Mutations) result = schema.execute(query_string) assert not result.errors assert result.data["createPost"]["result"]["__typename"] == "Success" python-graphene-3.4.3/graphene/tests/issues/test_356.py000066400000000000000000000012721471374454500230450ustar00rootroot00000000000000# https://github.com/graphql-python/graphene/issues/356 from pytest import raises import graphene from graphene import relay class SomeTypeOne(graphene.ObjectType): pass class SomeTypeTwo(graphene.ObjectType): pass class MyUnion(graphene.Union): class Meta: types = (SomeTypeOne, SomeTypeTwo) def test_issue(): class Query(graphene.ObjectType): things = relay.ConnectionField(MyUnion) with raises(Exception) as exc_info: graphene.Schema(query=Query) assert str(exc_info.value) == ( "Query fields cannot be resolved." " IterableConnectionField type has to be a subclass of Connection." ' Received "MyUnion".' ) python-graphene-3.4.3/graphene/tests/issues/test_425.py000066400000000000000000000060351471374454500230440ustar00rootroot00000000000000# https://github.com/graphql-python/graphene/issues/425 # Adapted for Graphene 2.0 from graphene.types.enum import Enum, EnumOptions from graphene.types.inputobjecttype import InputObjectType from graphene.types.objecttype import ObjectType, ObjectTypeOptions # ObjectType class SpecialOptions(ObjectTypeOptions): other_attr = None class SpecialObjectType(ObjectType): @classmethod def __init_subclass_with_meta__(cls, other_attr="default", **options): _meta = SpecialOptions(cls) _meta.other_attr = other_attr super(SpecialObjectType, cls).__init_subclass_with_meta__( _meta=_meta, **options ) def test_special_objecttype_could_be_subclassed(): class MyType(SpecialObjectType): class Meta: other_attr = "yeah!" assert MyType._meta.other_attr == "yeah!" def test_special_objecttype_could_be_subclassed_default(): class MyType(SpecialObjectType): pass assert MyType._meta.other_attr == "default" def test_special_objecttype_inherit_meta_options(): class MyType(SpecialObjectType): pass assert MyType._meta.name == "MyType" assert MyType._meta.default_resolver is None assert MyType._meta.interfaces == () # InputObjectType class SpecialInputObjectTypeOptions(ObjectTypeOptions): other_attr = None class SpecialInputObjectType(InputObjectType): @classmethod def __init_subclass_with_meta__(cls, other_attr="default", **options): _meta = SpecialInputObjectTypeOptions(cls) _meta.other_attr = other_attr super(SpecialInputObjectType, cls).__init_subclass_with_meta__( _meta=_meta, **options ) def test_special_inputobjecttype_could_be_subclassed(): class MyInputObjectType(SpecialInputObjectType): class Meta: other_attr = "yeah!" assert MyInputObjectType._meta.other_attr == "yeah!" def test_special_inputobjecttype_could_be_subclassed_default(): class MyInputObjectType(SpecialInputObjectType): pass assert MyInputObjectType._meta.other_attr == "default" def test_special_inputobjecttype_inherit_meta_options(): class MyInputObjectType(SpecialInputObjectType): pass assert MyInputObjectType._meta.name == "MyInputObjectType" # Enum class SpecialEnumOptions(EnumOptions): other_attr = None class SpecialEnum(Enum): @classmethod def __init_subclass_with_meta__(cls, other_attr="default", **options): _meta = SpecialEnumOptions(cls) _meta.other_attr = other_attr super(SpecialEnum, cls).__init_subclass_with_meta__(_meta=_meta, **options) def test_special_enum_could_be_subclassed(): class MyEnum(SpecialEnum): class Meta: other_attr = "yeah!" assert MyEnum._meta.other_attr == "yeah!" def test_special_enum_could_be_subclassed_default(): class MyEnum(SpecialEnum): pass assert MyEnum._meta.other_attr == "default" def test_special_enum_inherit_meta_options(): class MyEnum(SpecialEnum): pass assert MyEnum._meta.name == "MyEnum" python-graphene-3.4.3/graphene/tests/issues/test_490.py000066400000000000000000000010041471374454500230350ustar00rootroot00000000000000# https://github.com/graphql-python/graphene/issues/313 import graphene class Query(graphene.ObjectType): some_field = graphene.String(from_=graphene.String(name="from")) def resolve_some_field(self, info, from_=None): return from_ def test_issue(): query_string = """ query myQuery { someField(from: "Oh") } """ schema = graphene.Schema(query=Query) result = schema.execute(query_string) assert not result.errors assert result.data["someField"] == "Oh" python-graphene-3.4.3/graphene/tests/issues/test_720.py000066400000000000000000000022551471374454500230420ustar00rootroot00000000000000# https://github.com/graphql-python/graphene/issues/720 # InputObjectTypes overwrite the "fields" attribute of the provided # _meta object, so even if dynamic fields are provided with a standard # InputObjectTypeOptions, they are ignored. import graphene class MyInputClass(graphene.InputObjectType): @classmethod def __init_subclass_with_meta__( cls, container=None, _meta=None, fields=None, **options ): if _meta is None: _meta = graphene.types.inputobjecttype.InputObjectTypeOptions(cls) _meta.fields = fields super(MyInputClass, cls).__init_subclass_with_meta__( container=container, _meta=_meta, **options ) class MyInput(MyInputClass): class Meta: fields = dict(x=graphene.Field(graphene.Int)) class Query(graphene.ObjectType): myField = graphene.Field(graphene.String, input=graphene.Argument(MyInput)) def resolve_myField(parent, info, input): return "ok" def test_issue(): query_string = """ query myQuery { myField(input: {x: 1}) } """ schema = graphene.Schema(query=Query) result = schema.execute(query_string) assert not result.errors python-graphene-3.4.3/graphene/tests/issues/test_881.py000066400000000000000000000012271471374454500230500ustar00rootroot00000000000000import pickle from ...types.enum import Enum class PickleEnum(Enum): # is defined outside of test because pickle unable to dump class inside ot pytest function A = "a" B = 1 def test_enums_pickling(): a = PickleEnum.A pickled = pickle.dumps(a) restored = pickle.loads(pickled) assert type(a) is type(restored) assert a == restored assert a.value == restored.value assert a.name == restored.name b = PickleEnum.B pickled = pickle.dumps(b) restored = pickle.loads(pickled) assert type(a) is type(restored) assert b == restored assert b.value == restored.value assert b.name == restored.name python-graphene-3.4.3/graphene/tests/issues/test_956.py000066400000000000000000000005141471374454500230510ustar00rootroot00000000000000import graphene def test_issue(): options = {"description": "This my enum", "deprecation_reason": "For the funs"} new_enum = graphene.Enum("MyEnum", [("some", "data")], **options) assert new_enum._meta.description == options["description"] assert new_enum._meta.deprecation_reason == options["deprecation_reason"] python-graphene-3.4.3/graphene/types/000077500000000000000000000000001471374454500176045ustar00rootroot00000000000000python-graphene-3.4.3/graphene/types/__init__.py000066400000000000000000000021421471374454500217140ustar00rootroot00000000000000from graphql import GraphQLResolveInfo as ResolveInfo from .argument import Argument from .base64 import Base64 from .context import Context from .datetime import Date, DateTime, Time from .decimal import Decimal from .dynamic import Dynamic from .enum import Enum from .field import Field from .inputfield import InputField from .inputobjecttype import InputObjectType from .interface import Interface from .json import JSONString from .mutation import Mutation from .objecttype import ObjectType from .scalars import ID, BigInt, Boolean, Float, Int, Scalar, String from .schema import Schema from .structures import List, NonNull from .union import Union from .uuid import UUID __all__ = [ "Argument", "Base64", "BigInt", "Boolean", "Context", "Date", "DateTime", "Decimal", "Dynamic", "Enum", "Field", "Float", "ID", "InputField", "InputObjectType", "Int", "Interface", "JSONString", "List", "Mutation", "NonNull", "ObjectType", "ResolveInfo", "Scalar", "Schema", "String", "Time", "UUID", "Union", ] python-graphene-3.4.3/graphene/types/argument.py000066400000000000000000000101441471374454500220000ustar00rootroot00000000000000from itertools import chain from graphql import Undefined from .dynamic import Dynamic from .mountedtype import MountedType from .structures import NonNull from .utils import get_type class Argument(MountedType): """ Makes an Argument available on a Field in the GraphQL schema. Arguments will be parsed and provided to resolver methods for fields as keyword arguments. All ``arg`` and ``**extra_args`` for a ``graphene.Field`` are implicitly mounted as Argument using the below parameters. .. code:: python from graphene import String, Boolean, Argument age = String( # Boolean implicitly mounted as Argument dog_years=Boolean(description="convert to dog years"), # Boolean explicitly mounted as Argument decades=Argument(Boolean, default_value=False), ) args: type (class for a graphene.UnmountedType): must be a class (not an instance) of an unmounted graphene type (ex. scalar or object) which is used for the type of this argument in the GraphQL schema. required (optional, bool): indicates this argument as not null in the graphql schema. Same behavior as graphene.NonNull. Default False. name (optional, str): the name of the GraphQL argument. Defaults to parameter name. description (optional, str): the description of the GraphQL argument in the schema. default_value (optional, Any): The value to be provided if the user does not set this argument in the operation. deprecation_reason (optional, str): Setting this value indicates that the argument is depreciated and may provide instruction or reason on how for clients to proceed. Cannot be set if the argument is required (see spec). """ def __init__( self, type_, default_value=Undefined, deprecation_reason=None, description=None, name=None, required=False, _creation_counter=None, ): super(Argument, self).__init__(_creation_counter=_creation_counter) if required: assert ( deprecation_reason is None ), f"Argument {name} is required, cannot deprecate it." type_ = NonNull(type_) self.name = name self._type = type_ self.default_value = default_value self.description = description self.deprecation_reason = deprecation_reason @property def type(self): return get_type(self._type) def __eq__(self, other): return isinstance(other, Argument) and ( self.name == other.name and self.type == other.type and self.default_value == other.default_value and self.description == other.description and self.deprecation_reason == other.deprecation_reason ) def to_arguments(args, extra_args=None): from .unmountedtype import UnmountedType from .field import Field from .inputfield import InputField if extra_args: extra_args = sorted(extra_args.items(), key=lambda f: f[1]) else: extra_args = [] iter_arguments = chain(args.items(), extra_args) arguments = {} for default_name, arg in iter_arguments: if isinstance(arg, Dynamic): arg = arg.get_type() if arg is None: # If the Dynamic type returned None # then we skip the Argument continue if isinstance(arg, UnmountedType): arg = Argument.mounted(arg) if isinstance(arg, (InputField, Field)): raise ValueError( f"Expected {default_name} to be Argument, " f"but received {type(arg).__name__}. Try using Argument({arg.type})." ) if not isinstance(arg, Argument): raise ValueError(f'Unknown argument "{default_name}".') arg_name = default_name or arg.name assert ( arg_name not in arguments ), f'More than one Argument have same name "{arg_name}".' arguments[arg_name] = arg return arguments python-graphene-3.4.3/graphene/types/base.py000066400000000000000000000025521471374454500210740ustar00rootroot00000000000000from typing import Type, Optional from ..utils.subclass_with_meta import SubclassWithMeta, SubclassWithMeta_Meta from ..utils.trim_docstring import trim_docstring class BaseOptions: name: Optional[str] = None description: Optional[str] = None _frozen: bool = False def __init__(self, class_type: Type): self.class_type: Type = class_type def freeze(self): self._frozen = True def __setattr__(self, name, value): if not self._frozen: super(BaseOptions, self).__setattr__(name, value) else: raise Exception(f"Can't modify frozen Options {self}") def __repr__(self): return f"<{self.__class__.__name__} name={repr(self.name)}>" BaseTypeMeta = SubclassWithMeta_Meta class BaseType(SubclassWithMeta): @classmethod def create_type(cls, class_name, **options): return type(class_name, (cls,), {"Meta": options}) @classmethod def __init_subclass_with_meta__( cls, name=None, description=None, _meta=None, **_kwargs ): assert "_meta" not in cls.__dict__, "Can't assign meta directly" if not _meta: return _meta.name = name or cls.__name__ _meta.description = description or trim_docstring(cls.__doc__) _meta.freeze() cls._meta = _meta super(BaseType, cls).__init_subclass_with_meta__() python-graphene-3.4.3/graphene/types/base64.py000066400000000000000000000025301471374454500212420ustar00rootroot00000000000000from binascii import Error as _Error from base64 import b64decode, b64encode from graphql.error import GraphQLError from graphql.language import StringValueNode, print_ast from .scalars import Scalar class Base64(Scalar): """ The `Base64` scalar type represents a base64-encoded String. """ @staticmethod def serialize(value): if not isinstance(value, bytes): if isinstance(value, str): value = value.encode("utf-8") else: value = str(value).encode("utf-8") return b64encode(value).decode("utf-8") @classmethod def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"Base64 cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @staticmethod def parse_value(value): if not isinstance(value, bytes): if not isinstance(value, str): raise GraphQLError( f"Base64 cannot represent non-string value: {repr(value)}" ) value = value.encode("utf-8") try: return b64decode(value, validate=True).decode("utf-8") except _Error: raise GraphQLError(f"Base64 cannot decode value: {repr(value)}") python-graphene-3.4.3/graphene/types/context.py000066400000000000000000000014271471374454500216460ustar00rootroot00000000000000class Context: """ Context can be used to make a convenient container for attributes to provide for execution for resolvers of a GraphQL operation like a query. .. code:: python from graphene import Context context = Context(loaders=build_dataloaders(), request=my_web_request) schema.execute('{ hello(name: "world") }', context=context) def resolve_hello(parent, info, name): info.context.request # value set in Context info.context.loaders # value set in Context # ... args: **params (Dict[str, Any]): values to make available on Context instance as attributes. """ def __init__(self, **params): for key, value in params.items(): setattr(self, key, value) python-graphene-3.4.3/graphene/types/datetime.py000066400000000000000000000066771471374454500217720ustar00rootroot00000000000000import datetime from dateutil.parser import isoparse from graphql.error import GraphQLError from graphql.language import StringValueNode, print_ast from .scalars import Scalar class Date(Scalar): """ The `Date` scalar type represents a Date value as specified by [iso8601](https://en.wikipedia.org/wiki/ISO_8601). """ @staticmethod def serialize(date): if isinstance(date, datetime.datetime): date = date.date() if not isinstance(date, datetime.date): raise GraphQLError(f"Date cannot represent value: {repr(date)}") return date.isoformat() @classmethod def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"Date cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @staticmethod def parse_value(value): if isinstance(value, datetime.date): return value if not isinstance(value, str): raise GraphQLError(f"Date cannot represent non-string value: {repr(value)}") try: return datetime.date.fromisoformat(value) except ValueError: raise GraphQLError(f"Date cannot represent value: {repr(value)}") class DateTime(Scalar): """ The `DateTime` scalar type represents a DateTime value as specified by [iso8601](https://en.wikipedia.org/wiki/ISO_8601). """ @staticmethod def serialize(dt): if not isinstance(dt, (datetime.datetime, datetime.date)): raise GraphQLError(f"DateTime cannot represent value: {repr(dt)}") return dt.isoformat() @classmethod def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"DateTime cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @staticmethod def parse_value(value): if isinstance(value, datetime.datetime): return value if not isinstance(value, str): raise GraphQLError( f"DateTime cannot represent non-string value: {repr(value)}" ) try: return isoparse(value) except ValueError: raise GraphQLError(f"DateTime cannot represent value: {repr(value)}") class Time(Scalar): """ The `Time` scalar type represents a Time value as specified by [iso8601](https://en.wikipedia.org/wiki/ISO_8601). """ @staticmethod def serialize(time): if not isinstance(time, datetime.time): raise GraphQLError(f"Time cannot represent value: {repr(time)}") return time.isoformat() @classmethod def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"Time cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @classmethod def parse_value(cls, value): if isinstance(value, datetime.time): return value if not isinstance(value, str): raise GraphQLError(f"Time cannot represent non-string value: {repr(value)}") try: return datetime.time.fromisoformat(value) except ValueError: raise GraphQLError(f"Time cannot represent value: {repr(value)}") python-graphene-3.4.3/graphene/types/decimal.py000066400000000000000000000015351471374454500215600ustar00rootroot00000000000000from decimal import Decimal as _Decimal from graphql import Undefined from graphql.language.ast import StringValueNode, IntValueNode from .scalars import Scalar class Decimal(Scalar): """ The `Decimal` scalar type represents a python Decimal. """ @staticmethod def serialize(dec): if isinstance(dec, str): dec = _Decimal(dec) assert isinstance( dec, _Decimal ), f'Received not compatible Decimal "{repr(dec)}"' return str(dec) @classmethod def parse_literal(cls, node, _variables=None): if isinstance(node, (StringValueNode, IntValueNode)): return cls.parse_value(node.value) return Undefined @staticmethod def parse_value(value): try: return _Decimal(value) except Exception: return Undefined python-graphene-3.4.3/graphene/types/definitions.py000066400000000000000000000030731471374454500224740ustar00rootroot00000000000000from enum import Enum as PyEnum from graphql import ( GraphQLEnumType, GraphQLInputObjectType, GraphQLInterfaceType, GraphQLObjectType, GraphQLScalarType, GraphQLUnionType, ) class GrapheneGraphQLType: """ A class for extending the base GraphQLType with the related graphene_type """ def __init__(self, *args, **kwargs): self.graphene_type = kwargs.pop("graphene_type") super(GrapheneGraphQLType, self).__init__(*args, **kwargs) def __copy__(self): result = GrapheneGraphQLType(graphene_type=self.graphene_type) result.__dict__.update(self.__dict__) return result class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType): pass class GrapheneUnionType(GrapheneGraphQLType, GraphQLUnionType): pass class GrapheneObjectType(GrapheneGraphQLType, GraphQLObjectType): pass class GrapheneScalarType(GrapheneGraphQLType, GraphQLScalarType): pass class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType): def serialize(self, value): if not isinstance(value, PyEnum): enum = self.graphene_type._meta.enum try: # Try and get enum by value value = enum(value) except ValueError: # Try and get enum by name try: value = enum[value] except KeyError: pass return super(GrapheneEnumType, self).serialize(value) class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType): pass python-graphene-3.4.3/graphene/types/dynamic.py000066400000000000000000000012431471374454500216020ustar00rootroot00000000000000import inspect from functools import partial from .mountedtype import MountedType class Dynamic(MountedType): """ A Dynamic Type let us get the type in runtime when we generate the schema. So we can have lazy fields. """ def __init__(self, type_, with_schema=False, _creation_counter=None): super(Dynamic, self).__init__(_creation_counter=_creation_counter) assert inspect.isfunction(type_) or isinstance(type_, partial) self.type = type_ self.with_schema = with_schema def get_type(self, schema=None): if schema and self.with_schema: return self.type(schema=schema) return self.type() python-graphene-3.4.3/graphene/types/enum.py000066400000000000000000000073521471374454500211310ustar00rootroot00000000000000from enum import Enum as PyEnum from graphene.utils.subclass_with_meta import SubclassWithMeta_Meta from .base import BaseOptions, BaseType from .unmountedtype import UnmountedType def eq_enum(self, other): if isinstance(other, self.__class__): return self is other return self.value is other def hash_enum(self): return hash(self.name) EnumType = type(PyEnum) class EnumOptions(BaseOptions): enum = None # type: Enum deprecation_reason = None class EnumMeta(SubclassWithMeta_Meta): def __new__(cls, name_, bases, classdict, **options): enum_members = dict(classdict, __eq__=eq_enum, __hash__=hash_enum) # We remove the Meta attribute from the class to not collide # with the enum values. enum_members.pop("Meta", None) enum = PyEnum(cls.__name__, enum_members) obj = SubclassWithMeta_Meta.__new__( cls, name_, bases, dict(classdict, __enum__=enum), **options ) globals()[name_] = obj.__enum__ return obj def get(cls, value): return cls._meta.enum(value) def __getitem__(cls, value): return cls._meta.enum[value] def __prepare__(name, bases, **kwargs): # noqa: N805 return {} def __call__(cls, *args, **kwargs): # noqa: N805 if cls is Enum: description = kwargs.pop("description", None) deprecation_reason = kwargs.pop("deprecation_reason", None) return cls.from_enum( PyEnum(*args, **kwargs), description=description, deprecation_reason=deprecation_reason, ) return super(EnumMeta, cls).__call__(*args, **kwargs) # return cls._meta.enum(*args, **kwargs) def __iter__(cls): return cls._meta.enum.__iter__() def from_enum(cls, enum, name=None, description=None, deprecation_reason=None): # noqa: N805 name = name or enum.__name__ description = description or enum.__doc__ or "An enumeration." meta_dict = { "enum": enum, "description": description, "deprecation_reason": deprecation_reason, } meta_class = type("Meta", (object,), meta_dict) return type(name, (Enum,), {"Meta": meta_class}) class Enum(UnmountedType, BaseType, metaclass=EnumMeta): """ Enum type definition Defines a static set of values that can be provided as a Field, Argument or InputField. .. code:: python from graphene import Enum class NameFormat(Enum): FIRST_LAST = "first_last" LAST_FIRST = "last_first" Meta: enum (optional, Enum): Python enum to use as a base for GraphQL Enum. name (optional, str): Name of the GraphQL type (must be unique in schema). Defaults to class name. description (optional, str): Description of the GraphQL type in the schema. Defaults to class docstring. deprecation_reason (optional, str): Setting this value indicates that the enum is depreciated and may provide instruction or reason on how for clients to proceed. """ @classmethod def __init_subclass_with_meta__(cls, enum=None, _meta=None, **options): if not _meta: _meta = EnumOptions(cls) _meta.enum = enum or cls.__enum__ _meta.deprecation_reason = options.pop("deprecation_reason", None) for key, value in _meta.enum.__members__.items(): setattr(cls, key, value) super(Enum, cls).__init_subclass_with_meta__(_meta=_meta, **options) @classmethod def get_type(cls): """ This function is called when the unmounted type (Enum instance) is mounted (as a Field, InputField or Argument) """ return cls python-graphene-3.4.3/graphene/types/field.py000066400000000000000000000125401471374454500212430ustar00rootroot00000000000000import inspect from collections.abc import Mapping from functools import partial from .argument import Argument, to_arguments from .mountedtype import MountedType from .resolver import default_resolver from .structures import NonNull from .unmountedtype import UnmountedType from .utils import get_type from ..utils.deprecated import warn_deprecation base_type = type def source_resolver(source, root, info, **args): resolved = default_resolver(source, None, root, info, **args) if inspect.isfunction(resolved) or inspect.ismethod(resolved): return resolved() return resolved class Field(MountedType): """ Makes a field available on an ObjectType in the GraphQL schema. Any type can be mounted as a Field: - Object Type - Scalar Type - Enum - Interface - Union All class attributes of ``graphene.ObjectType`` are implicitly mounted as Field using the below arguments. .. code:: python class Person(ObjectType): first_name = graphene.String(required=True) # implicitly mounted as Field last_name = graphene.Field(String, description='Surname') # explicitly mounted as Field args: type (class for a graphene.UnmountedType): Must be a class (not an instance) of an unmounted graphene type (ex. scalar or object) which is used for the type of this field in the GraphQL schema. You can provide a dotted module import path (string) to the class instead of the class itself (e.g. to avoid circular import issues). args (optional, Dict[str, graphene.Argument]): Arguments that can be input to the field. Prefer to use ``**extra_args``, unless you use an argument name that clashes with one of the Field arguments presented here (see :ref:`example`). resolver (optional, Callable): A function to get the value for a Field from the parent value object. If not set, the default resolver method for the schema is used. source (optional, str): attribute name to resolve for this field from the parent value object. Alternative to resolver (cannot set both source and resolver). deprecation_reason (optional, str): Setting this value indicates that the field is depreciated and may provide instruction or reason on how for clients to proceed. required (optional, bool): indicates this field as not null in the graphql schema. Same behavior as graphene.NonNull. Default False. name (optional, str): the name of the GraphQL field (must be unique in a type). Defaults to attribute name. description (optional, str): the description of the GraphQL field in the schema. default_value (optional, Any): Default value to resolve if none set from schema. **extra_args (optional, Dict[str, Union[graphene.Argument, graphene.UnmountedType]): any additional arguments to mount on the field. """ def __init__( self, type_, args=None, resolver=None, source=None, deprecation_reason=None, name=None, description=None, required=False, _creation_counter=None, default_value=None, **extra_args, ): super(Field, self).__init__(_creation_counter=_creation_counter) assert not args or isinstance( args, Mapping ), f'Arguments in a field have to be a mapping, received "{args}".' assert not ( source and resolver ), "A Field cannot have a source and a resolver in at the same time." assert not callable( default_value ), f'The default value can not be a function but received "{base_type(default_value)}".' if required: type_ = NonNull(type_) # Check if name is actually an argument of the field if isinstance(name, (Argument, UnmountedType)): extra_args["name"] = name name = None # Check if source is actually an argument of the field if isinstance(source, (Argument, UnmountedType)): extra_args["source"] = source source = None self.name = name self._type = type_ self.args = to_arguments(args or {}, extra_args) if source: resolver = partial(source_resolver, source) self.resolver = resolver self.deprecation_reason = deprecation_reason self.description = description self.default_value = default_value @property def type(self): return get_type(self._type) get_resolver = None def wrap_resolve(self, parent_resolver): """ Wraps a function resolver, using the ObjectType resolve_{FIELD_NAME} (parent_resolver) if the Field definition has no resolver. """ if self.get_resolver is not None: warn_deprecation( "The get_resolver method is being deprecated, please rename it to wrap_resolve." ) return self.get_resolver(parent_resolver) return self.resolver or parent_resolver def wrap_subscribe(self, parent_subscribe): """ Wraps a function subscribe, using the ObjectType subscribe_{FIELD_NAME} (parent_subscribe) if the Field definition has no subscribe. """ return parent_subscribe python-graphene-3.4.3/graphene/types/generic.py000066400000000000000000000024211471374454500215710ustar00rootroot00000000000000from graphql.language.ast import ( BooleanValueNode, FloatValueNode, IntValueNode, ListValueNode, ObjectValueNode, StringValueNode, ) from graphene.types.scalars import MAX_INT, MIN_INT from .scalars import Scalar class GenericScalar(Scalar): """ The `GenericScalar` scalar type represents a generic GraphQL scalar value that could be: String, Boolean, Int, Float, List or Object. """ @staticmethod def identity(value): return value serialize = identity parse_value = identity @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, (StringValueNode, BooleanValueNode)): return ast.value elif isinstance(ast, IntValueNode): num = int(ast.value) if MIN_INT <= num <= MAX_INT: return num elif isinstance(ast, FloatValueNode): return float(ast.value) elif isinstance(ast, ListValueNode): return [GenericScalar.parse_literal(value) for value in ast.values] elif isinstance(ast, ObjectValueNode): return { field.name.value: GenericScalar.parse_literal(field.value) for field in ast.fields } else: return None python-graphene-3.4.3/graphene/types/inputfield.py000066400000000000000000000053001471374454500223170ustar00rootroot00000000000000from graphql import Undefined from .mountedtype import MountedType from .structures import NonNull from .utils import get_type class InputField(MountedType): """ Makes a field available on an ObjectType in the GraphQL schema. Any type can be mounted as a Input Field except Interface and Union: - Object Type - Scalar Type - Enum Input object types also can't have arguments on their input fields, unlike regular ``graphene.Field``. All class attributes of ``graphene.InputObjectType`` are implicitly mounted as InputField using the below arguments. .. code:: python from graphene import InputObjectType, String, InputField class Person(InputObjectType): # implicitly mounted as Input Field first_name = String(required=True) # explicitly mounted as Input Field last_name = InputField(String, description="Surname") args: type (class for a graphene.UnmountedType): Must be a class (not an instance) of an unmounted graphene type (ex. scalar or object) which is used for the type of this field in the GraphQL schema. name (optional, str): Name of the GraphQL input field (must be unique in a type). Defaults to attribute name. default_value (optional, Any): Default value to use as input if none set in user operation ( query, mutation, etc.). deprecation_reason (optional, str): Setting this value indicates that the field is depreciated and may provide instruction or reason on how for clients to proceed. description (optional, str): Description of the GraphQL field in the schema. required (optional, bool): Indicates this input field as not null in the graphql schema. Raises a validation error if argument not provided. Same behavior as graphene.NonNull. Default False. **extra_args (optional, Dict): Not used. """ def __init__( self, type_, name=None, default_value=Undefined, deprecation_reason=None, description=None, required=False, _creation_counter=None, **extra_args, ): super(InputField, self).__init__(_creation_counter=_creation_counter) self.name = name if required: assert ( deprecation_reason is None ), f"InputField {name} is required, cannot deprecate it." type_ = NonNull(type_) self._type = type_ self.deprecation_reason = deprecation_reason self.default_value = default_value self.description = description @property def type(self): return get_type(self._type) python-graphene-3.4.3/graphene/types/inputobjecttype.py000066400000000000000000000111531471374454500234070ustar00rootroot00000000000000from typing import TYPE_CHECKING from .base import BaseOptions, BaseType from .inputfield import InputField from .unmountedtype import UnmountedType from .utils import yank_fields_from_attrs # For static type checking with type checker if TYPE_CHECKING: from typing import Dict, Callable # NOQA class InputObjectTypeOptions(BaseOptions): fields = None # type: Dict[str, InputField] container = None # type: InputObjectTypeContainer # Currently in Graphene, we get a `None` whenever we access an (optional) field that was not set in an InputObjectType # using the InputObjectType. dot access syntax. This is ambiguous, because in this current (Graphene # historical) arrangement, we cannot distinguish between a field not being set and a field being set to None. # At the same time, we shouldn't break existing code that expects a `None` when accessing a field that was not set. _INPUT_OBJECT_TYPE_DEFAULT_VALUE = None # To mitigate this, we provide the function `set_input_object_type_default_value` to allow users to change the default # value returned in non-specified fields in InputObjectType to another meaningful sentinel value (e.g. Undefined) # if they want to. This way, we can keep code that expects a `None` working while we figure out a better solution (or # a well-documented breaking change) for this issue. def set_input_object_type_default_value(default_value): """ Change the sentinel value returned by non-specified fields in an InputObjectType Useful to differentiate between a field not being set and a field being set to None by using a sentinel value (e.g. Undefined is a good sentinel value for this purpose) This function should be called at the beginning of the app or in some other place where it is guaranteed to be called before any InputObjectType is defined. """ global _INPUT_OBJECT_TYPE_DEFAULT_VALUE _INPUT_OBJECT_TYPE_DEFAULT_VALUE = default_value class InputObjectTypeContainer(dict, BaseType): # type: ignore class Meta: abstract = True def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) for key in self._meta.fields: setattr(self, key, self.get(key, _INPUT_OBJECT_TYPE_DEFAULT_VALUE)) def __init_subclass__(cls, *args, **kwargs): pass class InputObjectType(UnmountedType, BaseType): """ Input Object Type Definition An input object defines a structured collection of fields which may be supplied to a field argument. Using ``graphene.NonNull`` will ensure that a input value must be provided by the query. All class attributes of ``graphene.InputObjectType`` are implicitly mounted as InputField using the below Meta class options. .. code:: python from graphene import InputObjectType, String, InputField class Person(InputObjectType): # implicitly mounted as Input Field first_name = String(required=True) # explicitly mounted as Input Field last_name = InputField(String, description="Surname") The fields on an input object type can themselves refer to input object types, but you can't mix input and output types in your schema. Meta class options (optional): name (str): the name of the GraphQL type (must be unique in schema). Defaults to class name. description (str): the description of the GraphQL type in the schema. Defaults to class docstring. container (class): A class reference for a value object that allows for attribute initialization and access. Default InputObjectTypeContainer. fields (Dict[str, graphene.InputField]): Dictionary of field name to InputField. Not recommended to use (prefer class attributes). """ @classmethod def __init_subclass_with_meta__(cls, container=None, _meta=None, **options): if not _meta: _meta = InputObjectTypeOptions(cls) fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=InputField)) if _meta.fields: _meta.fields.update(fields) else: _meta.fields = fields if container is None: container = type(cls.__name__, (InputObjectTypeContainer, cls), {}) _meta.container = container super(InputObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options) @classmethod def get_type(cls): """ This function is called when the unmounted type (InputObjectType instance) is mounted (as a Field, InputField or Argument) """ return cls python-graphene-3.4.3/graphene/types/interface.py000066400000000000000000000045741471374454500221300ustar00rootroot00000000000000from typing import TYPE_CHECKING from .base import BaseOptions, BaseType from .field import Field from .utils import yank_fields_from_attrs # For static type checking with type checker if TYPE_CHECKING: from typing import Dict, Iterable, Type # NOQA class InterfaceOptions(BaseOptions): fields = None # type: Dict[str, Field] interfaces = () # type: Iterable[Type[Interface]] class Interface(BaseType): """ Interface Type Definition When a field can return one of a heterogeneous set of types, a Interface type is used to describe what types are possible, what fields are in common across all types, as well as a function to determine which type is actually used when the field is resolved. .. code:: python from graphene import Interface, String class HasAddress(Interface): class Meta: description = "Address fields" address1 = String() address2 = String() If a field returns an Interface Type, the ambiguous type of the object can be determined using ``resolve_type`` on Interface and an ObjectType with ``Meta.possible_types`` or ``is_type_of``. Meta: name (str): Name of the GraphQL type (must be unique in schema). Defaults to class name. description (str): Description of the GraphQL type in the schema. Defaults to class docstring. fields (Dict[str, graphene.Field]): Dictionary of field name to Field. Not recommended to use (prefer class attributes). """ @classmethod def __init_subclass_with_meta__(cls, _meta=None, interfaces=(), **options): if not _meta: _meta = InterfaceOptions(cls) fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) if _meta.fields: _meta.fields.update(fields) else: _meta.fields = fields if not _meta.interfaces: _meta.interfaces = interfaces super(Interface, cls).__init_subclass_with_meta__(_meta=_meta, **options) @classmethod def resolve_type(cls, instance, info): from .objecttype import ObjectType if isinstance(instance, ObjectType): return type(instance) def __init__(self, *args, **kwargs): raise Exception("An Interface cannot be initialized") python-graphene-3.4.3/graphene/types/json.py000066400000000000000000000015231471374454500211300ustar00rootroot00000000000000import json from graphql import Undefined from graphql.language.ast import StringValueNode from .scalars import Scalar class JSONString(Scalar): """ Allows use of a JSON String for input / output from the GraphQL schema. Use of this type is *not recommended* as you lose the benefits of having a defined, static schema (one of the key benefits of GraphQL). """ @staticmethod def serialize(dt): return json.dumps(dt) @staticmethod def parse_literal(node, _variables=None): if isinstance(node, StringValueNode): try: return json.loads(node.value) except Exception as error: raise ValueError(f"Badly formed JSONString: {str(error)}") return Undefined @staticmethod def parse_value(value): return json.loads(value) python-graphene-3.4.3/graphene/types/mountedtype.py000066400000000000000000000010531471374454500225320ustar00rootroot00000000000000from ..utils.orderedtype import OrderedType from .unmountedtype import UnmountedType class MountedType(OrderedType): @classmethod def mounted(cls, unmounted): # noqa: N802 """ Mount the UnmountedType instance """ assert isinstance( unmounted, UnmountedType ), f"{cls.__name__} can't mount {repr(unmounted)}" return cls( unmounted.get_type(), *unmounted.args, _creation_counter=unmounted.creation_counter, **unmounted.kwargs, ) python-graphene-3.4.3/graphene/types/mutation.py000066400000000000000000000120071471374454500220160ustar00rootroot00000000000000from typing import TYPE_CHECKING from ..utils.deprecated import warn_deprecation from ..utils.get_unbound_function import get_unbound_function from ..utils.props import props from .field import Field from .objecttype import ObjectType, ObjectTypeOptions from .utils import yank_fields_from_attrs from .interface import Interface # For static type checking with type checker if TYPE_CHECKING: from .argument import Argument # NOQA from typing import Dict, Type, Callable, Iterable # NOQA class MutationOptions(ObjectTypeOptions): arguments = None # type: Dict[str, Argument] output = None # type: Type[ObjectType] resolver = None # type: Callable interfaces = () # type: Iterable[Type[Interface]] class Mutation(ObjectType): """ Object Type Definition (mutation field) Mutation is a convenience type that helps us build a Field which takes Arguments and returns a mutation Output ObjectType. .. code:: python import graphene class CreatePerson(graphene.Mutation): class Arguments: name = graphene.String() ok = graphene.Boolean() person = graphene.Field(Person) def mutate(parent, info, name): person = Person(name=name) ok = True return CreatePerson(person=person, ok=ok) class Mutation(graphene.ObjectType): create_person = CreatePerson.Field() Meta class options (optional): output (graphene.ObjectType): Or ``Output`` inner class with attributes on Mutation class. Or attributes from Mutation class. Fields which can be returned from this mutation field. resolver (Callable resolver method): Or ``mutate`` method on Mutation class. Perform data change and return output. arguments (Dict[str, graphene.Argument]): Or ``Arguments`` inner class with attributes on Mutation class. Arguments to use for the mutation Field. name (str): Name of the GraphQL type (must be unique in schema). Defaults to class name. description (str): Description of the GraphQL type in the schema. Defaults to class docstring. interfaces (Iterable[graphene.Interface]): GraphQL interfaces to extend with the payload object. All fields from interface will be included in this object's schema. fields (Dict[str, graphene.Field]): Dictionary of field name to Field. Not recommended to use (prefer class attributes or ``Meta.output``). """ @classmethod def __init_subclass_with_meta__( cls, interfaces=(), resolver=None, output=None, arguments=None, _meta=None, **options, ): if not _meta: _meta = MutationOptions(cls) output = output or getattr(cls, "Output", None) fields = {} for interface in interfaces: assert issubclass( interface, Interface ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) if not output: # If output is defined, we don't need to get the fields fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) output = cls if not arguments: input_class = getattr(cls, "Arguments", None) if not input_class: input_class = getattr(cls, "Input", None) if input_class: warn_deprecation( f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." " Input is now only used in ClientMutationID.\n" "Read more:" " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" ) arguments = props(input_class) if input_class else {} if not resolver: mutate = getattr(cls, "mutate", None) assert mutate, "All mutations must define a mutate method in it" resolver = get_unbound_function(mutate) if _meta.fields: _meta.fields.update(fields) else: _meta.fields = fields _meta.interfaces = interfaces _meta.output = output _meta.resolver = resolver _meta.arguments = arguments super(Mutation, cls).__init_subclass_with_meta__(_meta=_meta, **options) @classmethod def Field( cls, name=None, description=None, deprecation_reason=None, required=False ): """Mount instance of mutation Field.""" return Field( cls._meta.output, args=cls._meta.arguments, resolver=cls._meta.resolver, name=name, description=description or cls._meta.description, deprecation_reason=deprecation_reason, required=required, ) python-graphene-3.4.3/graphene/types/objecttype.py000066400000000000000000000132521471374454500223310ustar00rootroot00000000000000from typing import TYPE_CHECKING from .base import BaseOptions, BaseType, BaseTypeMeta from .field import Field from .interface import Interface from .utils import yank_fields_from_attrs from dataclasses import make_dataclass, field # For static type checking with type checker if TYPE_CHECKING: from typing import Dict, Iterable, Type # NOQA class ObjectTypeOptions(BaseOptions): fields = None # type: Dict[str, Field] interfaces = () # type: Iterable[Type[Interface]] class ObjectTypeMeta(BaseTypeMeta): def __new__(cls, name_, bases, namespace, **options): # Note: it's safe to pass options as keyword arguments as they are still type-checked by ObjectTypeOptions. # We create this type, to then overload it with the dataclass attrs class InterObjectType: pass base_cls = super().__new__( cls, name_, (InterObjectType,) + bases, namespace, **options ) if base_cls._meta: fields = [ ( key, "typing.Any", field( default=field_value.default_value if isinstance(field_value, Field) else None ), ) for key, field_value in base_cls._meta.fields.items() ] dataclass = make_dataclass(name_, fields, bases=()) InterObjectType.__init__ = dataclass.__init__ InterObjectType.__eq__ = dataclass.__eq__ InterObjectType.__repr__ = dataclass.__repr__ return base_cls class ObjectType(BaseType, metaclass=ObjectTypeMeta): """ Object Type Definition Almost all of the GraphQL types you define will be object types. Object types have a name, but most importantly describe their fields. The name of the type defined by an _ObjectType_ defaults to the class name. The type description defaults to the class docstring. This can be overridden by adding attributes to a Meta inner class. The class attributes of an _ObjectType_ are mounted as instances of ``graphene.Field``. Methods starting with ``resolve_`` are bound as resolvers of the matching Field name. If no resolver is provided, the default resolver is used. Ambiguous types with Interface and Union can be determined through ``is_type_of`` method and ``Meta.possible_types`` attribute. .. code:: python from graphene import ObjectType, String, Field class Person(ObjectType): class Meta: description = 'A human' # implicitly mounted as Field first_name = String() # explicitly mounted as Field last_name = Field(String) def resolve_last_name(parent, info): return last_name ObjectType must be mounted using ``graphene.Field``. .. code:: python from graphene import ObjectType, Field class Query(ObjectType): person = Field(Person, description="My favorite person") Meta class options (optional): name (str): Name of the GraphQL type (must be unique in schema). Defaults to class name. description (str): Description of the GraphQL type in the schema. Defaults to class docstring. interfaces (Iterable[graphene.Interface]): GraphQL interfaces to extend with this object. all fields from interface will be included in this object's schema. possible_types (Iterable[class]): Used to test parent value object via isinstance to see if this type can be used to resolve an ambiguous type (interface, union). default_resolver (any Callable resolver): Override the default resolver for this type. Defaults to graphene default resolver which returns an attribute or dictionary key with the same name as the field. fields (Dict[str, graphene.Field]): Dictionary of field name to Field. Not recommended to use (prefer class attributes). An _ObjectType_ can be used as a simple value object by creating an instance of the class. .. code:: python p = Person(first_name='Bob', last_name='Roberts') assert p.first_name == 'Bob' Args: *args (List[Any]): Positional values to use for Field values of value object **kwargs (Dict[str: Any]): Keyword arguments to use for Field values of value object """ @classmethod def __init_subclass_with_meta__( cls, interfaces=(), possible_types=(), default_resolver=None, _meta=None, **options, ): if not _meta: _meta = ObjectTypeOptions(cls) fields = {} for interface in interfaces: assert issubclass( interface, Interface ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) assert not (possible_types and cls.is_type_of), ( f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. " "Please use one or other." ) if _meta.fields: _meta.fields.update(fields) else: _meta.fields = fields if not _meta.interfaces: _meta.interfaces = interfaces _meta.possible_types = possible_types _meta.default_resolver = default_resolver super(ObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options) is_type_of = None python-graphene-3.4.3/graphene/types/resolver.py000066400000000000000000000012701471374454500220170ustar00rootroot00000000000000def attr_resolver(attname, default_value, root, info, **args): return getattr(root, attname, default_value) def dict_resolver(attname, default_value, root, info, **args): return root.get(attname, default_value) def dict_or_attr_resolver(attname, default_value, root, info, **args): resolver = dict_resolver if isinstance(root, dict) else attr_resolver return resolver(attname, default_value, root, info, **args) default_resolver = dict_or_attr_resolver def set_default_resolver(resolver): global default_resolver assert callable(resolver), "Received non-callable resolver." default_resolver = resolver def get_default_resolver(): return default_resolver python-graphene-3.4.3/graphene/types/scalars.py000066400000000000000000000121121471374454500216030ustar00rootroot00000000000000from typing import Any from graphql import Undefined from graphql.language.ast import ( BooleanValueNode, FloatValueNode, IntValueNode, StringValueNode, ) from .base import BaseOptions, BaseType from .unmountedtype import UnmountedType class ScalarOptions(BaseOptions): pass class Scalar(UnmountedType, BaseType): """ Scalar Type Definition The leaf values of any request and input values to arguments are Scalars (or Enums) and are defined with a name and a series of functions used to parse input from ast or variables and to ensure validity. """ @classmethod def __init_subclass_with_meta__(cls, **options): _meta = ScalarOptions(cls) super(Scalar, cls).__init_subclass_with_meta__(_meta=_meta, **options) serialize = None parse_value = None parse_literal = None @classmethod def get_type(cls): """ This function is called when the unmounted type (Scalar instance) is mounted (as a Field, InputField or Argument) """ return cls # As per the GraphQL Spec, Integers are only treated as valid when a valid # 32-bit signed integer, providing the broadest support across platforms. # # n.b. JavaScript's integers are safe between -(2^53 - 1) and 2^53 - 1 because # they are internally represented as IEEE 754 doubles. MAX_INT = 2147483647 MIN_INT = -2147483648 class Int(Scalar): """ The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^53 - 1) and 2^53 - 1 since represented in JSON as double-precision floating point numbers specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). """ @staticmethod def coerce_int(value): try: num = int(value) except ValueError: try: num = int(float(value)) except ValueError: return Undefined if MIN_INT <= num <= MAX_INT: return num return Undefined serialize = coerce_int parse_value = coerce_int @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, IntValueNode): num = int(ast.value) if MIN_INT <= num <= MAX_INT: return num return Undefined class BigInt(Scalar): """ The `BigInt` scalar type represents non-fractional whole numeric values. `BigInt` is not constrained to 32-bit like the `Int` type and thus is a less compatible type. """ @staticmethod def coerce_int(value): try: num = int(value) except ValueError: try: num = int(float(value)) except ValueError: return Undefined return num serialize = coerce_int parse_value = coerce_int @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, IntValueNode): return int(ast.value) return Undefined class Float(Scalar): """ The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). """ @staticmethod def coerce_float(value: Any) -> float: try: return float(value) except ValueError: return Undefined serialize = coerce_float parse_value = coerce_float @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, (FloatValueNode, IntValueNode)): return float(ast.value) return Undefined class String(Scalar): """ The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. """ @staticmethod def coerce_string(value): if isinstance(value, bool): return "true" if value else "false" return str(value) serialize = coerce_string parse_value = coerce_string @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, StringValueNode): return ast.value return Undefined class Boolean(Scalar): """ The `Boolean` scalar type represents `true` or `false`. """ serialize = bool parse_value = bool @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, BooleanValueNode): return ast.value return Undefined class ID(Scalar): """ The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID. """ serialize = str parse_value = str @staticmethod def parse_literal(ast, _variables=None): if isinstance(ast, (StringValueNode, IntValueNode)): return ast.value return Undefined python-graphene-3.4.3/graphene/types/schema.py000066400000000000000000000502151471374454500214210ustar00rootroot00000000000000from enum import Enum as PyEnum import inspect from functools import partial from graphql import ( default_type_resolver, get_introspection_query, graphql, graphql_sync, introspection_types, parse, print_schema, subscribe, validate, ExecutionResult, GraphQLArgument, GraphQLBoolean, GraphQLError, GraphQLEnumValue, GraphQLField, GraphQLFloat, GraphQLID, GraphQLInputField, GraphQLInt, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLString, ) from ..utils.str_converters import to_camel_case from ..utils.get_unbound_function import get_unbound_function from .definitions import ( GrapheneEnumType, GrapheneGraphQLType, GrapheneInputObjectType, GrapheneInterfaceType, GrapheneObjectType, GrapheneScalarType, GrapheneUnionType, ) from .dynamic import Dynamic from .enum import Enum from .field import Field from .inputobjecttype import InputObjectType from .interface import Interface from .objecttype import ObjectType from .resolver import get_default_resolver from .scalars import ID, Boolean, Float, Int, Scalar, String from .structures import List, NonNull from .union import Union from .utils import get_field_as introspection_query = get_introspection_query() IntrospectionSchema = introspection_types["__Schema"] def assert_valid_root_type(type_): if type_ is None: return is_graphene_objecttype = inspect.isclass(type_) and issubclass(type_, ObjectType) is_graphql_objecttype = isinstance(type_, GraphQLObjectType) assert ( is_graphene_objecttype or is_graphql_objecttype ), f"Type {type_} is not a valid ObjectType." def is_graphene_type(type_): if isinstance(type_, (List, NonNull)): return True if inspect.isclass(type_) and issubclass( type_, (ObjectType, InputObjectType, Scalar, Interface, Union, Enum) ): return True def is_type_of_from_possible_types(possible_types, root, _info): return isinstance(root, possible_types) # We use this resolver for subscriptions def identity_resolve(root, info, **arguments): return root class TypeMap(dict): def __init__( self, query=None, mutation=None, subscription=None, types=None, auto_camelcase=True, ): assert_valid_root_type(query) assert_valid_root_type(mutation) assert_valid_root_type(subscription) if types is None: types = [] for type_ in types: assert is_graphene_type(type_) self.auto_camelcase = auto_camelcase create_graphql_type = self.add_type self.query = create_graphql_type(query) if query else None self.mutation = create_graphql_type(mutation) if mutation else None self.subscription = create_graphql_type(subscription) if subscription else None self.types = [create_graphql_type(graphene_type) for graphene_type in types] def add_type(self, graphene_type): if inspect.isfunction(graphene_type): graphene_type = graphene_type() if isinstance(graphene_type, List): return GraphQLList(self.add_type(graphene_type.of_type)) if isinstance(graphene_type, NonNull): return GraphQLNonNull(self.add_type(graphene_type.of_type)) try: name = graphene_type._meta.name except AttributeError: raise TypeError(f"Expected Graphene type, but received: {graphene_type}.") graphql_type = self.get(name) if graphql_type: return graphql_type if issubclass(graphene_type, ObjectType): graphql_type = self.create_objecttype(graphene_type) elif issubclass(graphene_type, InputObjectType): graphql_type = self.create_inputobjecttype(graphene_type) elif issubclass(graphene_type, Interface): graphql_type = self.create_interface(graphene_type) elif issubclass(graphene_type, Scalar): graphql_type = self.create_scalar(graphene_type) elif issubclass(graphene_type, Enum): graphql_type = self.create_enum(graphene_type) elif issubclass(graphene_type, Union): graphql_type = self.construct_union(graphene_type) else: raise TypeError(f"Expected Graphene type, but received: {graphene_type}.") self[name] = graphql_type return graphql_type @staticmethod def create_scalar(graphene_type): # We have a mapping to the original GraphQL types # so there are no collisions. _scalars = { String: GraphQLString, Int: GraphQLInt, Float: GraphQLFloat, Boolean: GraphQLBoolean, ID: GraphQLID, } if graphene_type in _scalars: return _scalars[graphene_type] return GrapheneScalarType( graphene_type=graphene_type, name=graphene_type._meta.name, description=graphene_type._meta.description, serialize=getattr(graphene_type, "serialize", None), parse_value=getattr(graphene_type, "parse_value", None), parse_literal=getattr(graphene_type, "parse_literal", None), ) @staticmethod def create_enum(graphene_type): values = {} for name, value in graphene_type._meta.enum.__members__.items(): description = getattr(value, "description", None) # if the "description" attribute is an Enum, it is likely an enum member # called description, not a description property if isinstance(description, PyEnum): description = None if not description and callable(graphene_type._meta.description): description = graphene_type._meta.description(value) deprecation_reason = getattr(value, "deprecation_reason", None) if isinstance(deprecation_reason, PyEnum): deprecation_reason = None if not deprecation_reason and callable( graphene_type._meta.deprecation_reason ): deprecation_reason = graphene_type._meta.deprecation_reason(value) values[name] = GraphQLEnumValue( value=value, description=description, deprecation_reason=deprecation_reason, ) type_description = ( graphene_type._meta.description(None) if callable(graphene_type._meta.description) else graphene_type._meta.description ) return GrapheneEnumType( graphene_type=graphene_type, values=values, name=graphene_type._meta.name, description=type_description, ) def create_objecttype(self, graphene_type): create_graphql_type = self.add_type def interfaces(): interfaces = [] for graphene_interface in graphene_type._meta.interfaces: interface = create_graphql_type(graphene_interface) assert interface.graphene_type == graphene_interface interfaces.append(interface) return interfaces if graphene_type._meta.possible_types: is_type_of = partial( is_type_of_from_possible_types, graphene_type._meta.possible_types ) else: is_type_of = graphene_type.is_type_of return GrapheneObjectType( graphene_type=graphene_type, name=graphene_type._meta.name, description=graphene_type._meta.description, fields=partial(self.create_fields_for_type, graphene_type), is_type_of=is_type_of, interfaces=interfaces, ) def create_interface(self, graphene_type): resolve_type = ( partial( self.resolve_type, graphene_type.resolve_type, graphene_type._meta.name ) if graphene_type.resolve_type else None ) def interfaces(): interfaces = [] for graphene_interface in graphene_type._meta.interfaces: interface = self.add_type(graphene_interface) assert interface.graphene_type == graphene_interface interfaces.append(interface) return interfaces return GrapheneInterfaceType( graphene_type=graphene_type, name=graphene_type._meta.name, description=graphene_type._meta.description, fields=partial(self.create_fields_for_type, graphene_type), interfaces=interfaces, resolve_type=resolve_type, ) def create_inputobjecttype(self, graphene_type): return GrapheneInputObjectType( graphene_type=graphene_type, name=graphene_type._meta.name, description=graphene_type._meta.description, out_type=graphene_type._meta.container, fields=partial( self.create_fields_for_type, graphene_type, is_input_type=True ), ) def construct_union(self, graphene_type): create_graphql_type = self.add_type def types(): union_types = [] for graphene_objecttype in graphene_type._meta.types: object_type = create_graphql_type(graphene_objecttype) assert object_type.graphene_type == graphene_objecttype union_types.append(object_type) return union_types resolve_type = ( partial( self.resolve_type, graphene_type.resolve_type, graphene_type._meta.name ) if graphene_type.resolve_type else None ) return GrapheneUnionType( graphene_type=graphene_type, name=graphene_type._meta.name, description=graphene_type._meta.description, types=types, resolve_type=resolve_type, ) def get_name(self, name): if self.auto_camelcase: return to_camel_case(name) return name def create_fields_for_type(self, graphene_type, is_input_type=False): create_graphql_type = self.add_type fields = {} for name, field in graphene_type._meta.fields.items(): if isinstance(field, Dynamic): field = get_field_as(field.get_type(self), _as=Field) if not field: continue field_type = create_graphql_type(field.type) if is_input_type: _field = GraphQLInputField( field_type, default_value=field.default_value, out_name=name, description=field.description, deprecation_reason=field.deprecation_reason, ) else: args = {} for arg_name, arg in field.args.items(): arg_type = create_graphql_type(arg.type) processed_arg_name = arg.name or self.get_name(arg_name) args[processed_arg_name] = GraphQLArgument( arg_type, out_name=arg_name, description=arg.description, default_value=arg.default_value, deprecation_reason=arg.deprecation_reason, ) subscribe = field.wrap_subscribe( self.get_function_for_type( graphene_type, f"subscribe_{name}", name, field.default_value ) ) # If we are in a subscription, we use (by default) an # identity-based resolver for the root, rather than the # default resolver for objects/dicts. if subscribe: field_default_resolver = identity_resolve elif issubclass(graphene_type, ObjectType): default_resolver = ( graphene_type._meta.default_resolver or get_default_resolver() ) field_default_resolver = partial( default_resolver, name, field.default_value ) else: field_default_resolver = None resolve = field.wrap_resolve( self.get_function_for_type( graphene_type, f"resolve_{name}", name, field.default_value ) or field_default_resolver ) _field = GraphQLField( field_type, args=args, resolve=resolve, subscribe=subscribe, deprecation_reason=field.deprecation_reason, description=field.description, ) field_name = field.name or self.get_name(name) fields[field_name] = _field return fields def get_function_for_type(self, graphene_type, func_name, name, default_value): """Gets a resolve or subscribe function for a given ObjectType""" if not issubclass(graphene_type, ObjectType): return resolver = getattr(graphene_type, func_name, None) if not resolver: # If we don't find the resolver in the ObjectType class, then try to # find it in each of the interfaces interface_resolver = None for interface in graphene_type._meta.interfaces: if name not in interface._meta.fields: continue interface_resolver = getattr(interface, func_name, None) if interface_resolver: break resolver = interface_resolver # Only if is not decorated with classmethod if resolver: return get_unbound_function(resolver) def resolve_type(self, resolve_type_func, type_name, root, info, _type): type_ = resolve_type_func(root, info) if inspect.isclass(type_) and issubclass(type_, ObjectType): return type_._meta.name return_type = self[type_name] return default_type_resolver(root, info, return_type) class Schema: """Schema Definition. A Graphene Schema can execute operations (query, mutation, subscription) against the defined types. For advanced purposes, the schema can be used to lookup type definitions and answer questions about the types through introspection. Args: query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read* data in your Schema. mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for fields to *create, update or delete* data in your API. subscription (Optional[Type[ObjectType]]): Root subscription *ObjectType*. Describes entry point for fields to receive continuous updates. types (Optional[List[Type[ObjectType]]]): List of any types to include in schema that may not be introspected through root types. directives (List[GraphQLDirective], optional): List of custom directives to include in the GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective]. auto_camelcase (bool): Fieldnames will be transformed in Schema's TypeMap from snake_case to camelCase (preferred by GraphQL standard). Default True. """ def __init__( self, query=None, mutation=None, subscription=None, types=None, directives=None, auto_camelcase=True, ): self.query = query self.mutation = mutation self.subscription = subscription type_map = TypeMap( query, mutation, subscription, types, auto_camelcase=auto_camelcase ) self.graphql_schema = GraphQLSchema( type_map.query, type_map.mutation, type_map.subscription, type_map.types, directives, ) def __str__(self): return print_schema(self.graphql_schema) def __getattr__(self, type_name): """ This function let the developer select a type in a given schema by accessing its attrs. Example: using schema.Query for accessing the "Query" type in the Schema """ _type = self.graphql_schema.get_type(type_name) if _type is None: raise AttributeError(f'Type "{type_name}" not found in the Schema') if isinstance(_type, GrapheneGraphQLType): return _type.graphene_type return _type def lazy(self, _type): return lambda: self.get_type(_type) def execute(self, *args, **kwargs): """Execute a GraphQL query on the schema. Use the `graphql_sync` function from `graphql-core` to provide the result for a query string. Most of the time this method will be called by one of the Graphene :ref:`Integrations` via a web request. Args: request_string (str or Document): GraphQL request (query, mutation or subscription) as string or parsed AST form from `graphql-core`. root_value (Any, optional): Value to use as the parent value object when resolving root types. context_value (Any, optional): Value to be made available to all resolvers via `info.context`. Can be used to share authorization, dataloaders or other information needed to resolve an operation. variable_values (dict, optional): If variables are used in the request string, they can be provided in dictionary form mapping the variable name to the variable value. operation_name (str, optional): If multiple operations are provided in the request_string, an operation name must be provided for the result to be provided. middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as defined in `graphql-core`. execution_context_class (ExecutionContext, optional): The execution context class to use when resolving queries and mutations. Returns: :obj:`ExecutionResult` containing any data and errors for the operation. """ kwargs = normalize_execute_kwargs(kwargs) return graphql_sync(self.graphql_schema, *args, **kwargs) async def execute_async(self, *args, **kwargs): """Execute a GraphQL query on the schema asynchronously. Same as `execute`, but uses `graphql` instead of `graphql_sync`. """ kwargs = normalize_execute_kwargs(kwargs) return await graphql(self.graphql_schema, *args, **kwargs) async def subscribe(self, query, *args, **kwargs): """Execute a GraphQL subscription on the schema asynchronously.""" # Do parsing try: document = parse(query) except GraphQLError as error: return ExecutionResult(data=None, errors=[error]) # Do validation validation_errors = validate(self.graphql_schema, document) if validation_errors: return ExecutionResult(data=None, errors=validation_errors) # Execute the query kwargs = normalize_execute_kwargs(kwargs) return await subscribe(self.graphql_schema, document, *args, **kwargs) def introspect(self): introspection = self.execute(introspection_query) if introspection.errors: raise introspection.errors[0] return introspection.data def normalize_execute_kwargs(kwargs): """Replace alias names in keyword arguments for graphql()""" if "root" in kwargs and "root_value" not in kwargs: kwargs["root_value"] = kwargs.pop("root") if "context" in kwargs and "context_value" not in kwargs: kwargs["context_value"] = kwargs.pop("context") if "variables" in kwargs and "variable_values" not in kwargs: kwargs["variable_values"] = kwargs.pop("variables") if "operation" in kwargs and "operation_name" not in kwargs: kwargs["operation_name"] = kwargs.pop("operation") return kwargs python-graphene-3.4.3/graphene/types/structures.py000066400000000000000000000060611471374454500224040ustar00rootroot00000000000000from .unmountedtype import UnmountedType from .utils import get_type class Structure(UnmountedType): """ A structure is a GraphQL type instance that wraps a main type with certain structure. """ def __init__(self, of_type, *args, **kwargs): super(Structure, self).__init__(*args, **kwargs) if not isinstance(of_type, Structure) and isinstance(of_type, UnmountedType): cls_name = type(self).__name__ of_type_name = type(of_type).__name__ raise Exception( f"{cls_name} could not have a mounted {of_type_name}()" f" as inner type. Try with {cls_name}({of_type_name})." ) self._of_type = of_type @property def of_type(self): return get_type(self._of_type) def get_type(self): """ This function is called when the unmounted type (List or NonNull instance) is mounted (as a Field, InputField or Argument) """ return self class List(Structure): """ List Modifier A list is a kind of type marker, a wrapping type which points to another type. Lists are often created within the context of defining the fields of an object type. List indicates that many values will be returned (or input) for this field. .. code:: python from graphene import List, String field_name = List(String, description="There will be many values") """ def __str__(self): return f"[{self.of_type}]" def __eq__(self, other): return isinstance(other, List) and ( self.of_type == other.of_type and self.args == other.args and self.kwargs == other.kwargs ) class NonNull(Structure): """ Non-Null Modifier A non-null is a kind of type marker, a wrapping type which points to another type. Non-null types enforce that their values are never null and can ensure an error is raised if this ever occurs during a request. It is useful for fields which you can make a strong guarantee on non-nullability, for example usually the id field of a database row will never be null. Note: the enforcement of non-nullability occurs within the executor. NonNull can also be indicated on all Mounted types with the keyword argument ``required``. .. code:: python from graphene import NonNull, String field_name = NonNull(String, description='This field will not be null') another_field = String(required=True, description='This is equivalent to the above') """ def __init__(self, *args, **kwargs): super(NonNull, self).__init__(*args, **kwargs) assert not isinstance( self._of_type, NonNull ), f"Can only create NonNull of a Nullable GraphQLType but got: {self._of_type}." def __str__(self): return f"{self.of_type}!" def __eq__(self, other): return isinstance(other, NonNull) and ( self.of_type == other.of_type and self.args == other.args and self.kwargs == other.kwargs ) python-graphene-3.4.3/graphene/types/tests/000077500000000000000000000000001471374454500207465ustar00rootroot00000000000000python-graphene-3.4.3/graphene/types/tests/__init__.py000066400000000000000000000000001471374454500230450ustar00rootroot00000000000000python-graphene-3.4.3/graphene/types/tests/conftest.py000066400000000000000000000006421471374454500231470ustar00rootroot00000000000000import pytest from graphql import Undefined from graphene.types.inputobjecttype import set_input_object_type_default_value @pytest.fixture() def set_default_input_object_type_to_undefined(): """This fixture is used to change the default value of optional inputs in InputObjectTypes for specific tests""" set_input_object_type_default_value(Undefined) yield set_input_object_type_default_value(None) python-graphene-3.4.3/graphene/types/tests/test_argument.py000066400000000000000000000053401471374454500242030ustar00rootroot00000000000000from functools import partial from pytest import raises from ..argument import Argument, to_arguments from ..field import Field from ..inputfield import InputField from ..scalars import String from ..structures import NonNull def test_argument(): arg = Argument(String, default_value="a", description="desc", name="b") assert arg.type == String assert arg.default_value == "a" assert arg.description == "desc" assert arg.name == "b" def test_argument_comparasion(): arg1 = Argument( String, name="Hey", description="Desc", default_value="default", deprecation_reason="deprecated", ) arg2 = Argument( String, name="Hey", description="Desc", default_value="default", deprecation_reason="deprecated", ) assert arg1 == arg2 assert arg1 != String() def test_argument_required(): arg = Argument(String, required=True) assert arg.type == NonNull(String) def test_to_arguments(): args = {"arg_string": Argument(String), "unmounted_arg": String(required=True)} my_args = to_arguments(args) assert my_args == { "arg_string": Argument(String), "unmounted_arg": Argument(String, required=True), } def test_to_arguments_deprecated(): args = {"unmounted_arg": String(required=False, deprecation_reason="deprecated")} my_args = to_arguments(args) assert my_args == { "unmounted_arg": Argument( String, required=False, deprecation_reason="deprecated" ), } def test_to_arguments_required_deprecated(): args = { "unmounted_arg": String( required=True, name="arg", deprecation_reason="deprecated" ) } with raises(AssertionError) as exc_info: to_arguments(args) assert str(exc_info.value) == "Argument arg is required, cannot deprecate it." def test_to_arguments_raises_if_field(): args = {"arg_string": Field(String)} with raises(ValueError) as exc_info: to_arguments(args) assert str(exc_info.value) == ( "Expected arg_string to be Argument, but received Field. Try using " "Argument(String)." ) def test_to_arguments_raises_if_inputfield(): args = {"arg_string": InputField(String)} with raises(ValueError) as exc_info: to_arguments(args) assert str(exc_info.value) == ( "Expected arg_string to be Argument, but received InputField. Try " "using Argument(String)." ) def test_argument_with_lazy_type(): MyType = object() arg = Argument(lambda: MyType) assert arg.type == MyType def test_argument_with_lazy_partial_type(): MyType = object() arg = Argument(partial(lambda: MyType)) assert arg.type == MyType python-graphene-3.4.3/graphene/types/tests/test_base.py000066400000000000000000000032301471374454500232670ustar00rootroot00000000000000from ..base import BaseOptions, BaseType class CustomOptions(BaseOptions): pass class CustomType(BaseType): @classmethod def __init_subclass_with_meta__(cls, **options): _meta = CustomOptions(cls) super(CustomType, cls).__init_subclass_with_meta__(_meta=_meta, **options) def test_basetype(): class MyBaseType(CustomType): pass assert isinstance(MyBaseType._meta, CustomOptions) assert MyBaseType._meta.name == "MyBaseType" assert MyBaseType._meta.description is None def test_basetype_nones(): class MyBaseType(CustomType): """Documentation""" class Meta: name = None description = None assert isinstance(MyBaseType._meta, CustomOptions) assert MyBaseType._meta.name == "MyBaseType" assert MyBaseType._meta.description == "Documentation" def test_basetype_custom(): class MyBaseType(CustomType): """Documentation""" class Meta: name = "Base" description = "Desc" assert isinstance(MyBaseType._meta, CustomOptions) assert MyBaseType._meta.name == "Base" assert MyBaseType._meta.description == "Desc" def test_basetype_create(): MyBaseType = CustomType.create_type("MyBaseType") assert isinstance(MyBaseType._meta, CustomOptions) assert MyBaseType._meta.name == "MyBaseType" assert MyBaseType._meta.description is None def test_basetype_create_extra(): MyBaseType = CustomType.create_type("MyBaseType", name="Base", description="Desc") assert isinstance(MyBaseType._meta, CustomOptions) assert MyBaseType._meta.name == "Base" assert MyBaseType._meta.description == "Desc" python-graphene-3.4.3/graphene/types/tests/test_base64.py000066400000000000000000000053561471374454500234540ustar00rootroot00000000000000import base64 from graphql import GraphQLError from ..objecttype import ObjectType from ..scalars import String from ..schema import Schema from ..base64 import Base64 class Query(ObjectType): base64 = Base64(_in=Base64(name="input"), _match=String(name="match")) bytes_as_base64 = Base64() string_as_base64 = Base64() number_as_base64 = Base64() def resolve_base64(self, info, _in=None, _match=None): if _match: assert _in == _match return _in def resolve_bytes_as_base64(self, info): return b"Hello world" def resolve_string_as_base64(self, info): return "Spam and eggs" def resolve_number_as_base64(self, info): return 42 schema = Schema(query=Query) def test_base64_query(): base64_value = base64.b64encode(b"Random string").decode("utf-8") result = schema.execute( """{{ base64(input: "{}", match: "Random string") }}""".format(base64_value) ) assert not result.errors assert result.data == {"base64": base64_value} def test_base64_query_with_variable(): base64_value = base64.b64encode(b"Another string").decode("utf-8") # test datetime variable in string representation result = schema.execute( """ query GetBase64($base64: Base64) { base64(input: $base64, match: "Another string") } """, variables={"base64": base64_value}, ) assert not result.errors assert result.data == {"base64": base64_value} def test_base64_query_none(): result = schema.execute("""{ base64 }""") assert not result.errors assert result.data == {"base64": None} def test_base64_query_invalid(): bad_inputs = [dict(), 123, "This is not valid base64"] for input_ in bad_inputs: result = schema.execute( """{ base64(input: $input) }""", variables={"input": input_} ) assert isinstance(result.errors, list) assert len(result.errors) == 1 assert isinstance(result.errors[0], GraphQLError) assert result.data is None def test_base64_from_bytes(): base64_value = base64.b64encode(b"Hello world").decode("utf-8") result = schema.execute("""{ bytesAsBase64 }""") assert not result.errors assert result.data == {"bytesAsBase64": base64_value} def test_base64_from_string(): base64_value = base64.b64encode(b"Spam and eggs").decode("utf-8") result = schema.execute("""{ stringAsBase64 }""") assert not result.errors assert result.data == {"stringAsBase64": base64_value} def test_base64_from_number(): base64_value = base64.b64encode(b"42").decode("utf-8") result = schema.execute("""{ numberAsBase64 }""") assert not result.errors assert result.data == {"numberAsBase64": base64_value} python-graphene-3.4.3/graphene/types/tests/test_datetime.py000066400000000000000000000164721471374454500241650ustar00rootroot00000000000000import datetime from graphql import GraphQLError from pytest import fixture from ..datetime import Date, DateTime, Time from ..objecttype import ObjectType from ..schema import Schema class Query(ObjectType): datetime = DateTime(_in=DateTime(name="in")) date = Date(_in=Date(name="in")) time = Time(_at=Time(name="at")) def resolve_datetime(self, info, _in=None): return _in def resolve_date(self, info, _in=None): return _in def resolve_time(self, info, _at=None): return _at schema = Schema(query=Query) @fixture def sample_datetime(): utc_datetime = datetime.datetime(2019, 5, 25, 5, 30, 15, 10, datetime.timezone.utc) return utc_datetime @fixture def sample_time(sample_datetime): time = datetime.time( sample_datetime.hour, sample_datetime.minute, sample_datetime.second, sample_datetime.microsecond, sample_datetime.tzinfo, ) return time @fixture def sample_date(sample_datetime): date = sample_datetime.date() return date def test_datetime_query(sample_datetime): isoformat = sample_datetime.isoformat() result = schema.execute("""{ datetime(in: "%s") }""" % isoformat) assert not result.errors assert result.data == {"datetime": isoformat} def test_datetime_query_with_variables(sample_datetime): isoformat = sample_datetime.isoformat() result = schema.execute( """ query GetDate($datetime: DateTime) { literal: datetime(in: "%s") value: datetime(in: $datetime) } """ % isoformat, variable_values={"datetime": isoformat}, ) assert not result.errors assert result.data == {"literal": isoformat, "value": isoformat} def test_date_query(sample_date): isoformat = sample_date.isoformat() result = schema.execute("""{ date(in: "%s") }""" % isoformat) assert not result.errors assert result.data == {"date": isoformat} def test_date_query_with_variables(sample_date): isoformat = sample_date.isoformat() result = schema.execute( """ query GetDate($date: Date) { literal: date(in: "%s") value: date(in: $date) } """ % isoformat, variable_values={"date": isoformat}, ) assert not result.errors assert result.data == {"literal": isoformat, "value": isoformat} def test_time_query(sample_time): isoformat = sample_time.isoformat() result = schema.execute("""{ time(at: "%s") }""" % isoformat) assert not result.errors assert result.data == {"time": isoformat} def test_time_query_with_variables(sample_time): isoformat = sample_time.isoformat() result = schema.execute( """ query GetTime($time: Time) { literal: time(at: "%s") value: time(at: $time) } """ % isoformat, variable_values={"time": isoformat}, ) assert not result.errors assert result.data == {"literal": isoformat, "value": isoformat} def test_bad_datetime_query(): not_a_date = "Some string that's not a datetime" result = schema.execute("""{ datetime(in: "%s") }""" % not_a_date) assert result.errors and len(result.errors) == 1 error = result.errors[0] assert isinstance(error, GraphQLError) assert ( error.message == "DateTime cannot represent value:" ' "Some string that\'s not a datetime"' ) assert result.data is None def test_bad_date_query(): not_a_date = "Some string that's not a date" result = schema.execute("""{ date(in: "%s") }""" % not_a_date) error = result.errors[0] assert isinstance(error, GraphQLError) assert ( error.message == "Date cannot represent value:" ' "Some string that\'s not a date"' ) assert result.data is None def test_bad_time_query(): not_a_date = "Some string that's not a time" result = schema.execute("""{ time(at: "%s") }""" % not_a_date) error = result.errors[0] assert isinstance(error, GraphQLError) assert ( error.message == "Time cannot represent value:" ' "Some string that\'s not a time"' ) assert result.data is None def test_datetime_query_variable(sample_datetime): isoformat = sample_datetime.isoformat() # test datetime variable provided as Python datetime result = schema.execute( """query Test($date: DateTime){ datetime(in: $date) }""", variables={"date": sample_datetime}, ) assert not result.errors assert result.data == {"datetime": isoformat} # test datetime variable in string representation result = schema.execute( """query Test($date: DateTime){ datetime(in: $date) }""", variables={"date": isoformat}, ) assert not result.errors assert result.data == {"datetime": isoformat} def test_date_query_variable(sample_date): isoformat = sample_date.isoformat() # test date variable provided as Python date result = schema.execute( """query Test($date: Date){ date(in: $date) }""", variables={"date": sample_date}, ) assert not result.errors assert result.data == {"date": isoformat} # test date variable in string representation result = schema.execute( """query Test($date: Date){ date(in: $date) }""", variables={"date": isoformat} ) assert not result.errors assert result.data == {"date": isoformat} def test_time_query_variable(sample_time): isoformat = sample_time.isoformat() # test time variable provided as Python time result = schema.execute( """query Test($time: Time){ time(at: $time) }""", variables={"time": sample_time}, ) assert not result.errors assert result.data == {"time": isoformat} # test time variable in string representation result = schema.execute( """query Test($time: Time){ time(at: $time) }""", variables={"time": isoformat} ) assert not result.errors assert result.data == {"time": isoformat} def test_support_isoformat(): isoformat = "2011-11-04T00:05:23Z" # test time variable provided as Python time result = schema.execute( """query DateTime($time: DateTime){ datetime(in: $time) }""", variables={"time": isoformat}, ) assert not result.errors assert result.data == {"datetime": "2011-11-04T00:05:23+00:00"} def test_bad_variables(sample_date, sample_datetime, sample_time): def _test_bad_variables(type_, input_): result = schema.execute( f"""query Test($input: {type_}){{ {type_.lower()}(in: $input) }}""", variables={"input": input_}, ) assert isinstance(result.errors, list) assert len(result.errors) == 1 assert isinstance(result.errors[0], GraphQLError) assert result.data is None not_a_date = dict() not_a_date_str = "Some string that's not a date" today = sample_date now = sample_datetime time = sample_time bad_pairs = [ ("DateTime", not_a_date), ("DateTime", not_a_date_str), ("DateTime", today), ("DateTime", time), ("Date", not_a_date), ("Date", not_a_date_str), ("Date", time), ("Time", not_a_date), ("Time", not_a_date_str), ("Time", now), ("Time", today), ] for type_, input_ in bad_pairs: _test_bad_variables(type_, input_) python-graphene-3.4.3/graphene/types/tests/test_decimal.py000066400000000000000000000041651471374454500237630ustar00rootroot00000000000000import decimal from ..decimal import Decimal from ..objecttype import ObjectType from ..schema import Schema class Query(ObjectType): decimal = Decimal(input=Decimal()) def resolve_decimal(self, info, input): return input schema = Schema(query=Query) def test_decimal_string_query(): decimal_value = decimal.Decimal("1969.1974") result = schema.execute("""{ decimal(input: "%s") }""" % decimal_value) assert not result.errors assert result.data == {"decimal": str(decimal_value)} assert decimal.Decimal(result.data["decimal"]) == decimal_value def test_decimal_string_query_variable(): decimal_value = decimal.Decimal("1969.1974") result = schema.execute( """query Test($decimal: Decimal){ decimal(input: $decimal) }""", variables={"decimal": decimal_value}, ) assert not result.errors assert result.data == {"decimal": str(decimal_value)} assert decimal.Decimal(result.data["decimal"]) == decimal_value def test_bad_decimal_query(): not_a_decimal = "Nobody expects the Spanish Inquisition!" result = schema.execute("""{ decimal(input: "%s") }""" % not_a_decimal) assert result.errors assert len(result.errors) == 1 assert result.data is None assert ( result.errors[0].message == "Expected value of type 'Decimal', found \"Nobody expects the Spanish Inquisition!\"." ) result = schema.execute("{ decimal(input: true) }") assert result.errors assert len(result.errors) == 1 assert result.data is None assert result.errors[0].message == "Expected value of type 'Decimal', found true." result = schema.execute("{ decimal(input: 1.2) }") assert result.errors assert len(result.errors) == 1 assert result.data is None assert result.errors[0].message == "Expected value of type 'Decimal', found 1.2." def test_decimal_string_query_integer(): decimal_value = 1 result = schema.execute("""{ decimal(input: %s) }""" % decimal_value) assert not result.errors assert result.data == {"decimal": str(decimal_value)} assert decimal.Decimal(result.data["decimal"]) == decimal_value python-graphene-3.4.3/graphene/types/tests/test_definition.py000066400000000000000000000214611471374454500245130ustar00rootroot00000000000000import copy from ..argument import Argument from ..definitions import GrapheneGraphQLType from ..enum import Enum from ..field import Field from ..inputfield import InputField from ..inputobjecttype import InputObjectType from ..interface import Interface from ..objecttype import ObjectType from ..scalars import Boolean, Int, String from ..schema import Schema from ..structures import List, NonNull from ..union import Union class Image(ObjectType): url = String() width = Int() height = Int() class Author(ObjectType): id = String() name = String() pic = Field(Image, width=Int(), height=Int()) recent_article = Field(lambda: Article) class Article(ObjectType): id = String() is_published = Boolean() author = Field(Author) title = String() body = String() class Query(ObjectType): article = Field(Article, id=String()) feed = List(Article) class Mutation(ObjectType): write_article = Field(Article) class Subscription(ObjectType): article_subscribe = Field(Article, id=String()) class MyObjectType(ObjectType): pass class MyInterface(Interface): pass class MyUnion(Union): class Meta: types = (Article,) class MyEnum(Enum): foo = "foo" class MyInputObjectType(InputObjectType): pass def test_defines_a_query_only_schema(): blog_schema = Schema(Query) assert blog_schema.query == Query assert blog_schema.graphql_schema.query_type.graphene_type == Query article_field = Query._meta.fields["article"] assert article_field.type == Article assert article_field.type._meta.name == "Article" article_field_type = article_field.type assert issubclass(article_field_type, ObjectType) title_field = article_field_type._meta.fields["title"] assert title_field.type == String author_field = article_field_type._meta.fields["author"] author_field_type = author_field.type assert issubclass(author_field_type, ObjectType) recent_article_field = author_field_type._meta.fields["recent_article"] assert recent_article_field.type == Article feed_field = Query._meta.fields["feed"] assert feed_field.type.of_type == Article def test_defines_a_mutation_schema(): blog_schema = Schema(Query, mutation=Mutation) assert blog_schema.mutation == Mutation assert blog_schema.graphql_schema.mutation_type.graphene_type == Mutation write_mutation = Mutation._meta.fields["write_article"] assert write_mutation.type == Article assert write_mutation.type._meta.name == "Article" def test_defines_a_subscription_schema(): blog_schema = Schema(Query, subscription=Subscription) assert blog_schema.subscription == Subscription assert blog_schema.graphql_schema.subscription_type.graphene_type == Subscription subscription = Subscription._meta.fields["article_subscribe"] assert subscription.type == Article assert subscription.type._meta.name == "Article" def test_includes_nested_input_objects_in_the_map(): class NestedInputObject(InputObjectType): value = String() class SomeInputObject(InputObjectType): nested = InputField(NestedInputObject) class SomeMutation(Mutation): mutate_something = Field(Article, input=Argument(SomeInputObject)) class SomeSubscription(Mutation): subscribe_to_something = Field(Article, input=Argument(SomeInputObject)) schema = Schema(query=Query, mutation=SomeMutation, subscription=SomeSubscription) type_map = schema.graphql_schema.type_map assert type_map["NestedInputObject"].graphene_type is NestedInputObject def test_includes_interfaces_thunk_subtypes_in_the_type_map(): class SomeInterface(Interface): f = Int() class SomeSubtype(ObjectType): class Meta: interfaces = (SomeInterface,) class Query(ObjectType): iface = Field(lambda: SomeInterface) schema = Schema(query=Query, types=[SomeSubtype]) type_map = schema.graphql_schema.type_map assert type_map["SomeSubtype"].graphene_type is SomeSubtype def test_includes_types_in_union(): class SomeType(ObjectType): a = String() class OtherType(ObjectType): b = String() class MyUnion(Union): class Meta: types = (SomeType, OtherType) class Query(ObjectType): union = Field(MyUnion) schema = Schema(query=Query) type_map = schema.graphql_schema.type_map assert type_map["OtherType"].graphene_type is OtherType assert type_map["SomeType"].graphene_type is SomeType def test_maps_enum(): class SomeType(ObjectType): a = String() class OtherType(ObjectType): b = String() class MyUnion(Union): class Meta: types = (SomeType, OtherType) class Query(ObjectType): union = Field(MyUnion) schema = Schema(query=Query) type_map = schema.graphql_schema.type_map assert type_map["OtherType"].graphene_type is OtherType assert type_map["SomeType"].graphene_type is SomeType def test_includes_interfaces_subtypes_in_the_type_map(): class SomeInterface(Interface): f = Int() class SomeSubtype(ObjectType): class Meta: interfaces = (SomeInterface,) class Query(ObjectType): iface = Field(SomeInterface) schema = Schema(query=Query, types=[SomeSubtype]) type_map = schema.graphql_schema.type_map assert type_map["SomeSubtype"].graphene_type is SomeSubtype def test_stringifies_simple_types(): assert str(Int) == "Int" assert str(Article) == "Article" assert str(MyInterface) == "MyInterface" assert str(MyUnion) == "MyUnion" assert str(MyEnum) == "MyEnum" assert str(MyInputObjectType) == "MyInputObjectType" assert str(NonNull(Int)) == "Int!" assert str(List(Int)) == "[Int]" assert str(NonNull(List(Int))) == "[Int]!" assert str(List(NonNull(Int))) == "[Int!]" assert str(List(List(Int))) == "[[Int]]" # def test_identifies_input_types(): # expected = ( # (GraphQLInt, True), # (ObjectType, False), # (InterfaceType, False), # (UnionType, False), # (EnumType, True), # (InputObjectType, True) # ) # for type_, answer in expected: # assert is_input_type(type_) == answer # assert is_input_type(GraphQLList(type_)) == answer # assert is_input_type(GraphQLNonNull(type_)) == answer # def test_identifies_output_types(): # expected = ( # (GraphQLInt, True), # (ObjectType, True), # (InterfaceType, True), # (UnionType, True), # (EnumType, True), # (InputObjectType, False) # ) # for type, answer in expected: # assert is_output_type(type) == answer # assert is_output_type(GraphQLList(type)) == answer # assert is_output_type(GraphQLNonNull(type)) == answer # def test_prohibits_nesting_nonnull_inside_nonnull(): # with raises(Exception) as excinfo: # GraphQLNonNull(GraphQLNonNull(GraphQLInt)) # assert 'Can only create NonNull of a Nullable GraphQLType but got: Int!.' in str(excinfo.value) # def test_prohibits_putting_non_object_types_in_unions(): # bad_union_types = [ # GraphQLInt, # GraphQLNonNull(GraphQLInt), # GraphQLList(GraphQLInt), # InterfaceType, # UnionType, # EnumType, # InputObjectType # ] # for x in bad_union_types: # with raises(Exception) as excinfo: # GraphQLSchema( # GraphQLObjectType( # 'Root', # fields={ # 'union': GraphQLField(GraphQLUnionType('BadUnion', [x])) # } # ) # ) # assert 'BadUnion may only contain Object types, it cannot contain: ' + str(x) + '.' \ # == str(excinfo.value) def test_does_not_mutate_passed_field_definitions(): class CommonFields: field1 = String() field2 = String(id=String()) class TestObject1(CommonFields, ObjectType): pass class TestObject2(CommonFields, ObjectType): pass assert TestObject1._meta.fields == TestObject2._meta.fields class CommonFields: field1 = String() field2 = String() class TestInputObject1(CommonFields, InputObjectType): pass class TestInputObject2(CommonFields, InputObjectType): pass assert TestInputObject1._meta.fields == TestInputObject2._meta.fields def test_graphene_graphql_type_can_be_copied(): class Query(ObjectType): field = String() def resolve_field(self, info): return "" schema = Schema(query=Query) query_type_copy = copy.copy(schema.graphql_schema.query_type) assert query_type_copy.__dict__ == schema.graphql_schema.query_type.__dict__ assert isinstance(schema.graphql_schema.query_type, GrapheneGraphQLType) python-graphene-3.4.3/graphene/types/tests/test_dynamic.py000066400000000000000000000017351471374454500240110ustar00rootroot00000000000000from functools import partial from ..dynamic import Dynamic from ..scalars import String from ..structures import List, NonNull def test_dynamic(): dynamic = Dynamic(lambda: String) assert dynamic.get_type() == String assert str(dynamic.get_type()) == "String" def test_nonnull(): dynamic = Dynamic(lambda: NonNull(String)) assert dynamic.get_type().of_type == String assert str(dynamic.get_type()) == "String!" def test_list(): dynamic = Dynamic(lambda: List(String)) assert dynamic.get_type().of_type == String assert str(dynamic.get_type()) == "[String]" def test_list_non_null(): dynamic = Dynamic(lambda: List(NonNull(String))) assert dynamic.get_type().of_type.of_type == String assert str(dynamic.get_type()) == "[String!]" def test_partial(): def __type(_type): return _type dynamic = Dynamic(partial(__type, String)) assert dynamic.get_type() == String assert str(dynamic.get_type()) == "String" python-graphene-3.4.3/graphene/types/tests/test_enum.py000066400000000000000000000326361471374454500233350ustar00rootroot00000000000000from textwrap import dedent from ..argument import Argument from ..enum import Enum, PyEnum from ..field import Field from ..inputfield import InputField from ..inputobjecttype import InputObjectType from ..mutation import Mutation from ..scalars import String from ..schema import ObjectType, Schema def test_enum_construction(): class RGB(Enum): """Description""" RED = 1 GREEN = 2 BLUE = 3 @property def description(self): return f"Description {self.name}" assert RGB._meta.name == "RGB" assert RGB._meta.description == "Description" values = RGB._meta.enum.__members__.values() assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"] assert sorted(v.description for v in values) == [ "Description BLUE", "Description GREEN", "Description RED", ] def test_enum_construction_meta(): class RGB(Enum): class Meta: name = "RGBEnum" description = "Description" RED = 1 GREEN = 2 BLUE = 3 assert RGB._meta.name == "RGBEnum" assert RGB._meta.description == "Description" def test_enum_instance_construction(): RGB = Enum("RGB", "RED,GREEN,BLUE") values = RGB._meta.enum.__members__.values() assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"] def test_enum_from_builtin_enum(): PyRGB = PyEnum("RGB", "RED,GREEN,BLUE") RGB = Enum.from_enum(PyRGB) assert RGB._meta.enum == PyRGB assert RGB.RED assert RGB.GREEN assert RGB.BLUE def test_enum_custom_description_in_constructor(): description = "An enumeration, but with a custom description" RGB = Enum( "RGB", "RED,GREEN,BLUE", description=description, ) assert RGB._meta.description == description def test_enum_from_python3_enum_uses_default_builtin_doc(): RGB = Enum("RGB", "RED,GREEN,BLUE") assert RGB._meta.description == "An enumeration." def test_enum_from_builtin_enum_accepts_lambda_description(): def custom_description(value): if not value: return "StarWars Episodes" return "New Hope Episode" if value == Episode.NEWHOPE else "Other" def custom_deprecation_reason(value): return "meh" if value == Episode.NEWHOPE else None PyEpisode = PyEnum("PyEpisode", "NEWHOPE,EMPIRE,JEDI") Episode = Enum.from_enum( PyEpisode, description=custom_description, deprecation_reason=custom_deprecation_reason, ) class Query(ObjectType): foo = Episode() schema = Schema(query=Query).graphql_schema episode = schema.get_type("PyEpisode") assert episode.description == "StarWars Episodes" assert [ (name, value.description, value.deprecation_reason) for name, value in episode.values.items() ] == [ ("NEWHOPE", "New Hope Episode", "meh"), ("EMPIRE", "Other", None), ("JEDI", "Other", None), ] def test_enum_from_python3_enum_uses_enum_doc(): from enum import Enum as PyEnum class Color(PyEnum): """This is the description""" RED = 1 GREEN = 2 BLUE = 3 RGB = Enum.from_enum(Color) assert RGB._meta.enum == Color assert RGB._meta.description == "This is the description" assert RGB assert RGB.RED assert RGB.GREEN assert RGB.BLUE def test_enum_value_from_class(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 assert RGB.RED.value == 1 assert RGB.GREEN.value == 2 assert RGB.BLUE.value == 3 def test_enum_value_as_unmounted_field(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 unmounted = RGB() unmounted_field = unmounted.Field() assert isinstance(unmounted_field, Field) assert unmounted_field.type == RGB def test_enum_value_as_unmounted_inputfield(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 unmounted = RGB() unmounted_field = unmounted.InputField() assert isinstance(unmounted_field, InputField) assert unmounted_field.type == RGB def test_enum_value_as_unmounted_argument(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 unmounted = RGB() unmounted_field = unmounted.Argument() assert isinstance(unmounted_field, Argument) assert unmounted_field.type == RGB def test_enum_can_be_compared(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 assert RGB.RED == 1 assert RGB.GREEN == 2 assert RGB.BLUE == 3 def test_enum_can_be_initialized(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 assert RGB.get(1) == RGB.RED assert RGB.get(2) == RGB.GREEN assert RGB.get(3) == RGB.BLUE def test_enum_can_retrieve_members(): class RGB(Enum): RED = 1 GREEN = 2 BLUE = 3 assert RGB["RED"] == RGB.RED assert RGB["GREEN"] == RGB.GREEN assert RGB["BLUE"] == RGB.BLUE def test_enum_to_enum_comparison_should_differ(): class RGB1(Enum): RED = 1 GREEN = 2 BLUE = 3 class RGB2(Enum): RED = 1 GREEN = 2 BLUE = 3 assert RGB1.RED != RGB2.RED assert RGB1.GREEN != RGB2.GREEN assert RGB1.BLUE != RGB2.BLUE def test_enum_skip_meta_from_members(): class RGB1(Enum): class Meta: name = "RGB" RED = 1 GREEN = 2 BLUE = 3 assert dict(RGB1._meta.enum.__members__) == { "RED": RGB1.RED, "GREEN": RGB1.GREEN, "BLUE": RGB1.BLUE, } def test_enum_types(): from enum import Enum as PyEnum class Color(PyEnum): """Primary colors""" RED = 1 YELLOW = 2 BLUE = 3 GColor = Enum.from_enum(Color) class Query(ObjectType): color = GColor(required=True) def resolve_color(_, info): return Color.RED schema = Schema(query=Query) assert ( str(schema).strip() == dedent( ''' type Query { color: Color! } """Primary colors""" enum Color { RED YELLOW BLUE } ''' ).strip() ) def test_enum_resolver(): from enum import Enum as PyEnum class Color(PyEnum): RED = 1 GREEN = 2 BLUE = 3 GColor = Enum.from_enum(Color) class Query(ObjectType): color = GColor(required=True) def resolve_color(_, info): return Color.RED schema = Schema(query=Query) results = schema.execute("query { color }") assert not results.errors assert results.data["color"] == Color.RED.name def test_enum_resolver_compat(): from enum import Enum as PyEnum class Color(PyEnum): RED = 1 GREEN = 2 BLUE = 3 GColor = Enum.from_enum(Color) class Query(ObjectType): color = GColor(required=True) color_by_name = GColor(required=True) def resolve_color(_, info): return Color.RED.value def resolve_color_by_name(_, info): return Color.RED.name schema = Schema(query=Query) results = schema.execute( """query { color colorByName }""" ) assert not results.errors assert results.data["color"] == Color.RED.name assert results.data["colorByName"] == Color.RED.name def test_enum_with_name(): from enum import Enum as PyEnum class Color(PyEnum): RED = 1 YELLOW = 2 BLUE = 3 GColor = Enum.from_enum(Color, description="original colors") UniqueGColor = Enum.from_enum( Color, name="UniqueColor", description="unique colors" ) class Query(ObjectType): color = GColor(required=True) unique_color = UniqueGColor(required=True) schema = Schema(query=Query) assert ( str(schema).strip() == dedent( ''' type Query { color: Color! uniqueColor: UniqueColor! } """original colors""" enum Color { RED YELLOW BLUE } """unique colors""" enum UniqueColor { RED YELLOW BLUE } ''' ).strip() ) def test_enum_resolver_invalid(): from enum import Enum as PyEnum class Color(PyEnum): RED = 1 GREEN = 2 BLUE = 3 GColor = Enum.from_enum(Color) class Query(ObjectType): color = GColor(required=True) def resolve_color(_, info): return "BLACK" schema = Schema(query=Query) results = schema.execute("query { color }") assert results.errors assert results.errors[0].message == "Enum 'Color' cannot represent value: 'BLACK'" def test_field_enum_argument(): class Color(Enum): RED = 1 GREEN = 2 BLUE = 3 class Brick(ObjectType): color = Color(required=True) color_filter = None class Query(ObjectType): bricks_by_color = Field(Brick, color=Color(required=True)) def resolve_bricks_by_color(_, info, color): nonlocal color_filter color_filter = color return Brick(color=color) schema = Schema(query=Query) results = schema.execute( """ query { bricksByColor(color: RED) { color } } """ ) assert not results.errors assert results.data == {"bricksByColor": {"color": "RED"}} assert color_filter == Color.RED def test_mutation_enum_input(): class RGB(Enum): """Available colors""" RED = 1 GREEN = 2 BLUE = 3 color_input = None class CreatePaint(Mutation): class Arguments: color = RGB(required=True) color = RGB(required=True) def mutate(_, info, color): nonlocal color_input color_input = color return CreatePaint(color=color) class MyMutation(ObjectType): create_paint = CreatePaint.Field() class Query(ObjectType): a = String() schema = Schema(query=Query, mutation=MyMutation) result = schema.execute( """ mutation MyMutation { createPaint(color: RED) { color } } """ ) assert not result.errors assert result.data == {"createPaint": {"color": "RED"}} assert color_input == RGB.RED def test_mutation_enum_input_type(): class RGB(Enum): """Available colors""" RED = 1 GREEN = 2 BLUE = 3 class ColorInput(InputObjectType): color = RGB(required=True) color_input_value = None class CreatePaint(Mutation): class Arguments: color_input = ColorInput(required=True) color = RGB(required=True) def mutate(_, info, color_input): nonlocal color_input_value color_input_value = color_input.color return CreatePaint(color=color_input.color) class MyMutation(ObjectType): create_paint = CreatePaint.Field() class Query(ObjectType): a = String() schema = Schema(query=Query, mutation=MyMutation) result = schema.execute( """ mutation MyMutation { createPaint(colorInput: { color: RED }) { color } } """ ) assert not result.errors assert result.data == {"createPaint": {"color": "RED"}} assert color_input_value == RGB.RED def test_hashable_enum(): class RGB(Enum): """Available colors""" RED = 1 GREEN = 2 BLUE = 3 color_map = {RGB.RED: "a", RGB.BLUE: "b", 1: "c"} assert color_map[RGB.RED] == "a" assert color_map[RGB.BLUE] == "b" assert color_map[1] == "c" def test_hashable_instance_creation_enum(): Episode = Enum("Episode", [("NEWHOPE", 4), ("EMPIRE", 5), ("JEDI", 6)]) trilogy_map = {Episode.NEWHOPE: "better", Episode.EMPIRE: "best", 5: "foo"} assert trilogy_map[Episode.NEWHOPE] == "better" assert trilogy_map[Episode.EMPIRE] == "best" assert trilogy_map[5] == "foo" def test_enum_iteration(): class TestEnum(Enum): FIRST = 1 SECOND = 2 result = [] expected_values = ["FIRST", "SECOND"] for c in TestEnum: result.append(c.name) assert result == expected_values def test_iterable_instance_creation_enum(): TestEnum = Enum("TestEnum", [("FIRST", 1), ("SECOND", 2)]) result = [] expected_values = ["FIRST", "SECOND"] for c in TestEnum: result.append(c.name) assert result == expected_values # https://github.com/graphql-python/graphene/issues/1321 def test_enum_description_member_not_interpreted_as_property(): class RGB(Enum): """Description""" red = "red" green = "green" blue = "blue" description = "description" deprecation_reason = "deprecation_reason" class Query(ObjectType): color = RGB() def resolve_color(_, info): return RGB.description values = RGB._meta.enum.__members__.values() assert sorted(v.name for v in values) == [ "blue", "deprecation_reason", "description", "green", "red", ] schema = Schema(query=Query) results = schema.execute("query { color }") assert not results.errors assert results.data["color"] == RGB.description.name python-graphene-3.4.3/graphene/types/tests/test_field.py000066400000000000000000000100101471374454500234320ustar00rootroot00000000000000from functools import partial from pytest import raises from ..argument import Argument from ..field import Field from ..scalars import String from ..structures import NonNull from .utils import MyLazyType class MyInstance: value = "value" value_func = staticmethod(lambda: "value_func") def value_method(self): return "value_method" def test_field_basic(): MyType = object() args = {"my arg": Argument(True)} def resolver(): return None deprecation_reason = "Deprecated now" description = "My Field" my_default = "something" field = Field( MyType, name="name", args=args, resolver=resolver, description=description, deprecation_reason=deprecation_reason, default_value=my_default, ) assert field.name == "name" assert field.args == args assert field.resolver == resolver assert field.deprecation_reason == deprecation_reason assert field.description == description assert field.default_value == my_default def test_field_required(): MyType = object() field = Field(MyType, required=True) assert isinstance(field.type, NonNull) assert field.type.of_type == MyType def test_field_default_value_not_callable(): MyType = object() try: Field(MyType, default_value=lambda: True) except AssertionError as e: # substring comparison for py 2/3 compatibility assert "The default value can not be a function but received" in str(e) def test_field_source(): MyType = object() field = Field(MyType, source="value") assert field.resolver(MyInstance(), None) == MyInstance.value def test_field_source_dict_or_attr(): MyType = object() field = Field(MyType, source="value") assert field.resolver(MyInstance(), None) == MyInstance.value assert field.resolver({"value": MyInstance.value}, None) == MyInstance.value def test_field_with_lazy_type(): MyType = object() field = Field(lambda: MyType) assert field.type == MyType def test_field_with_lazy_partial_type(): MyType = object() field = Field(partial(lambda: MyType)) assert field.type == MyType def test_field_with_string_type(): field = Field("graphene.types.tests.utils.MyLazyType") assert field.type == MyLazyType def test_field_not_source_and_resolver(): MyType = object() with raises(Exception) as exc_info: Field(MyType, source="value", resolver=lambda: None) assert ( str(exc_info.value) == "A Field cannot have a source and a resolver in at the same time." ) def test_field_source_func(): MyType = object() field = Field(MyType, source="value_func") assert field.resolver(MyInstance(), None) == MyInstance.value_func() def test_field_source_method(): MyType = object() field = Field(MyType, source="value_method") assert field.resolver(MyInstance(), None) == MyInstance().value_method() def test_field_source_as_argument(): MyType = object() field = Field(MyType, source=String()) assert "source" in field.args assert field.args["source"].type == String def test_field_name_as_argument(): MyType = object() field = Field(MyType, name=String()) assert "name" in field.args assert field.args["name"].type == String def test_field_source_argument_as_kw(): MyType = object() deprecation_reason = "deprecated" field = Field( MyType, b=NonNull(True), c=Argument(None, deprecation_reason=deprecation_reason), a=NonNull(False), ) assert list(field.args) == ["b", "c", "a"] assert isinstance(field.args["b"], Argument) assert isinstance(field.args["b"].type, NonNull) assert field.args["b"].type.of_type is True assert isinstance(field.args["c"], Argument) assert field.args["c"].type is None assert field.args["c"].deprecation_reason == deprecation_reason assert isinstance(field.args["a"], Argument) assert isinstance(field.args["a"].type, NonNull) assert field.args["a"].type.of_type is False python-graphene-3.4.3/graphene/types/tests/test_generic.py000066400000000000000000000042621471374454500237770ustar00rootroot00000000000000from ..generic import GenericScalar from ..objecttype import ObjectType from ..schema import Schema class Query(ObjectType): generic = GenericScalar(input=GenericScalar()) def resolve_generic(self, info, input=None): return input schema = Schema(query=Query) def test_generic_query_variable(): for generic_value in [ 1, 1.1, True, "str", [1, 2, 3], [1.1, 2.2, 3.3], [True, False], ["str1", "str2"], {"key_a": "a", "key_b": "b"}, { "int": 1, "float": 1.1, "boolean": True, "string": "str", "int_list": [1, 2, 3], "float_list": [1.1, 2.2, 3.3], "boolean_list": [True, False], "string_list": ["str1", "str2"], "nested_dict": {"key_a": "a", "key_b": "b"}, }, None, ]: result = schema.execute( """query Test($generic: GenericScalar){ generic(input: $generic) }""", variables={"generic": generic_value}, ) assert not result.errors assert result.data == {"generic": generic_value} def test_generic_parse_literal_query(): result = schema.execute( """ query { generic(input: { int: 1, float: 1.1 boolean: true, string: "str", int_list: [1, 2, 3], float_list: [1.1, 2.2, 3.3], boolean_list: [true, false] string_list: ["str1", "str2"], nested_dict: { key_a: "a", key_b: "b" }, empty_key: undefined }) } """ ) assert not result.errors assert result.data == { "generic": { "int": 1, "float": 1.1, "boolean": True, "string": "str", "int_list": [1, 2, 3], "float_list": [1.1, 2.2, 3.3], "boolean_list": [True, False], "string_list": ["str1", "str2"], "nested_dict": {"key_a": "a", "key_b": "b"}, "empty_key": None, } } python-graphene-3.4.3/graphene/types/tests/test_inputfield.py000066400000000000000000000025001471374454500245170ustar00rootroot00000000000000from functools import partial from pytest import raises from ..inputfield import InputField from ..structures import NonNull from .utils import MyLazyType def test_inputfield_required(): MyType = object() field = InputField(MyType, required=True) assert isinstance(field.type, NonNull) assert field.type.of_type == MyType def test_inputfield_deprecated(): MyType = object() deprecation_reason = "deprecated" field = InputField(MyType, required=False, deprecation_reason=deprecation_reason) assert isinstance(field.type, type(MyType)) assert field.deprecation_reason == deprecation_reason def test_inputfield_required_deprecated(): MyType = object() with raises(AssertionError) as exc_info: InputField(MyType, name="input", required=True, deprecation_reason="deprecated") assert str(exc_info.value) == "InputField input is required, cannot deprecate it." def test_inputfield_with_lazy_type(): MyType = object() field = InputField(lambda: MyType) assert field.type == MyType def test_inputfield_with_lazy_partial_type(): MyType = object() field = InputField(partial(lambda: MyType)) assert field.type == MyType def test_inputfield_with_string_type(): field = InputField("graphene.types.tests.utils.MyLazyType") assert field.type == MyLazyType python-graphene-3.4.3/graphene/types/tests/test_inputobjecttype.py000066400000000000000000000114341471374454500256120ustar00rootroot00000000000000from graphql import Undefined from ..argument import Argument from ..field import Field from ..inputfield import InputField from ..inputobjecttype import InputObjectType from ..objecttype import ObjectType from ..scalars import Boolean, String from ..schema import Schema from ..unmountedtype import UnmountedType from ... import NonNull class MyType: pass class MyScalar(UnmountedType): def get_type(self): return MyType def test_generate_inputobjecttype(): class MyInputObjectType(InputObjectType): """Documentation""" assert MyInputObjectType._meta.name == "MyInputObjectType" assert MyInputObjectType._meta.description == "Documentation" assert MyInputObjectType._meta.fields == {} def test_generate_inputobjecttype_with_meta(): class MyInputObjectType(InputObjectType): class Meta: name = "MyOtherInputObjectType" description = "Documentation" assert MyInputObjectType._meta.name == "MyOtherInputObjectType" assert MyInputObjectType._meta.description == "Documentation" def test_generate_inputobjecttype_with_fields(): class MyInputObjectType(InputObjectType): field = Field(MyType) assert "field" in MyInputObjectType._meta.fields def test_ordered_fields_in_inputobjecttype(): class MyInputObjectType(InputObjectType): b = InputField(MyType) a = InputField(MyType) field = MyScalar() asa = InputField(MyType) assert list(MyInputObjectType._meta.fields) == ["b", "a", "field", "asa"] def test_generate_inputobjecttype_unmountedtype(): class MyInputObjectType(InputObjectType): field = MyScalar(MyType) assert "field" in MyInputObjectType._meta.fields assert isinstance(MyInputObjectType._meta.fields["field"], InputField) def test_generate_inputobjecttype_as_argument(): class MyInputObjectType(InputObjectType): field = MyScalar() class MyObjectType(ObjectType): field = Field(MyType, input=MyInputObjectType()) assert "field" in MyObjectType._meta.fields field = MyObjectType._meta.fields["field"] assert isinstance(field, Field) assert field.type == MyType assert "input" in field.args assert isinstance(field.args["input"], Argument) assert field.args["input"].type == MyInputObjectType def test_generate_inputobjecttype_inherit_abstracttype(): class MyAbstractType: field1 = MyScalar(MyType) class MyInputObjectType(InputObjectType, MyAbstractType): field2 = MyScalar(MyType) assert list(MyInputObjectType._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInputObjectType._meta.fields.values()] == [ InputField, InputField, ] def test_generate_inputobjecttype_inherit_abstracttype_reversed(): class MyAbstractType: field1 = MyScalar(MyType) class MyInputObjectType(MyAbstractType, InputObjectType): field2 = MyScalar(MyType) assert list(MyInputObjectType._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInputObjectType._meta.fields.values()] == [ InputField, InputField, ] def test_inputobjecttype_of_input(): class Child(InputObjectType): first_name = String() last_name = String() @property def full_name(self): return f"{self.first_name} {self.last_name}" class Parent(InputObjectType): child = InputField(Child) class Query(ObjectType): is_child = Boolean(parent=Parent()) def resolve_is_child(self, info, parent): return ( isinstance(parent.child, Child) and parent.child.full_name == "Peter Griffin" ) schema = Schema(query=Query) result = schema.execute( """query basequery { isChild(parent: {child: {firstName: "Peter", lastName: "Griffin"}}) } """ ) assert not result.errors assert result.data == {"isChild": True} def test_inputobjecttype_default_input_as_undefined( set_default_input_object_type_to_undefined, ): class TestUndefinedInput(InputObjectType): required_field = String(required=True) optional_field = String() class Query(ObjectType): undefined_optionals_work = Field(NonNull(Boolean), input=TestUndefinedInput()) def resolve_undefined_optionals_work(self, info, input: TestUndefinedInput): # Confirm that optional_field comes as Undefined return ( input.required_field == "required" and input.optional_field is Undefined ) schema = Schema(query=Query) result = schema.execute( """query basequery { undefinedOptionalsWork(input: {requiredField: "required"}) } """ ) assert not result.errors assert result.data == {"undefinedOptionalsWork": True} python-graphene-3.4.3/graphene/types/tests/test_interface.py000066400000000000000000000117251471374454500243250ustar00rootroot00000000000000from ..field import Field from ..interface import Interface from ..objecttype import ObjectType from ..scalars import String from ..schema import Schema from ..unmountedtype import UnmountedType class MyType: pass class MyScalar(UnmountedType): def get_type(self): return MyType def test_generate_interface(): class MyInterface(Interface): """Documentation""" assert MyInterface._meta.name == "MyInterface" assert MyInterface._meta.description == "Documentation" assert MyInterface._meta.fields == {} def test_generate_interface_with_meta(): class MyFirstInterface(Interface): pass class MyInterface(Interface): class Meta: name = "MyOtherInterface" description = "Documentation" interfaces = [MyFirstInterface] assert MyInterface._meta.name == "MyOtherInterface" assert MyInterface._meta.description == "Documentation" assert MyInterface._meta.interfaces == [MyFirstInterface] def test_generate_interface_with_fields(): class MyInterface(Interface): field = Field(MyType) assert "field" in MyInterface._meta.fields def test_ordered_fields_in_interface(): class MyInterface(Interface): b = Field(MyType) a = Field(MyType) field = MyScalar() asa = Field(MyType) assert list(MyInterface._meta.fields) == ["b", "a", "field", "asa"] def test_generate_interface_unmountedtype(): class MyInterface(Interface): field = MyScalar() assert "field" in MyInterface._meta.fields assert isinstance(MyInterface._meta.fields["field"], Field) def test_generate_interface_inherit_abstracttype(): class MyAbstractType: field1 = MyScalar() class MyInterface(Interface, MyAbstractType): field2 = MyScalar() assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] def test_generate_interface_inherit_interface(): class MyBaseInterface(Interface): field1 = MyScalar() class MyInterface(MyBaseInterface): field2 = MyScalar() assert MyInterface._meta.name == "MyInterface" assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] def test_generate_interface_inherit_abstracttype_reversed(): class MyAbstractType: field1 = MyScalar() class MyInterface(MyAbstractType, Interface): field2 = MyScalar() assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] def test_resolve_type_default(): class MyInterface(Interface): field2 = String() class MyTestType(ObjectType): class Meta: interfaces = (MyInterface,) class Query(ObjectType): test = Field(MyInterface) def resolve_test(_, info): return MyTestType() schema = Schema(query=Query, types=[MyTestType]) result = schema.execute( """ query { test { __typename } } """ ) assert not result.errors assert result.data == {"test": {"__typename": "MyTestType"}} def test_resolve_type_custom(): class MyInterface(Interface): field2 = String() @classmethod def resolve_type(cls, instance, info): if instance["type"] == 1: return MyTestType1 return MyTestType2 class MyTestType1(ObjectType): class Meta: interfaces = (MyInterface,) class MyTestType2(ObjectType): class Meta: interfaces = (MyInterface,) class Query(ObjectType): test = Field(MyInterface) def resolve_test(_, info): return {"type": 1} schema = Schema(query=Query, types=[MyTestType1, MyTestType2]) result = schema.execute( """ query { test { __typename } } """ ) assert not result.errors assert result.data == {"test": {"__typename": "MyTestType1"}} def test_resolve_type_custom_interferes(): class MyInterface(Interface): field2 = String() type_ = String(name="type") def resolve_type_(_, info): return "foo" class MyTestType1(ObjectType): class Meta: interfaces = (MyInterface,) class MyTestType2(ObjectType): class Meta: interfaces = (MyInterface,) class Query(ObjectType): test = Field(MyInterface) def resolve_test(_, info): return MyTestType1() schema = Schema(query=Query, types=[MyTestType1, MyTestType2]) result = schema.execute( """ query { test { __typename type } } """ ) assert not result.errors assert result.data == {"test": {"__typename": "MyTestType1", "type": "foo"}} python-graphene-3.4.3/graphene/types/tests/test_json.py000066400000000000000000000044531471374454500233360ustar00rootroot00000000000000from ..json import JSONString from ..objecttype import ObjectType from ..schema import Schema class Query(ObjectType): json = JSONString(input=JSONString()) def resolve_json(self, info, input): return input schema = Schema(query=Query) def test_jsonstring_query(): json_value = '{"key": "value"}' json_value_quoted = json_value.replace('"', '\\"') result = schema.execute("""{ json(input: "%s") }""" % json_value_quoted) assert not result.errors assert result.data == {"json": json_value} result = schema.execute("""{ json(input: "{}") }""") assert not result.errors assert result.data == {"json": "{}"} def test_jsonstring_query_variable(): json_value = '{"key": "value"}' result = schema.execute( """query Test($json: JSONString){ json(input: $json) }""", variables={"json": json_value}, ) assert not result.errors assert result.data == {"json": json_value} def test_jsonstring_optional_uuid_input(): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ json(input: null) }") assert not result.errors assert result.data == {"json": None} def test_jsonstring_invalid_query(): """ Test that if an invalid type is provided we get an error """ result = schema.execute("{ json(input: 1) }") assert result.errors == [ {"message": "Expected value of type 'JSONString', found 1."}, ] result = schema.execute("{ json(input: {}) }") assert result.errors == [ {"message": "Expected value of type 'JSONString', found {}."}, ] result = schema.execute('{ json(input: "a") }') assert result.errors == [ { "message": "Expected value of type 'JSONString', found \"a\"; " "Badly formed JSONString: Expecting value: line 1 column 1 (char 0)", }, ] result = schema.execute("""{ json(input: "{\\'key\\': 0}") }""") assert result.errors == [ {"message": "Syntax Error: Invalid character escape sequence: '\\''."}, ] result = schema.execute("""{ json(input: "{\\"key\\": 0,}") }""") assert len(result.errors) == 1 assert result.errors[0].message.startswith( 'Expected value of type \'JSONString\', found "{\\"key\\": 0,}"; Badly formed JSONString:' ) python-graphene-3.4.3/graphene/types/tests/test_mountedtype.py000066400000000000000000000012121471374454500247300ustar00rootroot00000000000000from ..field import Field from ..scalars import String class CustomField(Field): def __init__(self, *args, **kwargs): self.metadata = kwargs.pop("metadata", None) super(CustomField, self).__init__(*args, **kwargs) def test_mounted_type(): unmounted = String() mounted = Field.mounted(unmounted) assert isinstance(mounted, Field) assert mounted.type == String def test_mounted_type_custom(): unmounted = String(metadata={"hey": "yo!"}) mounted = CustomField.mounted(unmounted) assert isinstance(mounted, CustomField) assert mounted.type == String assert mounted.metadata == {"hey": "yo!"} python-graphene-3.4.3/graphene/types/tests/test_mutation.py000066400000000000000000000131141471374454500242170ustar00rootroot00000000000000from pytest import raises from ..argument import Argument from ..dynamic import Dynamic from ..mutation import Mutation from ..objecttype import ObjectType from ..scalars import String from ..schema import Schema from ..structures import NonNull from ..interface import Interface class MyType(Interface): pass def test_generate_mutation_no_args(): class MyMutation(Mutation): """Documentation""" def mutate(self, info, **args): return args assert issubclass(MyMutation, ObjectType) assert MyMutation._meta.name == "MyMutation" assert MyMutation._meta.description == "Documentation" resolved = MyMutation.Field().resolver(None, None, name="Peter") assert resolved == {"name": "Peter"} def test_generate_mutation_with_meta(): class MyMutation(Mutation): class Meta: name = "MyOtherMutation" description = "Documentation" interfaces = (MyType,) def mutate(self, info, **args): return args assert MyMutation._meta.name == "MyOtherMutation" assert MyMutation._meta.description == "Documentation" assert MyMutation._meta.interfaces == (MyType,) resolved = MyMutation.Field().resolver(None, None, name="Peter") assert resolved == {"name": "Peter"} def test_mutation_raises_exception_if_no_mutate(): with raises(AssertionError) as excinfo: class MyMutation(Mutation): pass assert "All mutations must define a mutate method in it" == str(excinfo.value) def test_mutation_custom_output_type(): class User(ObjectType): name = String() class CreateUser(Mutation): class Arguments: name = String() Output = User def mutate(self, info, name): return User(name=name) field = CreateUser.Field() assert field.type == User assert field.args == {"name": Argument(String)} resolved = field.resolver(None, None, name="Peter") assert isinstance(resolved, User) assert resolved.name == "Peter" def test_mutation_execution(): class CreateUser(Mutation): class Arguments: name = String() dynamic = Dynamic(lambda: String()) dynamic_none = Dynamic(lambda: None) name = String() dynamic = Dynamic(lambda: String()) def mutate(self, info, name, dynamic): return CreateUser(name=name, dynamic=dynamic) class Query(ObjectType): a = String() class MyMutation(ObjectType): create_user = CreateUser.Field() schema = Schema(query=Query, mutation=MyMutation) result = schema.execute( """ mutation mymutation { createUser(name:"Peter", dynamic: "dynamic") { name dynamic } } """ ) assert not result.errors assert result.data == {"createUser": {"name": "Peter", "dynamic": "dynamic"}} def test_mutation_no_fields_output(): class CreateUser(Mutation): name = String() def mutate(self, info): return CreateUser() class Query(ObjectType): a = String() class MyMutation(ObjectType): create_user = CreateUser.Field() schema = Schema(query=Query, mutation=MyMutation) result = schema.execute( """ mutation mymutation { createUser { name } } """ ) assert not result.errors assert result.data == {"createUser": {"name": None}} def test_mutation_allow_to_have_custom_args(): class CreateUser(Mutation): class Arguments: name = String() name = String() def mutate(self, info, name): return CreateUser(name=name) class MyMutation(ObjectType): create_user = CreateUser.Field( name="createUser", description="Create a user", deprecation_reason="Is deprecated", required=True, ) field = MyMutation._meta.fields["create_user"] assert field.name == "createUser" assert field.description == "Create a user" assert field.deprecation_reason == "Is deprecated" assert field.type == NonNull(CreateUser) def test_mutation_default_args_output(): class CreateUser(Mutation): """Description.""" class Arguments: name = String() name = String() def mutate(self, info, name): return CreateUser(name=name) class MyMutation(ObjectType): create_user = CreateUser.Field() field = MyMutation._meta.fields["create_user"] assert field.name is None assert field.description == "Description." assert field.deprecation_reason is None assert field.type == CreateUser def test_mutation_as_subclass(): class BaseCreateUser(Mutation): class Arguments: name = String() name = String() def mutate(self, info, **args): return args class CreateUserWithPlanet(BaseCreateUser): class Arguments(BaseCreateUser.Arguments): planet = String() planet = String() def mutate(self, info, **args): return CreateUserWithPlanet(**args) class MyMutation(ObjectType): create_user_with_planet = CreateUserWithPlanet.Field() class Query(ObjectType): a = String() schema = Schema(query=Query, mutation=MyMutation) result = schema.execute( """ mutation mymutation { createUserWithPlanet(name:"Peter", planet: "earth") { name planet } } """ ) assert not result.errors assert result.data == {"createUserWithPlanet": {"name": "Peter", "planet": "earth"}} python-graphene-3.4.3/graphene/types/tests/test_objecttype.py000066400000000000000000000200721471374454500245300ustar00rootroot00000000000000from pytest import raises from ..field import Field from ..interface import Interface from ..objecttype import ObjectType from ..scalars import String from ..schema import Schema from ..structures import NonNull from ..unmountedtype import UnmountedType class MyType(Interface): pass class Container(ObjectType): field1 = Field(MyType) field2 = Field(MyType) class MyInterface(Interface): ifield = Field(MyType) class ContainerWithInterface(ObjectType): class Meta: interfaces = (MyInterface,) field1 = Field(MyType) field2 = Field(MyType) class MyScalar(UnmountedType): def get_type(self): return MyType def test_generate_objecttype(): class MyObjectType(ObjectType): """Documentation""" assert MyObjectType._meta.name == "MyObjectType" assert MyObjectType._meta.description == "Documentation" assert MyObjectType._meta.interfaces == tuple() assert MyObjectType._meta.fields == {} assert ( repr(MyObjectType) == ">" ) def test_generate_objecttype_with_meta(): class MyObjectType(ObjectType): class Meta: name = "MyOtherObjectType" description = "Documentation" interfaces = (MyType,) assert MyObjectType._meta.name == "MyOtherObjectType" assert MyObjectType._meta.description == "Documentation" assert MyObjectType._meta.interfaces == (MyType,) def test_generate_lazy_objecttype(): class MyObjectType(ObjectType): example = Field(lambda: InnerObjectType, required=True) class InnerObjectType(ObjectType): field = Field(MyType) assert MyObjectType._meta.name == "MyObjectType" example_field = MyObjectType._meta.fields["example"] assert isinstance(example_field.type, NonNull) assert example_field.type.of_type == InnerObjectType def test_generate_objecttype_with_fields(): class MyObjectType(ObjectType): field = Field(MyType) assert "field" in MyObjectType._meta.fields def test_generate_objecttype_with_private_attributes(): class MyObjectType(ObjectType): def __init__(self, _private_state=None, **kwargs): self._private_state = _private_state super().__init__(**kwargs) _private_state = None assert "_private_state" not in MyObjectType._meta.fields assert hasattr(MyObjectType, "_private_state") m = MyObjectType(_private_state="custom") assert m._private_state == "custom" with raises(TypeError): MyObjectType(_other_private_state="Wrong") def test_ordered_fields_in_objecttype(): class MyObjectType(ObjectType): b = Field(MyType) a = Field(MyType) field = MyScalar() asa = Field(MyType) assert list(MyObjectType._meta.fields) == ["b", "a", "field", "asa"] def test_generate_objecttype_inherit_abstracttype(): class MyAbstractType: field1 = MyScalar() class MyObjectType(ObjectType, MyAbstractType): field2 = MyScalar() assert MyObjectType._meta.description is None assert MyObjectType._meta.interfaces == () assert MyObjectType._meta.name == "MyObjectType" assert list(MyObjectType._meta.fields) == ["field1", "field2"] assert list(map(type, MyObjectType._meta.fields.values())) == [Field, Field] def test_generate_objecttype_inherit_abstracttype_reversed(): class MyAbstractType: field1 = MyScalar() class MyObjectType(MyAbstractType, ObjectType): field2 = MyScalar() assert MyObjectType._meta.description is None assert MyObjectType._meta.interfaces == () assert MyObjectType._meta.name == "MyObjectType" assert list(MyObjectType._meta.fields) == ["field1", "field2"] assert list(map(type, MyObjectType._meta.fields.values())) == [Field, Field] def test_generate_objecttype_unmountedtype(): class MyObjectType(ObjectType): field = MyScalar() assert "field" in MyObjectType._meta.fields assert isinstance(MyObjectType._meta.fields["field"], Field) def test_parent_container_get_fields(): assert list(Container._meta.fields) == ["field1", "field2"] def test_parent_container_interface_get_fields(): assert list(ContainerWithInterface._meta.fields) == ["ifield", "field1", "field2"] def test_objecttype_as_container_only_args(): container = Container("1", "2") assert container.field1 == "1" assert container.field2 == "2" def test_objecttype_repr(): container = Container("1", "2") assert repr(container) == "Container(field1='1', field2='2')" def test_objecttype_eq(): container1 = Container("1", "2") container2 = Container("1", "2") container3 = Container("2", "3") assert container1 == container1 assert container1 == container2 assert container2 != container3 def test_objecttype_as_container_args_kwargs(): container = Container("1", field2="2") assert container.field1 == "1" assert container.field2 == "2" def test_objecttype_as_container_few_kwargs(): container = Container(field2="2") assert container.field2 == "2" def test_objecttype_as_container_all_kwargs(): container = Container(field1="1", field2="2") assert container.field1 == "1" assert container.field2 == "2" def test_objecttype_as_container_extra_args(): msg = r"__init__\(\) takes from 1 to 3 positional arguments but 4 were given" with raises(TypeError, match=msg): Container("1", "2", "3") # type: ignore def test_objecttype_as_container_invalid_kwargs(): msg = r"__init__\(\) got an unexpected keyword argument 'unexisting_field'" with raises(TypeError, match=msg): Container(unexisting_field="3") # type: ignore def test_objecttype_container_benchmark(benchmark): @benchmark def create_objecttype(): Container(field1="field1", field2="field2") def test_generate_objecttype_description(): class MyObjectType(ObjectType): """ Documentation Documentation line 2 """ assert MyObjectType._meta.description == "Documentation\n\nDocumentation line 2" def test_objecttype_with_possible_types(): class MyObjectType(ObjectType): class Meta: possible_types = (dict,) assert MyObjectType._meta.possible_types == (dict,) def test_objecttype_with_possible_types_and_is_type_of_should_raise(): with raises(AssertionError) as excinfo: class MyObjectType(ObjectType): class Meta: possible_types = (dict,) @classmethod def is_type_of(cls, root, context, info): return False assert str(excinfo.value) == ( "MyObjectType.Meta.possible_types will cause type collision with " "MyObjectType.is_type_of. Please use one or other." ) def test_objecttype_no_fields_output(): class User(ObjectType): name = String() class Query(ObjectType): user = Field(User) def resolve_user(self, info): return User() schema = Schema(query=Query) result = schema.execute( """ query basequery { user { name } } """ ) assert not result.errors assert result.data == {"user": {"name": None}} def test_abstract_objecttype_can_str(): class MyObjectType(ObjectType): class Meta: abstract = True field = MyScalar() assert str(MyObjectType) == "MyObjectType" def test_objecttype_meta_with_annotations(): class Query(ObjectType): class Meta: name: str = "oops" hello = String() def resolve_hello(self, info): return "Hello" schema = Schema(query=Query) assert schema is not None def test_objecttype_meta_arguments(): class MyInterface(Interface): foo = String() class MyType(ObjectType, interfaces=[MyInterface]): bar = String() assert MyType._meta.interfaces == [MyInterface] assert list(MyType._meta.fields.keys()) == ["foo", "bar"] def test_objecttype_type_name(): class MyObjectType(ObjectType, name="FooType"): pass assert MyObjectType._meta.name == "FooType" python-graphene-3.4.3/graphene/types/tests/test_query.py000066400000000000000000000321631471374454500235310ustar00rootroot00000000000000import json from functools import partial from graphql import ( GraphQLError, GraphQLResolveInfo as ResolveInfo, Source, execute, parse, ) from ..context import Context from ..dynamic import Dynamic from ..field import Field from ..inputfield import InputField from ..inputobjecttype import InputObjectType from ..interface import Interface from ..objecttype import ObjectType from ..scalars import Boolean, Int, String from ..schema import Schema from ..structures import List, NonNull from ..union import Union def test_query(): class Query(ObjectType): hello = String(resolver=lambda *_: "World") hello_schema = Schema(Query) executed = hello_schema.execute("{ hello }") assert not executed.errors assert executed.data == {"hello": "World"} def test_query_source(): class Root: _hello = "World" def hello(self): return self._hello class Query(ObjectType): hello = String(source="hello") hello_schema = Schema(Query) executed = hello_schema.execute("{ hello }", Root()) assert not executed.errors assert executed.data == {"hello": "World"} def test_query_union(): class one_object: pass class two_object: pass class One(ObjectType): one = String() @classmethod def is_type_of(cls, root, info): return isinstance(root, one_object) class Two(ObjectType): two = String() @classmethod def is_type_of(cls, root, info): return isinstance(root, two_object) class MyUnion(Union): class Meta: types = (One, Two) class Query(ObjectType): unions = List(MyUnion) def resolve_unions(self, info): return [one_object(), two_object()] hello_schema = Schema(Query) executed = hello_schema.execute("{ unions { __typename } }") assert not executed.errors assert executed.data == {"unions": [{"__typename": "One"}, {"__typename": "Two"}]} def test_query_interface(): class one_object: pass class two_object: pass class MyInterface(Interface): base = String() class One(ObjectType): class Meta: interfaces = (MyInterface,) one = String() @classmethod def is_type_of(cls, root, info): return isinstance(root, one_object) class Two(ObjectType): class Meta: interfaces = (MyInterface,) two = String() @classmethod def is_type_of(cls, root, info): return isinstance(root, two_object) class Query(ObjectType): interfaces = List(MyInterface) def resolve_interfaces(self, info): return [one_object(), two_object()] hello_schema = Schema(Query, types=[One, Two]) executed = hello_schema.execute("{ interfaces { __typename } }") assert not executed.errors assert executed.data == { "interfaces": [{"__typename": "One"}, {"__typename": "Two"}] } def test_query_dynamic(): class Query(ObjectType): hello = Dynamic(lambda: String(resolver=lambda *_: "World")) hellos = Dynamic(lambda: List(String, resolver=lambda *_: ["Worlds"])) hello_field = Dynamic(lambda: Field(String, resolver=lambda *_: "Field World")) hello_schema = Schema(Query) executed = hello_schema.execute("{ hello hellos helloField }") assert not executed.errors assert executed.data == { "hello": "World", "hellos": ["Worlds"], "helloField": "Field World", } def test_query_default_value(): class MyType(ObjectType): field = String() class Query(ObjectType): hello = Field(MyType, default_value=MyType(field="something else!")) hello_schema = Schema(Query) executed = hello_schema.execute("{ hello { field } }") assert not executed.errors assert executed.data == {"hello": {"field": "something else!"}} def test_query_wrong_default_value(): class MyType(ObjectType): field = String() @classmethod def is_type_of(cls, root, info): return isinstance(root, MyType) class Query(ObjectType): hello = Field(MyType, default_value="hello") hello_schema = Schema(Query) executed = hello_schema.execute("{ hello { field } }") assert len(executed.errors) == 1 assert ( executed.errors[0].message == GraphQLError("Expected value of type 'MyType' but got: 'hello'.").message ) assert executed.data == {"hello": None} def test_query_default_value_ignored_by_resolver(): class MyType(ObjectType): field = String() class Query(ObjectType): hello = Field( MyType, default_value="hello", resolver=lambda *_: MyType(field="no default."), ) hello_schema = Schema(Query) executed = hello_schema.execute("{ hello { field } }") assert not executed.errors assert executed.data == {"hello": {"field": "no default."}} def test_query_resolve_function(): class Query(ObjectType): hello = String() def resolve_hello(self, info): return "World" hello_schema = Schema(Query) executed = hello_schema.execute("{ hello }") assert not executed.errors assert executed.data == {"hello": "World"} def test_query_arguments(): class Query(ObjectType): test = String(a_str=String(), a_int=Int()) def resolve_test(self, info, **args): return json.dumps([self, args], separators=(",", ":")) test_schema = Schema(Query) result = test_schema.execute("{ test }", None) assert not result.errors assert result.data == {"test": "[null,{}]"} result = test_schema.execute('{ test(aStr: "String!") }', "Source!") assert not result.errors assert result.data == {"test": '["Source!",{"a_str":"String!"}]'} result = test_schema.execute('{ test(aInt: -123, aStr: "String!") }', "Source!") assert not result.errors assert result.data in [ {"test": '["Source!",{"a_str":"String!","a_int":-123}]'}, {"test": '["Source!",{"a_int":-123,"a_str":"String!"}]'}, ] def test_query_input_field(): class Input(InputObjectType): a_field = String() recursive_field = InputField(lambda: Input) class Query(ObjectType): test = String(a_input=Input()) def resolve_test(self, info, **args): return json.dumps([self, args], separators=(",", ":")) test_schema = Schema(Query) result = test_schema.execute("{ test }", None) assert not result.errors assert result.data == {"test": "[null,{}]"} result = test_schema.execute('{ test(aInput: {aField: "String!"} ) }', "Source!") assert not result.errors assert result.data == {"test": '["Source!",{"a_input":{"a_field":"String!"}}]'} result = test_schema.execute( '{ test(aInput: {recursiveField: {aField: "String!"}}) }', "Source!" ) assert not result.errors assert result.data == { "test": '["Source!",{"a_input":{"recursive_field":{"a_field":"String!"}}}]' } def test_query_middlewares(): class Query(ObjectType): hello = String() other = String() def resolve_hello(self, info): return "World" def resolve_other(self, info): return "other" def reversed_middleware(next, *args, **kwargs): return next(*args, **kwargs)[::-1] hello_schema = Schema(Query) executed = hello_schema.execute( "{ hello, other }", middleware=[reversed_middleware] ) assert not executed.errors assert executed.data == {"hello": "dlroW", "other": "rehto"} def test_objecttype_on_instances(): class Ship: def __init__(self, name): self.name = name class ShipType(ObjectType): name = String(description="Ship name", required=True) def resolve_name(self, info): # Here self will be the Ship instance returned in resolve_ship return self.name class Query(ObjectType): ship = Field(ShipType) def resolve_ship(self, info): return Ship(name="xwing") schema = Schema(query=Query) executed = schema.execute("{ ship { name } }") assert not executed.errors assert executed.data == {"ship": {"name": "xwing"}} def test_big_list_query_benchmark(benchmark): big_list = range(10000) class Query(ObjectType): all_ints = List(Int) def resolve_all_ints(self, info): return big_list hello_schema = Schema(Query) big_list_query = partial(hello_schema.execute, "{ allInts }") result = benchmark(big_list_query) assert not result.errors assert result.data == {"allInts": list(big_list)} def test_big_list_query_compiled_query_benchmark(benchmark): big_list = range(100000) class Query(ObjectType): all_ints = List(Int) def resolve_all_ints(self, info): return big_list hello_schema = Schema(Query) graphql_schema = hello_schema.graphql_schema source = Source("{ allInts }") query_ast = parse(source) big_list_query = partial(execute, graphql_schema, query_ast) result = benchmark(big_list_query) assert not result.errors assert result.data == {"allInts": list(big_list)} def test_big_list_of_containers_query_benchmark(benchmark): class Container(ObjectType): x = Int() big_container_list = [Container(x=x) for x in range(1000)] class Query(ObjectType): all_containers = List(Container) def resolve_all_containers(self, info): return big_container_list hello_schema = Schema(Query) big_list_query = partial(hello_schema.execute, "{ allContainers { x } }") result = benchmark(big_list_query) assert not result.errors assert result.data == {"allContainers": [{"x": c.x} for c in big_container_list]} def test_big_list_of_containers_multiple_fields_query_benchmark(benchmark): class Container(ObjectType): x = Int() y = Int() z = Int() o = Int() big_container_list = [Container(x=x, y=x, z=x, o=x) for x in range(1000)] class Query(ObjectType): all_containers = List(Container) def resolve_all_containers(self, info): return big_container_list hello_schema = Schema(Query) big_list_query = partial(hello_schema.execute, "{ allContainers { x, y, z, o } }") result = benchmark(big_list_query) assert not result.errors assert result.data == { "allContainers": [ {"x": c.x, "y": c.y, "z": c.z, "o": c.o} for c in big_container_list ] } def test_big_list_of_containers_multiple_fields_custom_resolvers_query_benchmark( benchmark, ): class Container(ObjectType): x = Int() y = Int() z = Int() o = Int() def resolve_x(self, info): return self.x def resolve_y(self, info): return self.y def resolve_z(self, info): return self.z def resolve_o(self, info): return self.o big_container_list = [Container(x=x, y=x, z=x, o=x) for x in range(1000)] class Query(ObjectType): all_containers = List(Container) def resolve_all_containers(self, info): return big_container_list hello_schema = Schema(Query) big_list_query = partial(hello_schema.execute, "{ allContainers { x, y, z, o } }") result = benchmark(big_list_query) assert not result.errors assert result.data == { "allContainers": [ {"x": c.x, "y": c.y, "z": c.z, "o": c.o} for c in big_container_list ] } def test_query_annotated_resolvers(): context = Context(key="context") class Query(ObjectType): annotated = String(id=String()) context = String() info = String() def resolve_annotated(self, info, id): return f"{self}-{id}" def resolve_context(self, info): assert isinstance(info.context, Context) return f"{self}-{info.context.key}" def resolve_info(self, info): assert isinstance(info, ResolveInfo) return f"{self}-{info.field_name}" test_schema = Schema(Query) result = test_schema.execute('{ annotated(id:"self") }', "base") assert not result.errors assert result.data == {"annotated": "base-self"} result = test_schema.execute("{ context }", "base", context=context) assert not result.errors assert result.data == {"context": "base-context"} result = test_schema.execute("{ info }", "base") assert not result.errors assert result.data == {"info": "base-info"} def test_default_as_kwarg_to_NonNull(): # Related to https://github.com/graphql-python/graphene/issues/702 class User(ObjectType): name = String() is_admin = NonNull(Boolean, default_value=False) class Query(ObjectType): user = Field(User) def resolve_user(self, *args, **kwargs): return User(name="foo") schema = Schema(query=Query) expected = {"user": {"name": "foo", "isAdmin": False}} result = schema.execute("{ user { name isAdmin } }") assert not result.errors assert result.data == expected python-graphene-3.4.3/graphene/types/tests/test_resolver.py000066400000000000000000000025501471374454500242220ustar00rootroot00000000000000from ..resolver import ( attr_resolver, dict_resolver, dict_or_attr_resolver, get_default_resolver, set_default_resolver, ) args = {} context = None info = None demo_dict = {"attr": "value"} class demo_obj: attr = "value" def test_attr_resolver(): resolved = attr_resolver("attr", None, demo_obj, info, **args) assert resolved == "value" def test_attr_resolver_default_value(): resolved = attr_resolver("attr2", "default", demo_obj, info, **args) assert resolved == "default" def test_dict_resolver(): resolved = dict_resolver("attr", None, demo_dict, info, **args) assert resolved == "value" def test_dict_resolver_default_value(): resolved = dict_resolver("attr2", "default", demo_dict, info, **args) assert resolved == "default" def test_dict_or_attr_resolver(): resolved = dict_or_attr_resolver("attr", None, demo_dict, info, **args) assert resolved == "value" resolved = dict_or_attr_resolver("attr", None, demo_obj, info, **args) assert resolved == "value" def test_get_default_resolver_is_attr_resolver(): assert get_default_resolver() == dict_or_attr_resolver def test_set_default_resolver_workd(): default_resolver = get_default_resolver() set_default_resolver(dict_resolver) assert get_default_resolver() == dict_resolver set_default_resolver(default_resolver) python-graphene-3.4.3/graphene/types/tests/test_scalar.py000066400000000000000000000240641471374454500236320ustar00rootroot00000000000000from ..objecttype import ObjectType, Field from ..scalars import Scalar, Int, BigInt, Float, String, Boolean from ..schema import Schema from graphql import Undefined from graphql.language.ast import IntValueNode def test_scalar(): class JSONScalar(Scalar): """Documentation""" assert JSONScalar._meta.name == "JSONScalar" assert JSONScalar._meta.description == "Documentation" def test_ints(): assert Int.parse_value(2**31 - 1) is not Undefined assert Int.parse_value("2.0") == 2 assert Int.parse_value(2**31) is Undefined assert Int.parse_literal(IntValueNode(value=str(2**31 - 1))) == 2**31 - 1 assert Int.parse_literal(IntValueNode(value=str(2**31))) is Undefined assert Int.parse_value(-(2**31)) is not Undefined assert Int.parse_value(-(2**31) - 1) is Undefined assert BigInt.parse_value(2**31) is not Undefined assert BigInt.parse_value("2.0") == 2 assert BigInt.parse_value(-(2**31) - 1) is not Undefined assert BigInt.parse_literal(IntValueNode(value=str(2**31 - 1))) == 2**31 - 1 assert BigInt.parse_literal(IntValueNode(value=str(2**31))) == 2**31 def return_input(_parent, _info, input): return input class Optional(ObjectType): int = Int(input=Int(), resolver=return_input) big_int = BigInt(input=BigInt(), resolver=return_input) float = Float(input=Float(), resolver=return_input) bool = Boolean(input=Boolean(), resolver=return_input) string = String(input=String(), resolver=return_input) class Query(ObjectType): optional = Field(Optional) def resolve_optional(self, info): return Optional() def resolve_required(self, info, input): return input schema = Schema(query=Query) class TestInt: def test_query(self): """ Test that a normal query works. """ result = schema.execute("{ optional { int(input: 20) } }") assert not result.errors assert result.data == {"optional": {"int": 20}} def test_optional_input(self): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ optional { int(input: null) } }") assert not result.errors assert result.data == {"optional": {"int": None}} def test_invalid_input(self): """ Test that if an invalid type is provided we get an error """ result = schema.execute('{ optional { int(input: "20") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == 'Int cannot represent non-integer value: "20"' ) result = schema.execute('{ optional { int(input: "a") } }') assert result.errors assert len(result.errors) == 1 assert result.errors[0].message == 'Int cannot represent non-integer value: "a"' result = schema.execute("{ optional { int(input: true) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Int cannot represent non-integer value: true" ) class TestBigInt: def test_query(self): """ Test that a normal query works. """ value = 2**31 result = schema.execute("{ optional { bigInt(input: %s) } }" % value) assert not result.errors assert result.data == {"optional": {"bigInt": value}} def test_optional_input(self): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ optional { bigInt(input: null) } }") assert not result.errors assert result.data == {"optional": {"bigInt": None}} def test_invalid_input(self): """ Test that if an invalid type is provided we get an error """ result = schema.execute('{ optional { bigInt(input: "20") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Expected value of type 'BigInt', found \"20\"." ) result = schema.execute('{ optional { bigInt(input: "a") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Expected value of type 'BigInt', found \"a\"." ) result = schema.execute("{ optional { bigInt(input: true) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Expected value of type 'BigInt', found true." ) class TestFloat: def test_query(self): """ Test that a normal query works. """ result = schema.execute("{ optional { float(input: 20) } }") assert not result.errors assert result.data == {"optional": {"float": 20.0}} result = schema.execute("{ optional { float(input: 20.2) } }") assert not result.errors assert result.data == {"optional": {"float": 20.2}} def test_optional_input(self): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ optional { float(input: null) } }") assert not result.errors assert result.data == {"optional": {"float": None}} def test_invalid_input(self): """ Test that if an invalid type is provided we get an error """ result = schema.execute('{ optional { float(input: "20") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == 'Float cannot represent non numeric value: "20"' ) result = schema.execute('{ optional { float(input: "a") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == 'Float cannot represent non numeric value: "a"' ) result = schema.execute("{ optional { float(input: true) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Float cannot represent non numeric value: true" ) class TestBoolean: def test_query(self): """ Test that a normal query works. """ result = schema.execute("{ optional { bool(input: true) } }") assert not result.errors assert result.data == {"optional": {"bool": True}} result = schema.execute("{ optional { bool(input: false) } }") assert not result.errors assert result.data == {"optional": {"bool": False}} def test_optional_input(self): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ optional { bool(input: null) } }") assert not result.errors assert result.data == {"optional": {"bool": None}} def test_invalid_input(self): """ Test that if an invalid type is provided we get an error """ result = schema.execute('{ optional { bool(input: "True") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == 'Boolean cannot represent a non boolean value: "True"' ) result = schema.execute('{ optional { bool(input: "true") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == 'Boolean cannot represent a non boolean value: "true"' ) result = schema.execute('{ optional { bool(input: "a") } }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == 'Boolean cannot represent a non boolean value: "a"' ) result = schema.execute("{ optional { bool(input: 1) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Boolean cannot represent a non boolean value: 1" ) result = schema.execute("{ optional { bool(input: 0) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Boolean cannot represent a non boolean value: 0" ) class TestString: def test_query(self): """ Test that a normal query works. """ result = schema.execute('{ optional { string(input: "something something") } }') assert not result.errors assert result.data == {"optional": {"string": "something something"}} result = schema.execute('{ optional { string(input: "True") } }') assert not result.errors assert result.data == {"optional": {"string": "True"}} result = schema.execute('{ optional { string(input: "0") } }') assert not result.errors assert result.data == {"optional": {"string": "0"}} def test_optional_input(self): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ optional { string(input: null) } }") assert not result.errors assert result.data == {"optional": {"string": None}} def test_invalid_input(self): """ Test that if an invalid type is provided we get an error """ result = schema.execute("{ optional { string(input: 1) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "String cannot represent a non string value: 1" ) result = schema.execute("{ optional { string(input: 3.2) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "String cannot represent a non string value: 3.2" ) result = schema.execute("{ optional { string(input: true) } }") assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "String cannot represent a non string value: true" ) python-graphene-3.4.3/graphene/types/tests/test_scalars_serialization.py000066400000000000000000000033471471374454500267530ustar00rootroot00000000000000from graphql import Undefined from ..scalars import Boolean, Float, Int, String def test_serializes_output_int(): assert Int.serialize(1) == 1 assert Int.serialize(0) == 0 assert Int.serialize(-1) == -1 assert Int.serialize(0.1) == 0 assert Int.serialize(1.1) == 1 assert Int.serialize(-1.1) == -1 assert Int.serialize(1e5) == 100000 assert Int.serialize(9876504321) is Undefined assert Int.serialize(-9876504321) is Undefined assert Int.serialize(1e100) is Undefined assert Int.serialize(-1e100) is Undefined assert Int.serialize("-1.1") == -1 assert Int.serialize("one") is Undefined assert Int.serialize(False) == 0 assert Int.serialize(True) == 1 def test_serializes_output_float(): assert Float.serialize(1) == 1.0 assert Float.serialize(0) == 0.0 assert Float.serialize(-1) == -1.0 assert Float.serialize(0.1) == 0.1 assert Float.serialize(1.1) == 1.1 assert Float.serialize(-1.1) == -1.1 assert Float.serialize("-1.1") == -1.1 assert Float.serialize("one") is Undefined assert Float.serialize(False) == 0 assert Float.serialize(True) == 1 def test_serializes_output_string(): assert String.serialize("string") == "string" assert String.serialize(1) == "1" assert String.serialize(-1.1) == "-1.1" assert String.serialize(True) == "true" assert String.serialize(False) == "false" assert String.serialize("\U0001f601") == "\U0001f601" def test_serializes_output_boolean(): assert Boolean.serialize("string") is True assert Boolean.serialize("") is False assert Boolean.serialize(1) is True assert Boolean.serialize(0) is False assert Boolean.serialize(True) is True assert Boolean.serialize(False) is False python-graphene-3.4.3/graphene/types/tests/test_schema.py000066400000000000000000000031251471374454500236200ustar00rootroot00000000000000from textwrap import dedent from pytest import raises from graphql.type import GraphQLObjectType, GraphQLSchema from ..field import Field from ..objecttype import ObjectType from ..scalars import String from ..schema import Schema class MyOtherType(ObjectType): field = String() class Query(ObjectType): inner = Field(MyOtherType) def test_schema(): schema = Schema(Query) graphql_schema = schema.graphql_schema assert isinstance(graphql_schema, GraphQLSchema) query_type = graphql_schema.query_type assert isinstance(query_type, GraphQLObjectType) assert query_type.name == "Query" assert query_type.graphene_type is Query def test_schema_get_type(): schema = Schema(Query) assert schema.Query == Query assert schema.MyOtherType == MyOtherType def test_schema_get_type_error(): schema = Schema(Query) with raises(AttributeError) as exc_info: schema.X assert str(exc_info.value) == 'Type "X" not found in the Schema' def test_schema_str(): schema = Schema(Query) assert ( str(schema).strip() == dedent( """ type Query { inner: MyOtherType } type MyOtherType { field: String } """ ).strip() ) def test_schema_introspect(): schema = Schema(Query) assert "__schema" in schema.introspect() def test_schema_requires_query_type(): schema = Schema() result = schema.execute("query {}") assert len(result.errors) == 1 error = result.errors[0] assert error.message == "Query root type must be provided." python-graphene-3.4.3/graphene/types/tests/test_structures.py000066400000000000000000000061601471374454500246050ustar00rootroot00000000000000from functools import partial from pytest import raises from ..scalars import String from ..structures import List, NonNull from .utils import MyLazyType def test_list(): _list = List(String) assert _list.of_type == String assert str(_list) == "[String]" def test_list_with_unmounted_type(): with raises(Exception) as exc_info: List(String()) assert ( str(exc_info.value) == "List could not have a mounted String() as inner type. Try with List(String)." ) def test_list_with_lazy_type(): MyType = object() field = List(lambda: MyType) assert field.of_type == MyType def test_list_with_lazy_partial_type(): MyType = object() field = List(partial(lambda: MyType)) assert field.of_type == MyType def test_list_with_string_type(): field = List("graphene.types.tests.utils.MyLazyType") assert field.of_type == MyLazyType def test_list_inherited_works_list(): _list = List(List(String)) assert isinstance(_list.of_type, List) assert _list.of_type.of_type == String def test_list_inherited_works_nonnull(): _list = List(NonNull(String)) assert isinstance(_list.of_type, NonNull) assert _list.of_type.of_type == String def test_nonnull(): nonnull = NonNull(String) assert nonnull.of_type == String assert str(nonnull) == "String!" def test_nonnull_with_lazy_type(): MyType = object() field = NonNull(lambda: MyType) assert field.of_type == MyType def test_nonnull_with_lazy_partial_type(): MyType = object() field = NonNull(partial(lambda: MyType)) assert field.of_type == MyType def test_nonnull_with_string_type(): field = NonNull("graphene.types.tests.utils.MyLazyType") assert field.of_type == MyLazyType def test_nonnull_inherited_works_list(): _list = NonNull(List(String)) assert isinstance(_list.of_type, List) assert _list.of_type.of_type == String def test_nonnull_inherited_dont_work_nonnull(): with raises(Exception) as exc_info: NonNull(NonNull(String)) assert ( str(exc_info.value) == "Can only create NonNull of a Nullable GraphQLType but got: String!." ) def test_nonnull_with_unmounted_type(): with raises(Exception) as exc_info: NonNull(String()) assert ( str(exc_info.value) == "NonNull could not have a mounted String() as inner type. Try with NonNull(String)." ) def test_list_comparasion(): list1 = List(String) list2 = List(String) list3 = List(None) list1_argskwargs = List(String, None, b=True) list2_argskwargs = List(String, None, b=True) assert list1 == list2 assert list1 != list3 assert list1_argskwargs == list2_argskwargs assert list1 != list1_argskwargs def test_nonnull_comparasion(): nonnull1 = NonNull(String) nonnull2 = NonNull(String) nonnull3 = NonNull(None) nonnull1_argskwargs = NonNull(String, None, b=True) nonnull2_argskwargs = NonNull(String, None, b=True) assert nonnull1 == nonnull2 assert nonnull1 != nonnull3 assert nonnull1_argskwargs == nonnull2_argskwargs assert nonnull1 != nonnull1_argskwargs python-graphene-3.4.3/graphene/types/tests/test_subscribe_async.py000066400000000000000000000041301471374454500255330ustar00rootroot00000000000000from pytest import mark from graphene import ObjectType, Int, String, Schema, Field class Query(ObjectType): hello = String() def resolve_hello(root, info): return "Hello, world!" class Subscription(ObjectType): count_to_ten = Field(Int) async def subscribe_count_to_ten(root, info): for count in range(1, 11): yield count schema = Schema(query=Query, subscription=Subscription) @mark.asyncio async def test_subscription(): subscription = "subscription { countToTen }" result = await schema.subscribe(subscription) count = 0 async for item in result: count = item.data["countToTen"] assert count == 10 @mark.asyncio async def test_subscription_fails_with_invalid_query(): # It fails if the provided query is invalid subscription = "subscription { " result = await schema.subscribe(subscription) assert not result.data assert result.errors assert "Syntax Error: Expected Name, found " in str(result.errors[0]) @mark.asyncio async def test_subscription_fails_when_query_is_not_valid(): # It can't subscribe to two fields at the same time, triggering a # validation error. subscription = "subscription { countToTen, b: countToTen }" result = await schema.subscribe(subscription) assert not result.data assert result.errors assert "Anonymous Subscription must select only one top level field." in str( result.errors[0] ) @mark.asyncio async def test_subscription_with_args(): class Query(ObjectType): hello = String() class Subscription(ObjectType): count_upwards = Field(Int, limit=Int(required=True)) async def subscribe_count_upwards(root, info, limit): count = 0 while count < limit: count += 1 yield count schema = Schema(query=Query, subscription=Subscription) subscription = "subscription { countUpwards(limit: 5) }" result = await schema.subscribe(subscription) count = 0 async for item in result: count = item.data["countUpwards"] assert count == 5 python-graphene-3.4.3/graphene/types/tests/test_type_map.py000066400000000000000000000242611471374454500242020ustar00rootroot00000000000000from graphql import Undefined from graphql.type import ( GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLInputObjectType, GraphQLInterfaceType, GraphQLNonNull, GraphQLObjectType, GraphQLString, ) from ..dynamic import Dynamic from ..enum import Enum from ..field import Field from ..inputfield import InputField from ..inputobjecttype import InputObjectType from ..interface import Interface from ..objecttype import ObjectType from ..scalars import Int, String from ..schema import Schema from ..structures import List, NonNull def create_type_map(types, auto_camelcase=True): query = type("Query", (ObjectType,), {}) schema = Schema(query, types=types, auto_camelcase=auto_camelcase) return schema.graphql_schema.type_map def test_enum(): class MyEnum(Enum): """Description""" foo = 1 bar = 2 @property def description(self): return f"Description {self.name}={self.value}" @property def deprecation_reason(self): if self == MyEnum.foo: return "Is deprecated" type_map = create_type_map([MyEnum]) assert "MyEnum" in type_map graphql_enum = type_map["MyEnum"] assert isinstance(graphql_enum, GraphQLEnumType) assert graphql_enum.name == "MyEnum" assert graphql_enum.description == "Description" assert graphql_enum.values == { "foo": GraphQLEnumValue( value=1, description="Description foo=1", deprecation_reason="Is deprecated" ), "bar": GraphQLEnumValue(value=2, description="Description bar=2"), } def test_objecttype(): class MyObjectType(ObjectType): """Description""" foo = String( bar=String(description="Argument description", default_value="x"), description="Field description", ) bar = String(name="gizmo") def resolve_foo(self, bar): return bar type_map = create_type_map([MyObjectType]) assert "MyObjectType" in type_map graphql_type = type_map["MyObjectType"] assert isinstance(graphql_type, GraphQLObjectType) assert graphql_type.name == "MyObjectType" assert graphql_type.description == "Description" fields = graphql_type.fields assert list(fields) == ["foo", "gizmo"] foo_field = fields["foo"] assert isinstance(foo_field, GraphQLField) assert foo_field.description == "Field description" assert foo_field.args == { "bar": GraphQLArgument( GraphQLString, description="Argument description", default_value="x", out_name="bar", ) } def test_required_argument_with_default_value(): class MyObjectType(ObjectType): foo = String(bar=String(required=True, default_value="x")) type_map = create_type_map([MyObjectType]) graphql_type = type_map["MyObjectType"] foo_field = graphql_type.fields["foo"] bar_argument = foo_field.args["bar"] assert bar_argument.default_value == "x" assert isinstance(bar_argument.type, GraphQLNonNull) assert bar_argument.type.of_type == GraphQLString def test_dynamic_objecttype(): class MyObjectType(ObjectType): """Description""" bar = Dynamic(lambda: Field(String)) own = Field(lambda: MyObjectType) type_map = create_type_map([MyObjectType]) assert "MyObjectType" in type_map assert list(MyObjectType._meta.fields) == ["bar", "own"] graphql_type = type_map["MyObjectType"] fields = graphql_type.fields assert list(fields) == ["bar", "own"] assert fields["bar"].type == GraphQLString assert fields["own"].type == graphql_type def test_interface(): class MyInterface(Interface): """Description""" foo = String( bar=String(description="Argument description", default_value="x"), description="Field description", ) bar = String(name="gizmo", first_arg=String(), other_arg=String(name="oth_arg")) own = Field(lambda: MyInterface) def resolve_foo(self, args, info): return args.get("bar") type_map = create_type_map([MyInterface]) assert "MyInterface" in type_map graphql_type = type_map["MyInterface"] assert isinstance(graphql_type, GraphQLInterfaceType) assert graphql_type.name == "MyInterface" assert graphql_type.description == "Description" fields = graphql_type.fields assert list(fields) == ["foo", "gizmo", "own"] assert fields["own"].type == graphql_type assert list(fields["gizmo"].args) == ["firstArg", "oth_arg"] foo_field = fields["foo"] assert isinstance(foo_field, GraphQLField) assert foo_field.description == "Field description" assert not foo_field.resolve # Resolver not attached in interfaces assert foo_field.args == { "bar": GraphQLArgument( GraphQLString, description="Argument description", default_value="x", out_name="bar", ) } def test_inputobject(): class OtherObjectType(InputObjectType): thingy = NonNull(Int) class MyInnerObjectType(InputObjectType): some_field = String() some_other_field = List(OtherObjectType) class MyInputObjectType(InputObjectType): """Description""" foo_bar = String(description="Field description") bar = String(name="gizmo") baz = NonNull(MyInnerObjectType) own = InputField(lambda: MyInputObjectType) def resolve_foo_bar(self, args, info): return args.get("bar") type_map = create_type_map([MyInputObjectType]) assert "MyInputObjectType" in type_map graphql_type = type_map["MyInputObjectType"] assert isinstance(graphql_type, GraphQLInputObjectType) assert graphql_type.name == "MyInputObjectType" assert graphql_type.description == "Description" other_graphql_type = type_map["OtherObjectType"] inner_graphql_type = type_map["MyInnerObjectType"] container = graphql_type.out_type( { "bar": "oh!", "baz": inner_graphql_type.out_type( { "some_other_field": [ other_graphql_type.out_type({"thingy": 1}), other_graphql_type.out_type({"thingy": 2}), ] } ), } ) assert isinstance(container, MyInputObjectType) assert "bar" in container assert container.bar == "oh!" assert "foo_bar" not in container assert container.foo_bar is None assert container.baz.some_field is None assert container.baz.some_other_field[0].thingy == 1 assert container.baz.some_other_field[1].thingy == 2 fields = graphql_type.fields assert list(fields) == ["fooBar", "gizmo", "baz", "own"] own_field = fields["own"] assert own_field.type == graphql_type foo_field = fields["fooBar"] assert isinstance(foo_field, GraphQLInputField) assert foo_field.description == "Field description" def test_inputobject_undefined(set_default_input_object_type_to_undefined): class OtherObjectType(InputObjectType): optional_field = String() type_map = create_type_map([OtherObjectType]) assert "OtherObjectType" in type_map graphql_type = type_map["OtherObjectType"] container = graphql_type.out_type({}) assert container.optional_field is Undefined def test_objecttype_camelcase(): class MyObjectType(ObjectType): """Description""" foo_bar = String(bar_foo=String()) type_map = create_type_map([MyObjectType]) assert "MyObjectType" in type_map graphql_type = type_map["MyObjectType"] assert isinstance(graphql_type, GraphQLObjectType) assert graphql_type.name == "MyObjectType" assert graphql_type.description == "Description" fields = graphql_type.fields assert list(fields) == ["fooBar"] foo_field = fields["fooBar"] assert isinstance(foo_field, GraphQLField) assert foo_field.args == { "barFoo": GraphQLArgument( GraphQLString, default_value=Undefined, out_name="bar_foo" ) } def test_objecttype_camelcase_disabled(): class MyObjectType(ObjectType): """Description""" foo_bar = String(bar_foo=String()) type_map = create_type_map([MyObjectType], auto_camelcase=False) assert "MyObjectType" in type_map graphql_type = type_map["MyObjectType"] assert isinstance(graphql_type, GraphQLObjectType) assert graphql_type.name == "MyObjectType" assert graphql_type.description == "Description" fields = graphql_type.fields assert list(fields) == ["foo_bar"] foo_field = fields["foo_bar"] assert isinstance(foo_field, GraphQLField) assert foo_field.args == { "bar_foo": GraphQLArgument( GraphQLString, default_value=Undefined, out_name="bar_foo" ) } def test_objecttype_with_possible_types(): class MyObjectType(ObjectType): """Description""" class Meta: possible_types = (dict,) foo_bar = String() type_map = create_type_map([MyObjectType]) graphql_type = type_map["MyObjectType"] assert graphql_type.is_type_of assert graphql_type.is_type_of({}, None) is True assert graphql_type.is_type_of(MyObjectType(), None) is False def test_interface_with_interfaces(): class FooInterface(Interface): foo = String() class BarInterface(Interface): class Meta: interfaces = [FooInterface] foo = String() bar = String() type_map = create_type_map([FooInterface, BarInterface]) assert "FooInterface" in type_map foo_graphql_type = type_map["FooInterface"] assert isinstance(foo_graphql_type, GraphQLInterfaceType) assert foo_graphql_type.name == "FooInterface" assert "BarInterface" in type_map bar_graphql_type = type_map["BarInterface"] assert isinstance(bar_graphql_type, GraphQLInterfaceType) assert bar_graphql_type.name == "BarInterface" fields = bar_graphql_type.fields assert list(fields) == ["foo", "bar"] assert isinstance(fields["foo"], GraphQLField) assert isinstance(fields["bar"], GraphQLField) assert list(bar_graphql_type.interfaces) == list([foo_graphql_type]) python-graphene-3.4.3/graphene/types/tests/test_union.py000066400000000000000000000026551471374454500235170ustar00rootroot00000000000000from pytest import raises from ..field import Field from ..objecttype import ObjectType from ..union import Union from ..unmountedtype import UnmountedType class MyObjectType1(ObjectType): pass class MyObjectType2(ObjectType): pass def test_generate_union(): class MyUnion(Union): """Documentation""" class Meta: types = (MyObjectType1, MyObjectType2) assert MyUnion._meta.name == "MyUnion" assert MyUnion._meta.description == "Documentation" assert MyUnion._meta.types == (MyObjectType1, MyObjectType2) def test_generate_union_with_meta(): class MyUnion(Union): class Meta: name = "MyOtherUnion" description = "Documentation" types = (MyObjectType1, MyObjectType2) assert MyUnion._meta.name == "MyOtherUnion" assert MyUnion._meta.description == "Documentation" def test_generate_union_with_no_types(): with raises(Exception) as exc_info: class MyUnion(Union): pass assert str(exc_info.value) == "Must provide types for Union MyUnion." def test_union_can_be_mounted(): class MyUnion(Union): class Meta: types = (MyObjectType1, MyObjectType2) my_union_instance = MyUnion() assert isinstance(my_union_instance, UnmountedType) my_union_field = my_union_instance.mount_as(Field) assert isinstance(my_union_field, Field) assert my_union_field.type == MyUnion python-graphene-3.4.3/graphene/types/tests/test_uuid.py000066400000000000000000000045551471374454500233360ustar00rootroot00000000000000from ..objecttype import ObjectType from ..schema import Schema from ..uuid import UUID from ..structures import NonNull class Query(ObjectType): uuid = UUID(input=UUID()) required_uuid = UUID(input=NonNull(UUID), required=True) def resolve_uuid(self, info, input): return input def resolve_required_uuid(self, info, input): return input schema = Schema(query=Query) def test_uuidstring_query(): uuid_value = "dfeb3bcf-70fd-11e7-a61a-6003088f8204" result = schema.execute("""{ uuid(input: "%s") }""" % uuid_value) assert not result.errors assert result.data == {"uuid": uuid_value} def test_uuidstring_query_variable(): uuid_value = "dfeb3bcf-70fd-11e7-a61a-6003088f8204" result = schema.execute( """query Test($uuid: UUID){ uuid(input: $uuid) }""", variables={"uuid": uuid_value}, ) assert not result.errors assert result.data == {"uuid": uuid_value} def test_uuidstring_invalid_argument(): uuid_value = {"not": "a string"} result = schema.execute( """query Test($uuid: UUID){ uuid(input: $uuid) }""", variables={"uuid": uuid_value}, ) assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Variable '$uuid' got invalid value {'not': 'a string'}; UUID cannot represent value: {'not': 'a string'}" ) def test_uuidstring_optional_uuid_input(): """ Test that we can provide a null value to an optional input """ result = schema.execute("{ uuid(input: null) }") assert not result.errors assert result.data == {"uuid": None} def test_uuidstring_invalid_query(): """ Test that if an invalid type is provided we get an error """ result = schema.execute("{ uuid(input: 1) }") assert result.errors assert len(result.errors) == 1 assert result.errors[0].message == "Expected value of type 'UUID', found 1." result = schema.execute('{ uuid(input: "a") }') assert result.errors assert len(result.errors) == 1 assert ( result.errors[0].message == "Expected value of type 'UUID', found \"a\"; badly formed hexadecimal UUID string" ) result = schema.execute("{ requiredUuid(input: null) }") assert result.errors assert len(result.errors) == 1 assert result.errors[0].message == "Expected value of type 'UUID!', found null." python-graphene-3.4.3/graphene/types/tests/utils.py000066400000000000000000000000261471374454500224560ustar00rootroot00000000000000MyLazyType = object() python-graphene-3.4.3/graphene/types/union.py000066400000000000000000000050261471374454500213110ustar00rootroot00000000000000from typing import TYPE_CHECKING from .base import BaseOptions, BaseType from .unmountedtype import UnmountedType # For static type checking with type checker if TYPE_CHECKING: from .objecttype import ObjectType # NOQA from typing import Iterable, Type # NOQA class UnionOptions(BaseOptions): types = () # type: Iterable[Type[ObjectType]] class Union(UnmountedType, BaseType): """ Union Type Definition When a field can return one of a heterogeneous set of types, a Union type is used to describe what types are possible as well as providing a function to determine which type is actually used when the field is resolved. The schema in this example can take a search text and return any of the GraphQL object types indicated: Human, Droid or Starship. Ambiguous return types can be resolved on each ObjectType through ``Meta.possible_types`` attribute or ``is_type_of`` method. Or by implementing ``resolve_type`` class method on the Union. .. code:: python from graphene import Union, ObjectType, List class SearchResult(Union): class Meta: types = (Human, Droid, Starship) class Query(ObjectType): search = List(SearchResult.Field( search_text=String(description='Value to search for')) ) Meta: types (Iterable[graphene.ObjectType]): Required. Collection of types that may be returned by this Union for the graphQL schema. name (optional, str): the name of the GraphQL type (must be unique in schema). Defaults to class name. description (optional, str): the description of the GraphQL type in the schema. Defaults to class docstring. """ @classmethod def __init_subclass_with_meta__(cls, types=None, _meta=None, **options): assert ( isinstance(types, (list, tuple)) and len(types) > 0 ), f"Must provide types for Union {cls.__name__}." if not _meta: _meta = UnionOptions(cls) _meta.types = types super(Union, cls).__init_subclass_with_meta__(_meta=_meta, **options) @classmethod def get_type(cls): """ This function is called when the unmounted type (Union instance) is mounted (as a Field, InputField or Argument) """ return cls @classmethod def resolve_type(cls, instance, info): from .objecttype import ObjectType # NOQA if isinstance(instance, ObjectType): return type(instance) python-graphene-3.4.3/graphene/types/unmountedtype.py000066400000000000000000000044501471374454500231010ustar00rootroot00000000000000from ..utils.orderedtype import OrderedType class UnmountedType(OrderedType): """ This class acts a proxy for a Graphene Type, so it can be mounted dynamically as Field, InputField or Argument. Instead of writing: .. code:: python from graphene import ObjectType, Field, String class MyObjectType(ObjectType): my_field = Field(String, description='Description here') It lets you write: .. code:: python from graphene import ObjectType, String class MyObjectType(ObjectType): my_field = String(description='Description here') It is not used directly, but is inherited by other types and streamlines their use in different context: - Object Type - Scalar Type - Enum - Interface - Union An unmounted type will accept arguments based upon its context (ObjectType, Field or InputObjectType) and pass it on to the appropriate MountedType (Field, Argument or InputField). See each Mounted type reference for more information about valid parameters. """ def __init__(self, *args, **kwargs): super(UnmountedType, self).__init__() self.args = args self.kwargs = kwargs def get_type(self): """ This function is called when the UnmountedType instance is mounted (as a Field, InputField or Argument) """ raise NotImplementedError(f"get_type not implemented in {self}") def mount_as(self, _as): return _as.mounted(self) def Field(self): # noqa: N802 """ Mount the UnmountedType as Field """ from .field import Field return self.mount_as(Field) def InputField(self): # noqa: N802 """ Mount the UnmountedType as InputField """ from .inputfield import InputField return self.mount_as(InputField) def Argument(self): # noqa: N802 """ Mount the UnmountedType as Argument """ from .argument import Argument return self.mount_as(Argument) def __eq__(self, other): return self is other or ( isinstance(other, UnmountedType) and self.get_type() == other.get_type() and self.args == other.args and self.kwargs == other.kwargs ) python-graphene-3.4.3/graphene/types/utils.py000066400000000000000000000024441471374454500213220ustar00rootroot00000000000000import inspect from functools import partial from ..utils.module_loading import import_string from .mountedtype import MountedType from .unmountedtype import UnmountedType def get_field_as(value, _as=None): """ Get type mounted """ if isinstance(value, MountedType): return value elif isinstance(value, UnmountedType): if _as is None: return value return _as.mounted(value) def yank_fields_from_attrs(attrs, _as=None, sort=True): """ Extract all the fields in given attributes (dict) and return them ordered """ fields_with_names = [] for attname, value in list(attrs.items()): field = get_field_as(value, _as) if not field: continue fields_with_names.append((attname, field)) if sort: fields_with_names = sorted(fields_with_names, key=lambda f: f[1]) return dict(fields_with_names) def get_type(_type): if isinstance(_type, str): return import_string(_type) if inspect.isfunction(_type) or isinstance(_type, partial): return _type() return _type def get_underlying_type(_type): """Get the underlying type even if it is wrapped in structures like NonNull""" while hasattr(_type, "of_type"): _type = _type.of_type return _type python-graphene-3.4.3/graphene/types/uuid.py000066400000000000000000000017751471374454500211360ustar00rootroot00000000000000from uuid import UUID as _UUID from graphql.error import GraphQLError from graphql.language.ast import StringValueNode from graphql import Undefined from .scalars import Scalar class UUID(Scalar): """ Leverages the internal Python implementation of UUID (uuid.UUID) to provide native UUID objects in fields, resolvers and input. """ @staticmethod def serialize(uuid): if isinstance(uuid, str): uuid = _UUID(uuid) assert isinstance(uuid, _UUID), f"Expected UUID instance, received {uuid}" return str(uuid) @staticmethod def parse_literal(node, _variables=None): if isinstance(node, StringValueNode): return _UUID(node.value) return Undefined @staticmethod def parse_value(value): if isinstance(value, _UUID): return value try: return _UUID(value) except (ValueError, AttributeError): raise GraphQLError(f"UUID cannot represent value: {repr(value)}") python-graphene-3.4.3/graphene/utils/000077500000000000000000000000001471374454500176005ustar00rootroot00000000000000python-graphene-3.4.3/graphene/utils/__init__.py000066400000000000000000000000001471374454500216770ustar00rootroot00000000000000python-graphene-3.4.3/graphene/utils/crunch.py000066400000000000000000000013641471374454500214400ustar00rootroot00000000000000import json from collections.abc import Mapping def to_key(value): return json.dumps(value) def insert(value, index, values): key = to_key(value) if key not in index: index[key] = len(values) values.append(value) return len(values) - 1 return index.get(key) def flatten(data, index, values): if isinstance(data, (list, tuple)): flattened = [flatten(child, index, values) for child in data] elif isinstance(data, Mapping): flattened = {key: flatten(child, index, values) for key, child in data.items()} else: flattened = data return insert(flattened, index, values) def crunch(data): index = {} values = [] flatten(data, index, values) return values python-graphene-3.4.3/graphene/utils/dataloader.py000066400000000000000000000214621471374454500222570ustar00rootroot00000000000000from asyncio import ( gather, ensure_future, get_event_loop, iscoroutine, iscoroutinefunction, ) from collections import namedtuple from collections.abc import Iterable from functools import partial from typing import List Loader = namedtuple("Loader", "key,future") def iscoroutinefunctionorpartial(fn): return iscoroutinefunction(fn.func if isinstance(fn, partial) else fn) class DataLoader(object): batch = True max_batch_size = None # type: int cache = True def __init__( self, batch_load_fn=None, batch=None, max_batch_size=None, cache=None, get_cache_key=None, cache_map=None, loop=None, ): self._loop = loop if batch_load_fn is not None: self.batch_load_fn = batch_load_fn assert iscoroutinefunctionorpartial( self.batch_load_fn ), "batch_load_fn must be coroutine. Received: {}".format(self.batch_load_fn) if not callable(self.batch_load_fn): raise TypeError( # pragma: no cover ( "DataLoader must be have a batch_load_fn which accepts " "Iterable and returns Future>, but got: {}." ).format(batch_load_fn) ) if batch is not None: self.batch = batch # pragma: no cover if max_batch_size is not None: self.max_batch_size = max_batch_size if cache is not None: self.cache = cache # pragma: no cover self.get_cache_key = get_cache_key or (lambda x: x) self._cache = cache_map if cache_map is not None else {} self._queue: List[Loader] = [] @property def loop(self): if not self._loop: self._loop = get_event_loop() return self._loop def load(self, key=None): """ Loads a key, returning a `Future` for the value represented by that key. """ if key is None: raise TypeError( # pragma: no cover ( "The loader.load() function must be called with a value, " "but got: {}." ).format(key) ) cache_key = self.get_cache_key(key) # If caching and there is a cache-hit, return cached Future. if self.cache: cached_result = self._cache.get(cache_key) if cached_result: return cached_result # Otherwise, produce a new Future for this value. future = self.loop.create_future() # If caching, cache this Future. if self.cache: self._cache[cache_key] = future self.do_resolve_reject(key, future) return future def do_resolve_reject(self, key, future): # Enqueue this Future to be dispatched. self._queue.append(Loader(key=key, future=future)) # Determine if a dispatch of this queue should be scheduled. # A single dispatch should be scheduled per queue at the time when the # queue changes from "empty" to "full". if len(self._queue) == 1: if self.batch: # If batching, schedule a task to dispatch the queue. enqueue_post_future_job(self.loop, self) else: # Otherwise dispatch the (queue of one) immediately. dispatch_queue(self) # pragma: no cover def load_many(self, keys): """ Loads multiple keys, returning a list of values >>> a, b = await my_loader.load_many([ 'a', 'b' ]) This is equivalent to the more verbose: >>> a, b = await gather( >>> my_loader.load('a'), >>> my_loader.load('b') >>> ) """ if not isinstance(keys, Iterable): raise TypeError( # pragma: no cover ( "The loader.load_many() function must be called with Iterable " "but got: {}." ).format(keys) ) return gather(*[self.load(key) for key in keys]) def clear(self, key): """ Clears the value at `key` from the cache, if it exists. Returns itself for method chaining. """ cache_key = self.get_cache_key(key) self._cache.pop(cache_key, None) return self def clear_all(self): """ Clears the entire cache. To be used when some event results in unknown invalidations across this particular `DataLoader`. Returns itself for method chaining. """ self._cache.clear() return self def prime(self, key, value): """ Adds the provied key and value to the cache. If the key already exists, no change is made. Returns itself for method chaining. """ cache_key = self.get_cache_key(key) # Only add the key if it does not already exist. if cache_key not in self._cache: # Cache a rejected future if the value is an Error, in order to match # the behavior of load(key). future = self.loop.create_future() if isinstance(value, Exception): future.set_exception(value) else: future.set_result(value) self._cache[cache_key] = future return self def enqueue_post_future_job(loop, loader): async def dispatch(): dispatch_queue(loader) loop.call_soon(ensure_future, dispatch()) def get_chunks(iterable_obj, chunk_size=1): chunk_size = max(1, chunk_size) return ( iterable_obj[i : i + chunk_size] for i in range(0, len(iterable_obj), chunk_size) ) def dispatch_queue(loader): """ Given the current state of a Loader instance, perform a batch load from its current queue. """ # Take the current loader queue, replacing it with an empty queue. queue = loader._queue loader._queue = [] # If a max_batch_size was provided and the queue is longer, then segment the # queue into multiple batches, otherwise treat the queue as a single batch. max_batch_size = loader.max_batch_size if max_batch_size and max_batch_size < len(queue): chunks = get_chunks(queue, max_batch_size) for chunk in chunks: ensure_future(dispatch_queue_batch(loader, chunk)) else: ensure_future(dispatch_queue_batch(loader, queue)) async def dispatch_queue_batch(loader, queue): # Collect all keys to be loaded in this dispatch keys = [loaded.key for loaded in queue] # Call the provided batch_load_fn for this loader with the loader queue's keys. batch_future = loader.batch_load_fn(keys) # Assert the expected response from batch_load_fn if not batch_future or not iscoroutine(batch_future): return failed_dispatch( # pragma: no cover loader, queue, TypeError( ( "DataLoader must be constructed with a function which accepts " "Iterable and returns Future>, but the function did " "not return a Coroutine: {}." ).format(batch_future) ), ) try: values = await batch_future if not isinstance(values, Iterable): raise TypeError( # pragma: no cover ( "DataLoader must be constructed with a function which accepts " "Iterable and returns Future>, but the function did " "not return a Future of a Iterable: {}." ).format(values) ) values = list(values) if len(values) != len(keys): raise TypeError( # pragma: no cover ( "DataLoader must be constructed with a function which accepts " "Iterable and returns Future>, but the function did " "not return a Future of a Iterable with the same length as the Iterable " "of keys." "\n\nKeys:\n{}" "\n\nValues:\n{}" ).format(keys, values) ) # Step through the values, resolving or rejecting each Future in the # loaded queue. for loaded, value in zip(queue, values): if isinstance(value, Exception): loaded.future.set_exception(value) else: loaded.future.set_result(value) except Exception as e: return failed_dispatch(loader, queue, e) def failed_dispatch(loader, queue, error): """ Do not cache individual loads if the entire batch dispatch fails, but still reject each request so they do not hang. """ for loaded in queue: loader.clear(loaded.key) loaded.future.set_exception(error) python-graphene-3.4.3/graphene/utils/deduplicator.py000066400000000000000000000016271471374454500226370ustar00rootroot00000000000000from collections.abc import Mapping def deflate(node, index=None, path=None): if index is None: index = {} if path is None: path = [] if node and "id" in node and "__typename" in node: route = ",".join(path) cache_key = ":".join([route, str(node["__typename"]), str(node["id"])]) if index.get(cache_key) is True: return {"__typename": node["__typename"], "id": node["id"]} else: index[cache_key] = True result = {} for field_name in node: value = node[field_name] new_path = path + [field_name] if isinstance(value, (list, tuple)): result[field_name] = [deflate(child, index, new_path) for child in value] elif isinstance(value, Mapping): result[field_name] = deflate(value, index, new_path) else: result[field_name] = value return result python-graphene-3.4.3/graphene/utils/deprecated.py000066400000000000000000000001671471374454500222560ustar00rootroot00000000000000from warnings import warn def warn_deprecation(text: str): warn(text, category=DeprecationWarning, stacklevel=2) python-graphene-3.4.3/graphene/utils/get_unbound_function.py000066400000000000000000000001711471374454500243670ustar00rootroot00000000000000def get_unbound_function(func): if not getattr(func, "__self__", True): return func.__func__ return func python-graphene-3.4.3/graphene/utils/is_introspection_key.py000066400000000000000000000005071471374454500244170ustar00rootroot00000000000000def is_introspection_key(key): # from: https://spec.graphql.org/June2018/#sec-Schema # > All types and directives defined within a schema must not have a name which # > begins with "__" (two underscores), as this is used exclusively # > by GraphQL’s introspection system. return str(key).startswith("__") python-graphene-3.4.3/graphene/utils/module_loading.py000066400000000000000000000030631471374454500231360ustar00rootroot00000000000000from functools import partial from importlib import import_module def import_string(dotted_path, dotted_attributes=None): """ Import a dotted module path and return the attribute/class designated by the last name in the path. When a dotted attribute path is also provided, the dotted attribute path would be applied to the attribute/class retrieved from the first step, and return the corresponding value designated by the attribute path. Raise ImportError if the import failed. """ try: module_path, class_name = dotted_path.rsplit(".", 1) except ValueError: raise ImportError("%s doesn't look like a module path" % dotted_path) module = import_module(module_path) try: result = getattr(module, class_name) except AttributeError: raise ImportError( 'Module "%s" does not define a "%s" attribute/class' % (module_path, class_name) ) if not dotted_attributes: return result attributes = dotted_attributes.split(".") traveled_attributes = [] try: for attribute in attributes: traveled_attributes.append(attribute) result = getattr(result, attribute) return result except AttributeError: raise ImportError( 'Module "%s" does not define a "%s" attribute inside attribute/class "%s"' % (module_path, ".".join(traveled_attributes), class_name) ) def lazy_import(dotted_path, dotted_attributes=None): return partial(import_string, dotted_path, dotted_attributes) python-graphene-3.4.3/graphene/utils/orderedtype.py000066400000000000000000000023051471374454500225000ustar00rootroot00000000000000from functools import total_ordering @total_ordering class OrderedType: creation_counter = 1 def __init__(self, _creation_counter=None): self.creation_counter = _creation_counter or self.gen_counter() @staticmethod def gen_counter(): counter = OrderedType.creation_counter OrderedType.creation_counter += 1 return counter def reset_counter(self): self.creation_counter = self.gen_counter() def __eq__(self, other): # Needed for @total_ordering if isinstance(self, type(other)): return self.creation_counter == other.creation_counter return NotImplemented def __lt__(self, other): # This is needed because bisect does not take a comparison function. if isinstance(other, OrderedType): return self.creation_counter < other.creation_counter return NotImplemented def __gt__(self, other): # This is needed because bisect does not take a comparison function. if isinstance(other, OrderedType): return self.creation_counter > other.creation_counter return NotImplemented def __hash__(self): return hash(self.creation_counter) python-graphene-3.4.3/graphene/utils/props.py000066400000000000000000000003451471374454500213170ustar00rootroot00000000000000class _OldClass: pass class _NewClass: pass _all_vars = set(dir(_OldClass) + dir(_NewClass)) def props(x): return { key: vars(x).get(key, getattr(x, key)) for key in dir(x) if key not in _all_vars } python-graphene-3.4.3/graphene/utils/resolve_only_args.py000066400000000000000000000004051471374454500237050ustar00rootroot00000000000000from functools import wraps from typing_extensions import deprecated @deprecated("This function is deprecated") def resolve_only_args(func): @wraps(func) def wrapped_func(root, info, **args): return func(root, **args) return wrapped_func python-graphene-3.4.3/graphene/utils/str_converters.py000066400000000000000000000011531471374454500232340ustar00rootroot00000000000000import re # Adapted from this response in Stackoverflow # http://stackoverflow.com/a/19053800/1072990 def to_camel_case(snake_str): components = snake_str.split("_") # We capitalize the first letter of each component except the first one # with the 'capitalize' method and join them together. return components[0] + "".join(x.capitalize() if x else "_" for x in components[1:]) # From this response in Stackoverflow # http://stackoverflow.com/a/1176023/1072990 def to_snake_case(name): s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() python-graphene-3.4.3/graphene/utils/subclass_with_meta.py000066400000000000000000000031201471374454500240260ustar00rootroot00000000000000from inspect import isclass from .props import props class SubclassWithMeta_Meta(type): _meta = None def __str__(cls): if cls._meta: return cls._meta.name return cls.__name__ def __repr__(cls): return f"<{cls.__name__} meta={repr(cls._meta)}>" class SubclassWithMeta(metaclass=SubclassWithMeta_Meta): """This class improves __init_subclass__ to receive automatically the options from meta""" def __init_subclass__(cls, **meta_options): """This method just terminates the super() chain""" _Meta = getattr(cls, "Meta", None) _meta_props = {} if _Meta: if isinstance(_Meta, dict): _meta_props = _Meta elif isclass(_Meta): _meta_props = props(_Meta) else: raise Exception( f"Meta have to be either a class or a dict. Received {_Meta}" ) delattr(cls, "Meta") options = dict(meta_options, **_meta_props) abstract = options.pop("abstract", False) if abstract: assert not options, ( "Abstract types can only contain the abstract attribute. " f"Received: abstract, {', '.join(options)}" ) else: super_class = super(cls, cls) if hasattr(super_class, "__init_subclass_with_meta__"): super_class.__init_subclass_with_meta__(**options) @classmethod def __init_subclass_with_meta__(cls, **meta_options): """This method just terminates the super() chain""" python-graphene-3.4.3/graphene/utils/tests/000077500000000000000000000000001471374454500207425ustar00rootroot00000000000000python-graphene-3.4.3/graphene/utils/tests/__init__.py000066400000000000000000000000001471374454500230410ustar00rootroot00000000000000python-graphene-3.4.3/graphene/utils/tests/test_crunch.py000066400000000000000000000033361471374454500236420ustar00rootroot00000000000000from pytest import mark from ..crunch import crunch @mark.parametrize( "description,uncrunched,crunched", [ ["number primitive", 0, [0]], ["boolean primitive", True, [True]], ["string primitive", "string", ["string"]], ["empty array", [], [[]]], ["single-item array", [None], [None, [0]]], [ "multi-primitive all distinct array", [None, 0, True, "string"], [None, 0, True, "string", [0, 1, 2, 3]], ], [ "multi-primitive repeated array", [True, True, True, True], [True, [0, 0, 0, 0]], ], ["one-level nested array", [[1, 2, 3]], [1, 2, 3, [0, 1, 2], [3]]], ["two-level nested array", [[[1, 2, 3]]], [1, 2, 3, [0, 1, 2], [3], [4]]], ["empty object", {}, [{}]], ["single-item object", {"a": None}, [None, {"a": 0}]], [ "multi-item all distinct object", {"a": None, "b": 0, "c": True, "d": "string"}, [None, 0, True, "string", {"a": 0, "b": 1, "c": 2, "d": 3}], ], [ "multi-item repeated object", {"a": True, "b": True, "c": True, "d": True}, [True, {"a": 0, "b": 0, "c": 0, "d": 0}], ], [ "complex array", [{"a": True, "b": [1, 2, 3]}, [1, 2, 3]], [True, 1, 2, 3, [1, 2, 3], {"a": 0, "b": 4}, [5, 4]], ], [ "complex object", {"a": True, "b": [1, 2, 3], "c": {"a": True, "b": [1, 2, 3]}}, [True, 1, 2, 3, [1, 2, 3], {"a": 0, "b": 4}, {"a": 0, "b": 4, "c": 5}], ], ], ) def test_crunch(description, uncrunched, crunched): assert crunch(uncrunched) == crunched python-graphene-3.4.3/graphene/utils/tests/test_dataloader.py000066400000000000000000000246461471374454500244670ustar00rootroot00000000000000from asyncio import gather from collections import namedtuple from functools import partial from unittest.mock import Mock from graphene.utils.dataloader import DataLoader from pytest import mark, raises from graphene import ObjectType, String, Schema, Field, List CHARACTERS = { "1": {"name": "Luke Skywalker", "sibling": "3"}, "2": {"name": "Darth Vader", "sibling": None}, "3": {"name": "Leia Organa", "sibling": "1"}, } get_character = Mock(side_effect=lambda character_id: CHARACTERS[character_id]) class CharacterType(ObjectType): name = String() sibling = Field(lambda: CharacterType) async def resolve_sibling(character, info): if character["sibling"]: return await info.context.character_loader.load(character["sibling"]) return None class Query(ObjectType): skywalker_family = List(CharacterType) async def resolve_skywalker_family(_, info): return await info.context.character_loader.load_many(["1", "2", "3"]) mock_batch_load_fn = Mock( side_effect=lambda character_ids: [get_character(id) for id in character_ids] ) class CharacterLoader(DataLoader): async def batch_load_fn(self, character_ids): return mock_batch_load_fn(character_ids) Context = namedtuple("Context", "character_loader") @mark.asyncio async def test_basic_dataloader(): schema = Schema(query=Query) character_loader = CharacterLoader() context = Context(character_loader=character_loader) query = """ { skywalkerFamily { name sibling { name } } } """ result = await schema.execute_async(query, context=context) assert not result.errors assert result.data == { "skywalkerFamily": [ {"name": "Luke Skywalker", "sibling": {"name": "Leia Organa"}}, {"name": "Darth Vader", "sibling": None}, {"name": "Leia Organa", "sibling": {"name": "Luke Skywalker"}}, ] } assert mock_batch_load_fn.call_count == 1 assert get_character.call_count == 3 def id_loader(**options): load_calls = [] async def default_resolve(x): return x resolve = options.pop("resolve", default_resolve) async def fn(keys): load_calls.append(keys) return await resolve(keys) # return keys identity_loader = DataLoader(fn, **options) return identity_loader, load_calls @mark.asyncio async def test_build_a_simple_data_loader(): async def call_fn(keys): return keys identity_loader = DataLoader(call_fn) promise1 = identity_loader.load(1) value1 = await promise1 assert value1 == 1 @mark.asyncio async def test_can_build_a_data_loader_from_a_partial(): value_map = {1: "one"} async def call_fn(context, keys): return [context.get(key) for key in keys] partial_fn = partial(call_fn, value_map) identity_loader = DataLoader(partial_fn) promise1 = identity_loader.load(1) value1 = await promise1 assert value1 == "one" @mark.asyncio async def test_supports_loading_multiple_keys_in_one_call(): async def call_fn(keys): return keys identity_loader = DataLoader(call_fn) promise_all = identity_loader.load_many([1, 2]) values = await promise_all assert values == [1, 2] promise_all = identity_loader.load_many([]) values = await promise_all assert values == [] @mark.asyncio async def test_batches_multiple_requests(): identity_loader, load_calls = id_loader() promise1 = identity_loader.load(1) promise2 = identity_loader.load(2) p = gather(promise1, promise2) value1, value2 = await p assert value1 == 1 assert value2 == 2 assert load_calls == [[1, 2]] @mark.asyncio async def test_batches_multiple_requests_with_max_batch_sizes(): identity_loader, load_calls = id_loader(max_batch_size=2) promise1 = identity_loader.load(1) promise2 = identity_loader.load(2) promise3 = identity_loader.load(3) p = gather(promise1, promise2, promise3) value1, value2, value3 = await p assert value1 == 1 assert value2 == 2 assert value3 == 3 assert load_calls == [[1, 2], [3]] @mark.asyncio async def test_coalesces_identical_requests(): identity_loader, load_calls = id_loader() promise1 = identity_loader.load(1) promise2 = identity_loader.load(1) assert promise1 == promise2 p = gather(promise1, promise2) value1, value2 = await p assert value1 == 1 assert value2 == 1 assert load_calls == [[1]] @mark.asyncio async def test_caches_repeated_requests(): identity_loader, load_calls = id_loader() a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) assert a == "A" assert b == "B" assert load_calls == [["A", "B"]] a2, c = await gather(identity_loader.load("A"), identity_loader.load("C")) assert a2 == "A" assert c == "C" assert load_calls == [["A", "B"], ["C"]] a3, b2, c2 = await gather( identity_loader.load("A"), identity_loader.load("B"), identity_loader.load("C") ) assert a3 == "A" assert b2 == "B" assert c2 == "C" assert load_calls == [["A", "B"], ["C"]] @mark.asyncio async def test_clears_single_value_in_loader(): identity_loader, load_calls = id_loader() a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) assert a == "A" assert b == "B" assert load_calls == [["A", "B"]] identity_loader.clear("A") a2, b2 = await gather(identity_loader.load("A"), identity_loader.load("B")) assert a2 == "A" assert b2 == "B" assert load_calls == [["A", "B"], ["A"]] @mark.asyncio async def test_clears_all_values_in_loader(): identity_loader, load_calls = id_loader() a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) assert a == "A" assert b == "B" assert load_calls == [["A", "B"]] identity_loader.clear_all() a2, b2 = await gather(identity_loader.load("A"), identity_loader.load("B")) assert a2 == "A" assert b2 == "B" assert load_calls == [["A", "B"], ["A", "B"]] @mark.asyncio async def test_allows_priming_the_cache(): identity_loader, load_calls = id_loader() identity_loader.prime("A", "A") a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) assert a == "A" assert b == "B" assert load_calls == [["B"]] @mark.asyncio async def test_does_not_prime_keys_that_already_exist(): identity_loader, load_calls = id_loader() identity_loader.prime("A", "X") a1 = await identity_loader.load("A") b1 = await identity_loader.load("B") assert a1 == "X" assert b1 == "B" identity_loader.prime("A", "Y") identity_loader.prime("B", "Y") a2 = await identity_loader.load("A") b2 = await identity_loader.load("B") assert a2 == "X" assert b2 == "B" assert load_calls == [["B"]] # # Represents Errors @mark.asyncio async def test_resolves_to_error_to_indicate_failure(): async def resolve(keys): mapped_keys = [ key if key % 2 == 0 else Exception("Odd: {}".format(key)) for key in keys ] return mapped_keys even_loader, load_calls = id_loader(resolve=resolve) with raises(Exception) as exc_info: await even_loader.load(1) assert str(exc_info.value) == "Odd: 1" value2 = await even_loader.load(2) assert value2 == 2 assert load_calls == [[1], [2]] @mark.asyncio async def test_can_represent_failures_and_successes_simultaneously(): async def resolve(keys): mapped_keys = [ key if key % 2 == 0 else Exception("Odd: {}".format(key)) for key in keys ] return mapped_keys even_loader, load_calls = id_loader(resolve=resolve) promise1 = even_loader.load(1) promise2 = even_loader.load(2) with raises(Exception) as exc_info: await promise1 assert str(exc_info.value) == "Odd: 1" value2 = await promise2 assert value2 == 2 assert load_calls == [[1, 2]] @mark.asyncio async def test_caches_failed_fetches(): async def resolve(keys): mapped_keys = [Exception("Error: {}".format(key)) for key in keys] return mapped_keys error_loader, load_calls = id_loader(resolve=resolve) with raises(Exception) as exc_info: await error_loader.load(1) assert str(exc_info.value) == "Error: 1" with raises(Exception) as exc_info: await error_loader.load(1) assert str(exc_info.value) == "Error: 1" assert load_calls == [[1]] @mark.asyncio async def test_caches_failed_fetches_2(): identity_loader, load_calls = id_loader() identity_loader.prime(1, Exception("Error: 1")) with raises(Exception) as _: await identity_loader.load(1) assert load_calls == [] # It is resilient to job queue ordering @mark.asyncio async def test_batches_loads_occuring_within_promises(): identity_loader, load_calls = id_loader() async def load_b_1(): return await load_b_2() async def load_b_2(): return await identity_loader.load("B") values = await gather(identity_loader.load("A"), load_b_1()) assert values == ["A", "B"] assert load_calls == [["A", "B"]] @mark.asyncio async def test_catches_error_if_loader_resolver_fails(): exc = Exception("AOH!") def do_resolve(x): raise exc a_loader, a_load_calls = id_loader(resolve=do_resolve) with raises(Exception) as exc_info: await a_loader.load("A1") assert exc_info.value == exc @mark.asyncio async def test_can_call_a_loader_from_a_loader(): deep_loader, deep_load_calls = id_loader() a_loader, a_load_calls = id_loader( resolve=lambda keys: deep_loader.load(tuple(keys)) ) b_loader, b_load_calls = id_loader( resolve=lambda keys: deep_loader.load(tuple(keys)) ) a1, b1, a2, b2 = await gather( a_loader.load("A1"), b_loader.load("B1"), a_loader.load("A2"), b_loader.load("B2"), ) assert a1 == "A1" assert b1 == "B1" assert a2 == "A2" assert b2 == "B2" assert a_load_calls == [["A1", "A2"]] assert b_load_calls == [["B1", "B2"]] assert deep_load_calls == [[("A1", "A2"), ("B1", "B2")]] @mark.asyncio async def test_dataloader_clear_with_missing_key_works(): async def do_resolve(x): return x a_loader, a_load_calls = id_loader(resolve=do_resolve) assert a_loader.clear("A1") == a_loader python-graphene-3.4.3/graphene/utils/tests/test_deduplicator.py000066400000000000000000000116741471374454500250430ustar00rootroot00000000000000import datetime import graphene from graphene import relay from graphene.types.resolver import dict_resolver from ..deduplicator import deflate def test_does_not_modify_object_without_typename_and_id(): response = {"foo": "bar"} deflated_response = deflate(response) assert deflated_response == {"foo": "bar"} def test_does_not_modify_first_instance_of_an_object(): response = { "data": [ {"__typename": "foo", "id": 1, "name": "foo"}, {"__typename": "foo", "id": 1, "name": "foo"}, ] } deflated_response = deflate(response) assert deflated_response == { "data": [ {"__typename": "foo", "id": 1, "name": "foo"}, {"__typename": "foo", "id": 1}, ] } def test_does_not_modify_first_instance_of_an_object_nested(): response = { "data": [ { "__typename": "foo", "bar1": {"__typename": "bar", "id": 1, "name": "bar"}, "bar2": {"__typename": "bar", "id": 1, "name": "bar"}, "id": 1, }, { "__typename": "foo", "bar1": {"__typename": "bar", "id": 1, "name": "bar"}, "bar2": {"__typename": "bar", "id": 1, "name": "bar"}, "id": 2, }, ] } deflated_response = deflate(response) assert deflated_response == { "data": [ { "__typename": "foo", "bar1": {"__typename": "bar", "id": 1, "name": "bar"}, "bar2": {"__typename": "bar", "id": 1, "name": "bar"}, "id": 1, }, { "__typename": "foo", "bar1": {"__typename": "bar", "id": 1}, "bar2": {"__typename": "bar", "id": 1}, "id": 2, }, ] } def test_does_not_modify_input(): response = { "data": [ {"__typename": "foo", "id": 1, "name": "foo"}, {"__typename": "foo", "id": 1, "name": "foo"}, ] } deflate(response) assert response == { "data": [ {"__typename": "foo", "id": 1, "name": "foo"}, {"__typename": "foo", "id": 1, "name": "foo"}, ] } TEST_DATA = { "events": [ {"id": "568", "date": datetime.date(2017, 5, 19), "movie": "1198359"}, {"id": "234", "date": datetime.date(2017, 5, 20), "movie": "1198359"}, ], "movies": { "1198359": { "id": "1198359", "name": "King Arthur: Legend of the Sword", "synopsis": ( "When the child Arthur's father is murdered, Vortigern, " "Arthur's uncle, seizes the crown. Robbed of his birthright and " "with no idea who he truly is..." ), } }, } def test_example_end_to_end(): class Movie(graphene.ObjectType): class Meta: interfaces = (relay.Node,) default_resolver = dict_resolver name = graphene.String(required=True) synopsis = graphene.String(required=True) class Event(graphene.ObjectType): class Meta: interfaces = (relay.Node,) default_resolver = dict_resolver movie = graphene.Field(Movie, required=True) date = graphene.types.datetime.Date(required=True) def resolve_movie(event, info): return TEST_DATA["movies"][event["movie"]] class Query(graphene.ObjectType): events = graphene.List(graphene.NonNull(Event), required=True) def resolve_events(_, info): return TEST_DATA["events"] schema = graphene.Schema(query=Query) query = """\ { events { __typename id date movie { __typename id name synopsis } } } """ result = schema.execute(query) assert not result.errors data = deflate(result.data) assert data == { "events": [ { "__typename": "Event", "id": "RXZlbnQ6NTY4", "date": "2017-05-19", "movie": { "__typename": "Movie", "id": "TW92aWU6MTE5ODM1OQ==", "name": "King Arthur: Legend of the Sword", "synopsis": ( "When the child Arthur's father is murdered, Vortigern, " "Arthur's uncle, seizes the crown. Robbed of his birthright and " "with no idea who he truly is..." ), }, }, { "__typename": "Event", "id": "RXZlbnQ6MjM0", "date": "2017-05-20", "movie": {"__typename": "Movie", "id": "TW92aWU6MTE5ODM1OQ=="}, }, ] } python-graphene-3.4.3/graphene/utils/tests/test_deprecated.py000066400000000000000000000004131471374454500244510ustar00rootroot00000000000000from .. import deprecated from ..deprecated import warn_deprecation def test_warn_deprecation(mocker): mocker.patch.object(deprecated, "warn") warn_deprecation("OH!") deprecated.warn.assert_called_with("OH!", stacklevel=2, category=DeprecationWarning) python-graphene-3.4.3/graphene/utils/tests/test_module_loading.py000066400000000000000000000035161471374454500253420ustar00rootroot00000000000000from pytest import raises from graphene import ObjectType, String from ..module_loading import import_string, lazy_import def test_import_string(): MyString = import_string("graphene.String") assert MyString == String MyObjectTypeMeta = import_string("graphene.ObjectType", "__doc__") assert MyObjectTypeMeta == ObjectType.__doc__ def test_import_string_module(): with raises(Exception) as exc_info: import_string("graphenea") assert str(exc_info.value) == "graphenea doesn't look like a module path" def test_import_string_class(): with raises(Exception) as exc_info: import_string("graphene.Stringa") assert ( str(exc_info.value) == 'Module "graphene" does not define a "Stringa" attribute/class' ) def test_import_string_attributes(): with raises(Exception) as exc_info: import_string("graphene.String", "length") assert ( str(exc_info.value) == 'Module "graphene" does not define a "length" attribute inside attribute/class ' '"String"' ) with raises(Exception) as exc_info: import_string("graphene.ObjectType", "__class__.length") assert ( str(exc_info.value) == 'Module "graphene" does not define a "__class__.length" attribute inside ' 'attribute/class "ObjectType"' ) with raises(Exception) as exc_info: import_string("graphene.ObjectType", "__classa__.__base__") assert ( str(exc_info.value) == 'Module "graphene" does not define a "__classa__" attribute inside attribute/class ' '"ObjectType"' ) def test_lazy_import(): f = lazy_import("graphene.String") MyString = f() assert MyString == String f = lazy_import("graphene.ObjectType", "__doc__") MyObjectTypeMeta = f() assert MyObjectTypeMeta == ObjectType.__doc__ python-graphene-3.4.3/graphene/utils/tests/test_orderedtype.py000066400000000000000000000013531471374454500247030ustar00rootroot00000000000000from ..orderedtype import OrderedType def test_orderedtype(): one = OrderedType() two = OrderedType() three = OrderedType() assert one < two < three def test_orderedtype_eq(): one = OrderedType() two = OrderedType() assert one == one assert one != two def test_orderedtype_hash(): one = OrderedType() two = OrderedType() assert hash(one) == hash(one) assert hash(one) != hash(two) def test_orderedtype_resetcounter(): one = OrderedType() two = OrderedType() one.reset_counter() assert one > two def test_orderedtype_non_orderabletypes(): one = OrderedType() assert one.__lt__(1) == NotImplemented assert one.__gt__(1) == NotImplemented assert one != 1 python-graphene-3.4.3/graphene/utils/tests/test_resolve_only_args.py000066400000000000000000000005451471374454500261130ustar00rootroot00000000000000from .. import deprecated from ..resolve_only_args import resolve_only_args def test_resolve_only_args(mocker): mocker.patch.object(deprecated, "warn_deprecation") def resolver(root, **args): return root, args wrapped_resolver = resolve_only_args(resolver) result = wrapped_resolver(1, 2, a=3) assert result == (1, {"a": 3}) python-graphene-3.4.3/graphene/utils/tests/test_resolver_from_annotations.py000066400000000000000000000000001471374454500276420ustar00rootroot00000000000000python-graphene-3.4.3/graphene/utils/tests/test_str_converters.py000066400000000000000000000015171471374454500254410ustar00rootroot00000000000000# coding: utf-8 from ..str_converters import to_camel_case, to_snake_case def test_snake_case(): assert to_snake_case("snakesOnAPlane") == "snakes_on_a_plane" assert to_snake_case("SnakesOnAPlane") == "snakes_on_a_plane" assert to_snake_case("SnakesOnA_Plane") == "snakes_on_a__plane" assert to_snake_case("snakes_on_a_plane") == "snakes_on_a_plane" assert to_snake_case("snakes_on_a__plane") == "snakes_on_a__plane" assert to_snake_case("IPhoneHysteria") == "i_phone_hysteria" assert to_snake_case("iPhoneHysteria") == "i_phone_hysteria" def test_camel_case(): assert to_camel_case("snakes_on_a_plane") == "snakesOnAPlane" assert to_camel_case("snakes_on_a__plane") == "snakesOnA_Plane" assert to_camel_case("i_phone_hysteria") == "iPhoneHysteria" assert to_camel_case("field_i18n") == "fieldI18n" python-graphene-3.4.3/graphene/utils/tests/test_trim_docstring.py000066400000000000000000000010701471374454500254000ustar00rootroot00000000000000from ..trim_docstring import trim_docstring def test_trim_docstring(): class WellDocumentedObject: """ This object is very well-documented. It has multiple lines in its description. Multiple paragraphs too """ assert ( trim_docstring(WellDocumentedObject.__doc__) == "This object is very well-documented. It has multiple lines in its\n" "description.\n\nMultiple paragraphs too" ) class UndocumentedObject: pass assert trim_docstring(UndocumentedObject.__doc__) is None python-graphene-3.4.3/graphene/utils/thenables.py000066400000000000000000000012331471374454500221160ustar00rootroot00000000000000""" This file is used mainly as a bridge for thenable abstractions. """ from inspect import isawaitable def await_and_execute(obj, on_resolve): async def build_resolve_async(): return on_resolve(await obj) return build_resolve_async() def maybe_thenable(obj, on_resolve): """ Execute a on_resolve function once the thenable is resolved, returning the same type of object inputed. If the object is not thenable, it should return on_resolve(obj) """ if isawaitable(obj): return await_and_execute(obj, on_resolve) # If it's not awaitable, return the function executed over the object return on_resolve(obj) python-graphene-3.4.3/graphene/utils/trim_docstring.py000066400000000000000000000004511471374454500232010ustar00rootroot00000000000000import inspect def trim_docstring(docstring): # Cleans up whitespaces from an indented docstring # # See https://www.python.org/dev/peps/pep-0257/ # and https://docs.python.org/2/library/inspect.html#inspect.cleandoc return inspect.cleandoc(docstring) if docstring else None python-graphene-3.4.3/graphene/validation/000077500000000000000000000000001471374454500205725ustar00rootroot00000000000000python-graphene-3.4.3/graphene/validation/__init__.py000066400000000000000000000002451471374454500227040ustar00rootroot00000000000000from .depth_limit import depth_limit_validator from .disable_introspection import DisableIntrospection __all__ = ["DisableIntrospection", "depth_limit_validator"] python-graphene-3.4.3/graphene/validation/depth_limit.py000066400000000000000000000145501471374454500234530ustar00rootroot00000000000000# This is a Python port of https://github.com/stems/graphql-depth-limit # which is licensed under the terms of the MIT license, reproduced below. # # ----------- # # MIT License # # Copyright (c) 2017 Stem # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. try: from re import Pattern except ImportError: # backwards compatibility for v3.6 from typing import Pattern from typing import Callable, Dict, List, Optional, Union, Tuple from graphql import GraphQLError from graphql.validation import ValidationContext, ValidationRule from graphql.language import ( DefinitionNode, FieldNode, FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, Node, OperationDefinitionNode, ) from ..utils.is_introspection_key import is_introspection_key IgnoreType = Union[Callable[[str], bool], Pattern, str] def depth_limit_validator( max_depth: int, ignore: Optional[List[IgnoreType]] = None, callback: Optional[Callable[[Dict[str, int]], None]] = None, ): class DepthLimitValidator(ValidationRule): def __init__(self, validation_context: ValidationContext): document = validation_context.document definitions = document.definitions fragments = get_fragments(definitions) queries = get_queries_and_mutations(definitions) query_depths = {} for name in queries: query_depths[name] = determine_depth( node=queries[name], fragments=fragments, depth_so_far=0, max_depth=max_depth, context=validation_context, operation_name=name, ignore=ignore, ) if callable(callback): callback(query_depths) super().__init__(validation_context) return DepthLimitValidator def get_fragments( definitions: Tuple[DefinitionNode, ...], ) -> Dict[str, FragmentDefinitionNode]: fragments = {} for definition in definitions: if isinstance(definition, FragmentDefinitionNode): fragments[definition.name.value] = definition return fragments # This will actually get both queries and mutations. # We can basically treat those the same def get_queries_and_mutations( definitions: Tuple[DefinitionNode, ...], ) -> Dict[str, OperationDefinitionNode]: operations = {} for definition in definitions: if isinstance(definition, OperationDefinitionNode): operation = definition.name.value if definition.name else "anonymous" operations[operation] = definition return operations def determine_depth( node: Node, fragments: Dict[str, FragmentDefinitionNode], depth_so_far: int, max_depth: int, context: ValidationContext, operation_name: str, ignore: Optional[List[IgnoreType]] = None, ) -> int: if depth_so_far > max_depth: context.report_error( GraphQLError( f"'{operation_name}' exceeds maximum operation depth of {max_depth}.", [node], ) ) return depth_so_far if isinstance(node, FieldNode): should_ignore = is_introspection_key(node.name.value) or is_ignored( node, ignore ) if should_ignore or not node.selection_set: return 0 return 1 + max( map( lambda selection: determine_depth( node=selection, fragments=fragments, depth_so_far=depth_so_far + 1, max_depth=max_depth, context=context, operation_name=operation_name, ignore=ignore, ), node.selection_set.selections, ) ) elif isinstance(node, FragmentSpreadNode): return determine_depth( node=fragments[node.name.value], fragments=fragments, depth_so_far=depth_so_far, max_depth=max_depth, context=context, operation_name=operation_name, ignore=ignore, ) elif isinstance( node, (InlineFragmentNode, FragmentDefinitionNode, OperationDefinitionNode) ): return max( map( lambda selection: determine_depth( node=selection, fragments=fragments, depth_so_far=depth_so_far, max_depth=max_depth, context=context, operation_name=operation_name, ignore=ignore, ), node.selection_set.selections, ) ) else: raise Exception( f"Depth crawler cannot handle: {node.kind}." ) # pragma: no cover def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: if ignore is None: return False for rule in ignore: field_name = node.name.value if isinstance(rule, str): if field_name == rule: return True elif isinstance(rule, Pattern): if rule.match(field_name): return True elif callable(rule): if rule(field_name): return True else: raise ValueError(f"Invalid ignore option: {rule}.") return False python-graphene-3.4.3/graphene/validation/disable_introspection.py000066400000000000000000000010331471374454500255240ustar00rootroot00000000000000from graphql import GraphQLError from graphql.language import FieldNode from graphql.validation import ValidationRule from ..utils.is_introspection_key import is_introspection_key class DisableIntrospection(ValidationRule): def enter_field(self, node: FieldNode, *_args): field_name = node.name.value if is_introspection_key(field_name): self.report_error( GraphQLError( f"Cannot query '{field_name}': introspection is disabled.", node ) ) python-graphene-3.4.3/graphene/validation/tests/000077500000000000000000000000001471374454500217345ustar00rootroot00000000000000python-graphene-3.4.3/graphene/validation/tests/__init__.py000066400000000000000000000000001471374454500240330ustar00rootroot00000000000000python-graphene-3.4.3/graphene/validation/tests/test_depth_limit_validator.py000066400000000000000000000114251471374454500277170ustar00rootroot00000000000000import re from pytest import raises from graphql import parse, get_introspection_query, validate from ...types import Schema, ObjectType, Interface from ...types import String, Int, List, Field from ..depth_limit import depth_limit_validator class PetType(Interface): name = String(required=True) class meta: name = "Pet" class CatType(ObjectType): class meta: name = "Cat" interfaces = (PetType,) class DogType(ObjectType): class meta: name = "Dog" interfaces = (PetType,) class AddressType(ObjectType): street = String(required=True) number = Int(required=True) city = String(required=True) country = String(required=True) class Meta: name = "Address" class HumanType(ObjectType): name = String(required=True) email = String(required=True) address = Field(AddressType, required=True) pets = List(PetType, required=True) class Meta: name = "Human" class Query(ObjectType): user = Field(HumanType, required=True, name=String()) version = String(required=True) user1 = Field(HumanType, required=True) user2 = Field(HumanType, required=True) user3 = Field(HumanType, required=True) @staticmethod def resolve_user(root, info, name=None): pass schema = Schema(query=Query) def run_query(query: str, max_depth: int, ignore=None): document = parse(query) result = None def callback(query_depths): nonlocal result result = query_depths errors = validate( schema=schema.graphql_schema, document_ast=document, rules=( depth_limit_validator( max_depth=max_depth, ignore=ignore, callback=callback ), ), ) return errors, result def test_should_count_depth_without_fragment(): query = """ query read0 { version } query read1 { version user { name } } query read2 { matt: user(name: "matt") { email } andy: user(name: "andy") { email address { city } } } query read3 { matt: user(name: "matt") { email } andy: user(name: "andy") { email address { city } pets { name owner { name } } } } """ expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} errors, result = run_query(query, 10) assert not errors assert result == expected def test_should_count_with_fragments(): query = """ query read0 { ... on Query { version } } query read1 { version user { ... on Human { name } } } fragment humanInfo on Human { email } fragment petInfo on Pet { name owner { name } } query read2 { matt: user(name: "matt") { ...humanInfo } andy: user(name: "andy") { ...humanInfo address { city } } } query read3 { matt: user(name: "matt") { ...humanInfo } andy: user(name: "andy") { ... on Human { email } address { city } pets { ...petInfo } } } """ expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} errors, result = run_query(query, 10) assert not errors assert result == expected def test_should_ignore_the_introspection_query(): errors, result = run_query(get_introspection_query(), 10) assert not errors assert result == {"IntrospectionQuery": 0} def test_should_catch_very_deep_query(): query = """{ user { pets { owner { pets { owner { pets { name } } } } } } } """ errors, result = run_query(query, 4) assert len(errors) == 1 assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4." def test_should_ignore_field(): query = """ query read1 { user { address { city } } } query read2 { user1 { address { city } } user2 { address { city } } user3 { address { city } } } """ errors, result = run_query( query, 10, ignore=["user1", re.compile("user2"), lambda field_name: field_name == "user3"], ) expected = {"read1": 2, "read2": 0} assert not errors assert result == expected def test_should_raise_invalid_ignore(): query = """ query read1 { user { address { city } } } """ with raises(ValueError, match="Invalid ignore option:"): run_query(query, 10, ignore=[True]) python-graphene-3.4.3/graphene/validation/tests/test_disable_introspection.py000066400000000000000000000015121471374454500277270ustar00rootroot00000000000000from graphql import parse, validate from ...types import Schema, ObjectType, String from ..disable_introspection import DisableIntrospection class Query(ObjectType): name = String(required=True) @staticmethod def resolve_name(root, info): return "Hello world!" schema = Schema(query=Query) def run_query(query: str): document = parse(query) return validate( schema=schema.graphql_schema, document_ast=document, rules=(DisableIntrospection,), ) def test_disallows_introspection_queries(): errors = run_query("{ __schema { queryType { name } } }") assert len(errors) == 1 assert errors[0].message == "Cannot query '__schema': introspection is disabled." def test_allows_non_introspection_queries(): errors = run_query("{ name }") assert len(errors) == 0 python-graphene-3.4.3/mypy.ini000066400000000000000000000004451471374454500163510ustar00rootroot00000000000000[mypy] ignore_missing_imports = True [mypy-graphene.pyutils.*] ignore_errors = True [mypy-graphene.types.scalars] ignore_errors = True [mypy-graphene.types.generic] ignore_errors = True [mypy-graphene.types.tests.*] ignore_errors = True [mypy-graphene.relay.tests.*] ignore_errors = True python-graphene-3.4.3/setup.cfg000066400000000000000000000001501471374454500164640ustar00rootroot00000000000000[coverage:run] omit = graphene/pyutils/*,*/tests/*,graphene/types/scalars.py [bdist_wheel] universal=1 python-graphene-3.4.3/setup.py000066400000000000000000000051531471374454500163650ustar00rootroot00000000000000import ast import codecs import re import sys from setuptools import find_packages, setup from setuptools.command.test import test as TestCommand _version_re = re.compile(r"VERSION\s+=\s+(.*)") with open("graphene/__init__.py", "rb") as f: version = ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1)) path_copy = sys.path[:] sys.path.append("graphene") try: from pyutils.version import get_version version = get_version(version) except Exception: version = ".".join([str(v) for v in version]) sys.path[:] = path_copy class PyTest(TestCommand): user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) tests_require = [ "pytest>=8,<9", "pytest-benchmark>=4,<5", "pytest-cov>=5,<6", "pytest-mock>=3,<4", "pytest-asyncio>=0.16,<2", "coveralls>=3.3,<5", ] dev_requires = [ "ruff==0.5.0", "types-python-dateutil>=2.8.1,<3", "mypy>=1.10,<2", ] + tests_require setup( name="graphene", version=version, description="GraphQL Framework for Python", long_description=codecs.open( "README.md", "r", encoding="ascii", errors="replace" ).read(), long_description_content_type="text/markdown", url="https://github.com/graphql-python/graphene", author="Syrus Akbary", author_email="me@syrusakbary.com", license="MIT", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", ], keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["examples*"]), install_requires=[ "graphql-core>=3.1,<3.3", "graphql-relay>=3.1,<3.3", "python-dateutil>=2.7.0,<3", "typing-extensions>=4.7.1,<5", ], tests_require=tests_require, extras_require={"test": tests_require, "dev": dev_requires}, cmdclass={"test": PyTest}, ) python-graphene-3.4.3/tox.ini000066400000000000000000000007321471374454500161640ustar00rootroot00000000000000[tox] envlist = py3{8,9,10,11,12,13}, mypy, pre-commit skipsdist = true [testenv] deps = .[test] commands = pytest --cov=graphene graphene --cov-report=term --cov-report=xml examples {posargs} [testenv:pre-commit] basepython = python3.10 deps = pre-commit>=3.7,<4 setenv = LC_CTYPE=en_US.UTF-8 commands = pre-commit run --all-files --show-diff-on-failure [testenv:mypy] basepython = python3.10 deps = .[dev] commands = mypy graphene [pytest]