pax_global_header 0000666 0000000 0000000 00000000064 14535040553 0014516 g ustar 00root root 0000000 0000000 52 comment=a084895d270cb8f9865ee33daab5356fc3ab3ba1
graphene-mongo-0.4.1/ 0000775 0000000 0000000 00000000000 14535040553 0014426 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/.github/ 0000775 0000000 0000000 00000000000 14535040553 0015766 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/.github/workflows/ 0000775 0000000 0000000 00000000000 14535040553 0020023 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/.github/workflows/ci.yml 0000664 0000000 0000000 00000002072 14535040553 0021142 0 ustar 00root root 0000000 0000000 # This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Test Package
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
permissions:
contents: read
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: ["3.8", "3.9", "3.10", "3.11","3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python }}
- name: Lint with ruff
run: |
python -m pip install ruff
make lint
- name: Install dependencies
run: |
python -m pip install poetry
poetry config virtualenvs.create false
poetry install --with dev
- name: Run Tests
run: make test
- name: Build Package
run: |
poetry build graphene-mongo-0.4.1/.github/workflows/lint.yml 0000664 0000000 0000000 00000001176 14535040553 0021521 0 ustar 00root root 0000000 0000000 # This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
name: Lint
on: [push, pull_request]
permissions:
contents: read
jobs:
build:
strategy:
matrix:
python: ["3.12"]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python }}
- name: Lint with ruff
run: |
python -m pip install ruff
make lint graphene-mongo-0.4.1/.github/workflows/publish.yml 0000664 0000000 0000000 00000002625 14535040553 0022221 0 ustar 00root root 0000000 0000000 name: Publish to PyPI
on:
release:
types: [published]
permissions:
contents: read
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: ["3.8", "3.9", "3.10", "3.11", "3.12"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python }}
- name: Install dependencies
run: |
python -m pip install poetry
poetry config virtualenvs.create false
poetry install --with dev
- name: Lint
run: |
make lint
- name: Run Tests
run: make test
- name: Build Package
run: |
poetry build
publish:
needs: build
runs-on: ubuntu-latest
permissions:
# IMPORTANT: this permission is mandatory for trusted publishing
id-token: write
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install poetry
- name: Build package
run: |
poetry build
- name: Build package
run: |
poetry build
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@release/v1 graphene-mongo-0.4.1/.gitignore 0000664 0000000 0000000 00000000167 14535040553 0016422 0 ustar 00root root 0000000 0000000 .coverage
.cache/
.eggs/
.idea/
_build/
build/
dist/
graphene_mongo.egg-info/
htmlcov/
*.pyc
*.swo
*.swp
venv/
.vscode/ graphene-mongo-0.4.1/LICENSE 0000664 0000000 0000000 00000002074 14535040553 0015436 0 ustar 00root root 0000000 0000000 The MIT License (MIT)
Copyright (c) 2018-Present Abaw Chen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
graphene-mongo-0.4.1/MANIFEST.in 0000664 0000000 0000000 00000000171 14535040553 0016163 0 ustar 00root root 0000000 0000000 global-exclude *.py[cod]
recursive-exclude examples *
recursive-exclude graphene-mongo/tests *
include README.md LICENSE
graphene-mongo-0.4.1/Makefile 0000664 0000000 0000000 00000001430 14535040553 0016064 0 ustar 00root root 0000000 0000000 clean:
@rm -f .coverage 2> /dev/null
@rm -rf .eggs 2> /dev/null
@rm -rf .cache 2> /dev/null
@rm -rf ./graphene_mongo/.cache 2> /dev/null
@rm -rf build 2> /dev/null
@rm -rf dist 2> /dev/null
@rm -rf graphene_mongo.egg-info 2> /dev/null
@find . -name "*.pyc" -delete
@find . -name "*.swp" -delete
@find . -name "__pycache__" -delete
lint:
@ruff check graphene_mongo
@ruff format . --check
test: clean
pytest graphene_mongo/tests --cov=graphene_mongo --cov-report=html --cov-report=term
register-pypitest:
#python setup.py register -r pypitest
deploy-pypitest: clean
poetry build
#poetry publish --repository testpypi
twine upload --repository testpypi dist/*
register:
#python setup.py register -r pypi
deploy: clean
poetry build
twine upload dist/*
#poetry publish
graphene-mongo-0.4.1/README.md 0000664 0000000 0000000 00000007276 14535040553 0015721 0 ustar 00root root 0000000 0000000 [](https://travis-ci.org/graphql-python/graphene-mongo) [](https://coveralls.io/github/graphql-python/graphene-mongo?branch=master) [](http://graphene-mongo.readthedocs.io/en/latest/?badge=latest) [](https://badge.fury.io/py/graphene-mongo) [](https://pypi.python.org/pypi/graphene-mongo/) [](https://pepy.tech/project/graphene-mongo)
[](https://github.com/graphql-python/graphene-mongo/actions/workflows/lint.yml) [](https://github.com/graphql-python/graphene-mongo/actions/workflows/ci.yml)
# Graphene-Mongo
A [Mongoengine](https://mongoengine-odm.readthedocs.io/) integration for [Graphene](http://graphene-python.org/).
## Installation
For installing graphene-mongo, just run this command in your shell
```
pip install graphene-mongo
```
## Examples
Here is a simple Mongoengine model as `models.py`:
```python
from mongoengine import Document
from mongoengine.fields import StringField
class User(Document):
meta = {'collection': 'user'}
first_name = StringField(required=True)
last_name = StringField(required=True)
```
To create a GraphQL schema and sync executor; for it you simply have to write the following:
```python
import graphene
from graphene_mongo import MongoengineObjectType
from .models import User as UserModel
class User(MongoengineObjectType):
class Meta:
model = UserModel
class Query(graphene.ObjectType):
users = graphene.List(User)
def resolve_users(self, info):
return list(UserModel.objects.all())
schema = graphene.Schema(query=Query)
```
Then you can simply query the schema:
```python
query = '''
query {
users {
firstName,
lastName
}
}
'''
result = await schema.execute(query)
```
To create a GraphQL schema and async executor; for it you simply have to write the following:
```python
import graphene
from graphene_mongo import AsyncMongoengineObjectType
from graphene_mongo.utils import sync_to_async
from concurrent.futures import ThreadPoolExecutor
from .models import User as UserModel
class User(AsyncMongoengineObjectType):
class Meta:
model = UserModel
class Query(graphene.ObjectType):
users = graphene.List(User)
async def resolve_users(self, info):
return await sync_to_async(list, thread_sensitive=False,
executor=ThreadPoolExecutor())(UserModel.objects.all())
schema = graphene.Schema(query=Query)
```
Then you can simply query the schema:
```python
query = '''
query {
users {
firstName,
lastName
}
}
'''
result = await schema.execute_async(query)
```
To learn more check out the following [examples](examples/):
* [Flask MongoEngine example](examples/flask_mongoengine)
* [Django MongoEngine example](examples/django_mongoengine)
* [Falcon MongoEngine example](examples/falcon_mongoengine)
## Contributing
After cloning this repo, ensure dependencies are installed by running:
```sh
pip install -r requirements.txt
```
After developing, the full test suite can be evaluated by running:
```sh
make test
```
graphene-mongo-0.4.1/README.rst 0000664 0000000 0000000 00000004056 14535040553 0016122 0 ustar 00root root 0000000 0000000 .. image:: https://travis-ci.org/graphql-python/graphene-mongo.svg?branch=master
:target: https://travis-ci.org/graphql-python/graphene-mongo
.. image:: https://coveralls.io/repos/github/graphql-python/graphene-mongo/badge.svg?branch=master
:target: https://coveralls.io/github/graphql-python/graphene-mongo?branch=master
.. image:: https://badge.fury.io/py/graphene-mongo.svg
:target: https://badge.fury.io/py/graphene-mongo
.. image:: https://img.shields.io/pypi/pyversions/graphene-mongo.svg
:target: https://pypi.python.org/pypi/graphene-mongo/
Graphene-Mongo
==============
A `Mongoengine `__ integration for `Graphene `__.
Installation
------------
For installing graphene-mongo, just run this command in your shell
.. code:: bash
pip install graphene-mongo
Examples
--------
Here is a simple Mongoengine model as `models.py`:
.. code:: python
from mongoengine import Document
from mongoengine.fields import StringField
class User(Document):
meta = {'collection': 'user'}
first_name = StringField(required=True)
last_name = StringField(required=True)
To create a GraphQL schema for it you simply have to write the following:
.. code:: python
import graphene
from graphene_mongo import MongoengineObjectType
from .models import User as UserModel
class User(MongoengineObjectType):
class Meta:
model = UserModel
class Query(graphene.ObjectType):
users = graphene.List(User)
def resolve_users(self, info):
return list(UserModel.objects.all())
schema = graphene.Schema(query=Query)
Then you can simply query the schema:
.. code:: python
query = '''
query {
users {
firstName,
lastName
}
}
'''
result = await schema.execute_async(query)
To learn more check out the `Flask MongoEngine example `__
graphene-mongo-0.4.1/docs/ 0000775 0000000 0000000 00000000000 14535040553 0015356 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/docs/Makefile 0000664 0000000 0000000 00000016677 14535040553 0017037 0 ustar 00root root 0000000 0000000 # Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Graphene.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Graphene.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Graphene"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Graphene"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."
graphene-mongo-0.4.1/docs/conf.py 0000664 0000000 0000000 00000030271 14535040553 0016660 0 ustar 00root root 0000000 0000000 import os
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
# -*- coding: utf-8 -*-
#
# Graphene documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 11 18:30:51 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.viewcode",
]
if not on_rtd:
extensions += ["sphinx.ext.githubpages"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Graphene Mongo"
copyright = "Graphene 2018"
author = "Abaw Chen"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.1"
# The full version, including alpha/beta/rc tags.
release = "0.1.2"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
# if on_rtd:
# html_theme = 'sphinx_rtd_theme'
import sphinx_graphene_theme
html_theme = "sphinx_graphene_theme"
html_theme_path = [sphinx_graphene_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# " v documentation" by default.
#
# html_title = u'Graphene v1.0.dev'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "Graphenedoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [(master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual")]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "graphene_django", "Graphene Django Documentation", [author], 1)]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"Graphene-Django",
"Graphene Django Documentation",
author,
"Graphene Django",
"One line description of project.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = project
# The HTML theme for the epub output. Since the default themes are not
# optimized for small screen space, using the same theme for HTML and epub
# output is usually not wise. This defaults to 'epub', a theme designed to save
# visual space.
#
# epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#
# epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_pre_files = []
# HTML files that should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#
# epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
# The depth of the table of contents in toc.ncx.
#
# epub_tocdepth = 3
# Allow duplicate toc entries.
#
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#
# epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#
# epub_fix_images = False
# Scale large images.
#
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# epub_show_urls = 'inline'
# If false, no index is generated.
#
# epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"https://docs.python.org/": None}
graphene-mongo-0.4.1/docs/fields.rst 0000664 0000000 0000000 00000001070 14535040553 0017354 0 ustar 00root root 0000000 0000000 Supported Fields
============================
Mongoengine Fields
------------------
- BooleanField
- DecimalField
- DateTimeField
- DictField
- EmailField
- EmbeddedDocumentField
- EmbeddedDocumentListField
- FileField
- FloatField
- GenericReferenceField
- IntField
- LazyReferenceField
- ListField
- LongField
- MapField
- MultiPolygonField
- ObjectIdField
- ReferenceField
- PointField
- PolygonField
- SequenceField
- StringField
- URLField
- UUIDField
Advanced
--------
- Self-reference relationship
- List of self-reference relationship
- Inheritance field
graphene-mongo-0.4.1/docs/index.rst 0000664 0000000 0000000 00000000146 14535040553 0017220 0 ustar 00root root 0000000 0000000 Graphene-Mongo
===================
Contents:
.. toctree::
:maxdepth: 0
tutorial
fields
graphene-mongo-0.4.1/docs/requirements.txt 0000664 0000000 0000000 00000000105 14535040553 0020636 0 ustar 00root root 0000000 0000000 # Docs template
http://graphene-python.org/sphinx_graphene_theme.zip
graphene-mongo-0.4.1/docs/tutorial.rst 0000664 0000000 0000000 00000011755 14535040553 0017764 0 ustar 00root root 0000000 0000000 Mongoengine + Flask Tutorial
==============================
Graphene comes with builtin support to Mongoengine, which makes quite
easy to operate with your current models.
Note: The code in this tutorial is pulled from the `Flask Mongoengine
example
app `__.
Setup the Project
-----------------
.. code:: bash
# Create the project directory
mkdir flask_graphene_mongo
cd flask_graphene_mongo
# [Optional but suggested] Create a virtualenv to isolate our package dependencies locally
virtualenv env
source env/bin/activate
# Install required packages
pip install Flask
pip install Flask-GraphQL
pip install graphene-mongo
# Install mongomock or you have to run a real mongo server instance somewhere.
pip install mongomock
Defining our models
-------------------
Let's get start with following models:
.. code:: python
# flask_graphene_mongo/models.py
from datetime import datetime
from mongoengine import Document
from mongoengine.fields import (
DateTimeField, ReferenceField, StringField,
)
class Department(Document):
meta = {'collection': 'department'}
name = StringField()
class Role(Document):
meta = {'collection': 'role'}
name = StringField()
class Employee(Document):
meta = {'collection': 'employee'}
name = StringField()
hired_on = DateTimeField(default=datetime.now)
department = ReferenceField(Department)
role = ReferenceField(Role)
Schema
------
Here I assume you guys have the basic knowledge of how schema works in GraphQL, that I define the *root type* as the `Query` class below with the ability to list all employees.
.. code:: python
# flask_graphene_mongo/schema.py
import graphene
from graphene.relay import Node
from graphene_mongo import MongoengineConnectionField, MongoengineObjectType
from models import Department as DepartmentModel
from models import Employee as EmployeeModel
from models import Role as RoleModel
class Department(MongoengineObjectType):
class Meta:
model = DepartmentModel
interfaces = (Node,)
class Role(MongoengineObjectType):
class Meta:
model = RoleModel
interfaces = (Node,)
class Employee(MongoengineObjectType):
class Meta:
model = EmployeeModel
interfaces = (Node,)
class Query(graphene.ObjectType):
node = Node.Field()
all_employees = MongoengineConnectionField(Employee)
all_role = MongoengineConnectionField(Role)
role = graphene.Field(Role)
schema = graphene.Schema(query=Query, types=[Department, Employee, Role])
Creating some data
------------------
By putting some data to make this demo can run directly:
.. code:: python
# flask_graphene_mongo/database.py
from mongoengine import connect
from models import Department, Employee, Role
# You can connect to a real mongo server instance by your own.
connect('graphene-mongo-example', host='mongomock://localhost', alias='default')
def init_db():
# Create the fixtures
engineering = Department(name='Engineering')
engineering.save()
hr = Department(name='Human Resources')
hr.save()
manager = Role(name='manager')
manager.save()
engineer = Role(name='engineer')
engineer.save()
peter = Employee(name='Peter', department=engineering, role=engineer)
peter.save()
roy = Employee(name='Roy', department=engineering, role=engineer)
roy.save()
tracy = Employee(name='Tracy', department=hr, role=manager)
tracy.save()
Creating GraphQL and GraphiQL views in Flask
--------------------------------------------
There is only one URL from which GraphQL is accessed, and we take the advantage of ``Flask-GraphQL`` to generate the GraphQL interface for easily accessed by a browser:
.. code:: python
# flask_graphene_mongo/app.py
from database import init_db
from flask import Flask
from flask_graphql import GraphQLView
from schema import schema
app = Flask(__name__)
app.debug = True
default_query = '''
{
allEmployees {
edges {
node {
id,
name,
department {
id,
name
},
role {
id,
name
}
}
}
}
}'''.strip()
app.add_url_rule(
'/graphql',
view_func=GraphQLView.as_view('graphql', schema=schema, graphiql=True)
)
if __name__ == '__main__':
init_db()
app.run()
Testing
-------
We are ready to launch the server!
.. code:: bash
$ python app.py
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
Then go to `http://localhost:5000/graphql `__ to test your first query.
graphene-mongo-0.4.1/examples/ 0000775 0000000 0000000 00000000000 14535040553 0016244 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/ 0000775 0000000 0000000 00000000000 14535040553 0022073 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/.gitignore 0000664 0000000 0000000 00000000012 14535040553 0024054 0 ustar 00root root 0000000 0000000 db.sqlite3 graphene-mongo-0.4.1/examples/django_mongoengine/README.md 0000664 0000000 0000000 00000002033 14535040553 0023350 0 ustar 00root root 0000000 0000000
Example Django+MongoEngine Project
================================
This example project demos integration between Graphene, Django and MongoEngine.
Getting started
---------------
First you'll need to get the source of the project. Do this by cloning the
whole Graphene repository:
```bash
# Get the example project code
git clone git@github.com:abawchen/graphene-mongo.git
cd graphene-mongo/examples/django_mongoengine
```
Create a virtual environment.
```bash
# Create a virtualenv in which we can install the dependencies
virtualenv env
source env/bin/activate
```
Now we can install our dependencies:
```bash
pip install -r requirements.txt
```
Run the following command:
```python
python manage.py migrate
```
Setup a mongodb connection and create a database.
See the mongoengine connection details in the *settings.py* file
Start the server:
```python
python manage.py runserver
```
Now head on over to
[http://127.0.0.1:8000/graphql](http://127.0.0.1:8000/graphql)
and run some queries!
For tests run:
```python
pytest -v
```
graphene-mongo-0.4.1/examples/django_mongoengine/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0024172 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike/ 0000775 0000000 0000000 00000000000 14535040553 0023005 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0025104 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike/apps.py 0000664 0000000 0000000 00000000123 14535040553 0024316 0 ustar 00root root 0000000 0000000 from django.apps import AppConfig
class BikeConfig(AppConfig):
name = "bike"
graphene-mongo-0.4.1/examples/django_mongoengine/bike/fixtures.py 0000664 0000000 0000000 00000002347 14535040553 0025236 0 ustar 00root root 0000000 0000000 import pytest
from .models import Bike, Shop
def fixture_bike_data():
Bike.drop_collection()
bike_one = Bike(
name="Level R",
brand="Mondraker",
year="2020",
size=["S", "M", "L", "XL"],
wheel_size=27.5,
type="MTB",
)
bike_one.save()
bike_two = Bike(
name="CAADX ULTEGRA",
brand="Cannondale",
year="2019",
size=["46", "51", "54", "58"],
wheel_size=28,
type="Gravel",
)
bike_two.save()
bike_three = Bike(
id="507f1f77bcf86cd799439011",
name="Moterra Neo",
brand="Cannondale",
year="2019",
size=["M", "L", "XL"],
wheel_size=29,
type="EBike",
)
bike_three.save()
def fixture_shop_data():
Shop.drop_collection()
shop_one = Shop(
name="Big Wheel Bicycles",
address="2438 Hart Ridge Road",
website="https://www.bigwheelbike.test",
)
shop_one.save()
shop_two = Shop(
name="Bike Tech",
address="2175 Pearl Street",
website="https://www.biketech.test",
)
shop_two.save()
@pytest.fixture(scope="module")
def fixtures_data():
fixture_bike_data()
fixture_shop_data()
return True
graphene-mongo-0.4.1/examples/django_mongoengine/bike/migrations/ 0000775 0000000 0000000 00000000000 14535040553 0025161 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike/migrations/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0027260 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike/models.py 0000664 0000000 0000000 00000001060 14535040553 0024637 0 ustar 00root root 0000000 0000000 from mongoengine import Document
from mongoengine.fields import (
FloatField,
StringField,
ListField,
URLField,
ObjectIdField,
)
class Shop(Document):
meta = {"collection": "shop"}
ID = ObjectIdField()
name = StringField()
address = StringField()
website = URLField()
class Bike(Document):
meta = {"collection": "bike"}
ID = ObjectIdField()
name = StringField()
brand = StringField()
year = StringField()
size = ListField(StringField())
wheel_size = FloatField()
type = StringField()
graphene-mongo-0.4.1/examples/django_mongoengine/bike/mutations.py 0000664 0000000 0000000 00000004043 14535040553 0025403 0 ustar 00root root 0000000 0000000 import graphene
from django.core.exceptions import ObjectDoesNotExist
from .models import Bike
from .types import BikeType
class BikeInput(graphene.InputObjectType):
id = graphene.ID()
name = graphene.String()
brand = graphene.String()
year = graphene.String()
size = graphene.List(graphene.String)
wheel_size = graphene.Float()
type = graphene.String()
class CreateBikeMutation(graphene.Mutation):
bike = graphene.Field(BikeType)
class Arguments:
bike_data = BikeInput(required=True)
def mutate(self, info, bike_data=None):
bike = Bike(
name=bike_data.name,
brand=bike_data.brand,
year=bike_data.year,
size=bike_data.size,
wheel_size=bike_data.wheel_size,
type=bike_data.type,
)
bike.save()
return CreateBikeMutation(bike=bike)
class UpdateBikeMutation(graphene.Mutation):
bike = graphene.Field(BikeType)
class Arguments:
bike_data = BikeInput(required=True)
@staticmethod
def get_object(id):
return Bike.objects.get(pk=id)
def mutate(self, info, bike_data=None):
bike = UpdateBikeMutation.get_object(bike_data.id)
if bike_data.name:
bike.name = bike_data.name
if bike_data.brand:
bike.brand = bike_data.brand
if bike_data.year:
bike.year = bike_data.year
if bike_data.size:
bike.size = bike_data.size
if bike_data.wheel_size:
bike.wheel_size = bike_data.wheel_size
if bike_data.type:
bike.type = bike_data.type
bike.save()
return UpdateBikeMutation(bike=bike)
class DeleteBikeMutation(graphene.Mutation):
class Arguments:
id = graphene.ID(required=True)
success = graphene.Boolean()
def mutate(self, info, id):
try:
Bike.objects.get(pk=id).delete()
success = True
except ObjectDoesNotExist:
success = False
return DeleteBikeMutation(success=success)
graphene-mongo-0.4.1/examples/django_mongoengine/bike/schema.py 0000664 0000000 0000000 00000001344 14535040553 0024621 0 ustar 00root root 0000000 0000000 import graphene
from graphene.relay import Node
from graphene_mongo.fields import MongoengineConnectionField
from .models import Shop
from .types import BikeType, ShopType
from .mutations import CreateBikeMutation, UpdateBikeMutation, DeleteBikeMutation
class Mutations(graphene.ObjectType):
create_bike = CreateBikeMutation.Field()
update_bike = UpdateBikeMutation.Field()
delete_bike = DeleteBikeMutation.Field()
class Query(graphene.ObjectType):
node = Node.Field()
bikes = MongoengineConnectionField(BikeType)
shop_list = graphene.List(ShopType)
def resolve_shop_list(self, info):
return Shop.objects.all()
schema = graphene.Schema(query=Query, mutation=Mutations, types=[BikeType, ShopType])
graphene-mongo-0.4.1/examples/django_mongoengine/bike/tests.py 0000664 0000000 0000000 00000014451 14535040553 0024526 0 ustar 00root root 0000000 0000000 import pytest
from django.urls import reverse
from django.test import RequestFactory
from graphene.test import Client
from .schema import schema
from .fixtures import fixtures_data
def test_bikes_first_item_query(fixtures_data):
query = """
{
bikes(first: 1){
edges {
node {
name
brand
year
size
wheelSize
type
}
}
}
}"""
expected = {
"data": {
"bikes": {
"edges": [
{
"node": {
"name": "Level R",
"brand": "Mondraker",
"year": "2020",
"size": ["S", "M", "L", "XL"],
"wheelSize": 27.5,
"type": "MTB",
}
}
]
}
}
}
client = Client(schema)
result = client.execute(query)
assert result == expected
def test_bikes_filter_by_type_item_query(fixtures_data):
query = """
{
bikes(first: 2, type: "Gravel"){
edges {
node {
name
brand
year
size
wheelSize
type
}
}
}
}"""
expected = {
"data": {
"bikes": {
"edges": [
{
"node": {
"name": "CAADX ULTEGRA",
"brand": "Cannondale",
"year": "2019",
"size": ["46", "51", "54", "58"],
"wheelSize": 28,
"type": "Gravel",
}
}
]
}
}
}
client = Client(schema)
result = client.execute(query)
assert result == expected
def test_shop_data_query(fixtures_data):
query = """{
shopList{
name
address
website
}
}"""
expected = {
"data": {
"shopList": [
{
"name": "Big Wheel Bicycles",
"address": "2438 Hart Ridge Road",
"website": "https://www.bigwheelbike.test",
},
{
"name": "Bike Tech",
"address": "2175 Pearl Street",
"website": "https://www.biketech.test",
},
]
}
}
client = Client(schema)
result = client.execute(query)
assert result == expected
@pytest.mark.django_db
def test_create_bike_mutation():
query = """
mutation {
createBike(bikeData:{
name:"Bullhorn",
brand:"Pegas",
year: "2019",
size: ["56", "58" ],
wheelSize: 28,
type: "Fixie"
}) {
bike {
name
brand
year
size
wheelSize
type
}
}
}
"""
expected = {
"data": {
"createBike": {
"bike": {
"name": "Bullhorn",
"brand": "Pegas",
"year": "2019",
"size": ["56", "58"],
"wheelSize": 28,
"type": "Fixie",
}
}
}
}
factory = RequestFactory()
request = factory.post(reverse("graphql-query"))
client = Client(schema)
result = client.execute(query, context=request)
assert result == expected
@pytest.mark.django_db
def test_update_bike_mutation():
query = """
mutation {
updateBike(bikeData:{
id: "507f1f77bcf86cd799439011",
name:"Moterra Neo Updated",
year: "2020",
wheelSize: 27.5,
type: "EBike Updated"
}) {
bike {
name
brand
year
size
wheelSize
type
}
}
}
"""
expected = {
"data": {
"updateBike": {
"bike": {
"name": "Moterra Neo Updated",
"brand": "Cannondale",
"year": "2020",
"size": ["M", "L", "XL"],
"wheelSize": 27.5,
"type": "EBike Updated",
}
}
}
}
factory = RequestFactory()
request = factory.post(reverse("graphql-query"))
client = Client(schema)
result = client.execute(query, context=request)
print(result)
assert result == expected
@pytest.mark.django_db
def test_delete_bike_mutation():
query = """
mutation {
deleteBike(id: "507f1f77bcf86cd799439011") {
success
}
}
"""
expected = {"data": {"deleteBike": {"success": True}}}
factory = RequestFactory()
request = factory.post(reverse("graphql-query"))
client = Client(schema)
result = client.execute(query, context=request)
assert result == expected
graphene-mongo-0.4.1/examples/django_mongoengine/bike/types.py 0000664 0000000 0000000 00000000452 14535040553 0024524 0 ustar 00root root 0000000 0000000 from graphene import relay
from graphene_mongo import MongoengineObjectType
from .models import Bike, Shop
class BikeType(MongoengineObjectType):
class Meta:
model = Bike
interfaces = (relay.Node,)
class ShopType(MongoengineObjectType):
class Meta:
model = Shop
graphene-mongo-0.4.1/examples/django_mongoengine/bike/urls.py 0000664 0000000 0000000 00000000356 14535040553 0024350 0 ustar 00root root 0000000 0000000 from django.urls import path
from django.views.decorators.csrf import csrf_exempt
from graphene_django.views import GraphQLView
urlpatterns = [
path("graphql", csrf_exempt(GraphQLView.as_view(graphiql=True)), name="graphql-query")
]
graphene-mongo-0.4.1/examples/django_mongoengine/bike_catalog/ 0000775 0000000 0000000 00000000000 14535040553 0024477 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike_catalog/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0026576 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/django_mongoengine/bike_catalog/settings.py 0000664 0000000 0000000 00000006735 14535040553 0026724 0 ustar 00root root 0000000 0000000 """
Django settings for bike_catalog project.
Generated by 'django-admin startproject' using Django 2.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import mongoengine
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "_@^#*p_+mm-p8+#k&8i0=dnvt03$ycqmwqs4os0-+@+u6k-f_m"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"graphene_django",
"graphene_mongo",
"bike",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "bike_catalog.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "bike_catalog.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
# MONGO DB connection\
_MONGODB_USER = ""
_MONGODB_PASSWD = ""
_MONGODB_HOST = "localhost"
_MONGODB_NAME = "bike-catalog"
_MONGODB_PORT = 27017
_MONGODB_DATABASE_HOST = "mongodb://%s:%s@%s/%s" % (
_MONGODB_USER,
_MONGODB_PASSWD,
_MONGODB_HOST,
_MONGODB_NAME,
)
mongoengine.connect(_MONGODB_NAME, host=_MONGODB_HOST, port=_MONGODB_PORT)
GRAPHENE = {"SCHEMA": "bike.schema.schema"} # Where your Graphene schema lives
graphene-mongo-0.4.1/examples/django_mongoengine/bike_catalog/settings_test.py 0000664 0000000 0000000 00000000203 14535040553 0027743 0 ustar 00root root 0000000 0000000 from .settings import * # flake8: noqa
mongoengine.connect("graphene-mongo-test", host="mongomock://localhost", alias="default")
graphene-mongo-0.4.1/examples/django_mongoengine/bike_catalog/urls.py 0000664 0000000 0000000 00000001424 14535040553 0026037 0 ustar 00root root 0000000 0000000 """bike_catalog URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [path("admin/", admin.site.urls), path("", include("bike.urls"))]
graphene-mongo-0.4.1/examples/django_mongoengine/bike_catalog/wsgi.py 0000664 0000000 0000000 00000000621 14535040553 0026021 0 ustar 00root root 0000000 0000000 """
WSGI config for bike_catalog project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bike_catalog.settings")
application = get_wsgi_application()
graphene-mongo-0.4.1/examples/django_mongoengine/manage.py 0000664 0000000 0000000 00000001170 14535040553 0023674 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bike_catalog.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
graphene-mongo-0.4.1/examples/django_mongoengine/pytest.ini 0000664 0000000 0000000 00000000151 14535040553 0024121 0 ustar 00root root 0000000 0000000 [pytest]
DJANGO_SETTINGS_MODULE = bike_catalog.settings_test
python_files = tests.py test_*.py *_tests.py graphene-mongo-0.4.1/examples/django_mongoengine/requirements.txt 0000664 0000000 0000000 00000000176 14535040553 0025363 0 ustar 00root root 0000000 0000000 Django==2.2.28
pytest==4.6.3
pytest-django==3.5.1
mongoengine==0.27.0
mongomock==3.16.0
graphene-django==2.4.0
graphene-mongo
graphene-mongo-0.4.1/examples/falcon_mongoengine/ 0000775 0000000 0000000 00000000000 14535040553 0022073 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/falcon_mongoengine/README.md 0000664 0000000 0000000 00000003046 14535040553 0023355 0 ustar 00root root 0000000 0000000
Example Falcon+MongoEngine Project
================================
This example project demos integration between Graphene, Falcon and MongoEngine.
Getting started
---------------
First you'll need to get the source of the project. Do this by cloning the
whole Graphene repository:
```bash
# Get the example project code
git clone git@github.com:abawchen/graphene-mongo.git
cd graphene-mongo/examples/falcon_mongoengine
```
Create a virtual environment.
```bash
# Create a virtualenv in which we can install the dependencies
virtualenv env
source env/bin/activate
```
Now we can install our dependencies:
```bash
pip install -r requirements.txt
```
Setup a mongodb connection and create a database.
See the mongoengine connection details in the *app.py* file
Start the server:
On windows:
```
waitress-serve --port=9000 falcon_mongoengine.app:app
```
On Linux:
```
gunicorn -b 0.0.0.0:9000 falcon_mongoengine.app:app
```
Now head on over to
[http://127.0.0.1:9000/graphql?query=](http://127.0.0.1:9000/graphql?query=)
and run some queries!
Example:
```
http://127.0.0.1:9000/graphql?query=query
{
categories(first: 1, name: "Travel")
{
edges { node { name color } }
}
}
```
```
http://127.0.0.1:9000/graphql?query=query
{
bookmarks(first: 10)
{
pageInfo { startCursor endCursor hasNextPage hasPreviousPage }
edges {
node { name url category { name color } tags }
}
}
}
```
For tests run:
```python
pytest -v
```
graphene-mongo-0.4.1/examples/falcon_mongoengine/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0024172 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/falcon_mongoengine/api.py 0000664 0000000 0000000 00000002132 14535040553 0023214 0 ustar 00root root 0000000 0000000 import json
import falcon
from .schema import schema
def set_graphql_allow_header(req: falcon.Request, resp: falcon.Response, resource: object):
resp.set_header("Allow", "GET, POST, OPTIONS")
class HelloWorldResource:
def on_get(self, req, resp):
name = "Hello World!"
resp.status = falcon.HTTP_200
resp.body = json.dumps({"respone": name, "status": resp.status})
def on_post(self, req, resp):
pass
@falcon.after(set_graphql_allow_header)
class GraphQLResource:
def on_get(self, req, resp):
query = req.params["query"]
result = await schema.execute_async(query)
if result.data:
data_ret = {"data": result.data}
resp.status = falcon.HTTP_200
resp.body = json.dumps(data_ret, separators=(",", ":"))
def on_post(self, req, resp):
query = req.params["query"]
result = await schema.execute_async(query)
if result.data:
data_ret = {"data": result.data}
resp.status = falcon.HTTP_200
resp.body = json.dumps(data_ret, separators=(",", ":"))
graphene-mongo-0.4.1/examples/falcon_mongoengine/app.py 0000664 0000000 0000000 00000000475 14535040553 0023233 0 ustar 00root root 0000000 0000000 import falcon
from mongoengine import connect
from .api import GraphQLResource, HelloWorldResource
connect("bookmarks_db", host="127.0.0.1", port=27017)
app = application = falcon.API()
helloWorld = HelloWorldResource()
graphQL = GraphQLResource()
app.add_route("/", helloWorld)
app.add_route("/graphql", graphQL)
graphene-mongo-0.4.1/examples/falcon_mongoengine/models.py 0000664 0000000 0000000 00000001042 14535040553 0023725 0 ustar 00root root 0000000 0000000 from mongoengine import Document, CASCADE
from mongoengine.fields import StringField, ListField, ReferenceField
class Category(Document):
meta = {"collection": "category"}
name = StringField(max_length=140, required=True)
color = StringField(max_length=7, required=True)
class Bookmark(Document):
meta = {"collection": "bookmark"}
name = StringField(required=True)
url = StringField(required=True)
category = ReferenceField("Category", reverse_delete_rule=CASCADE)
tags = ListField(StringField(max_length=50))
graphene-mongo-0.4.1/examples/falcon_mongoengine/pytest.ini 0000664 0000000 0000000 00000000065 14535040553 0024125 0 ustar 00root root 0000000 0000000 [pytest]
python_files = tests.py test_*.py *_tests.py graphene-mongo-0.4.1/examples/falcon_mongoengine/requirements.txt 0000664 0000000 0000000 00000000140 14535040553 0025352 0 ustar 00root root 0000000 0000000 falcon==2.0.0
mongoengine==0.17.0
graphene-mongo
waitress==2.1.2
pytest==4.6.3
mongomock==3.16.0 graphene-mongo-0.4.1/examples/falcon_mongoengine/schema.py 0000664 0000000 0000000 00000000536 14535040553 0023711 0 ustar 00root root 0000000 0000000 import graphene
from graphene_mongo.fields import MongoengineConnectionField
from .types import CategoryType, BookmarkType
class Query(graphene.ObjectType):
categories = MongoengineConnectionField(CategoryType)
bookmarks = MongoengineConnectionField(BookmarkType)
schema = graphene.Schema(query=Query, types=[CategoryType, BookmarkType])
graphene-mongo-0.4.1/examples/falcon_mongoengine/tests/ 0000775 0000000 0000000 00000000000 14535040553 0023235 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/falcon_mongoengine/tests/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0025334 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/falcon_mongoengine/tests/fixtures.py 0000664 0000000 0000000 00000002234 14535040553 0025461 0 ustar 00root root 0000000 0000000 import pytest
from examples.falcon_mongoengine.models import Category, Bookmark
def fixture_category_data():
Category.drop_collection()
category_one = Category(name="Travel", color="#ed008c")
category_one.save()
category_two = Category(name="Work", color="#1769ff")
category_two.save()
return category_one, category_two
@pytest.fixture(scope="module")
def fixtures_data():
category_one, category_two = fixture_category_data()
Bookmark.drop_collection()
bookmark_one = Bookmark(
name="Travel tips",
url="https://www.traveltips.test",
category=category_one,
tags=["travel", "tips", "howto"],
)
bookmark_one.save()
bookmark_two = Bookmark(
name="DIY vacation",
url="https://www.diyvacation.test",
category=category_one,
tags=["travel", "diy", "holiday", "vacation"],
)
bookmark_two.save()
bookmark_three = Bookmark(
name="Awesome python",
url="https://awesomelists.top/#repos/vinta/awesome-python",
category=category_two,
tags=["python", "dev", "awesome", "tutorial"],
)
bookmark_three.save()
return True
graphene-mongo-0.4.1/examples/falcon_mongoengine/tests/tests.py 0000664 0000000 0000000 00000007753 14535040553 0024765 0 ustar 00root root 0000000 0000000 import mongoengine
from graphene.test import Client
from examples.falcon_mongoengine.schema import schema
from .fixtures import fixtures_data
mongoengine.connect("graphene-mongo-test", host="mongomock://localhost", alias="default")
def test_category_last_1_item_query(fixtures_data):
query = """
{
categories(last: 1){
edges {
node {
name
color
}
}
}
}"""
expected = {
"data": {
"categories": {
"edges": [
{
"node": {
"name": "Work",
"color": "#1769ff",
}
}
]
}
}
}
client = Client(schema)
result = client.execute(query)
assert result == expected
def test_category_filter_item_query(fixtures_data):
query = """
{
categories(name: "Work"){
edges {
node {
name
color
}
}
}
}"""
expected = {"data": {"categories": {"edges": [{"node": {"name": "Work", "color": "#1769ff"}}]}}}
client = Client(schema)
result = client.execute(query)
assert result == expected
def test_bookmarks_first_2_items_query(fixtures_data):
query = """
{
bookmarks(first: 2){
edges {
node {
name
url
category {
name
color
}
tags
}
}
}
}"""
expected = {
"data": {
"bookmarks": {
"edges": [
{
"node": {
"name": "Travel tips",
"url": "https://www.traveltips.test",
"category": {"name": "Travel", "color": "#ed008c"},
"tags": ["travel", "tips", "howto"],
}
},
{
"node": {
"name": "DIY vacation",
"url": "https://www.diyvacation.test",
"category": {"name": "Travel", "color": "#ed008c"},
"tags": ["travel", "diy", "holiday", "vacation"],
}
},
]
}
}
}
client = Client(schema)
result = client.execute(query)
assert result == expected
def test_bookmarks_filter_items_query(fixtures_data):
query = """
{
bookmarks(first: 1, name: "Awesome python"){
edges {
node {
name
url
category {
name
color
}
tags
}
}
}
}"""
expected = {
"data": {
"bookmarks": {
"edges": [
{
"node": {
"name": "Awesome python",
"url": "https://awesomelists.top/#repos/vinta/awesome-python",
"category": {"name": "Work", "color": "#1769ff"},
"tags": ["python", "dev", "awesome", "tutorial"],
}
}
]
}
}
}
client = Client(schema)
result = client.execute(query)
assert result == expected
graphene-mongo-0.4.1/examples/falcon_mongoengine/types.py 0000664 0000000 0000000 00000000546 14535040553 0023616 0 ustar 00root root 0000000 0000000 from graphene import relay
from graphene_mongo import MongoengineObjectType
from .models import Bookmark, Category
class CategoryType(MongoengineObjectType):
class Meta:
model = Category
interfaces = (relay.Node,)
class BookmarkType(MongoengineObjectType):
class Meta:
model = Bookmark
interfaces = (relay.Node,)
graphene-mongo-0.4.1/examples/flask_mongoengine/ 0000775 0000000 0000000 00000000000 14535040553 0021731 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/flask_mongoengine/README.md 0000664 0000000 0000000 00000003300 14535040553 0023204 0 ustar 00root root 0000000 0000000
Example Flask+MongoEngine Project
================================
This example project demos integration between Graphene, Flask and MongoEngine.
The project contains three models, which are `Department`, `Employee` and `Role`.
Getting started
---------------
First you'll need to get the source of the project. Do this by cloning the
whole Graphene repository:
```bash
# Get the example project code
git clone git@github.com:abawchen/graphene-mongo.git
cd graphene-mongo/examples/flask_mongoengine
```
It is good idea (but not required) to create a virtual environment
for this project. We'll do this using
[virtualenv](http://docs.python-guide.org/en/latest/dev/virtualenvs/)
to keep things simple,
but you may also find something like
[virtualenvwrapper](https://virtualenvwrapper.readthedocs.org/en/latest/)
to be useful:
```bash
# Create a virtualenv in which we can install the dependencies
virtualenv env
source env/bin/activate
```
Now we can install our dependencies:
```bash
pip install -r requirements.txt
```
Now the following command will setup the database, and start the server:
```bash
python app.py
```
Now head on over to
[http://127.0.0.1:5000/graphql](http://127.0.0.1:5000/graphql)
and run some queries!
Sample query:
```
{
allEmployees {
edges {
node {
id,
name,
department {
id,
name
},
roles {
edges {
node {
id,
name
}
}
},
leader {
id,
name
}
tasks {
edges {
node {
name,
deadline
}
}
}
}
}
}
}
```
graphene-mongo-0.4.1/examples/flask_mongoengine/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0024030 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/examples/flask_mongoengine/app.py 0000664 0000000 0000000 00000001455 14535040553 0023070 0 ustar 00root root 0000000 0000000 from database import init_db
from flask import Flask
from flask_graphql import GraphQLView
from schema import schema
app = Flask(__name__)
app.debug = True
default_query = """
{
allEmployees {
edges {
node {
id,
name,
department {
id,
name
},
roles {
edges {
node {
id,
name
}
}
},
leader {
id,
name
}
tasks {
edges {
node {
name,
deadline
}
}
}
}
}
}
}""".strip()
app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True))
if __name__ == "__main__":
init_db()
app.run()
graphene-mongo-0.4.1/examples/flask_mongoengine/database.py 0000664 0000000 0000000 00000001703 14535040553 0024050 0 ustar 00root root 0000000 0000000 from mongoengine import connect
from .models import Department, Employee, Role, Task
connect("graphene-mongo-example", host="mongomock://localhost", alias="default")
def init_db():
# Create the fixtures
engineering = Department(name="Engineering")
engineering.save()
hr = Department(name="Human Resources")
hr.save()
manager = Role(name="manager")
manager.save()
engineer = Role(name="engineer")
engineer.save()
debug = Task(name="Debug")
test = Task(name="Test")
tracy = Employee(name="Tracy", department=hr, roles=[engineer, manager], tasks=[])
tracy.save()
peter = Employee(
name="Peter",
department=engineering,
leader=tracy,
roles=[engineer],
tasks=[debug, test],
)
peter.save()
roy = Employee(
name="Roy",
department=engineering,
leader=tracy,
roles=[engineer],
tasks=[debug],
)
roy.save()
graphene-mongo-0.4.1/examples/flask_mongoengine/models.py 0000664 0000000 0000000 00000001465 14535040553 0023574 0 ustar 00root root 0000000 0000000 from datetime import datetime
from mongoengine import Document, EmbeddedDocument
from mongoengine.fields import (
DateTimeField,
EmbeddedDocumentField,
ListField,
ReferenceField,
StringField,
)
class Department(Document):
meta = {"collection": "department"}
name = StringField()
class Role(Document):
meta = {"collection": "role"}
name = StringField()
class Task(EmbeddedDocument):
name = StringField()
deadline = DateTimeField(default=datetime.now)
class Employee(Document):
meta = {"collection": "employee"}
name = StringField()
hired_on = DateTimeField(default=datetime.now)
department = ReferenceField(Department)
roles = ListField(ReferenceField(Role))
leader = ReferenceField("Employee")
tasks = ListField(EmbeddedDocumentField(Task))
graphene-mongo-0.4.1/examples/flask_mongoengine/requirements.txt 0000664 0000000 0000000 00000000104 14535040553 0025210 0 ustar 00root root 0000000 0000000 Flask>=1.0.0
Flask-GraphQL==2.0.0
graphene-mongo
mongomock==3.14.0
graphene-mongo-0.4.1/examples/flask_mongoengine/schema.py 0000664 0000000 0000000 00000002472 14535040553 0023550 0 ustar 00root root 0000000 0000000 import graphene
from graphene.relay import Node
from graphene_mongo import MongoengineConnectionField, MongoengineObjectType
from .models import Department as DepartmentModel
from .models import Employee as EmployeeModel
from .models import Role as RoleModel
from .models import Task as TaskModel
class Department(MongoengineObjectType):
class Meta:
model = DepartmentModel
interfaces = (Node,)
class Role(MongoengineObjectType):
class Meta:
model = RoleModel
interfaces = (Node,)
filter_fields = {
"name": [
"exact",
"icontains",
"istartswith",
]
}
class Task(MongoengineObjectType):
class Meta:
model = TaskModel
interfaces = (Node,)
class Employee(MongoengineObjectType):
class Meta:
model = EmployeeModel
interfaces = (Node,)
filter_fields = {
"name": [
"exact",
"icontains",
"istartswith",
]
}
class Query(graphene.ObjectType):
node = Node.Field()
all_employees = MongoengineConnectionField(Employee)
all_roles = MongoengineConnectionField(Role)
role = graphene.Field(Role)
schema = graphene.Schema(query=Query, types=[Department, Employee, Role])
graphene-mongo-0.4.1/graphene_mongo/ 0000775 0000000 0000000 00000000000 14535040553 0017416 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/graphene_mongo/__init__.py 0000664 0000000 0000000 00000000764 14535040553 0021536 0 ustar 00root root 0000000 0000000 from .fields import MongoengineConnectionField
from .fields_async import AsyncMongoengineConnectionField
from .types import MongoengineObjectType, MongoengineInputType, MongoengineInterfaceType
from .types_async import AsyncMongoengineObjectType
__version__ = "0.1.1"
__all__ = [
"__version__",
"MongoengineObjectType",
"AsyncMongoengineObjectType",
"MongoengineInputType",
"MongoengineInterfaceType",
"MongoengineConnectionField",
"AsyncMongoengineConnectionField",
]
graphene-mongo-0.4.1/graphene_mongo/advanced_types.py 0000664 0000000 0000000 00000003676 14535040553 0022775 0 ustar 00root root 0000000 0000000 import base64
import graphene
class FileFieldType(graphene.ObjectType):
content_type = graphene.String()
md5 = graphene.String()
chunk_size = graphene.Int()
length = graphene.Int()
data = graphene.String()
# Support Graphene Federation v2
_shareable = True
@classmethod
def _resolve_fs_field(cls, field, name, default_value=None):
v = getattr(field.instance, field.key)
return getattr(v, name, default_value)
def resolve_content_type(self, info):
return FileFieldType._resolve_fs_field(self, "content_type")
def resolve_md5(self, info):
return FileFieldType._resolve_fs_field(self, "md5")
def resolve_chunk_size(self, info):
return FileFieldType._resolve_fs_field(self, "chunk_size", 0)
def resolve_length(self, info):
return FileFieldType._resolve_fs_field(self, "length", 0)
def resolve_data(self, info):
v = getattr(self.instance, self.key)
data = v.read()
if data is not None:
return base64.b64encode(data).decode("utf-8")
return None
class _CoordinatesTypeField(graphene.ObjectType):
type = graphene.String()
# Support Graphene Federation v2
_shareable = True
def resolve_type(self, info):
return self["type"]
def resolve_coordinates(self, info):
return self["coordinates"]
class PointFieldType(_CoordinatesTypeField):
coordinates = graphene.List(graphene.Float)
class PointFieldInputType(graphene.InputObjectType):
type = graphene.String(default_value="Point")
coordinates = graphene.List(graphene.Float, required=True)
class PolygonFieldType(_CoordinatesTypeField):
coordinates = graphene.List(graphene.List(graphene.List(graphene.Float)))
class MultiPolygonFieldType(_CoordinatesTypeField):
coordinates = graphene.List(
graphene.List(
graphene.List(
graphene.List(graphene.Float),
)
)
)
graphene-mongo-0.4.1/graphene_mongo/converter.py 0000664 0000000 0000000 00000103410 14535040553 0021776 0 ustar 00root root 0000000 0000000 import asyncio
import sys
import graphene
import mongoengine
from graphene.types.json import JSONString
from graphene.utils.str_converters import to_snake_case, to_camel_case
from mongoengine.base import get_document, LazyReference
from . import advanced_types
from .utils import (
import_single_dispatch,
get_field_description,
get_query_fields,
ExecutorEnum,
sync_to_async,
)
from concurrent.futures import ThreadPoolExecutor, as_completed
singledispatch = import_single_dispatch()
class MongoEngineConversionError(Exception):
pass
@singledispatch
def convert_mongoengine_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
raise MongoEngineConversionError(
"Don't know how to convert the MongoEngine field %s (%s)" % (field, field.__class__)
)
@convert_mongoengine_field.register(mongoengine.EmailField)
@convert_mongoengine_field.register(mongoengine.StringField)
@convert_mongoengine_field.register(mongoengine.URLField)
def convert_field_to_string(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.String(
description=get_field_description(field, registry), required=field.required
)
@convert_mongoengine_field.register(mongoengine.UUIDField)
@convert_mongoengine_field.register(mongoengine.ObjectIdField)
def convert_field_to_id(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.ID(description=get_field_description(field, registry), required=field.required)
@convert_mongoengine_field.register(mongoengine.IntField)
@convert_mongoengine_field.register(mongoengine.LongField)
@convert_mongoengine_field.register(mongoengine.SequenceField)
def convert_field_to_int(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Int(description=get_field_description(field, registry), required=field.required)
@convert_mongoengine_field.register(mongoengine.BooleanField)
def convert_field_to_boolean(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Boolean(
description=get_field_description(field, registry), required=field.required
)
@convert_mongoengine_field.register(mongoengine.FloatField)
def convert_field_to_float(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Float(
description=get_field_description(field, registry), required=field.required
)
@convert_mongoengine_field.register(mongoengine.Decimal128Field)
@convert_mongoengine_field.register(mongoengine.DecimalField)
def convert_field_to_decimal(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Decimal(
description=get_field_description(field, registry), required=field.required
)
@convert_mongoengine_field.register(mongoengine.DateTimeField)
def convert_field_to_datetime(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.DateTime(
description=get_field_description(field, registry), required=field.required
)
@convert_mongoengine_field.register(mongoengine.DateField)
def convert_field_to_date(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Date(
description=get_field_description(field, registry), required=field.required
)
@convert_mongoengine_field.register(mongoengine.DictField)
@convert_mongoengine_field.register(mongoengine.MapField)
def convert_field_to_jsonstring(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return JSONString(description=get_field_description(field, registry), required=field.required)
@convert_mongoengine_field.register(mongoengine.PointField)
def convert_point_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Field(
advanced_types.PointFieldType,
description=get_field_description(field, registry),
required=field.required,
)
@convert_mongoengine_field.register(mongoengine.PolygonField)
def convert_polygon_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Field(
advanced_types.PolygonFieldType,
description=get_field_description(field, registry),
required=field.required,
)
@convert_mongoengine_field.register(mongoengine.MultiPolygonField)
def convert_multipolygon_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Field(
advanced_types.MultiPolygonFieldType,
description=get_field_description(field, registry),
required=field.required,
)
@convert_mongoengine_field.register(mongoengine.FileField)
def convert_file_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
return graphene.Field(
advanced_types.FileFieldType,
description=get_field_description(field, registry),
required=field.required,
)
@convert_mongoengine_field.register(mongoengine.ListField)
@convert_mongoengine_field.register(mongoengine.EmbeddedDocumentListField)
@convert_mongoengine_field.register(mongoengine.GeoPointField)
def convert_field_to_list(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
base_type = convert_mongoengine_field(field.field, registry=registry, executor=executor)
if isinstance(base_type, graphene.Field):
if isinstance(field.field, mongoengine.GenericReferenceField):
def get_reference_objects(*args, **kwargs):
document = get_document(args[0][0])
document_field = mongoengine.ReferenceField(document)
document_field = convert_mongoengine_field(document_field, registry)
document_field_type = document_field.get_type().type
queried_fields = list()
filter_args = list()
if document_field_type._meta.filter_fields:
for key, values in document_field_type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0][3][0])[document_field_type._meta.name].keys():
item = to_snake_case(each)
if item in document._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return (
document.objects()
.no_dereference()
.only(*set(list(document_field_type._meta.required_fields) + queried_fields))
.filter(pk__in=args[0][1])
)
def get_non_querying_object(*args, **kwargs):
model = get_document(args[0][0])
return [model(pk=each) for each in args[0][1]]
def reference_resolver(root, *args, **kwargs):
to_resolve = getattr(root, field.name or field.db_name)
if to_resolve:
choice_to_resolve = dict()
querying_union_types = list(get_query_fields(args[0]).keys())
if "__typename" in querying_union_types:
querying_union_types.remove("__typename")
to_resolve_models = list()
for each in querying_union_types:
if executor == ExecutorEnum.SYNC:
to_resolve_models.append(registry._registry_string_map[each])
else:
to_resolve_models.append(registry._registry_async_string_map[each])
to_resolve_object_ids = list()
for each in to_resolve:
if isinstance(each, LazyReference):
to_resolve_object_ids.append(each.pk)
model = each.document_type._class_name
if model not in choice_to_resolve:
choice_to_resolve[model] = list()
choice_to_resolve[model].append(each.pk)
else:
to_resolve_object_ids.append(each["_ref"].id)
if each["_cls"] not in choice_to_resolve:
choice_to_resolve[each["_cls"]] = list()
choice_to_resolve[each["_cls"]].append(each["_ref"].id)
pool = ThreadPoolExecutor(5)
futures = list()
for model, object_id_list in choice_to_resolve.items():
if model in to_resolve_models:
futures.append(
pool.submit(
get_reference_objects,
(model, object_id_list, registry, args),
)
)
else:
futures.append(
pool.submit(
get_non_querying_object,
(model, object_id_list, registry, args),
)
)
result = list()
for x in as_completed(futures):
result += x.result()
result_object_ids = list()
for each in result:
result_object_ids.append(each.id)
ordered_result = list()
for each in to_resolve_object_ids:
ordered_result.append(result[result_object_ids.index(each)])
return ordered_result
return None
async def get_reference_objects_async(*args, **kwargs):
document = get_document(args[0])
document_field = mongoengine.ReferenceField(document)
document_field = convert_mongoengine_field(
document_field, registry, executor=ExecutorEnum.ASYNC
)
document_field_type = document_field.get_type().type
queried_fields = list()
filter_args = list()
if document_field_type._meta.filter_fields:
for key, values in document_field_type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[3][0])[document_field_type._meta.name].keys():
item = to_snake_case(each)
if item in document._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return await sync_to_async(list)(
document.objects()
.no_dereference()
.only(*set(list(document_field_type._meta.required_fields) + queried_fields))
.filter(pk__in=args[1])
)
async def get_non_querying_object_async(*args, **kwargs):
model = get_document(args[0])
return [model(pk=each) for each in args[1]]
async def reference_resolver_async(root, *args, **kwargs):
to_resolve = getattr(root, field.name or field.db_name)
if to_resolve:
choice_to_resolve = dict()
querying_union_types = list(get_query_fields(args[0]).keys())
if "__typename" in querying_union_types:
querying_union_types.remove("__typename")
to_resolve_models = list()
for each in querying_union_types:
if executor == ExecutorEnum.SYNC:
to_resolve_models.append(registry._registry_string_map[each])
else:
to_resolve_models.append(registry._registry_async_string_map[each])
to_resolve_object_ids = list()
for each in to_resolve:
if isinstance(each, LazyReference):
to_resolve_object_ids.append(each.pk)
model = each.document_type._class_name
if model not in choice_to_resolve:
choice_to_resolve[model] = list()
choice_to_resolve[model].append(each.pk)
else:
to_resolve_object_ids.append(each["_ref"].id)
if each["_cls"] not in choice_to_resolve:
choice_to_resolve[each["_cls"]] = list()
choice_to_resolve[each["_cls"]].append(each["_ref"].id)
loop = asyncio.get_event_loop()
tasks = []
for model, object_id_list in choice_to_resolve.items():
if model in to_resolve_models:
task = loop.create_task(
get_reference_objects_async(model, object_id_list, registry, args)
)
else:
task = loop.create_task(
get_non_querying_object_async(model, object_id_list, registry, args)
)
tasks.append(task)
result = await asyncio.gather(*tasks)
result_object = {}
for items in result:
for item in items:
result_object[item.id] = item
ordered_result = list()
for each in to_resolve_object_ids:
ordered_result.append(result_object[each])
return ordered_result
return None
return graphene.List(
base_type._type,
description=get_field_description(field, registry),
required=field.required,
resolver=reference_resolver
if executor == ExecutorEnum.SYNC
else reference_resolver_async,
)
return graphene.List(
base_type._type,
description=get_field_description(field, registry),
required=field.required,
)
if isinstance(base_type, (graphene.Dynamic)):
base_type = base_type.get_type()
if base_type is None:
return
base_type = base_type._type
if graphene.is_node(base_type):
return base_type._meta.connection_field_class(base_type)
# Non-relationship field
relations = (mongoengine.ReferenceField, mongoengine.EmbeddedDocumentField)
if not isinstance(base_type, (graphene.List, graphene.NonNull)) and not isinstance(
field.field, relations
):
base_type = type(base_type)
return graphene.List(
base_type,
description=get_field_description(field, registry),
required=field.required,
)
@convert_mongoengine_field.register(mongoengine.GenericEmbeddedDocumentField)
@convert_mongoengine_field.register(mongoengine.GenericReferenceField)
def convert_field_to_union(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
_types = []
for choice in field.choices:
if isinstance(field, mongoengine.GenericReferenceField):
_field = mongoengine.ReferenceField(get_document(choice))
elif isinstance(field, mongoengine.GenericEmbeddedDocumentField):
_field = mongoengine.EmbeddedDocumentField(choice)
_field = convert_mongoengine_field(_field, registry, executor=executor)
_type = _field.get_type()
if _type:
_types.append(_type.type)
else:
# TODO: Register type auto-matically here.
pass
if len(_types) == 0:
return None
field_name = field.db_field
if field_name is None:
# Get db_field name from parent mongo_field
for db_field_name, _mongo_parent_field in field.owner_document._fields.items():
if hasattr(_mongo_parent_field, "field") and _mongo_parent_field.field == field:
field_name = db_field_name
break
name = to_camel_case(
"{}_{}_union_type".format(
field._owner_document.__name__,
field_name,
)
)
Meta = type("Meta", (object,), {"types": tuple(_types)})
_union = type(name, (graphene.Union,), {"Meta": Meta})
def reference_resolver(root, *args, **kwargs):
de_referenced = getattr(root, field.name or field.db_name)
if de_referenced:
document = get_document(de_referenced["_cls"])
document_field = mongoengine.ReferenceField(document)
document_field = convert_mongoengine_field(document_field, registry, executor=executor)
_type = document_field.get_type().type
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
querying_types = list(get_query_fields(args[0]).keys())
if _type.__name__ in querying_types:
queried_fields = list()
for each in get_query_fields(args[0])[_type._meta.name].keys():
item = to_snake_case(each)
if item in document._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return (
document.objects()
.no_dereference()
.only(*list(set(list(_type._meta.required_fields) + queried_fields)))
.get(pk=de_referenced["_ref"].id)
)
return document()
return None
def lazy_reference_resolver(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
if document:
if document._cached_doc:
return document._cached_doc
queried_fields = list()
document_field_type = registry.get_type_for_model(
document.document_type, executor=executor
)
querying_types = list(get_query_fields(args[0]).keys())
filter_args = list()
if document_field_type._meta.filter_fields:
for key, values in document_field_type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
if document_field_type._meta.name in querying_types:
for each in get_query_fields(args[0])[document_field_type._meta.name].keys():
item = to_snake_case(each)
if item in document.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
_type = registry.get_type_for_model(document.document_type, executor=executor)
return (
document.document_type.objects()
.no_dereference()
.only(*(set((list(_type._meta.required_fields) + queried_fields))))
.get(pk=document.pk)
)
return document.document_type()
return None
async def reference_resolver_async(root, *args, **kwargs):
de_referenced = getattr(root, field.name or field.db_name)
if de_referenced:
document = get_document(de_referenced["_cls"])
document_field = mongoengine.ReferenceField(document)
document_field = convert_mongoengine_field(
document_field, registry, executor=ExecutorEnum.ASYNC
)
_type = document_field.get_type().type
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
querying_types = list(get_query_fields(args[0]).keys())
if _type.__name__ in querying_types:
queried_fields = list()
for each in get_query_fields(args[0])[_type._meta.name].keys():
item = to_snake_case(each)
if item in document._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return await sync_to_async(
document.objects()
.no_dereference()
.only(*list(set(list(_type._meta.required_fields) + queried_fields)))
.get
)(pk=de_referenced["_ref"].id)
return await sync_to_async(document)()
return None
async def lazy_reference_resolver_async(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
if document:
if document._cached_doc:
return document._cached_doc
queried_fields = list()
document_field_type = registry.get_type_for_model(
document.document_type, executor=executor
)
querying_types = list(get_query_fields(args[0]).keys())
filter_args = list()
if document_field_type._meta.filter_fields:
for key, values in document_field_type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
if document_field_type._meta.name in querying_types:
for each in get_query_fields(args[0])[document_field_type._meta.name].keys():
item = to_snake_case(each)
if item in document.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
_type = registry.get_type_for_model(document.document_type, executor=executor)
return await sync_to_async(
document.document_type.objects()
.no_dereference()
.only(*(set((list(_type._meta.required_fields) + queried_fields))))
.get
)(pk=document.pk)
return await sync_to_async(document.document_type)()
return None
if isinstance(field, mongoengine.GenericLazyReferenceField):
field_resolver = None
required = False
if field.db_field is not None:
required = field.required
resolver_function = getattr(
registry.get_type_for_model(field.owner_document, executor=executor),
"resolve_" + field.db_field,
None,
)
if resolver_function and callable(resolver_function):
field_resolver = resolver_function
return graphene.Field(
_union,
resolver=field_resolver
if field_resolver
else (
lazy_reference_resolver
if executor == ExecutorEnum.SYNC
else lazy_reference_resolver_async
),
description=get_field_description(field, registry),
required=required,
)
elif isinstance(field, mongoengine.GenericReferenceField):
field_resolver = None
required = False
if field.db_field is not None:
required = field.required
resolver_function = getattr(
registry.get_type_for_model(field.owner_document, executor=executor),
"resolve_" + field.db_field,
None,
)
if resolver_function and callable(resolver_function):
field_resolver = resolver_function
return graphene.Field(
_union,
resolver=field_resolver
if field_resolver
else (
reference_resolver if executor == ExecutorEnum.SYNC else reference_resolver_async
),
description=get_field_description(field, registry),
required=required,
)
return graphene.Field(_union)
@convert_mongoengine_field.register(mongoengine.EmbeddedDocumentField)
@convert_mongoengine_field.register(mongoengine.ReferenceField)
@convert_mongoengine_field.register(mongoengine.CachedReferenceField)
def convert_field_to_dynamic(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
model = field.document_type
def reference_resolver(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
if document:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0]).keys():
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return (
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.get(pk=document.id)
)
return None
def cached_reference_resolver(root, *args, **kwargs):
if field:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0]).keys():
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return (
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.get(pk=getattr(root, field.name or field.db_name))
)
return None
async def reference_resolver_async(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
if document:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0]).keys():
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return await sync_to_async(
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.get
)(pk=document.id)
return None
async def cached_reference_resolver_async(root, *args, **kwargs):
if field:
queried_fields = list()
_type = registry.get_type_for_model(field.document_type, executor=executor)
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0]).keys():
item = to_snake_case(each)
if item in field.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return await sync_to_async(
field.document_type.objects()
.no_dereference()
.only(*(set(list(_type._meta.required_fields) + queried_fields)))
.get
)(pk=getattr(root, field.name or field.db_name))
return None
def dynamic_type():
_type = registry.get_type_for_model(model, executor=executor)
if not _type:
return None
if isinstance(field, mongoengine.EmbeddedDocumentField):
return graphene.Field(
_type,
description=get_field_description(field, registry),
required=field.required,
)
field_resolver = None
required = False
if field.db_field is not None:
required = field.required
resolver_function = getattr(
registry.get_type_for_model(field.owner_document, executor=executor),
"resolve_" + field.db_field,
None,
)
if resolver_function and callable(resolver_function):
field_resolver = resolver_function
if isinstance(field, mongoengine.ReferenceField):
return graphene.Field(
_type,
resolver=field_resolver
if field_resolver
else (
reference_resolver
if executor == ExecutorEnum.SYNC
else reference_resolver_async
),
description=get_field_description(field, registry),
required=required,
)
else:
return graphene.Field(
_type,
resolver=field_resolver
if field_resolver
else (
cached_reference_resolver
if executor == ExecutorEnum.SYNC
else cached_reference_resolver_async
),
description=get_field_description(field, registry),
required=required,
)
return graphene.Dynamic(dynamic_type)
@convert_mongoengine_field.register(mongoengine.LazyReferenceField)
def convert_lazy_field_to_dynamic(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
model = field.document_type
def lazy_resolver(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
if document:
if document._cached_doc:
return document._cached_doc
queried_fields = list()
_type = registry.get_type_for_model(document.document_type, executor=executor)
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0]).keys():
item = to_snake_case(each)
if item in document.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return (
document.document_type.objects()
.no_dereference()
.only(*(set((list(_type._meta.required_fields) + queried_fields))))
.get(pk=document.pk)
)
return None
async def lazy_resolver_async(root, *args, **kwargs):
document = getattr(root, field.name or field.db_name)
if document:
if document._cached_doc:
return document._cached_doc
queried_fields = list()
_type = registry.get_type_for_model(document.document_type, executor=executor)
filter_args = list()
if _type._meta.filter_fields:
for key, values in _type._meta.filter_fields.items():
for each in values:
filter_args.append(key + "__" + each)
for each in get_query_fields(args[0]).keys():
item = to_snake_case(each)
if item in document.document_type._fields_ordered + tuple(filter_args):
queried_fields.append(item)
return await sync_to_async(
document.document_type.objects()
.no_dereference()
.only(*(set((list(_type._meta.required_fields) + queried_fields))))
.get
)(pk=document.pk)
return None
def dynamic_type():
_type = registry.get_type_for_model(model, executor=executor)
if not _type:
return None
field_resolver = None
required = False
if field.db_field is not None:
required = field.required
resolver_function = getattr(
registry.get_type_for_model(field.owner_document, executor=executor),
"resolve_" + field.db_field,
None,
)
if resolver_function and callable(resolver_function):
field_resolver = resolver_function
return graphene.Field(
_type,
resolver=field_resolver
if field_resolver
else (lazy_resolver if executor == ExecutorEnum.SYNC else lazy_resolver_async),
description=get_field_description(field, registry),
required=required,
)
return graphene.Dynamic(dynamic_type)
if sys.version_info >= (3, 6):
@convert_mongoengine_field.register(mongoengine.EnumField)
def convert_field_to_enum(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC):
if not registry.check_enum_already_exist(field._enum_cls):
registry.register_enum(field._enum_cls)
_type = registry.get_type_for_enum(field._enum_cls)
return graphene.Field(
_type,
description=get_field_description(field, registry),
required=field.required,
)
graphene-mongo-0.4.1/graphene_mongo/fields.py 0000664 0000000 0000000 00000070207 14535040553 0021244 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import logging
from collections import OrderedDict
from functools import partial, reduce
from itertools import filterfalse
import bson
import graphene
import mongoengine
import pymongo
from bson import DBRef, ObjectId
from graphene import Context
from graphene.relay import ConnectionField
from graphene.types.argument import to_arguments
from graphene.types.dynamic import Dynamic
from graphene.types.structures import Structure
from graphene.types.utils import get_type
from graphene.utils.str_converters import to_snake_case
from graphql import GraphQLResolveInfo
from graphql_relay import cursor_to_offset, from_global_id
from mongoengine import QuerySet
from mongoengine.base import get_document
from promise import Promise
from pymongo.errors import OperationFailure
from .advanced_types import (
FileFieldType,
MultiPolygonFieldType,
PointFieldInputType,
PointFieldType,
PolygonFieldType,
)
from .converter import MongoEngineConversionError, convert_mongoengine_field
from .registry import get_global_registry
from .utils import (
ExecutorEnum,
connection_from_iterables,
find_skip_and_limit,
get_model_reference_fields,
get_query_fields,
)
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
class MongoengineConnectionField(ConnectionField):
def __init__(self, type, *args, **kwargs):
get_queryset = kwargs.pop("get_queryset", None)
if get_queryset:
assert callable(
get_queryset
), "Attribute `get_queryset` on {} must be callable.".format(self)
self._get_queryset = get_queryset
super(MongoengineConnectionField, self).__init__(type, *args, **kwargs)
@property
def executor(self) -> ExecutorEnum:
return ExecutorEnum.SYNC
@property
def type(self):
from .types import MongoengineObjectType
_type = super(ConnectionField, self).type
assert issubclass(
_type, MongoengineObjectType
), "MongoengineConnectionField only accepts MongoengineObjectType types"
assert _type._meta.connection, "The type {} doesn't have a connection".format(
_type.__name__
)
return _type._meta.connection
@property
def node_type(self):
return self.type._meta.node
@property
def model(self):
return self.node_type._meta.model
@property
def order_by(self):
return self.node_type._meta.order_by
@property
def required_fields(self):
return tuple(set(self.node_type._meta.required_fields + self.node_type._meta.only_fields))
@property
def registry(self):
return getattr(self.node_type._meta, "registry", get_global_registry())
@property
def args(self):
_field_args = self.field_args
_advance_args = self.advance_args
_filter_args = self.filter_args
_extended_args = self.extended_args
if self._type._meta.non_filter_fields:
for _field in self._type._meta.non_filter_fields:
if _field in _field_args:
_field_args.pop(_field)
if _field in _advance_args:
_advance_args.pop(_field)
if _field in _filter_args:
_filter_args.pop(_field)
if _field in _extended_args:
_filter_args.pop(_field)
extra_args = dict(
dict(dict(_field_args, **_advance_args), **_filter_args), **_extended_args
)
for key in list(self._base_args.keys()):
extra_args.pop(key, None)
return to_arguments(self._base_args or OrderedDict(), extra_args)
@args.setter
def args(self, args):
self._base_args = args
def _field_args(self, items):
def is_filterable(k):
"""
Remove complex columns from input args at this moment.
Args:
k (str): field name.
Returns:
bool
"""
if hasattr(self.fields[k].type, "_sdl"):
return False
if not hasattr(self.model, k):
return False
else:
# else section is a patch for federated field error
field_ = self.fields[k]
type_ = field_.type
while hasattr(type_, "of_type"):
type_ = type_.of_type
if hasattr(type_, "_sdl") and "@key" in type_._sdl:
return False
if isinstance(getattr(self.model, k), property):
return False
try:
converted = convert_mongoengine_field(
getattr(self.model, k), self.registry, self.executor
)
except MongoEngineConversionError:
return False
if isinstance(converted, (ConnectionField, Dynamic)):
return False
if callable(getattr(converted, "type", None)) and isinstance(
converted.type(),
(
FileFieldType,
PointFieldType,
MultiPolygonFieldType,
graphene.Union,
PolygonFieldType,
),
):
return False
if (
getattr(converted, "type", None)
and getattr(converted.type, "_of_type", None)
and issubclass((get_type(converted.type.of_type)), graphene.Union)
):
return False
if isinstance(converted, (graphene.List)) and issubclass(
getattr(converted, "_of_type", None), graphene.Union
):
return False
# below if condition: workaround for DB filterable field redefined as custom graphene type
if (
hasattr(field_, "type")
and hasattr(converted, "type")
and converted.type != field_.type
):
return False
return True
def get_filter_type(_type):
"""
Returns the scalar type.
"""
if isinstance(_type, Structure):
return get_filter_type(_type.of_type)
return _type()
return {k: get_filter_type(v.type) for k, v in items if is_filterable(k)}
@property
def field_args(self):
return self._field_args(self.fields.items())
@property
def filter_args(self):
filter_args = dict()
if self._type._meta.filter_fields:
for field, filter_collection in self._type._meta.filter_fields.items():
for each in filter_collection:
if str(self._type._meta.fields[field].type) in (
"PointFieldType",
"PointFieldType!",
):
if each == "max_distance":
filter_type = graphene.Int
else:
filter_type = PointFieldInputType
else:
filter_type = getattr(
graphene,
str(self._type._meta.fields[field].type).replace("!", ""),
)
# handle special cases
advanced_filter_types = {
"in": graphene.List(filter_type),
"nin": graphene.List(filter_type),
"all": graphene.List(filter_type),
}
filter_type = advanced_filter_types.get(each, filter_type)
filter_args[field + "__" + each] = graphene.Argument(type_=filter_type)
return filter_args
@property
def advance_args(self):
def get_advance_field(r, kv):
field = kv[1]
mongo_field = getattr(self.model, kv[0], None)
if isinstance(mongo_field, mongoengine.PointField):
r.update({kv[0]: graphene.Argument(PointFieldInputType)})
return r
if isinstance(
mongo_field,
(
mongoengine.LazyReferenceField,
mongoengine.ReferenceField,
mongoengine.GenericReferenceField,
),
):
r.update({kv[0]: graphene.ID()})
return r
if isinstance(mongo_field, mongoengine.GenericReferenceField):
r.update({kv[0]: graphene.ID()})
return r
if callable(getattr(field, "get_type", None)):
_type = field.get_type()
if _type:
node = (
_type.type._meta
if hasattr(_type.type, "_meta")
else _type.type._of_type._meta
)
if "id" in node.fields and not issubclass(
node.model, (mongoengine.EmbeddedDocument,)
):
r.update({kv[0]: node.fields["id"]._type.of_type()})
return r
return reduce(get_advance_field, self.fields.items(), {})
@property
def extended_args(self):
args = OrderedDict()
for k, each in self.fields.items():
if hasattr(each.type, "_sdl"):
args.update({k: graphene.ID()})
return args
@property
def fields(self):
self._type = get_type(self._type)
return self._type._meta.fields
def get_queryset(
self, model, info, required_fields=None, skip=None, limit=None, reversed=False, **args
) -> QuerySet:
if required_fields is None:
required_fields = list()
if args:
reference_fields = get_model_reference_fields(self.model)
hydrated_references = {}
for arg_name, arg in args.copy().items():
if arg_name in reference_fields and not isinstance(
arg, mongoengine.base.metaclasses.TopLevelDocumentMetaclass
):
try:
reference_obj = reference_fields[arg_name].document_type(
pk=from_global_id(arg)[1]
)
except TypeError:
reference_obj = reference_fields[arg_name].document_type(pk=arg)
hydrated_references[arg_name] = reference_obj
elif arg_name in self.model._fields_ordered and isinstance(
getattr(self.model, arg_name),
mongoengine.fields.GenericReferenceField,
):
try:
reference_obj = get_document(
self.registry._registry_string_map[from_global_id(arg)[0]]
)(pk=from_global_id(arg)[1])
except TypeError:
reference_obj = get_document(arg["_cls"])(pk=arg["_ref"].id)
hydrated_references[arg_name] = reference_obj
elif "__near" in arg_name and isinstance(
getattr(self.model, arg_name.split("__")[0]),
mongoengine.fields.PointField,
):
location = args.pop(arg_name, None)
hydrated_references[arg_name] = location["coordinates"]
if (arg_name.split("__")[0] + "__max_distance") not in args:
hydrated_references[arg_name.split("__")[0] + "__max_distance"] = 10000
elif arg_name == "id":
hydrated_references["id"] = from_global_id(args.pop("id", None))[1]
args.update(hydrated_references)
if self._get_queryset:
queryset_or_filters = self._get_queryset(model, info, **args)
if isinstance(queryset_or_filters, mongoengine.QuerySet):
return queryset_or_filters
else:
args.update(queryset_or_filters)
if limit is not None:
if reversed:
if self.order_by:
order_by = self.order_by + ",-pk"
else:
order_by = "-pk"
return (
model.objects(**args)
.no_dereference()
.only(*required_fields)
.order_by(order_by)
.skip(skip if skip else 0)
.limit(limit)
)
else:
return (
model.objects(**args)
.no_dereference()
.only(*required_fields)
.order_by(self.order_by)
.skip(skip if skip else 0)
.limit(limit)
)
elif skip is not None:
if reversed:
if self.order_by:
order_by = self.order_by + ",-pk"
else:
order_by = "-pk"
return (
model.objects(**args)
.no_dereference()
.only(*required_fields)
.order_by(order_by)
.skip(skip)
)
else:
return (
model.objects(**args)
.no_dereference()
.only(*required_fields)
.order_by(self.order_by)
.skip(skip)
)
return model.objects(**args).no_dereference().only(*required_fields).order_by(self.order_by)
def default_resolver(self, _root, info, required_fields=None, resolved=None, **args):
if required_fields is None:
required_fields = list()
args = args or {}
for key, value in dict(args).items():
if value is None:
del args[key]
if _root is not None and not resolved:
field_name = to_snake_case(info.field_name)
if not hasattr(_root, "_fields_ordered"):
if isinstance(getattr(_root, field_name, []), list):
args["pk__in"] = [r.id for r in getattr(_root, field_name, [])]
elif field_name in _root._fields_ordered and not (
isinstance(_root._fields[field_name].field, mongoengine.EmbeddedDocumentField)
or isinstance(
_root._fields[field_name].field,
mongoengine.GenericEmbeddedDocumentField,
)
):
if getattr(_root, field_name, []) is not None:
args["pk__in"] = [r.id for r in getattr(_root, field_name, [])]
_id = args.pop("id", None)
if _id is not None:
args["pk"] = from_global_id(_id)[-1]
iterables = []
list_length = 0
skip = 0
count = 0
limit = None
reverse = False
first = args.pop("first", None)
after = args.pop("after", None)
if after:
after = cursor_to_offset(after)
last = args.pop("last", None)
before = args.pop("before", None)
if before:
before = cursor_to_offset(before)
has_next_page = False
if resolved is not None:
items = resolved
if isinstance(items, QuerySet):
try:
if last is not None and after is not None:
count = items.count(with_limit_and_skip=False)
else:
count = None
except OperationFailure:
count = len(items)
else:
count = len(items)
skip, limit, reverse = find_skip_and_limit(
first=first, last=last, after=after, before=before, count=count
)
if isinstance(items, QuerySet):
if limit:
_base_query: QuerySet = (
items.order_by("-pk").skip(skip) if reverse else items.skip(skip)
)
items = _base_query.limit(limit)
has_next_page = len(_base_query.skip(limit).only("id").limit(1)) != 0
elif skip:
items = items.skip(skip)
else:
if limit:
if reverse:
_base_query = items[::-1]
items = _base_query[skip : skip + limit]
has_next_page = (skip + limit) < len(_base_query)
else:
_base_query = items
items = items[skip : skip + limit]
has_next_page = (skip + limit) < len(_base_query)
elif skip:
items = items[skip:]
iterables = list(items)
list_length = len(iterables)
elif callable(getattr(self.model, "objects", None)):
if (
_root is None
or args
or isinstance(getattr(_root, field_name, []), MongoengineConnectionField)
):
args_copy = args.copy()
for key in args.copy():
if key not in self.model._fields_ordered:
args_copy.pop(key)
elif (
isinstance(getattr(self.model, key), mongoengine.fields.ReferenceField)
or isinstance(
getattr(self.model, key),
mongoengine.fields.GenericReferenceField,
)
or isinstance(
getattr(self.model, key),
mongoengine.fields.LazyReferenceField,
)
or isinstance(
getattr(self.model, key),
mongoengine.fields.CachedReferenceField,
)
):
if not isinstance(args_copy[key], ObjectId):
_from_global_id = from_global_id(args_copy[key])[1]
if bson.objectid.ObjectId.is_valid(_from_global_id):
args_copy[key] = ObjectId(_from_global_id)
else:
args_copy[key] = _from_global_id
elif isinstance(getattr(self.model, key), mongoengine.fields.EnumField):
if getattr(args_copy[key], "value", None):
args_copy[key] = args_copy[key].value
if PYMONGO_VERSION >= (3, 7):
if hasattr(self.model, "_meta") and "db_alias" in self.model._meta:
count = (
mongoengine.get_db(self.model._meta["db_alias"])[
self.model._get_collection_name()
]
).count_documents(args_copy)
else:
count = (
mongoengine.get_db()[self.model._get_collection_name()]
).count_documents(args_copy)
else:
count = self.model.objects(args_copy).count()
if count != 0:
skip, limit, reverse = find_skip_and_limit(
first=first, after=after, last=last, before=before, count=count
)
iterables = self.get_queryset(
self.model, info, required_fields, skip, limit, reverse, **args
)
list_length = len(iterables)
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(
self.model, info, required_fields, **args
)
elif "pk__in" in args and args["pk__in"]:
count = len(args["pk__in"])
skip, limit, reverse = find_skip_and_limit(
first=first, last=last, after=after, before=before, count=count
)
if limit:
if reverse:
args["pk__in"] = args["pk__in"][::-1][skip : skip + limit]
else:
args["pk__in"] = args["pk__in"][skip : skip + limit]
elif skip:
args["pk__in"] = args["pk__in"][skip:]
iterables = self.get_queryset(self.model, info, required_fields, **args)
list_length = len(iterables)
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(
self.model, info, required_fields, **args
)
elif _root is not None:
field_name = to_snake_case(info.field_name)
items = getattr(_root, field_name, [])
count = len(items)
skip, limit, reverse = find_skip_and_limit(
first=first, last=last, after=after, before=before, count=count
)
if limit:
if reverse:
_base_query = items[::-1]
items = _base_query[skip : skip + limit]
has_next_page = (skip + limit) < len(_base_query)
else:
_base_query = items
items = items[skip : skip + limit]
has_next_page = (skip + limit) < len(_base_query)
elif skip:
items = items[skip:]
iterables = items
list_length = len(iterables)
if count:
has_next_page = (
True
if (0 if limit is None else limit) + (0 if skip is None else skip) < count
else False
)
has_previous_page = True if skip else False
if reverse:
iterables = list(iterables)
iterables.reverse()
skip = limit
connection = connection_from_iterables(
edges=iterables,
start_offset=skip,
has_previous_page=has_previous_page,
has_next_page=has_next_page,
connection_type=self.type,
edge_type=self.type.Edge,
pageinfo_type=graphene.PageInfo,
)
connection.iterable = iterables
connection.list_length = list_length
return connection
def chained_resolver(self, resolver, is_partial, root, info, **args):
for key, value in dict(args).items():
if value is None:
del args[key]
required_fields = list()
for field in self.required_fields:
if field in self.model._fields_ordered:
required_fields.append(field)
for field in get_query_fields(info):
if to_snake_case(field) in self.model._fields_ordered:
required_fields.append(to_snake_case(field))
args_copy = args.copy()
if not bool(args) or not is_partial:
if isinstance(self.model, mongoengine.Document) or isinstance(
self.model, mongoengine.base.metaclasses.TopLevelDocumentMetaclass
):
connection_fields = [
field
for field in self.fields
if isinstance(self.fields[field], MongoengineConnectionField)
]
def filter_connection(x):
return any(
[
connection_fields.__contains__(x),
self._type._meta.non_filter_fields.__contains__(x),
]
)
filterable_args = tuple(
filterfalse(filter_connection, list(self.model._fields_ordered))
)
for arg_name, arg in args.copy().items():
if arg_name not in filterable_args + tuple(self.filter_args.keys()):
args_copy.pop(arg_name)
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(
self.model, info, required_fields, **args_copy
)
# XXX: Filter nested args
resolved = resolver(root, info, **args)
if resolved is not None:
if isinstance(resolved, list):
if resolved == list():
return resolved
elif not isinstance(resolved[0], DBRef):
return resolved
else:
return self.default_resolver(root, info, required_fields, **args_copy)
elif isinstance(resolved, QuerySet):
args.update(resolved._query)
args_copy = args.copy()
for arg_name, arg in args.copy().items():
if "." in arg_name or arg_name not in self.model._fields_ordered + (
"first",
"last",
"before",
"after",
) + tuple(self.filter_args.keys()):
args_copy.pop(arg_name)
if arg_name == "_id" and isinstance(arg, dict):
operation = list(arg.keys())[0]
args_copy["pk" + operation.replace("$", "__")] = arg[operation]
if not isinstance(arg, ObjectId) and "." in arg_name:
if isinstance(arg, dict):
operation = list(arg.keys())[0]
args_copy[
arg_name.replace(".", "__") + operation.replace("$", "__")
] = arg[operation]
else:
args_copy[arg_name.replace(".", "__")] = arg
elif "." in arg_name and isinstance(arg, ObjectId):
args_copy[arg_name.replace(".", "__")] = arg
else:
operations = ["$lte", "$gte", "$ne", "$in"]
if isinstance(arg, dict) and any(op in arg for op in operations):
operation = list(arg.keys())[0]
args_copy[arg_name + operation.replace("$", "__")] = arg[operation]
del args_copy[arg_name]
return self.default_resolver(
root, info, required_fields, resolved=resolved, **args_copy
)
elif isinstance(resolved, Promise):
return resolved.value
else:
return resolved
return self.default_resolver(root, info, required_fields, **args)
@classmethod
def connection_resolver(cls, resolver, connection_type, root, info, **args):
if root:
for key, value in root.__dict__.items():
if value:
try:
setattr(root, key, from_global_id(value)[1])
except Exception as error:
logging.debug("Exception Occurred: ", exc_info=error)
iterable = resolver(root, info, **args)
if isinstance(connection_type, graphene.NonNull):
connection_type = connection_type.of_type
on_resolve = partial(cls.resolve_connection, connection_type, args)
if Promise.is_thenable(iterable):
return Promise.resolve(iterable).then(on_resolve)
return on_resolve(iterable)
def wrap_resolve(self, parent_resolver):
super_resolver = self.resolver or parent_resolver
resolver = partial(
self.chained_resolver, super_resolver, isinstance(super_resolver, partial)
)
return partial(self.connection_resolver, resolver, self.type)
graphene-mongo-0.4.1/graphene_mongo/fields_async.py 0000664 0000000 0000000 00000041472 14535040553 0022443 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
from functools import partial
from itertools import filterfalse
from typing import Coroutine
import bson
import graphene
import mongoengine
import pymongo
from bson import DBRef, ObjectId
from graphene import Context
from graphene.relay import ConnectionField
from graphene.utils.str_converters import to_snake_case
from graphql import GraphQLResolveInfo
from graphql_relay import cursor_to_offset, from_global_id
from mongoengine import QuerySet
from promise import Promise
from pymongo.errors import OperationFailure
from . import MongoengineConnectionField
from .registry import get_global_async_registry
from .utils import (
ExecutorEnum,
connection_from_iterables,
find_skip_and_limit,
get_query_fields,
sync_to_async,
has_page_info,
)
PYMONGO_VERSION = tuple(pymongo.version_tuple[:2])
class AsyncMongoengineConnectionField(MongoengineConnectionField):
def __init__(self, type, *args, **kwargs):
super(AsyncMongoengineConnectionField, self).__init__(type, *args, **kwargs)
@property
def executor(self):
return ExecutorEnum.ASYNC
@property
def type(self):
from .types_async import AsyncMongoengineObjectType
_type = super(ConnectionField, self).type
assert issubclass(
_type, AsyncMongoengineObjectType
), "AsyncMongoengineConnectionField only accepts AsyncMongoengineObjectType types"
assert _type._meta.connection, "The type {} doesn't have a connection".format(
_type.__name__
)
return _type._meta.connection
@property
def fields(self):
return super(AsyncMongoengineConnectionField, self).fields
@property
def registry(self):
return getattr(self.node_type._meta, "registry", get_global_async_registry())
async def default_resolver(self, _root, info, required_fields=None, resolved=None, **args):
if required_fields is None:
required_fields = list()
args = args or {}
for key, value in dict(args).items():
if value is None:
del args[key]
if _root is not None and not resolved:
field_name = to_snake_case(info.field_name)
if not hasattr(_root, "_fields_ordered"):
if isinstance(getattr(_root, field_name, []), list):
args["pk__in"] = [r.id for r in getattr(_root, field_name, [])]
elif field_name in _root._fields_ordered and not (
isinstance(_root._fields[field_name].field, mongoengine.EmbeddedDocumentField)
or isinstance(
_root._fields[field_name].field,
mongoengine.GenericEmbeddedDocumentField,
)
):
if getattr(_root, field_name, []) is not None:
args["pk__in"] = [r.id for r in getattr(_root, field_name, [])]
_id = args.pop("id", None)
if _id is not None:
args["pk"] = from_global_id(_id)[-1]
iterables = []
list_length = 0
skip = 0
count = 0
limit = None
reverse = False
first = args.pop("first", None)
after = args.pop("after", None)
if after:
after = cursor_to_offset(after)
last = args.pop("last", None)
before = args.pop("before", None)
if before:
before = cursor_to_offset(before)
requires_page_info = has_page_info(info)
has_next_page = False
if resolved is not None:
items = resolved
if isinstance(items, QuerySet):
try:
if last is not None and after is not None:
count = await sync_to_async(items.count)(with_limit_and_skip=False)
else:
count = None
except OperationFailure:
count = await sync_to_async(len)(items)
else:
count = len(items)
skip, limit, reverse = find_skip_and_limit(
first=first, last=last, after=after, before=before, count=count
)
if isinstance(items, QuerySet):
if limit:
_base_query: QuerySet = (
await sync_to_async(items.order_by("-pk").skip)(skip)
if reverse
else await sync_to_async(items.skip)(skip)
)
items = await sync_to_async(_base_query.limit)(limit)
has_next_page = (
(
await sync_to_async(len)(
await sync_to_async(_base_query.skip(limit).only("id").limit)(1)
)
!= 0
)
if requires_page_info
else False
)
elif skip:
items = await sync_to_async(items.skip)(skip)
else:
if limit:
if reverse:
_base_query = items[::-1]
items = _base_query[skip : skip + limit]
else:
_base_query = items
items = items[skip : skip + limit]
has_next_page = (
(skip + limit) < len(_base_query) if requires_page_info else False
)
elif skip:
items = items[skip:]
iterables = await sync_to_async(list)(items)
list_length = len(iterables)
elif callable(getattr(self.model, "objects", None)):
if (
_root is None
or args
or isinstance(getattr(_root, field_name, []), AsyncMongoengineConnectionField)
):
args_copy = args.copy()
for key in args.copy():
if key not in self.model._fields_ordered:
args_copy.pop(key)
elif (
isinstance(getattr(self.model, key), mongoengine.fields.ReferenceField)
or isinstance(
getattr(self.model, key),
mongoengine.fields.GenericReferenceField,
)
or isinstance(
getattr(self.model, key),
mongoengine.fields.LazyReferenceField,
)
or isinstance(
getattr(self.model, key),
mongoengine.fields.CachedReferenceField,
)
):
if not isinstance(args_copy[key], ObjectId):
_from_global_id = from_global_id(args_copy[key])[1]
if bson.objectid.ObjectId.is_valid(_from_global_id):
args_copy[key] = ObjectId(_from_global_id)
else:
args_copy[key] = _from_global_id
elif isinstance(getattr(self.model, key), mongoengine.fields.EnumField):
if getattr(args_copy[key], "value", None):
args_copy[key] = args_copy[key].value
if PYMONGO_VERSION >= (3, 7):
count = await sync_to_async(
(mongoengine.get_db()[self.model._get_collection_name()]).count_documents
)(args_copy)
else:
count = await sync_to_async(self.model.objects(args_copy).count)()
if count != 0:
skip, limit, reverse = find_skip_and_limit(
first=first, after=after, last=last, before=before, count=count
)
iterables = self.get_queryset(
self.model, info, required_fields, skip, limit, reverse, **args
)
iterables = await sync_to_async(list)(iterables)
list_length = len(iterables)
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(
self.model, info, required_fields, **args
)
elif "pk__in" in args and args["pk__in"]:
count = len(args["pk__in"])
skip, limit, reverse = find_skip_and_limit(
first=first, last=last, after=after, before=before, count=count
)
if limit:
if reverse:
args["pk__in"] = args["pk__in"][::-1][skip : skip + limit]
else:
args["pk__in"] = args["pk__in"][skip : skip + limit]
elif skip:
args["pk__in"] = args["pk__in"][skip:]
iterables = self.get_queryset(self.model, info, required_fields, **args)
iterables = await sync_to_async(list)(iterables)
list_length = len(iterables)
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(
self.model, info, required_fields, **args
)
elif _root is not None:
field_name = to_snake_case(info.field_name)
items = getattr(_root, field_name, [])
count = len(items)
skip, limit, reverse = find_skip_and_limit(
first=first, last=last, after=after, before=before, count=count
)
if limit:
if reverse:
_base_query = items[::-1]
items = _base_query[skip : skip + limit]
else:
_base_query = items
items = items[skip : skip + limit]
has_next_page = (skip + limit) < len(_base_query) if requires_page_info else False
elif skip:
items = items[skip:]
iterables = items
iterables = await sync_to_async(list)(iterables)
list_length = len(iterables)
if requires_page_info and count:
has_next_page = (
True
if (0 if limit is None else limit) + (0 if skip is None else skip) < count
else False
)
has_previous_page = True if requires_page_info and skip else False
if reverse:
iterables = await sync_to_async(list)(iterables)
iterables.reverse()
skip = limit
connection = connection_from_iterables(
edges=iterables,
start_offset=skip,
has_previous_page=has_previous_page,
has_next_page=has_next_page,
connection_type=self.type,
edge_type=self.type.Edge,
pageinfo_type=graphene.PageInfo,
)
connection.iterable = iterables
connection.list_length = list_length
return connection
async def chained_resolver(self, resolver, is_partial, root, info, **args):
for key, value in dict(args).items():
if value is None:
del args[key]
required_fields = list()
for field in self.required_fields:
if field in self.model._fields_ordered:
required_fields.append(field)
for field in get_query_fields(info):
if to_snake_case(field) in self.model._fields_ordered:
required_fields.append(to_snake_case(field))
args_copy = args.copy()
if not bool(args) or not is_partial:
if isinstance(self.model, mongoengine.Document) or isinstance(
self.model, mongoengine.base.metaclasses.TopLevelDocumentMetaclass
):
connection_fields = [
field
for field in self.fields
if isinstance(self.fields[field], AsyncMongoengineConnectionField)
]
def filter_connection(x):
return any(
[
connection_fields.__contains__(x),
self._type._meta.non_filter_fields.__contains__(x),
]
)
filterable_args = tuple(
filterfalse(filter_connection, list(self.model._fields_ordered))
)
for arg_name, arg in args.copy().items():
if arg_name not in filterable_args + tuple(self.filter_args.keys()):
args_copy.pop(arg_name)
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(
self.model, info, required_fields, **args_copy
)
# XXX: Filter nested args
resolved = resolver(root, info, **args)
if isinstance(resolved, Coroutine):
resolved = await resolved
if resolved is not None:
# if isinstance(resolved, Coroutine):
# resolved = await resolved
if isinstance(resolved, list):
if resolved == list():
return resolved
elif not isinstance(resolved[0], DBRef):
return resolved
else:
return await self.default_resolver(root, info, required_fields, **args_copy)
elif isinstance(resolved, QuerySet):
args.update(resolved._query)
args_copy = args.copy()
for arg_name, arg in args.copy().items():
if "." in arg_name or arg_name not in self.model._fields_ordered + (
"first",
"last",
"before",
"after",
) + tuple(self.filter_args.keys()):
args_copy.pop(arg_name)
if arg_name == "_id" and isinstance(arg, dict):
operation = list(arg.keys())[0]
args_copy["pk" + operation.replace("$", "__")] = arg[operation]
if not isinstance(arg, ObjectId) and "." in arg_name:
if isinstance(arg, dict):
operation = list(arg.keys())[0]
args_copy[
arg_name.replace(".", "__") + operation.replace("$", "__")
] = arg[operation]
else:
args_copy[arg_name.replace(".", "__")] = arg
elif "." in arg_name and isinstance(arg, ObjectId):
args_copy[arg_name.replace(".", "__")] = arg
else:
operations = ["$lte", "$gte", "$ne", "$in"]
if isinstance(arg, dict) and any(op in arg for op in operations):
operation = list(arg.keys())[0]
args_copy[arg_name + operation.replace("$", "__")] = arg[operation]
del args_copy[arg_name]
return await self.default_resolver(
root, info, required_fields, resolved=resolved, **args_copy
)
elif isinstance(resolved, Promise):
return resolved.value
else:
return await resolved
return await self.default_resolver(root, info, required_fields, **args)
@classmethod
async def connection_resolver(cls, resolver, connection_type, root, info, **args):
if root:
for key, value in root.__dict__.items():
if value:
try:
setattr(root, key, from_global_id(value)[1])
except Exception:
pass
iterable = await resolver(root=root, info=info, **args)
if isinstance(connection_type, graphene.NonNull):
connection_type = connection_type.of_type
on_resolve = partial(cls.resolve_connection, connection_type, args)
if Promise.is_thenable(iterable):
iterable = Promise.resolve(iterable).then(on_resolve).value
return on_resolve(iterable)
graphene-mongo-0.4.1/graphene_mongo/registry.py 0000664 0000000 0000000 00000005773 14535040553 0021654 0 ustar 00root root 0000000 0000000 from graphene import Enum
from graphene_mongo.utils import ExecutorEnum
class Registry(object):
def __init__(self):
self._registry = {}
self._registry_async = {}
self._registry_string_map = {}
self._registry_async_string_map = {}
self._registry_enum = {}
def register(self, cls):
from .types import GrapheneMongoengineObjectTypes
from .types_async import AsyncGrapheneMongoengineObjectTypes
assert (
issubclass(cls, GrapheneMongoengineObjectTypes)
or issubclass(cls, AsyncGrapheneMongoengineObjectTypes)
), 'Only Mongoengine/Async Mongoengine object types can be registered, received "{}"'.format(
cls.__name__
)
assert cls._meta.registry == self, "Registry for a Model have to match."
if issubclass(cls, GrapheneMongoengineObjectTypes):
self._registry[cls._meta.model] = cls
self._registry_string_map[cls.__name__] = cls._meta.model.__name__
else:
self._registry_async[cls._meta.model] = cls
self._registry_async_string_map[cls.__name__] = cls._meta.model.__name__
# Rescan all fields
for model, cls in self._registry.items():
cls.rescan_fields()
def register_enum(self, cls):
from enum import EnumMeta
assert isinstance(
cls, EnumMeta
), f'Only EnumMeta can be registered, received "{cls.__name__}"'
if not cls.__name__.endswith("Enum"):
name = cls.__name__ + "Enum"
else:
name = cls.__name__
cls.__name__ = name
self._registry_enum[cls] = Enum.from_enum(cls)
def get_type_for_model(self, model, executor: ExecutorEnum = ExecutorEnum.SYNC):
if executor == ExecutorEnum.SYNC:
return self._registry.get(model)
else:
return self._registry_async.get(model)
def check_enum_already_exist(self, cls):
return cls in self._registry_enum
def get_type_for_enum(self, cls):
return self._registry_enum.get(cls)
registry = None
async_registry = None
inputs_registry = None
async_inputs_registry = None
def get_inputs_registry():
global inputs_registry
if not inputs_registry:
inputs_registry = Registry()
return inputs_registry
def get_inputs_async_registry():
global async_inputs_registry
if not async_inputs_registry:
async_inputs_registry = Registry()
return async_inputs_registry
def get_global_registry():
global registry
if not registry:
registry = Registry()
return registry
def get_global_async_registry():
global async_registry
if not async_registry:
async_registry = Registry()
return async_registry
def reset_global_registry():
global registry
global inputs_registry
registry = None
inputs_registry = None
def reset_global_async_registry():
global async_registry
global async_inputs_registry
async_registry = None
async_inputs_registry = None
graphene-mongo-0.4.1/graphene_mongo/tests/ 0000775 0000000 0000000 00000000000 14535040553 0020560 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/graphene_mongo/tests/__init__.py 0000664 0000000 0000000 00000000000 14535040553 0022657 0 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/graphene_mongo/tests/conftest.py 0000664 0000000 0000000 00000011427 14535040553 0022764 0 ustar 00root root 0000000 0000000 import os
from datetime import datetime
import pytest
from .models import (
AnotherChild,
Article,
CellTower,
Child,
ChildRegisteredAfter,
ChildRegisteredBefore,
Editor,
EmbeddedArticle,
ParentWithRelationship,
Player,
ProfessorMetadata,
ProfessorVector,
Publisher,
Reporter,
)
current_dirname = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture()
def fixtures_dirname():
return os.path.join(current_dirname, "fixtures")
@pytest.fixture(scope="module")
def fixtures():
Publisher.drop_collection()
publisher1 = Publisher(name="Newsco")
publisher1.save()
Editor.drop_collection()
editor1 = Editor(
id="1",
first_name="Penny",
last_name="Hardaway",
metadata={"age": "20", "nickname": "$1"},
company=publisher1,
)
image_filename = os.path.join(current_dirname, "fixtures", "image.jpg")
with open(image_filename, "rb") as f:
editor1.avatar.put(f, content_type="image/jpeg")
editor1.save()
editor2 = Editor(id="2", first_name="Grant", last_name="Hill")
editor2.save()
editor3 = Editor(id="3", first_name="Dennis", last_name="Rodman")
editor3.save()
Article.drop_collection()
pub_date = datetime.strptime("2020-01-01", "%Y-%m-%d")
article1 = Article(headline="Hello", editor=editor1, pub_date=pub_date)
article1.save()
article2 = Article(headline="World", editor=editor2, pub_date=pub_date)
article2.save()
article3 = Article(headline="Bye", editor=editor2, pub_date=pub_date)
article3.save()
Reporter.drop_collection()
reporter1 = Reporter(
id="1",
first_name="Allen",
last_name="Iverson",
email="ai@gmail.com",
awards=["2010-mvp"],
generic_references=[article1],
)
reporter1.articles = [article1, article2]
embedded_article1 = EmbeddedArticle(headline="Real", editor=editor1)
embedded_article2 = EmbeddedArticle(headline="World", editor=editor2)
reporter1.embedded_articles = [embedded_article1, embedded_article2]
reporter1.embedded_list_articles = [embedded_article2, embedded_article1]
reporter1.generic_reference = article1
reporter1.save()
Player.drop_collection()
player1 = Player(
first_name="Michael",
last_name="Jordan",
articles=[article1, article2],
)
player1.save()
player2 = Player(
first_name="Magic",
last_name="Johnson",
opponent=player1,
articles=[article3],
)
player2.save()
player3 = Player(first_name="Larry", last_name="Bird", players=[player1, player2])
player3.save()
player1.players = [player2]
player1.save()
player2.players = [player1]
player2.save()
player4 = Player(first_name="Chris", last_name="Webber")
player4.save()
Child.drop_collection()
child1 = Child(bar="BAR", baz="BAZ")
child1.save()
child2 = Child(bar="bar", baz="baz", loc=[10, 20])
child2.save()
another_child1 = AnotherChild(bar="BAR", qux="QUX")
another_child1.save()
another_child2 = AnotherChild(bar="bar", qux="qux", loc=[20, 10])
another_child2.save()
CellTower.drop_collection()
ct = CellTower(
code="bar",
base=[
[
[-43.36556, -22.99669],
[-43.36539, -23.01928],
[-43.26583, -23.01802],
[-43.36717, -22.98855],
[-43.36636, -22.99351],
[-43.36556, -22.99669],
]
],
coverage_area=[
[
[
[-43.36556, -22.99669],
[-43.36539, -23.01928],
[-43.26583, -23.01802],
[-43.36717, -22.98855],
[-43.36636, -22.99351],
[-43.36556, -22.99669],
]
]
],
)
ct.save()
ProfessorVector.drop_collection()
professor_metadata = ProfessorMetadata(
id="5e06aa20-6805-4eef-a144-5615dedbe32b",
first_name="Steven",
last_name="Curry",
departments=["NBA", "MLB"],
)
professor_vector = ProfessorVector(vec=[1.0, 2.3], metadata=professor_metadata)
professor_vector.save()
ParentWithRelationship.drop_collection()
ChildRegisteredAfter.drop_collection()
ChildRegisteredBefore.drop_collection()
# This is one messed up family
# She'd better have presence this time
child3 = ChildRegisteredBefore(name="Akari")
child4 = ChildRegisteredAfter(name="Kyouko")
child3.save()
child4.save()
parent = ParentWithRelationship(
name="Yui",
before_child=[child3],
after_child=[child4],
)
parent.save()
child3.parent = child4.parent = parent
child3.save()
child4.save()
return True
graphene-mongo-0.4.1/graphene_mongo/tests/fixtures/ 0000775 0000000 0000000 00000000000 14535040553 0022431 5 ustar 00root root 0000000 0000000 graphene-mongo-0.4.1/graphene_mongo/tests/fixtures/image.jpg 0000664 0000000 0000000 00000133520 14535040553 0024221 0 ustar 00root root 0000000 0000000 ‰PNG
IHDR • “ 8iCCPICC Profile X…•Y 8UÝ×ßçÜ×<Ïó˜y&ó<Ïc*×<Ó5«$$SI†RxEŠFSB$%™2”"E¡TL!ßAõ¾ÿ÷ÿ<ß÷|ûyö9¿»öÚk¯µöÚÃ: RXXL@pHÙÖH—×ÙÅ•7 @Žä¦cmmŽ`ðûýŸeiáFÊSÉ-YÿÝþ¿:/ïpO k{x…{#ø h%Ï0r ˜y„.†`,¢%`$#
"Xpûî`•-챃ͷyìmõì žŠD"û@½¥o”§/"‡:i£ñòAX/ XÓÓä û$³+88ÁTõø‡ßÿéñG&‰äûïØ²]ðúþáaA¤Øÿ§;þïù{¤Rù‘m·lÞò[`¨ÙFt‡†xXZ!˜ÁCþ^Ûü[ø_¤±Ã/þÏp=Äg€ ˜Ê‹¤o†`Nó‡Yšÿ¢kúøš ñ=lïab¿Óö"‡Úþ’Çx‡ØýÆ$òöX[<é‘:¿dž÷ó6ù-³)ÎÏÞiGO¸?ÊßÑÁÔ~hgö‹ç}œŸžåor¤í–ÎÈœ£€ÙÐv‡%þÛ.”šŸ¿‰å/lágo¼ÓµÏ“´+‚¼ÃÍëéåo°c*Ñ;Äá—þ¨Óaº¶¿øË¬ñ£š½ƒŒ¶èüî
²ûÝw! ¶{Ñ ,ÂÚ~G74c ÉÔzG´80z@ð‚H¤z€P ü{çëç‘_;-†€ÈÀxÉ_”ß=œ¶[B§ˆä
ÂÿôÓÝnõQ}ãuç) |¶[£¶{‚wf ù¹Ý+äÏhŽ`¡øÿ×螈®AHÝjû//ÍoÖ «5ÆbÅÐìhM´:Úyj#U‚Vý×ßü˜w˜ÌÌ0fó|¿"ù_šó0‰èhøË:Z‡F¤*¢uш|D6šÍ$Ñ
ÈH:h-dlE„úO]#ÿXü·/É"È`A› úo
¨Å©ÿHÙòÔ?}±£—Çoéýiù·zÿðŸò6û7'*uÕ…º‡z„jFÕ^T+ªÕƒº»…ÿÄÆôvlüÍv[Ÿ@DŽÿGú5æ–×ÂeªeædֵŢKö÷õ‹àÕAvko^“O©]¼r2²ª líý;[Ë7Ûí=bîû›FBö89 (tÿ¦…"k»& ù³Ó„‘uȆH»aëIŽÚ¡¡·@h•¸‘½K±H(u
€)°öÀìCüì‡Ä)DƒCà(HàÈç@ (•à*¸êA3¸€Ç ƒH¬¼ÀXká "Ä ±A<$ÉA*&d