mirror of
https://github.com/graphql-python/graphene.git
synced 2025-04-25 03:43:42 +03:00
Compare commits
No commits in common. "master" and "v3.0.0a1" have entirely different histories.
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,34 +0,0 @@
|
||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Create a report to help us improve
|
|
||||||
title: ''
|
|
||||||
labels: "\U0001F41B bug"
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Note: for support questions, please use stackoverflow**. This repository's issues are reserved for feature requests and bug reports.
|
|
||||||
|
|
||||||
* **What is the current behavior?**
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
* **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem** via
|
|
||||||
a github repo, https://repl.it or similar.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
* **What is the expected behavior?**
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
* **What is the motivation / use case for changing the behavior?**
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
* **Please tell us about your environment:**
|
|
||||||
|
|
||||||
- Version:
|
|
||||||
- Platform:
|
|
||||||
|
|
||||||
* **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow)
|
|
1
.github/ISSUE_TEMPLATE/config.yml
vendored
1
.github/ISSUE_TEMPLATE/config.yml
vendored
|
@ -1 +0,0 @@
|
||||||
blank_issues_enabled: false
|
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,20 +0,0 @@
|
||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
title: ''
|
|
||||||
labels: "✨ enhancement"
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
|
||||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
|
||||||
|
|
||||||
**Describe the solution you'd like**
|
|
||||||
A clear and concise description of what you want to happen.
|
|
||||||
|
|
||||||
**Describe alternatives you've considered**
|
|
||||||
A clear and concise description of any alternative solutions or features you've considered.
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context or screenshots about the feature request here.
|
|
19
.github/stale.yml
vendored
19
.github/stale.yml
vendored
|
@ -1,24 +1,17 @@
|
||||||
# Number of days of inactivity before an issue becomes stale
|
# Number of days of inactivity before an issue becomes stale
|
||||||
daysUntilStale: false
|
daysUntilStale: 90
|
||||||
# Number of days of inactivity before a stale issue is closed
|
# Number of days of inactivity before a stale issue is closed
|
||||||
daysUntilClose: false
|
daysUntilClose: 14
|
||||||
# Issues with these labels will never be considered stale
|
# Issues with these labels will never be considered stale
|
||||||
exemptLabels:
|
exemptLabels:
|
||||||
- pinned
|
- pinned
|
||||||
- security
|
- security
|
||||||
- 🐛 bug
|
|
||||||
- 📖 documentation
|
|
||||||
- 🙋 help wanted
|
|
||||||
- ✨ enhancement
|
|
||||||
- good first issue
|
|
||||||
- work in progress
|
|
||||||
# Label to use when marking an issue as stale
|
# Label to use when marking an issue as stale
|
||||||
staleLabel: wontfix
|
staleLabel: wontfix
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: false
|
markComment: >
|
||||||
# markComment: >
|
This issue has been automatically marked as stale because it has not had
|
||||||
# This issue has been automatically marked as stale because it has not had
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
# recent activity. It will be closed if no further activity occurs. Thank you
|
for your contributions.
|
||||||
# for your contributions.
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
closeComment: false
|
closeComment: false
|
||||||
|
|
21
.github/workflows/build.yaml
vendored
21
.github/workflows/build.yaml
vendored
|
@ -1,21 +0,0 @@
|
||||||
name: 📦 Build
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python 3.10
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install build twine
|
|
||||||
- name: Building package
|
|
||||||
run: python3 -m build
|
|
||||||
- name: Check package with Twine
|
|
||||||
run: twine check dist/*
|
|
26
.github/workflows/deploy.yml
vendored
26
.github/workflows/deploy.yml
vendored
|
@ -1,26 +0,0 @@
|
||||||
name: 🚀 Deploy to PyPI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- 'v*'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python 3.10
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
- name: Build wheel and source tarball
|
|
||||||
run: |
|
|
||||||
pip install wheel
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
- name: Publish a Python distribution to PyPI
|
|
||||||
uses: pypa/gh-action-pypi-publish@v1.1.0
|
|
||||||
with:
|
|
||||||
user: __token__
|
|
||||||
password: ${{ secrets.pypi_password }}
|
|
26
.github/workflows/lint.yml
vendored
26
.github/workflows/lint.yml
vendored
|
@ -1,26 +0,0 @@
|
||||||
name: 💅 Lint
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python 3.10
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.10"
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install tox
|
|
||||||
- name: Run lint
|
|
||||||
run: tox
|
|
||||||
env:
|
|
||||||
TOXENV: pre-commit
|
|
||||||
- name: Run mypy
|
|
||||||
run: tox
|
|
||||||
env:
|
|
||||||
TOXENV: mypy
|
|
64
.github/workflows/tests.yml
vendored
64
.github/workflows/tests.yml
vendored
|
@ -1,64 +0,0 @@
|
||||||
name: 📄 Tests
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- '*.x'
|
|
||||||
paths-ignore:
|
|
||||||
- 'docs/**'
|
|
||||||
- '*.md'
|
|
||||||
- '*.rst'
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- '*.x'
|
|
||||||
paths-ignore:
|
|
||||||
- 'docs/**'
|
|
||||||
- '*.md'
|
|
||||||
- '*.rst'
|
|
||||||
jobs:
|
|
||||||
tests:
|
|
||||||
# runs the test suite
|
|
||||||
name: ${{ matrix.name }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- {name: '3.13', python: '3.13', os: ubuntu-latest, tox: py313}
|
|
||||||
- {name: '3.12', python: '3.12', os: ubuntu-latest, tox: py312}
|
|
||||||
- {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311}
|
|
||||||
- {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
|
|
||||||
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
|
|
||||||
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python }}
|
|
||||||
|
|
||||||
- name: update pip
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install --upgrade setuptools wheel
|
|
||||||
|
|
||||||
- name: get pip cache dir
|
|
||||||
id: pip-cache
|
|
||||||
run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
|
|
||||||
- name: cache pip dependencies
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e ${{ matrix.tox }}
|
|
||||||
- name: Upload coverage.xml
|
|
||||||
if: ${{ matrix.python == '3.10' }}
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: graphene-coverage
|
|
||||||
path: coverage.xml
|
|
||||||
if-no-files-found: error
|
|
||||||
- name: Upload coverage.xml to codecov
|
|
||||||
if: ${{ matrix.python == '3.10' }}
|
|
||||||
uses: codecov/codecov-action@v4
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -90,4 +90,3 @@ venv/
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
.vscode
|
.vscode
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
.ruff_cache
|
|
||||||
|
|
2
.isort.cfg
Normal file
2
.isort.cfg
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
[settings]
|
||||||
|
known_third_party = aniso8601,graphql,graphql_relay,promise,pytest,pytz,pyutils,setuptools,six,snapshottest,sphinx_graphene_theme
|
|
@ -1,9 +1,6 @@
|
||||||
default_language_version:
|
|
||||||
python: python3.10
|
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: git://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v2.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: check-json
|
- id: check-json
|
||||||
|
@ -17,13 +14,15 @@ repos:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude: README.md
|
exclude: README.md
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v2.37.3
|
rev: v1.12.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/ambv/black
|
||||||
# Ruff version.
|
rev: 19.3b0
|
||||||
rev: v0.5.0
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: black
|
||||||
- id: ruff-format
|
language_version: python3
|
||||||
args: [ --check ]
|
- repo: https://github.com/PyCQA/flake8
|
||||||
|
rev: 3.7.8
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
|
42
.travis.yml
Normal file
42
.travis.yml
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
language: python
|
||||||
|
dist: xenial
|
||||||
|
|
||||||
|
python:
|
||||||
|
- "3.6"
|
||||||
|
- "3.7"
|
||||||
|
- "3.8"
|
||||||
|
|
||||||
|
install:
|
||||||
|
- pip install tox tox-travis
|
||||||
|
script: tox
|
||||||
|
after_success:
|
||||||
|
- pip install coveralls
|
||||||
|
- coveralls
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $HOME/.cache/pip
|
||||||
|
- $HOME/.cache/pre-commit
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- test
|
||||||
|
- name: deploy
|
||||||
|
if: tag IS present
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fast_finish: true
|
||||||
|
include:
|
||||||
|
- env: TOXENV=pre-commit
|
||||||
|
python: 3.7
|
||||||
|
- env: TOXENV=mypy
|
||||||
|
python: 3.7
|
||||||
|
- stage: deploy
|
||||||
|
python: 3.7
|
||||||
|
after_success: true
|
||||||
|
deploy:
|
||||||
|
provider: pypi
|
||||||
|
user: syrusakbary
|
||||||
|
on:
|
||||||
|
tags: true
|
||||||
|
password:
|
||||||
|
secure: LHOp9DvYR+70vj4YVY8+JRNCKUOfYZREEUY3+4lMUpY7Zy5QwDfgEMXG64ybREH9dFldpUqVXRj53eeU3spfudSfh8NHkgqW7qihez2AhSnRc4dK6ooNfB+kLcSoJ4nUFGxdYImABc4V1hJvflGaUkTwDNYVxJF938bPaO797IvSbuI86llwqkvuK2Vegv9q/fy9sVGaF9VZIs4JgXwR5AyDR7FBArl+S84vWww4vTFD33hoE88VR4QvFY3/71BwRtQrnCMm7AOm31P9u29yi3bpzQpiOR2rHsgrsYdm597QzFKVxYwsmf9uAx2bpbSPy2WibunLePIvOFwm8xcfwnz4/J4ONBc5PSFmUytTWpzEnxb0bfUNLuYloIS24V6OZ8BfAhiYZ1AwySeJCQDM4Vk1V8IF6trTtyx5EW/uV9jsHCZ3LFsAD7UnFRTosIgN3SAK3ZWCEk5oF2IvjecsolEfkRXB3q9EjMkkuXRUeFDH2lWJLgNE27BzY6myvZVzPmfwZUsPBlPD/6w+WLSp97Rjgr9zS3T1d4ddqFM4ZYu04f2i7a/UUQqG+itzzuX5DWLPvzuNt37JB45mB9IsvxPyXZ6SkAcLl48NGyKok1f3vQnvphkfkl4lni29woKhaau8xlsuEDrcwOoeAsVcZXiItg+l+z2SlIwM0A06EvQ=
|
||||||
|
distributions: "sdist bdist_wheel"
|
3
CODEOWNERS
Normal file
3
CODEOWNERS
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
* @ekampf @dan98765 @projectcheshire @jkimbo
|
||||||
|
/docs/ @dvndrsn @phalt @changeling
|
||||||
|
/examples/ @dvndrsn @phalt @changeling
|
7
Makefile
7
Makefile
|
@ -7,9 +7,8 @@ help:
|
||||||
install-dev:
|
install-dev:
|
||||||
pip install -e ".[dev]"
|
pip install -e ".[dev]"
|
||||||
|
|
||||||
.PHONY: test ## Run tests
|
|
||||||
test:
|
test:
|
||||||
py.test graphene examples
|
py.test graphene examples tests_asyncio
|
||||||
|
|
||||||
.PHONY: docs ## Generate docs
|
.PHONY: docs ## Generate docs
|
||||||
docs: install-dev
|
docs: install-dev
|
||||||
|
@ -21,8 +20,8 @@ docs-live: install-dev
|
||||||
|
|
||||||
.PHONY: format
|
.PHONY: format
|
||||||
format:
|
format:
|
||||||
black graphene examples setup.py
|
black graphene examples setup.py tests_asyncio
|
||||||
|
|
||||||
.PHONY: lint
|
.PHONY: lint
|
||||||
lint:
|
lint:
|
||||||
flake8 graphene examples setup.py
|
flake8 graphene examples setup.py tests_asyncio
|
||||||
|
|
41
README.md
41
README.md
|
@ -1,8 +1,14 @@
|
||||||
#  [Graphene](http://graphene-python.org) [](https://badge.fury.io/py/graphene) [](https://coveralls.io/github/graphql-python/graphene?branch=master) [](https://discord.gg/T6Gp6NFYHe)
|
#  [Graphene](http://graphene-python.org) [](https://travis-ci.org/graphql-python/graphene) [](https://badge.fury.io/py/graphene) [](https://coveralls.io/github/graphql-python/graphene?branch=master)
|
||||||
|
|
||||||
[💬 Join the community on Discord](https://discord.gg/T6Gp6NFYHe)
|
[💬 Join the community on Slack](https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM)
|
||||||
|
|
||||||
**We are looking for contributors**! Please check the current issues to see how you can help ❤️
|
**We are looking for contributors**! Please check the [ROADMAP](https://github.com/graphql-python/graphene/blob/master/ROADMAP.md) to see how you can help ❤️
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**The below readme is the documentation for the `dev` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the [v2 docs](https://docs.graphene-python.org/en/stable/)**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
|
@ -10,7 +16,7 @@
|
||||||
|
|
||||||
- **Easy to use:** Graphene helps you use GraphQL in Python without effort.
|
- **Easy to use:** Graphene helps you use GraphQL in Python without effort.
|
||||||
- **Relay:** Graphene has builtin support for Relay.
|
- **Relay:** Graphene has builtin support for Relay.
|
||||||
- **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), Mongo, custom Python objects, etc.
|
- **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), NoSQL, custom Python objects, etc.
|
||||||
We believe that by providing a complete API you could plug Graphene anywhere your data lives and make your data available
|
We believe that by providing a complete API you could plug Graphene anywhere your data lives and make your data available
|
||||||
through GraphQL.
|
through GraphQL.
|
||||||
|
|
||||||
|
@ -20,19 +26,18 @@ Graphene has multiple integrations with different frameworks:
|
||||||
|
|
||||||
| integration | Package |
|
| integration | Package |
|
||||||
| ----------------- | --------------------------------------------------------------------------------------- |
|
| ----------------- | --------------------------------------------------------------------------------------- |
|
||||||
| SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) |
|
|
||||||
| Mongo | [graphene-mongo](https://github.com/graphql-python/graphene-mongo/) |
|
|
||||||
| Apollo Federation | [graphene-federation](https://github.com/graphql-python/graphene-federation/) |
|
|
||||||
| Django | [graphene-django](https://github.com/graphql-python/graphene-django/) |
|
| Django | [graphene-django](https://github.com/graphql-python/graphene-django/) |
|
||||||
|
| SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) |
|
||||||
|
| Google App Engine | [graphene-gae](https://github.com/graphql-python/graphene-gae/) |
|
||||||
|
|
||||||
Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql).
|
Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql).
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
To install `graphene`, just run this command in your shell
|
For instaling graphene, just run this command in your shell
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install "graphene>=3.1"
|
pip install "graphene>=2.0"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
@ -40,8 +45,6 @@ pip install "graphene>=3.1"
|
||||||
Here is one example for you to get started:
|
Here is one example for you to get started:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import graphene
|
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
hello = graphene.String(description='A typical hello world')
|
hello = graphene.String(description='A typical hello world')
|
||||||
|
|
||||||
|
@ -85,24 +88,18 @@ pip install -e ".[test]"
|
||||||
Well-written tests and maintaining good test coverage is important to this project. While developing, run new and existing tests with:
|
Well-written tests and maintaining good test coverage is important to this project. While developing, run new and existing tests with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pytest graphene/relay/tests/test_node.py # Single file
|
py.test graphene/relay/tests/test_node.py # Single file
|
||||||
pytest graphene/relay # All tests in directory
|
py.test graphene/relay # All tests in directory
|
||||||
```
|
```
|
||||||
|
|
||||||
Add the `-s` flag if you have introduced breakpoints into the code for debugging.
|
Add the `-s` flag if you have introduced breakpoints into the code for debugging.
|
||||||
Add the `-v` ("verbose") flag to get more detailed test output. For even more detailed output, use `-vv`.
|
Add the `-v` ("verbose") flag to get more detailed test output. For even more detailed output, use `-vv`.
|
||||||
Check out the [pytest documentation](https://docs.pytest.org/en/latest/) for more options and test running controls.
|
Check out the [pytest documentation](https://docs.pytest.org/en/latest/) for more options and test running controls.
|
||||||
|
|
||||||
Regularly ensure your `pre-commit` hooks are up to date and enabled:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
pre-commit install
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also run the benchmarks with:
|
You can also run the benchmarks with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pytest graphene --benchmark-only
|
py.test graphene --benchmark-only
|
||||||
```
|
```
|
||||||
|
|
||||||
Graphene supports several versions of Python. To make sure that changes do not break compatibility with any of those versions, we use `tox` to create virtualenvs for each Python version and run tests with that version. To run against all Python versions defined in the `tox.ini` config file, just run:
|
Graphene supports several versions of Python. To make sure that changes do not break compatibility with any of those versions, we use `tox` to create virtualenvs for each Python version and run tests with that version. To run against all Python versions defined in the `tox.ini` config file, just run:
|
||||||
|
@ -114,10 +111,10 @@ tox
|
||||||
If you wish to run against a specific version defined in the `tox.ini` file:
|
If you wish to run against a specific version defined in the `tox.ini` file:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
tox -e py39
|
tox -e py36
|
||||||
```
|
```
|
||||||
|
|
||||||
Tox can only use whatever versions of Python are installed on your system. When you create a pull request, GitHub Actions pipelines will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of Python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful!
|
Tox can only use whatever versions of Python are installed on your system. When you create a pull request, Travis will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of Python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful!
|
||||||
|
|
||||||
### Building Documentation
|
### Building Documentation
|
||||||
|
|
||||||
|
|
177
README.rst
Normal file
177
README.rst
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
**We are looking for contributors**! Please check the
|
||||||
|
`ROADMAP <https://github.com/graphql-python/graphene/blob/master/ROADMAP.md>`__
|
||||||
|
to see how you can help ❤️
|
||||||
|
|
||||||
|
--------------
|
||||||
|
|
||||||
|
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
|
||||||
|
=========================================================================================================
|
||||||
|
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
------------
|
||||||
|
|
||||||
|
`Graphene <http://graphene-python.org>`__ is a Python library for
|
||||||
|
building GraphQL schemas/types fast and easily.
|
||||||
|
|
||||||
|
- **Easy to use:** Graphene helps you use GraphQL in Python without
|
||||||
|
effort.
|
||||||
|
- **Relay:** Graphene has builtin support for Relay.
|
||||||
|
- **Data agnostic:** Graphene supports any kind of data source: SQL
|
||||||
|
(Django, SQLAlchemy), NoSQL, custom Python objects, etc. We believe
|
||||||
|
that by providing a complete API you could plug Graphene anywhere
|
||||||
|
your data lives and make your data available through GraphQL.
|
||||||
|
|
||||||
|
Integrations
|
||||||
|
------------
|
||||||
|
|
||||||
|
Graphene has multiple integrations with different frameworks:
|
||||||
|
|
||||||
|
+---------------------+----------------------------------------------------------------------------------------------+
|
||||||
|
| integration | Package |
|
||||||
|
+=====================+==============================================================================================+
|
||||||
|
| Django | `graphene-django <https://github.com/graphql-python/graphene-django/>`__ |
|
||||||
|
+---------------------+----------------------------------------------------------------------------------------------+
|
||||||
|
| SQLAlchemy | `graphene-sqlalchemy <https://github.com/graphql-python/graphene-sqlalchemy/>`__ |
|
||||||
|
+---------------------+----------------------------------------------------------------------------------------------+
|
||||||
|
| Google App Engine | `graphene-gae <https://github.com/graphql-python/graphene-gae/>`__ |
|
||||||
|
+---------------------+----------------------------------------------------------------------------------------------+
|
||||||
|
| Peewee | *In progress* (`Tracking Issue <https://github.com/graphql-python/graphene/issues/289>`__) |
|
||||||
|
+---------------------+----------------------------------------------------------------------------------------------+
|
||||||
|
|
||||||
|
Also, Graphene is fully compatible with the GraphQL spec, working
|
||||||
|
seamlessly with all GraphQL clients, such as
|
||||||
|
`Relay <https://github.com/facebook/relay>`__,
|
||||||
|
`Apollo <https://github.com/apollographql/apollo-client>`__ and
|
||||||
|
`gql <https://github.com/graphql-python/gql>`__.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
For instaling graphene, just run this command in your shell
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
pip install "graphene>=2.0"
|
||||||
|
|
||||||
|
2.0 Upgrade Guide
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Please read `UPGRADE-v2.0.md </UPGRADE-v2.0.md>`__ to learn how to
|
||||||
|
upgrade.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
Here is one example for you to get started:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
class Query(graphene.ObjectType):
|
||||||
|
hello = graphene.String(description='A typical hello world')
|
||||||
|
|
||||||
|
def resolve_hello(self, info):
|
||||||
|
return 'World'
|
||||||
|
|
||||||
|
schema = graphene.Schema(query=Query)
|
||||||
|
|
||||||
|
Then Querying ``graphene.Schema`` is as simple as:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
query = '''
|
||||||
|
query SayHello {
|
||||||
|
hello
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
result = schema.execute(query)
|
||||||
|
|
||||||
|
If you want to learn even more, you can also check the following
|
||||||
|
`examples <examples/>`__:
|
||||||
|
|
||||||
|
- **Basic Schema**: `Starwars example <examples/starwars>`__
|
||||||
|
- **Relay Schema**: `Starwars Relay
|
||||||
|
example <examples/starwars_relay>`__
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Documentation and links to additional resources are available at
|
||||||
|
https://docs.graphene-python.org/en/latest/
|
||||||
|
|
||||||
|
Contributing
|
||||||
|
------------
|
||||||
|
|
||||||
|
After cloning this repo, create a
|
||||||
|
`virtualenv <https://virtualenv.pypa.io/en/stable/>`__ and ensure
|
||||||
|
dependencies are installed by running:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
virtualenv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
pip install -e ".[test]"
|
||||||
|
|
||||||
|
Well-written tests and maintaining good test coverage is important to
|
||||||
|
this project. While developing, run new and existing tests with:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
py.test graphene/relay/tests/test_node.py # Single file
|
||||||
|
py.test graphene/relay # All tests in directory
|
||||||
|
|
||||||
|
Add the ``-s`` flag if you have introduced breakpoints into the code for
|
||||||
|
debugging. Add the ``-v`` ("verbose") flag to get more detailed test
|
||||||
|
output. For even more detailed output, use ``-vv``. Check out the
|
||||||
|
`pytest documentation <https://docs.pytest.org/en/latest/>`__ for more
|
||||||
|
options and test running controls.
|
||||||
|
|
||||||
|
You can also run the benchmarks with:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
py.test graphene --benchmark-only
|
||||||
|
|
||||||
|
Graphene supports several versions of Python. To make sure that changes
|
||||||
|
do not break compatibility with any of those versions, we use ``tox`` to
|
||||||
|
create virtualenvs for each Python version and run tests with that
|
||||||
|
version. To run against all Python versions defined in the ``tox.ini``
|
||||||
|
config file, just run:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
tox
|
||||||
|
|
||||||
|
If you wish to run against a specific version defined in the ``tox.ini``
|
||||||
|
file:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
tox -e py36
|
||||||
|
|
||||||
|
Tox can only use whatever versions of Python are installed on your
|
||||||
|
system. When you create a pull request, Travis will also be running the
|
||||||
|
same tests and report the results, so there is no need for potential
|
||||||
|
contributors to try to install every single version of Python on their
|
||||||
|
own system ahead of time. We appreciate opening issues and pull requests
|
||||||
|
to make graphene even more stable & useful!
|
||||||
|
|
||||||
|
Building Documentation
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The documentation is generated using the excellent
|
||||||
|
`Sphinx <http://www.sphinx-doc.org/>`__ and a custom theme.
|
||||||
|
|
||||||
|
An HTML version of the documentation is produced by running:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
make docs
|
||||||
|
|
||||||
|
.. |Graphene Logo| image:: http://graphene-python.org/favicon.png
|
||||||
|
.. |Build Status| image:: https://travis-ci.org/graphql-python/graphene.svg?branch=master
|
||||||
|
:target: https://travis-ci.org/graphql-python/graphene
|
||||||
|
.. |PyPI version| image:: https://badge.fury.io/py/graphene.svg
|
||||||
|
:target: https://badge.fury.io/py/graphene
|
||||||
|
.. |Coverage Status| image:: https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github
|
||||||
|
:target: https://coveralls.io/github/graphql-python/graphene?branch=master
|
54
ROADMAP.md
Normal file
54
ROADMAP.md
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
# GraphQL Python Roadmap
|
||||||
|
|
||||||
|
In order to move Graphene and the GraphQL Python ecosystem forward it's essential to be clear with the community on next steps, so we can move uniformly.
|
||||||
|
|
||||||
|
_👋 If you have more ideas on how to move the Graphene ecosystem forward, don't hesistate to [open a PR](https://github.com/graphql-python/graphene/edit/master/ROADMAP.md)_
|
||||||
|
|
||||||
|
|
||||||
|
## Now
|
||||||
|
- [ ] Continue to support v2.x with security releases
|
||||||
|
- [ ] Last major/feature release is cut and graphene-* libraries should pin to that version number
|
||||||
|
|
||||||
|
## Next
|
||||||
|
New features will only be developed on version 3 of ecosystem libraries.
|
||||||
|
|
||||||
|
### [Core-Next](https://github.com/graphql-python/graphql-core-next)
|
||||||
|
Targeted as v3 of [graphql-core](https://pypi.org/project/graphql-core/), Python 3 only
|
||||||
|
|
||||||
|
### Graphene
|
||||||
|
- [ ] Integrate with the core-next API and resolve all breaking changes
|
||||||
|
- [ ] GraphQL types from type annotations - [See issue](https://github.com/graphql-python/graphene/issues/729)
|
||||||
|
- [ ] Add support for coroutines in Connection, Mutation (abstracting out Promise requirement) - [See PR](https://github.com/graphql-python/graphene/pull/824)
|
||||||
|
|
||||||
|
### Graphene-*
|
||||||
|
- [ ] Integrate with the graphene core-next API and resolve all breaking changes
|
||||||
|
|
||||||
|
### *-graphql
|
||||||
|
- [ ] Integrate with the graphql core-next API and resolve all breaking changes
|
||||||
|
|
||||||
|
## Ongoing Initiatives
|
||||||
|
- [ ] Improve documentation, especially for new users to the library
|
||||||
|
- [ ] Recipes for “quick start” that people can ideally use/run
|
||||||
|
|
||||||
|
|
||||||
|
## Dependent Libraries
|
||||||
|
| Repo | Release Manager | CODEOWNERS | Pinned | next/master created | Labels Standardized |
|
||||||
|
| ---------------------------------------------------------------------------- | --------------- | ---------- | ---------- | ------------------- | ------------------- |
|
||||||
|
| [graphene](https://github.com/graphql-python/graphene) | ekampf | ✅ | | ✅ | |
|
||||||
|
| [graphql-core](https://github.com/graphql-python/graphql-core) | Cito | ✅ | N/A | N/A | |
|
||||||
|
| [graphql-core-next](https://github.com/graphql-python/graphql-core-next) | Cito | ✅ | N/A | N/A | |
|
||||||
|
| [graphql-server-core](https://github.com/graphql-python/graphql-server-core) | Cito | | ✅ | ✅ | |
|
||||||
|
| [gql](https://github.com/graphql-python/gql) | ekampf | | | | |
|
||||||
|
| [gql-next](https://github.com/graphql-python/gql-next) | ekampf | | N/A | N/A | |
|
||||||
|
| ...[aiohttp](https://github.com/graphql-python/aiohttp-graphql) | | | | | |
|
||||||
|
| ...[django](https://github.com/graphql-python/graphene-django) | mvanlonden | | ✅ | ✅ | |
|
||||||
|
| ...[sanic](https://github.com/graphql-python/sanic-graphql) | ekampf | | | | |
|
||||||
|
| ...[flask](https://github.com/graphql-python/flask-graphql) | | | | | |
|
||||||
|
| ...[webob](https://github.com/graphql-python/webob-graphql) | | | | | |
|
||||||
|
| ...[tornado](https://github.com/graphql-python/graphene-tornado) | ewhauser | | PR created | ✅ | |
|
||||||
|
| ...[ws](https://github.com/graphql-python/graphql-ws) | Cito/dfee | | ✅ | ✅ | |
|
||||||
|
| ...[gae](https://github.com/graphql-python/graphene-gae) | ekampf | | PR created | ✅ | |
|
||||||
|
| ...[sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy) | jnak/Nabell | ✅ | ✅ | ✅ | |
|
||||||
|
| ...[mongo](https://github.com/graphql-python/graphene-mongo) | | | ✅ | ✅ | |
|
||||||
|
| ...[relay-py](https://github.com/graphql-python/graphql-relay-py) | Cito | | | | |
|
||||||
|
| ...[wsgi](https://github.com/moritzmhmk/wsgi-graphql) | | | | | |
|
15
SECURITY.md
15
SECURITY.md
|
@ -1,15 +0,0 @@
|
||||||
# Security Policy
|
|
||||||
|
|
||||||
## Supported Versions
|
|
||||||
|
|
||||||
Support for security issues is currently provided for Graphene 3.0 and above. Support on earlier versions cannot be guaranteed by the maintainers of this library, but community PRs may be accepted in critical cases.
|
|
||||||
The preferred mitigation strategy is via an upgrade to Graphene 3.
|
|
||||||
|
|
||||||
| Version | Supported |
|
|
||||||
| ------- | ------------------ |
|
|
||||||
| 3.x | :white_check_mark: |
|
|
||||||
| <3.x | :x: |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
|
||||||
|
|
||||||
Please use responsible disclosure by contacting a core maintainer via Discord or E-Mail.
|
|
|
@ -153,7 +153,7 @@ class Query(ObjectType):
|
||||||
```
|
```
|
||||||
|
|
||||||
Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it
|
Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it
|
||||||
explicitly.
|
explicity.
|
||||||
|
|
||||||
## Django
|
## Django
|
||||||
|
|
||||||
|
|
|
@ -123,7 +123,7 @@ def resolve_my_field(root, info, my_arg):
|
||||||
return ...
|
return ...
|
||||||
```
|
```
|
||||||
|
|
||||||
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as standard `Connection`'s arguments (first, last, after, before).**
|
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as stantard `Connection`'s arguments (first, before, after, before).**
|
||||||
You may need something like this:
|
You may need something like this:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
@ -377,7 +377,10 @@ class Base(ObjectType):
|
||||||
id = ID()
|
id = ID()
|
||||||
|
|
||||||
def resolve_id(root, info):
|
def resolve_id(root, info):
|
||||||
return f"{root.__class__.__name__}_{root.id}"
|
return "{type}_{id}".format(
|
||||||
|
type=root.__class__.__name__,
|
||||||
|
id=root.id
|
||||||
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
### UUID Scalar
|
### UUID Scalar
|
||||||
|
|
7
bin/autolinter
Executable file
7
bin/autolinter
Executable file
|
@ -0,0 +1,7 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Install the required scripts with
|
||||||
|
# pip install autoflake autopep8 isort
|
||||||
|
autoflake ./examples/ ./graphene/ -r --remove-unused-variables --remove-all-unused-imports --in-place
|
||||||
|
autopep8 ./examples/ ./graphene/ -r --in-place --experimental --aggressive --max-line-length 120
|
||||||
|
isort -rc ./examples/ ./graphene/
|
|
@ -20,8 +20,6 @@ Object types
|
||||||
.. autoclass:: graphene.Mutation
|
.. autoclass:: graphene.Mutation
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. _fields-mounted-types:
|
|
||||||
|
|
||||||
Fields (Mounted Types)
|
Fields (Mounted Types)
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
@ -64,8 +62,6 @@ Graphene Scalars
|
||||||
|
|
||||||
.. autoclass:: graphene.JSONString()
|
.. autoclass:: graphene.JSONString()
|
||||||
|
|
||||||
.. autoclass:: graphene.Base64()
|
|
||||||
|
|
||||||
Enum
|
Enum
|
||||||
----
|
----
|
||||||
|
|
||||||
|
@ -92,7 +88,7 @@ Execution Metadata
|
||||||
|
|
||||||
.. autoclass:: graphene.Context
|
.. autoclass:: graphene.Context
|
||||||
|
|
||||||
.. autoclass:: graphql.ExecutionResult
|
.. autoclass:: graphql.execution.base.ExecutionResult
|
||||||
|
|
||||||
.. Relay
|
.. Relay
|
||||||
.. -----
|
.. -----
|
||||||
|
|
22
docs/conf.py
22
docs/conf.py
|
@ -1,5 +1,4 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import sphinx_graphene_theme
|
import sphinx_graphene_theme
|
||||||
|
|
||||||
|
@ -23,6 +22,8 @@ on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
#
|
#
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath(".."))
|
sys.path.insert(0, os.path.abspath(".."))
|
||||||
|
|
||||||
|
@ -63,25 +64,25 @@ source_suffix = ".rst"
|
||||||
master_doc = "index"
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "Graphene"
|
project = u"Graphene"
|
||||||
copyright = "Graphene 2016"
|
copyright = u"Graphene 2016"
|
||||||
author = "Syrus Akbary"
|
author = u"Syrus Akbary"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = "1.0"
|
version = u"1.0"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = "1.0"
|
release = u"1.0"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#
|
#
|
||||||
# This is also used if you do content translation via gettext catalogs.
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
# Usually you set "language" from the command line for these cases.
|
# Usually you set "language" from the command line for these cases.
|
||||||
# language = None
|
language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
|
@ -277,7 +278,7 @@ latex_elements = {
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual")
|
(master_doc, "Graphene.tex", u"Graphene Documentation", u"Syrus Akbary", "manual")
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
@ -317,7 +318,7 @@ latex_documents = [
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [(master_doc, "graphene", "Graphene Documentation", [author], 1)]
|
man_pages = [(master_doc, "graphene", u"Graphene Documentation", [author], 1)]
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#
|
#
|
||||||
|
@ -333,7 +334,7 @@ texinfo_documents = [
|
||||||
(
|
(
|
||||||
master_doc,
|
master_doc,
|
||||||
"Graphene",
|
"Graphene",
|
||||||
"Graphene Documentation",
|
u"Graphene Documentation",
|
||||||
author,
|
author,
|
||||||
"Graphene",
|
"Graphene",
|
||||||
"One line description of project.",
|
"One line description of project.",
|
||||||
|
@ -455,4 +456,5 @@ intersphinx_mapping = {
|
||||||
"http://docs.graphene-python.org/projects/sqlalchemy/en/latest/",
|
"http://docs.graphene-python.org/projects/sqlalchemy/en/latest/",
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
"graphene_gae": ("http://docs.graphene-python.org/projects/gae/en/latest/", None),
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ Dataloader
|
||||||
DataLoader is a generic utility to be used as part of your application's
|
DataLoader is a generic utility to be used as part of your application's
|
||||||
data fetching layer to provide a simplified and consistent API over
|
data fetching layer to provide a simplified and consistent API over
|
||||||
various remote data sources such as databases or web services via batching
|
various remote data sources such as databases or web services via batching
|
||||||
and caching. It is provided by a separate package `aiodataloader <https://pypi.org/project/aiodataloader/>`.
|
and caching.
|
||||||
|
|
||||||
|
|
||||||
Batching
|
Batching
|
||||||
|
@ -15,31 +15,32 @@ Create loaders by providing a batch loading function.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from aiodataloader import DataLoader
|
from promise import Promise
|
||||||
|
from promise.dataloader import DataLoader
|
||||||
|
|
||||||
class UserLoader(DataLoader):
|
class UserLoader(DataLoader):
|
||||||
async def batch_load_fn(self, keys):
|
def batch_load_fn(self, keys):
|
||||||
# Here we call a function to return a user for each key in keys
|
# Here we return a promise that will result on the
|
||||||
return [get_user(id=key) for key in keys]
|
# corresponding user for each key in keys
|
||||||
|
return Promise.resolve([get_user(id=key) for key in keys])
|
||||||
|
|
||||||
|
|
||||||
A batch loading async function accepts a list of keys, and returns a list of ``values``.
|
A batch loading function accepts a list of keys, and returns a ``Promise``
|
||||||
|
which resolves to a list of ``values``.
|
||||||
|
|
||||||
|
Then load individual values from the loader. ``DataLoader`` will coalesce all
|
||||||
``DataLoader`` will coalesce all individual loads which occur within a
|
individual loads which occur within a single frame of execution (executed once
|
||||||
single frame of execution (executed once the wrapping event loop is resolved)
|
the wrapping promise is resolved) and then call your batch function with all
|
||||||
and then call your batch function with all requested keys.
|
requested keys.
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
user_loader = UserLoader()
|
user_loader = UserLoader()
|
||||||
|
|
||||||
user1 = await user_loader.load(1)
|
user_loader.load(1).then(lambda user: user_loader.load(user.best_friend_id))
|
||||||
user1_best_friend = await user_loader.load(user1.best_friend_id)
|
|
||||||
|
|
||||||
user2 = await user_loader.load(2)
|
user_loader.load(2).then(lambda user: user_loader.load(user.best_friend_id))
|
||||||
user2_best_friend = await user_loader.load(user2.best_friend_id)
|
|
||||||
|
|
||||||
|
|
||||||
A naive application may have issued *four* round-trips to a backend for the
|
A naive application may have issued *four* round-trips to a backend for the
|
||||||
|
@ -53,9 +54,9 @@ make sure that you then order the query result for the results to match the keys
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
class UserLoader(DataLoader):
|
class UserLoader(DataLoader):
|
||||||
async def batch_load_fn(self, keys):
|
def batch_load_fn(self, keys):
|
||||||
users = {user.id: user for user in User.objects.filter(id__in=keys)}
|
users = {user.id: user for user in User.objects.filter(id__in=keys)}
|
||||||
return [users.get(user_id) for user_id in keys]
|
return Promise.resolve([users.get(user_id) for user_id in keys])
|
||||||
|
|
||||||
|
|
||||||
``DataLoader`` allows you to decouple unrelated parts of your application without
|
``DataLoader`` allows you to decouple unrelated parts of your application without
|
||||||
|
@ -95,7 +96,7 @@ Consider the following GraphQL request:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
If ``me``, ``bestFriend`` and ``friends`` each need to send a request to the backend,
|
Naively, if ``me``, ``bestFriend`` and ``friends`` each need to request the backend,
|
||||||
there could be at most 13 database requests!
|
there could be at most 13 database requests!
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,8 +111,8 @@ leaner code and at most 4 database requests, and possibly fewer if there are cac
|
||||||
best_friend = graphene.Field(lambda: User)
|
best_friend = graphene.Field(lambda: User)
|
||||||
friends = graphene.List(lambda: User)
|
friends = graphene.List(lambda: User)
|
||||||
|
|
||||||
async def resolve_best_friend(root, info):
|
def resolve_best_friend(root, info):
|
||||||
return await user_loader.load(root.best_friend_id)
|
return user_loader.load(root.best_friend_id)
|
||||||
|
|
||||||
async def resolve_friends(root, info):
|
def resolve_friends(root, info):
|
||||||
return await user_loader.load_many(root.friend_ids)
|
return user_loader.load_many(root.friend_ids)
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
Executing a query
|
Executing a query
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
|
||||||
For executing a query against a schema, you can directly call the ``execute`` method on it.
|
For executing a query against a schema, you can directly call the ``execute`` method on it.
|
||||||
|
|
||||||
|
|
||||||
|
@ -85,7 +86,7 @@ Value used for :ref:`ResolverParamParent` in root queries and mutations can be o
|
||||||
return {'id': root.id, 'firstName': root.name}
|
return {'id': root.id, 'firstName': root.name}
|
||||||
|
|
||||||
schema = Schema(Query)
|
schema = Schema(Query)
|
||||||
user_root = User(id=12, name='bob')
|
user_root = User(id=12, name='bob'}
|
||||||
result = schema.execute(
|
result = schema.execute(
|
||||||
'''
|
'''
|
||||||
query getUser {
|
query getUser {
|
||||||
|
@ -110,7 +111,7 @@ If there are multiple operations defined in a query string, ``operation_name`` s
|
||||||
from graphene import ObjectType, Field, Schema
|
from graphene import ObjectType, Field, Schema
|
||||||
|
|
||||||
class Query(ObjectType):
|
class Query(ObjectType):
|
||||||
user = Field(User)
|
me = Field(User)
|
||||||
|
|
||||||
def resolve_user(root, info):
|
def resolve_user(root, info):
|
||||||
return get_user_by_id(12)
|
return get_user_by_id(12)
|
||||||
|
|
|
@ -4,5 +4,5 @@ File uploading
|
||||||
File uploading is not part of the official GraphQL spec yet and is not natively
|
File uploading is not part of the official GraphQL spec yet and is not natively
|
||||||
implemented in Graphene.
|
implemented in Graphene.
|
||||||
|
|
||||||
If your server needs to support file uploading then you can use the library: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
|
If your server needs to support file uploading then you can use the libary: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
|
||||||
uploads and conforms to the unoffical GraphQL `multipart request spec <https://github.com/jaydenseric/graphql-multipart-request-spec>`_.
|
uploads and conforms to the unoffical GraphQL `multipart request spec <https://github.com/jaydenseric/graphql-multipart-request-spec>`_.
|
||||||
|
|
|
@ -9,5 +9,3 @@ Execution
|
||||||
middleware
|
middleware
|
||||||
dataloader
|
dataloader
|
||||||
fileuploading
|
fileuploading
|
||||||
subscriptions
|
|
||||||
queryvalidation
|
|
||||||
|
|
|
@ -41,14 +41,12 @@ And then execute it with:
|
||||||
|
|
||||||
result = schema.execute('THE QUERY', middleware=[AuthorizationMiddleware()])
|
result = schema.execute('THE QUERY', middleware=[AuthorizationMiddleware()])
|
||||||
|
|
||||||
If the ``middleware`` argument includes multiple middlewares,
|
|
||||||
these middlewares will be executed bottom-up, i.e. from last to first.
|
|
||||||
|
|
||||||
Functional example
|
Functional example
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
Middleware can also be defined as a function. Here we define a middleware that
|
Middleware can also be defined as a function. Here we define a middleware that
|
||||||
logs the time it takes to resolve each field:
|
logs the time it takes to resolve each field
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -57,9 +55,12 @@ logs the time it takes to resolve each field:
|
||||||
def timing_middleware(next, root, info, **args):
|
def timing_middleware(next, root, info, **args):
|
||||||
start = timer()
|
start = timer()
|
||||||
return_value = next(root, info, **args)
|
return_value = next(root, info, **args)
|
||||||
duration = round((timer() - start) * 1000, 2)
|
duration = timer() - start
|
||||||
parent_type_name = root._meta.name if root and hasattr(root, '_meta') else ''
|
logger.debug("{parent_type}.{field_name}: {duration} ms".format(
|
||||||
logger.debug(f"{parent_type_name}.{info.field_name}: {duration} ms")
|
parent_type=root._meta.name if root and hasattr(root, '_meta') else '',
|
||||||
|
field_name=info.field_name,
|
||||||
|
duration=round(duration * 1000, 2)
|
||||||
|
))
|
||||||
return return_value
|
return return_value
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,123 +0,0 @@
|
||||||
Query Validation
|
|
||||||
================
|
|
||||||
GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements
|
|
||||||
standard query validators. For example, there is an validator that tests if queried field exists on queried type, that
|
|
||||||
makes query fail with "Cannot query field on type" error if it doesn't.
|
|
||||||
|
|
||||||
To help with common use cases, graphene provides a few validation rules out of the box.
|
|
||||||
|
|
||||||
|
|
||||||
Depth limit Validator
|
|
||||||
---------------------
|
|
||||||
The depth limit validator helps to prevent execution of malicious
|
|
||||||
queries. It takes in the following arguments.
|
|
||||||
|
|
||||||
- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document.
|
|
||||||
- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean
|
|
||||||
- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation.
|
|
||||||
|
|
||||||
Usage
|
|
||||||
-----
|
|
||||||
|
|
||||||
Here is how you would implement depth-limiting on your schema.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from graphql import validate, parse
|
|
||||||
from graphene import ObjectType, Schema, String
|
|
||||||
from graphene.validation import depth_limit_validator
|
|
||||||
|
|
||||||
|
|
||||||
class MyQuery(ObjectType):
|
|
||||||
name = String(required=True)
|
|
||||||
|
|
||||||
|
|
||||||
schema = Schema(query=MyQuery)
|
|
||||||
|
|
||||||
# queries which have a depth more than 20
|
|
||||||
# will not be executed.
|
|
||||||
|
|
||||||
validation_errors = validate(
|
|
||||||
schema=schema.graphql_schema,
|
|
||||||
document_ast=parse('THE QUERY'),
|
|
||||||
rules=(
|
|
||||||
depth_limit_validator(
|
|
||||||
max_depth=20
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
Disable Introspection
|
|
||||||
---------------------
|
|
||||||
the disable introspection validation rule ensures that your schema cannot be introspected.
|
|
||||||
This is a useful security measure in production environments.
|
|
||||||
|
|
||||||
Usage
|
|
||||||
-----
|
|
||||||
|
|
||||||
Here is how you would disable introspection for your schema.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from graphql import validate, parse
|
|
||||||
from graphene import ObjectType, Schema, String
|
|
||||||
from graphene.validation import DisableIntrospection
|
|
||||||
|
|
||||||
|
|
||||||
class MyQuery(ObjectType):
|
|
||||||
name = String(required=True)
|
|
||||||
|
|
||||||
|
|
||||||
schema = Schema(query=MyQuery)
|
|
||||||
|
|
||||||
# introspection queries will not be executed.
|
|
||||||
|
|
||||||
validation_errors = validate(
|
|
||||||
schema=schema.graphql_schema,
|
|
||||||
document_ast=parse('THE QUERY'),
|
|
||||||
rules=(
|
|
||||||
DisableIntrospection,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
Implementing custom validators
|
|
||||||
------------------------------
|
|
||||||
All custom query validators should extend the `ValidationRule <https://github.com/graphql-python/graphql-core/blob/v3.0.5/src/graphql/validation/rules/__init__.py#L37>`_
|
|
||||||
base class importable from the graphql.validation.rules module. Query validators are visitor classes. They are
|
|
||||||
instantiated at the time of query validation with one required argument (context: ASTValidationContext). In order to
|
|
||||||
perform validation, your validator class should define one or more of enter_* and leave_* methods. For possible
|
|
||||||
enter/leave items as well as details on function documentation, please see contents of the visitor module. To make
|
|
||||||
validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure
|
|
||||||
reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation
|
|
||||||
if any of those fields are blacklisted:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from graphql import GraphQLError
|
|
||||||
from graphql.language import FieldNode
|
|
||||||
from graphql.validation import ValidationRule
|
|
||||||
|
|
||||||
|
|
||||||
my_blacklist = (
|
|
||||||
"disallowed_field",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def is_blacklisted_field(field_name: str):
|
|
||||||
return field_name.lower() in my_blacklist
|
|
||||||
|
|
||||||
|
|
||||||
class BlackListRule(ValidationRule):
|
|
||||||
def enter_field(self, node: FieldNode, *_args):
|
|
||||||
field_name = node.name.value
|
|
||||||
if not is_blacklisted_field(field_name):
|
|
||||||
return
|
|
||||||
|
|
||||||
self.report_error(
|
|
||||||
GraphQLError(
|
|
||||||
f"Cannot query '{field_name}': field is blacklisted.", node,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
|
@ -1,40 +0,0 @@
|
||||||
.. _SchemaSubscription:
|
|
||||||
|
|
||||||
Subscriptions
|
|
||||||
=============
|
|
||||||
|
|
||||||
To create a subscription, you can directly call the ``subscribe`` method on the
|
|
||||||
schema. This method is async and must be awaited.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from datetime import datetime
|
|
||||||
from graphene import ObjectType, String, Schema, Field
|
|
||||||
|
|
||||||
# Every schema requires a query.
|
|
||||||
class Query(ObjectType):
|
|
||||||
hello = String()
|
|
||||||
|
|
||||||
def resolve_hello(root, info):
|
|
||||||
return "Hello, world!"
|
|
||||||
|
|
||||||
class Subscription(ObjectType):
|
|
||||||
time_of_day = String()
|
|
||||||
|
|
||||||
async def subscribe_time_of_day(root, info):
|
|
||||||
while True:
|
|
||||||
yield datetime.now().isoformat()
|
|
||||||
await asyncio.sleep(1)
|
|
||||||
|
|
||||||
schema = Schema(query=Query, subscription=Subscription)
|
|
||||||
|
|
||||||
async def main(schema):
|
|
||||||
subscription = 'subscription { timeOfDay }'
|
|
||||||
result = await schema.subscribe(subscription)
|
|
||||||
async for item in result:
|
|
||||||
print(item.data['timeOfDay'])
|
|
||||||
|
|
||||||
asyncio.run(main(schema))
|
|
||||||
|
|
||||||
The ``result`` is an async iterator which yields items in the same manner as a query.
|
|
|
@ -1,6 +1,12 @@
|
||||||
Graphene
|
Graphene
|
||||||
========
|
========
|
||||||
|
|
||||||
|
------------
|
||||||
|
|
||||||
|
The documentation below is for the ``dev`` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the `v2 docs <https://docs.graphene-python.org/en/stable/>`_.
|
||||||
|
|
||||||
|
------------
|
||||||
|
|
||||||
Contents:
|
Contents:
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
@ -21,6 +27,7 @@ Integrations
|
||||||
* `Graphene-Django <http://docs.graphene-python.org/projects/django/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-django/>`_)
|
* `Graphene-Django <http://docs.graphene-python.org/projects/django/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-django/>`_)
|
||||||
* Flask-Graphql (`source <https://github.com/graphql-python/flask-graphql>`_)
|
* Flask-Graphql (`source <https://github.com/graphql-python/flask-graphql>`_)
|
||||||
* `Graphene-SQLAlchemy <http://docs.graphene-python.org/projects/sqlalchemy/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-sqlalchemy/>`_)
|
* `Graphene-SQLAlchemy <http://docs.graphene-python.org/projects/sqlalchemy/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-sqlalchemy/>`_)
|
||||||
|
* `Graphene-GAE <http://docs.graphene-python.org/projects/gae/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-gae/>`_)
|
||||||
* `Graphene-Mongo <http://graphene-mongo.readthedocs.io/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-mongo>`_)
|
* `Graphene-Mongo <http://graphene-mongo.readthedocs.io/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-mongo>`_)
|
||||||
* `Starlette <https://www.starlette.io/graphql/>`_ (`source <https://github.com/encode/starlette>`_)
|
* `Starlette <https://www.starlette.io/graphql/>`_ (`source <https://github.com/encode/starlette>`_)
|
||||||
* `FastAPI <https://fastapi.tiangolo.com/advanced/graphql/>`_ (`source <https://github.com/tiangolo/fastapi>`_)
|
* `FastAPI <https://fastapi.tiangolo.com/tutorial/graphql/>`_ (`source <https://github.com/tiangolo/fastapi>`_)
|
||||||
|
|
|
@ -28,7 +28,7 @@ Compare Graphene's *code-first* approach to building a GraphQL API with *schema-
|
||||||
|
|
||||||
.. _Apollo Server: https://www.apollographql.com/docs/apollo-server/
|
.. _Apollo Server: https://www.apollographql.com/docs/apollo-server/
|
||||||
|
|
||||||
.. _Ariadne: https://ariadnegraphql.org/
|
.. _Ariadne: https://ariadne.readthedocs.io
|
||||||
|
|
||||||
Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well.
|
Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well.
|
||||||
|
|
||||||
|
@ -37,12 +37,12 @@ An example in Graphene
|
||||||
|
|
||||||
Let’s build a basic GraphQL schema to say "hello" and "goodbye" in Graphene.
|
Let’s build a basic GraphQL schema to say "hello" and "goodbye" in Graphene.
|
||||||
|
|
||||||
When we send a **Query** requesting only one **Field**, ``hello``, and specify a value for the ``firstName`` **Argument**...
|
When we send a **Query** requesting only one **Field**, ``hello``, and specify a value for the ``name`` **Argument**...
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
{
|
{
|
||||||
hello(firstName: "friend")
|
hello(name: "friend")
|
||||||
}
|
}
|
||||||
|
|
||||||
...we would expect the following Response containing only the data requested (the ``goodbye`` field is not resolved).
|
...we would expect the following Response containing only the data requested (the ``goodbye`` field is not resolved).
|
||||||
|
@ -59,15 +59,15 @@ When we send a **Query** requesting only one **Field**, ``hello``, and specify a
|
||||||
Requirements
|
Requirements
|
||||||
~~~~~~~~~~~~
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
- Python (3.8, 3.9, 3.10, 3.11, 3.12, pypy)
|
- Python (2.7, 3.4, 3.5, 3.6, pypy)
|
||||||
- Graphene (3.0)
|
- Graphene (2.0)
|
||||||
|
|
||||||
Project setup
|
Project setup
|
||||||
~~~~~~~~~~~~~
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
pip install "graphene>=3.0"
|
pip install "graphene>=2.0"
|
||||||
|
|
||||||
Creating a basic Schema
|
Creating a basic Schema
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -79,15 +79,14 @@ In Graphene, we can define a simple schema using the following code:
|
||||||
from graphene import ObjectType, String, Schema
|
from graphene import ObjectType, String, Schema
|
||||||
|
|
||||||
class Query(ObjectType):
|
class Query(ObjectType):
|
||||||
# this defines a Field `hello` in our Schema with a single Argument `first_name`
|
# this defines a Field `hello` in our Schema with a single Argument `name`
|
||||||
# By default, the argument name will automatically be camel-based into firstName in the generated schema
|
hello = String(name=String(default_value="stranger"))
|
||||||
hello = String(first_name=String(default_value="stranger"))
|
|
||||||
goodbye = String()
|
goodbye = String()
|
||||||
|
|
||||||
# our Resolver method takes the GraphQL context (root, info) as well as
|
# our Resolver method takes the GraphQL context (root, info) as well as
|
||||||
# Argument (first_name) for the Field and returns data for the query Response
|
# Argument (name) for the Field and returns data for the query Response
|
||||||
def resolve_hello(root, info, first_name):
|
def resolve_hello(root, info, name):
|
||||||
return f'Hello {first_name}!'
|
return f'Hello {name}!'
|
||||||
|
|
||||||
def resolve_goodbye(root, info):
|
def resolve_goodbye(root, info):
|
||||||
return 'See ya!'
|
return 'See ya!'
|
||||||
|
@ -104,14 +103,14 @@ For each **Field** in our **Schema**, we write a **Resolver** method to fetch da
|
||||||
Schema Definition Language (SDL)
|
Schema Definition Language (SDL)
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
In the `GraphQL Schema Definition Language`_, we could describe the fields defined by our example code as shown below.
|
In the `GraphQL Schema Definition Language`_, we could describe the fields defined by our example code as show below.
|
||||||
|
|
||||||
.. _GraphQL Schema Definition Language: https://graphql.org/learn/schema/
|
.. _GraphQL Schema Definition Language: https://graphql.org/learn/schema/
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
type Query {
|
type Query {
|
||||||
hello(firstName: String = "stranger"): String
|
hello(name: String = "stranger"): String
|
||||||
goodbye: String
|
goodbye: String
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,7 +130,7 @@ Then we can start querying our **Schema** by passing a GraphQL query string to `
|
||||||
# "Hello stranger!"
|
# "Hello stranger!"
|
||||||
|
|
||||||
# or passing the argument in the query
|
# or passing the argument in the query
|
||||||
query_with_argument = '{ hello(firstName: "GraphQL") }'
|
query_with_argument = '{ hello(name: "GraphQL") }'
|
||||||
result = schema.execute(query_with_argument)
|
result = schema.execute(query_with_argument)
|
||||||
print(result.data['hello'])
|
print(result.data['hello'])
|
||||||
# "Hello GraphQL!"
|
# "Hello GraphQL!"
|
||||||
|
|
|
@ -19,8 +19,11 @@ Useful links
|
||||||
- `Getting started with Relay`_
|
- `Getting started with Relay`_
|
||||||
- `Relay Global Identification Specification`_
|
- `Relay Global Identification Specification`_
|
||||||
- `Relay Cursor Connection Specification`_
|
- `Relay Cursor Connection Specification`_
|
||||||
|
- `Relay input Object Mutation`_
|
||||||
|
|
||||||
.. _Relay: https://relay.dev/docs/guides/graphql-server-specification/
|
.. _Relay: https://facebook.github.io/relay/docs/en/graphql-server-specification.html
|
||||||
.. _Getting started with Relay: https://relay.dev/docs/getting-started/step-by-step-guide/
|
.. _Relay specification: https://facebook.github.io/relay/graphql/objectidentification.htm#sec-Node-root-field
|
||||||
.. _Relay Global Identification Specification: https://relay.dev/graphql/objectidentification.htm
|
.. _Getting started with Relay: https://facebook.github.io/relay/docs/en/quick-start-guide.html
|
||||||
.. _Relay Cursor Connection Specification: https://relay.dev/graphql/connections.htm
|
.. _Relay Global Identification Specification: https://facebook.github.io/relay/graphql/objectidentification.htm
|
||||||
|
.. _Relay Cursor Connection Specification: https://facebook.github.io/relay/graphql/connections.htm
|
||||||
|
.. _Relay input Object Mutation: https://facebook.github.io/relay/graphql/mutations.htm
|
||||||
|
|
|
@ -51,20 +51,20 @@ Example of a custom node:
|
||||||
name = 'Node'
|
name = 'Node'
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def to_global_id(type_, id):
|
def to_global_id(type, id):
|
||||||
return f"{type_}:{id}"
|
return '{}:{}'.format(type, id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_node_from_global_id(info, global_id, only_type=None):
|
def get_node_from_global_id(info, global_id, only_type=None):
|
||||||
type_, id = global_id.split(':')
|
type, id = global_id.split(':')
|
||||||
if only_type:
|
if only_type:
|
||||||
# We assure that the node type that we want to retrieve
|
# We assure that the node type that we want to retrieve
|
||||||
# is the same that was indicated in the field type
|
# is the same that was indicated in the field type
|
||||||
assert type_ == only_type._meta.name, 'Received not compatible node.'
|
assert type == only_type._meta.name, 'Received not compatible node.'
|
||||||
|
|
||||||
if type_ == 'User':
|
if type == 'User':
|
||||||
return get_user(id)
|
return get_user(id)
|
||||||
elif type_ == 'Photo':
|
elif type == 'Photo':
|
||||||
return get_photo(id)
|
return get_photo(id)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Required library
|
# Required library
|
||||||
Sphinx==6.1.3
|
Sphinx==1.5.3
|
||||||
sphinx-autobuild==2021.3.14
|
sphinx-autobuild==0.7.1
|
||||||
# Docs template
|
# Docs template
|
||||||
http://graphene-python.org/sphinx_graphene_theme.zip
|
http://graphene-python.org/sphinx_graphene_theme.zip
|
||||||
|
|
|
@ -69,3 +69,43 @@ You can also add extra keyword arguments to the ``execute`` method, such as
|
||||||
'hey': 'hello Peter!'
|
'hey': 'hello Peter!'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Snapshot testing
|
||||||
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
As our APIs evolve, we need to know when our changes introduce any breaking changes that might break
|
||||||
|
some of the clients of our GraphQL app.
|
||||||
|
|
||||||
|
However, writing tests and replicate the same response we expect from our GraphQL application can be
|
||||||
|
tedious and repetitive task, and sometimes it's easier to skip this process.
|
||||||
|
|
||||||
|
Because of that, we recommend the usage of `SnapshotTest <https://github.com/syrusakbary/snapshottest/>`_.
|
||||||
|
|
||||||
|
SnapshotTest let us write all this tests in a breeze, as creates automatically the ``snapshots`` for us
|
||||||
|
the first time the test is executed.
|
||||||
|
|
||||||
|
|
||||||
|
Here is a simple example on how our tests will look if we use ``pytest``:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
def test_hey(snapshot):
|
||||||
|
client = Client(my_schema)
|
||||||
|
# This will create a snapshot dir and a snapshot file
|
||||||
|
# the first time the test is executed, with the response
|
||||||
|
# of the execution.
|
||||||
|
snapshot.assert_match(client.execute('''{ hey }'''))
|
||||||
|
|
||||||
|
|
||||||
|
If we are using ``unittest``:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from snapshottest import TestCase
|
||||||
|
|
||||||
|
class APITestCase(TestCase):
|
||||||
|
def test_api_me(self):
|
||||||
|
"""Testing the API for /me"""
|
||||||
|
client = Client(my_schema)
|
||||||
|
self.assertMatchSnapshot(client.execute('''{ hey }'''))
|
||||||
|
|
|
@ -61,8 +61,7 @@ you can add description etc. to your enum without changing the original:
|
||||||
|
|
||||||
graphene.Enum.from_enum(
|
graphene.Enum.from_enum(
|
||||||
AlreadyExistingPyEnum,
|
AlreadyExistingPyEnum,
|
||||||
description=lambda v: return 'foo' if v == AlreadyExistingPyEnum.Foo else 'bar'
|
description=lambda v: return 'foo' if v == AlreadyExistingPyEnum.Foo else 'bar')
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
Notes
|
Notes
|
||||||
|
@ -77,7 +76,6 @@ In the Python ``Enum`` implementation you can access a member by initing the Enu
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
class Color(Enum):
|
class Color(Enum):
|
||||||
RED = 1
|
RED = 1
|
||||||
GREEN = 2
|
GREEN = 2
|
||||||
|
@ -86,12 +84,11 @@ In the Python ``Enum`` implementation you can access a member by initing the Enu
|
||||||
assert Color(1) == Color.RED
|
assert Color(1) == Color.RED
|
||||||
|
|
||||||
|
|
||||||
However, in Graphene ``Enum`` you need to call `.get` to have the same effect:
|
However, in Graphene ``Enum`` you need to call get to have the same effect:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from graphene import Enum
|
from graphene import Enum
|
||||||
|
|
||||||
class Color(Enum):
|
class Color(Enum):
|
||||||
RED = 1
|
RED = 1
|
||||||
GREEN = 2
|
GREEN = 2
|
||||||
|
|
|
@ -44,7 +44,7 @@ Both of these types have all of the fields from the ``Character`` interface,
|
||||||
but also bring in extra fields, ``home_planet``, ``starships`` and
|
but also bring in extra fields, ``home_planet``, ``starships`` and
|
||||||
``primary_function``, that are specific to that particular type of character.
|
``primary_function``, that are specific to that particular type of character.
|
||||||
|
|
||||||
The full GraphQL schema definition will look like this:
|
The full GraphQL schema defition will look like this:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
|
|
|
@ -85,9 +85,9 @@ We should receive:
|
||||||
|
|
||||||
InputFields and InputObjectTypes
|
InputFields and InputObjectTypes
|
||||||
----------------------------------
|
----------------------------------
|
||||||
InputFields are used in mutations to allow nested input data for mutations.
|
InputFields are used in mutations to allow nested input data for mutations
|
||||||
|
|
||||||
To use an InputField you define an InputObjectType that specifies the structure of your input data:
|
To use an InputField you define an InputObjectType that specifies the structure of your input data
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
@ -104,6 +104,7 @@ To use an InputField you define an InputObjectType that specifies the structure
|
||||||
|
|
||||||
person = graphene.Field(Person)
|
person = graphene.Field(Person)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
def mutate(root, info, person_data=None):
|
def mutate(root, info, person_data=None):
|
||||||
person = Person(
|
person = Person(
|
||||||
name=person_data.name,
|
name=person_data.name,
|
||||||
|
@ -112,7 +113,7 @@ To use an InputField you define an InputObjectType that specifies the structure
|
||||||
return CreatePerson(person=person)
|
return CreatePerson(person=person)
|
||||||
|
|
||||||
|
|
||||||
Note that **name** and **age** are part of **person_data** now.
|
Note that **name** and **age** are part of **person_data** now
|
||||||
|
|
||||||
Using the above mutation your new query would look like this:
|
Using the above mutation your new query would look like this:
|
||||||
|
|
||||||
|
@ -128,7 +129,7 @@ Using the above mutation your new query would look like this:
|
||||||
}
|
}
|
||||||
|
|
||||||
InputObjectTypes can also be fields of InputObjectTypes allowing you to have
|
InputObjectTypes can also be fields of InputObjectTypes allowing you to have
|
||||||
as complex of input data as you need:
|
as complex of input data as you need
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -160,7 +161,7 @@ To return an existing ObjectType instead of a mutation-specific type, set the **
|
||||||
def mutate(root, info, name):
|
def mutate(root, info, name):
|
||||||
return Person(name=name)
|
return Person(name=name)
|
||||||
|
|
||||||
Then, if we query (``schema.execute(query_str)``) with the following:
|
Then, if we query (``schema.execute(query_str)``) the following:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
|
|
|
@ -52,7 +52,6 @@ Resolvers are lazily executed, so if a field is not included in a query, its res
|
||||||
Each field on an *ObjectType* in Graphene should have a corresponding resolver method to fetch data. This resolver method should match the field name. For example, in the ``Person`` type above, the ``full_name`` field is resolved by the method ``resolve_full_name``.
|
Each field on an *ObjectType* in Graphene should have a corresponding resolver method to fetch data. This resolver method should match the field name. For example, in the ``Person`` type above, the ``full_name`` field is resolved by the method ``resolve_full_name``.
|
||||||
|
|
||||||
Each resolver method takes the parameters:
|
Each resolver method takes the parameters:
|
||||||
|
|
||||||
* :ref:`ResolverParamParent` for the value object use to resolve most fields
|
* :ref:`ResolverParamParent` for the value object use to resolve most fields
|
||||||
* :ref:`ResolverParamInfo` for query and schema meta information and per-request context
|
* :ref:`ResolverParamInfo` for query and schema meta information and per-request context
|
||||||
* :ref:`ResolverParamGraphQLArguments` as defined on the **Field**.
|
* :ref:`ResolverParamGraphQLArguments` as defined on the **Field**.
|
||||||
|
@ -80,10 +79,6 @@ If we have a schema with Person type and one field on the root query.
|
||||||
|
|
||||||
from graphene import ObjectType, String, Field
|
from graphene import ObjectType, String, Field
|
||||||
|
|
||||||
def get_human(name):
|
|
||||||
first_name, last_name = name.split()
|
|
||||||
return Person(first_name, last_name)
|
|
||||||
|
|
||||||
class Person(ObjectType):
|
class Person(ObjectType):
|
||||||
full_name = String()
|
full_name = String()
|
||||||
|
|
||||||
|
@ -106,7 +101,7 @@ When we execute a query against that schema.
|
||||||
query_string = "{ me { fullName } }"
|
query_string = "{ me { fullName } }"
|
||||||
result = schema.execute(query_string)
|
result = schema.execute(query_string)
|
||||||
|
|
||||||
assert result.data["me"] == {"fullName": "Luke Skywalker"}
|
assert result.data["me"] == {"fullName": "Luke Skywalker")
|
||||||
|
|
||||||
Then we go through the following steps to resolve this query:
|
Then we go through the following steps to resolve this query:
|
||||||
|
|
||||||
|
@ -163,22 +158,6 @@ You can then execute the following query:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
*Note:* There are several arguments to a field that are "reserved" by Graphene
|
|
||||||
(see :ref:`fields-mounted-types`).
|
|
||||||
You can still define an argument that clashes with one of these fields by using
|
|
||||||
the ``args`` parameter like so:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from graphene import ObjectType, Field, String
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
answer = String(args={'description': String()})
|
|
||||||
|
|
||||||
def resolve_answer(parent, info, description):
|
|
||||||
return description
|
|
||||||
|
|
||||||
|
|
||||||
Convenience Features of Graphene Resolvers
|
Convenience Features of Graphene Resolvers
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
@ -352,7 +331,7 @@ A field can use a custom resolver from outside the class:
|
||||||
from graphene import ObjectType, String
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
def resolve_full_name(person, info):
|
def resolve_full_name(person, info):
|
||||||
return f"{person.first_name} {person.last_name}"
|
return '{} {}'.format(person.first_name, person.last_name)
|
||||||
|
|
||||||
class Person(ObjectType):
|
class Person(ObjectType):
|
||||||
first_name = String()
|
first_name = String()
|
||||||
|
|
|
@ -3,11 +3,6 @@
|
||||||
Scalars
|
Scalars
|
||||||
=======
|
=======
|
||||||
|
|
||||||
Scalar types represent concrete values at the leaves of a query. There are
|
|
||||||
several built in types that Graphene provides out of the box which represent common
|
|
||||||
values in Python. You can also create your own Scalar types to better express
|
|
||||||
values that you might have in your data model.
|
|
||||||
|
|
||||||
All Scalar types accept the following arguments. All are optional:
|
All Scalar types accept the following arguments. All are optional:
|
||||||
|
|
||||||
``name``: *string*
|
``name``: *string*
|
||||||
|
@ -32,39 +27,34 @@ All Scalar types accept the following arguments. All are optional:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Built in scalars
|
Base scalars
|
||||||
----------------
|
------------
|
||||||
|
|
||||||
Graphene defines the following base Scalar Types that match the default `GraphQL types <https://graphql.org/learn/schema/#scalar-types>`_:
|
Graphene defines the following base Scalar Types:
|
||||||
|
|
||||||
``graphene.String``
|
``graphene.String``
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents textual data, represented as UTF-8
|
Represents textual data, represented as UTF-8
|
||||||
character sequences. The String type is most often used by GraphQL to
|
character sequences. The String type is most often used by GraphQL to
|
||||||
represent free-form human-readable text.
|
represent free-form human-readable text.
|
||||||
|
|
||||||
``graphene.Int``
|
``graphene.Int``
|
||||||
^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents non-fractional signed whole numeric
|
Represents non-fractional signed whole numeric
|
||||||
values. Int is a signed 32‐bit integer per the
|
values. Int is a signed 32‐bit integer per the
|
||||||
`GraphQL spec <https://facebook.github.io/graphql/June2018/#sec-Int>`_
|
`GraphQL spec <https://facebook.github.io/graphql/June2018/#sec-Int>`_
|
||||||
|
|
||||||
``graphene.Float``
|
``graphene.Float``
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents signed double-precision fractional
|
Represents signed double-precision fractional
|
||||||
values as specified by
|
values as specified by
|
||||||
`IEEE 754 <http://en.wikipedia.org/wiki/IEEE_floating_point>`_.
|
`IEEE 754 <http://en.wikipedia.org/wiki/IEEE_floating_point>`_.
|
||||||
|
|
||||||
``graphene.Boolean``
|
``graphene.Boolean``
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents `true` or `false`.
|
Represents `true` or `false`.
|
||||||
|
|
||||||
``graphene.ID``
|
``graphene.ID``
|
||||||
^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a unique identifier, often used to
|
Represents a unique identifier, often used to
|
||||||
refetch an object or as key for a cache. The ID type appears in a JSON
|
refetch an object or as key for a cache. The ID type appears in a JSON
|
||||||
|
@ -72,183 +62,24 @@ Graphene defines the following base Scalar Types that match the default `GraphQL
|
||||||
When expected as an input type, any string (such as `"4"`) or integer
|
When expected as an input type, any string (such as `"4"`) or integer
|
||||||
(such as `4`) input value will be accepted as an ID.
|
(such as `4`) input value will be accepted as an ID.
|
||||||
|
|
||||||
----
|
Graphene also provides custom scalars for Dates, Times, and JSON:
|
||||||
|
|
||||||
Graphene also provides custom scalars for common values:
|
``graphene.types.datetime.Date``
|
||||||
|
|
||||||
``graphene.Date``
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a Date value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
Represents a Date value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
||||||
|
|
||||||
.. code:: python
|
``graphene.types.datetime.DateTime``
|
||||||
|
|
||||||
import datetime
|
|
||||||
from graphene import Schema, ObjectType, Date
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
one_week_from = Date(required=True, date_input=Date(required=True))
|
|
||||||
|
|
||||||
def resolve_one_week_from(root, info, date_input):
|
|
||||||
assert date_input == datetime.date(2006, 1, 2)
|
|
||||||
return date_input + datetime.timedelta(weeks=1)
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
results = schema.execute("""
|
|
||||||
query {
|
|
||||||
oneWeekFrom(dateInput: "2006-01-02")
|
|
||||||
}
|
|
||||||
""")
|
|
||||||
|
|
||||||
assert results.data == {"oneWeekFrom": "2006-01-09"}
|
|
||||||
|
|
||||||
|
|
||||||
``graphene.DateTime``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a DateTime value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
Represents a DateTime value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
||||||
|
|
||||||
.. code:: python
|
``graphene.types.datetime.Time``
|
||||||
|
|
||||||
import datetime
|
|
||||||
from graphene import Schema, ObjectType, DateTime
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
one_hour_from = DateTime(required=True, datetime_input=DateTime(required=True))
|
|
||||||
|
|
||||||
def resolve_one_hour_from(root, info, datetime_input):
|
|
||||||
assert datetime_input == datetime.datetime(2006, 1, 2, 15, 4, 5)
|
|
||||||
return datetime_input + datetime.timedelta(hours=1)
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
results = schema.execute("""
|
|
||||||
query {
|
|
||||||
oneHourFrom(datetimeInput: "2006-01-02T15:04:05")
|
|
||||||
}
|
|
||||||
""")
|
|
||||||
|
|
||||||
assert results.data == {"oneHourFrom": "2006-01-02T16:04:05"}
|
|
||||||
|
|
||||||
``graphene.Time``
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a Time value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
Represents a Time value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
||||||
|
|
||||||
.. code:: python
|
``graphene.types.json.JSONString``
|
||||||
|
|
||||||
import datetime
|
|
||||||
from graphene import Schema, ObjectType, Time
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
one_hour_from = Time(required=True, time_input=Time(required=True))
|
|
||||||
|
|
||||||
def resolve_one_hour_from(root, info, time_input):
|
|
||||||
assert time_input == datetime.time(15, 4, 5)
|
|
||||||
tmp_time_input = datetime.datetime.combine(datetime.date(1, 1, 1), time_input)
|
|
||||||
return (tmp_time_input + datetime.timedelta(hours=1)).time()
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
results = schema.execute("""
|
|
||||||
query {
|
|
||||||
oneHourFrom(timeInput: "15:04:05")
|
|
||||||
}
|
|
||||||
""")
|
|
||||||
|
|
||||||
assert results.data == {"oneHourFrom": "16:04:05"}
|
|
||||||
|
|
||||||
``graphene.Decimal``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a Python Decimal value.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import decimal
|
|
||||||
from graphene import Schema, ObjectType, Decimal
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
add_one_to = Decimal(required=True, decimal_input=Decimal(required=True))
|
|
||||||
|
|
||||||
def resolve_add_one_to(root, info, decimal_input):
|
|
||||||
assert decimal_input == decimal.Decimal("10.50")
|
|
||||||
return decimal_input + decimal.Decimal("1")
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
results = schema.execute("""
|
|
||||||
query {
|
|
||||||
addOneTo(decimalInput: "10.50")
|
|
||||||
}
|
|
||||||
""")
|
|
||||||
|
|
||||||
assert results.data == {"addOneTo": "11.50"}
|
|
||||||
|
|
||||||
``graphene.JSONString``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a JSON string.
|
Represents a JSON string.
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from graphene import Schema, ObjectType, JSONString, String
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
update_json_key = JSONString(
|
|
||||||
required=True,
|
|
||||||
json_input=JSONString(required=True),
|
|
||||||
key=String(required=True),
|
|
||||||
value=String(required=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
def resolve_update_json_key(root, info, json_input, key, value):
|
|
||||||
assert json_input == {"name": "Jane"}
|
|
||||||
json_input[key] = value
|
|
||||||
return json_input
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
results = schema.execute("""
|
|
||||||
query {
|
|
||||||
updateJsonKey(jsonInput: "{\\"name\\": \\"Jane\\"}", key: "name", value: "Beth")
|
|
||||||
}
|
|
||||||
""")
|
|
||||||
|
|
||||||
assert results.data == {"updateJsonKey": "{\"name\": \"Beth\"}"}
|
|
||||||
|
|
||||||
|
|
||||||
``graphene.Base64``
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Represents a Base64 encoded string.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from graphene import Schema, ObjectType, Base64
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
increment_encoded_id = Base64(
|
|
||||||
required=True,
|
|
||||||
base64_input=Base64(required=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
def resolve_increment_encoded_id(root, info, base64_input):
|
|
||||||
assert base64_input == "4"
|
|
||||||
return int(base64_input) + 1
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
results = schema.execute("""
|
|
||||||
query {
|
|
||||||
incrementEncodedId(base64Input: "NA==")
|
|
||||||
}
|
|
||||||
""")
|
|
||||||
|
|
||||||
assert results.data == {"incrementEncodedId": "NQ=="}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Custom scalars
|
Custom scalars
|
||||||
--------------
|
--------------
|
||||||
|
@ -270,8 +101,8 @@ The following is an example for creating a DateTime scalar:
|
||||||
return dt.isoformat()
|
return dt.isoformat()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(node, _variables=None):
|
def parse_literal(node):
|
||||||
if isinstance(node, ast.StringValueNode):
|
if isinstance(node, ast.StringValue):
|
||||||
return datetime.datetime.strptime(
|
return datetime.datetime.strptime(
|
||||||
node.value, "%Y-%m-%dT%H:%M:%S.%f")
|
node.value, "%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
Schema
|
Schema
|
||||||
======
|
======
|
||||||
|
|
||||||
A GraphQL **Schema** defines the types and relationships between **Fields** in your API.
|
A GraphQL **Schema** defines the types and relationship between **Fields** in your API.
|
||||||
|
|
||||||
A Schema is created by supplying the root :ref:`ObjectType` of each operation, query (mandatory), mutation and subscription.
|
A Schema is created by supplying the root :ref:`ObjectType` of each operation, query (mandatory), mutation and subscription.
|
||||||
|
|
||||||
Schema will collect all type definitions related to the root operations and then supply them to the validator and executor.
|
Schema will collect all type definitions related to the root operations and then supplied to the validator and executor.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -15,11 +15,11 @@ Schema will collect all type definitions related to the root operations and then
|
||||||
subscription=MyRootSubscription
|
subscription=MyRootSubscription
|
||||||
)
|
)
|
||||||
|
|
||||||
A Root Query is just a special :ref:`ObjectType` that defines the fields that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types:
|
A Root Query is just a special :ref:`ObjectType` that :ref:`defines the fields <Scalars>` that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types:
|
||||||
|
|
||||||
* Query fetches data
|
* Query fetches data
|
||||||
* Mutation changes data and retrieves the changes
|
* Mutation to changes data and retrieve the changes
|
||||||
* Subscription sends changes to clients in real-time
|
* Subscription to sends changes to clients in real time
|
||||||
|
|
||||||
Review the `GraphQL documentation on Schema`_ for a brief overview of fields, schema and operations.
|
Review the `GraphQL documentation on Schema`_ for a brief overview of fields, schema and operations.
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ There are some cases where the schema cannot access all of the types that we pla
|
||||||
For example, when a field returns an ``Interface``, the schema doesn't know about any of the
|
For example, when a field returns an ``Interface``, the schema doesn't know about any of the
|
||||||
implementations.
|
implementations.
|
||||||
|
|
||||||
In this case, we need to use the ``types`` argument when creating the Schema:
|
In this case, we need to use the ``types`` argument when creating the Schema.
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
@ -56,14 +56,14 @@ In this case, we need to use the ``types`` argument when creating the Schema:
|
||||||
|
|
||||||
.. _SchemaAutoCamelCase:
|
.. _SchemaAutoCamelCase:
|
||||||
|
|
||||||
Auto camelCase field names
|
Auto CamelCase field names
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
By default all field and argument names (that are not
|
By default all field and argument names (that are not
|
||||||
explicitly set with the ``name`` arg) will be converted from
|
explicitly set with the ``name`` arg) will be converted from
|
||||||
``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client)
|
``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client)
|
||||||
|
|
||||||
For example with the ObjectType the ``last_name`` field name is converted to ``lastName``:
|
For example with the ObjectType
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -71,10 +71,12 @@ For example with the ObjectType the ``last_name`` field name is converted to ``l
|
||||||
last_name = graphene.String()
|
last_name = graphene.String()
|
||||||
other_name = graphene.String(name='_other_Name')
|
other_name = graphene.String(name='_other_Name')
|
||||||
|
|
||||||
|
the ``last_name`` field name is converted to ``lastName``.
|
||||||
|
|
||||||
In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor.
|
In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor.
|
||||||
``other_name`` converts to ``_other_Name`` (without further transformations).
|
``other_name`` converts to ``_other_Name`` (without further transformations).
|
||||||
|
|
||||||
Your query should look like:
|
Your query should look like
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
|
@ -84,7 +86,7 @@ Your query should look like:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation:
|
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ to specify any common fields between the types.
|
||||||
The basics:
|
The basics:
|
||||||
|
|
||||||
- Each Union is a Python class that inherits from ``graphene.Union``.
|
- Each Union is a Python class that inherits from ``graphene.Union``.
|
||||||
- Unions don't have any fields on it, just links to the possible ObjectTypes.
|
- Unions don't have any fields on it, just links to the possible objecttypes.
|
||||||
|
|
||||||
Quick example
|
Quick example
|
||||||
-------------
|
-------------
|
||||||
|
|
|
@ -7,7 +7,7 @@ class GeoInput(graphene.InputObjectType):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def latlng(self):
|
def latlng(self):
|
||||||
return f"({self.lat},{self.lng})"
|
return "({},{})".format(self.lat, self.lng)
|
||||||
|
|
||||||
|
|
||||||
class Address(graphene.ObjectType):
|
class Address(graphene.ObjectType):
|
||||||
|
@ -17,7 +17,7 @@ class Address(graphene.ObjectType):
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
address = graphene.Field(Address, geo=GeoInput(required=True))
|
address = graphene.Field(Address, geo=GeoInput(required=True))
|
||||||
|
|
||||||
def resolve_address(root, info, geo):
|
def resolve_address(self, info, geo):
|
||||||
return Address(latlng=geo.latlng)
|
return Address(latlng=geo.latlng)
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ class CreateAddress(graphene.Mutation):
|
||||||
|
|
||||||
Output = Address
|
Output = Address
|
||||||
|
|
||||||
def mutate(root, info, geo):
|
def mutate(self, info, geo):
|
||||||
return Address(latlng=geo.latlng)
|
return Address(latlng=geo.latlng)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ class User(graphene.ObjectType):
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
me = graphene.Field(User)
|
me = graphene.Field(User)
|
||||||
|
|
||||||
def resolve_me(root, info):
|
def resolve_me(self, info):
|
||||||
return info.context["user"]
|
return info.context["user"]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,9 +8,10 @@ class Patron(graphene.ObjectType):
|
||||||
|
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
|
|
||||||
patron = graphene.Field(Patron)
|
patron = graphene.Field(Patron)
|
||||||
|
|
||||||
def resolve_patron(root, info):
|
def resolve_patron(self, info):
|
||||||
return Patron(id=1, name="Syrus", age=27)
|
return Patron(id=1, name="Syrus", age=27)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -39,13 +39,13 @@ class Query(graphene.ObjectType):
|
||||||
human = graphene.Field(Human, id=graphene.String())
|
human = graphene.Field(Human, id=graphene.String())
|
||||||
droid = graphene.Field(Droid, id=graphene.String())
|
droid = graphene.Field(Droid, id=graphene.String())
|
||||||
|
|
||||||
def resolve_hero(root, info, episode=None):
|
def resolve_hero(self, info, episode=None):
|
||||||
return get_hero(episode)
|
return get_hero(episode)
|
||||||
|
|
||||||
def resolve_human(root, info, id):
|
def resolve_human(self, info, id):
|
||||||
return get_human(id)
|
return get_human(id)
|
||||||
|
|
||||||
def resolve_droid(root, info, id):
|
def resolve_droid(self, info, id):
|
||||||
return get_droid(id)
|
return get_droid(id)
|
||||||
|
|
||||||
|
|
||||||
|
|
100
examples/starwars/tests/snapshots/snap_test_query.py
Normal file
100
examples/starwars/tests/snapshots/snap_test_query.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# snapshottest: v1 - https://goo.gl/zC4yUc
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from snapshottest import Snapshot
|
||||||
|
|
||||||
|
snapshots = Snapshot()
|
||||||
|
|
||||||
|
snapshots["test_hero_name_query 1"] = {"data": {"hero": {"name": "R2-D2"}}}
|
||||||
|
|
||||||
|
snapshots["test_hero_name_and_friends_query 1"] = {
|
||||||
|
"data": {
|
||||||
|
"hero": {
|
||||||
|
"id": "2001",
|
||||||
|
"name": "R2-D2",
|
||||||
|
"friends": [
|
||||||
|
{"name": "Luke Skywalker"},
|
||||||
|
{"name": "Han Solo"},
|
||||||
|
{"name": "Leia Organa"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_nested_query 1"] = {
|
||||||
|
"data": {
|
||||||
|
"hero": {
|
||||||
|
"name": "R2-D2",
|
||||||
|
"friends": [
|
||||||
|
{
|
||||||
|
"name": "Luke Skywalker",
|
||||||
|
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
||||||
|
"friends": [
|
||||||
|
{"name": "Han Solo"},
|
||||||
|
{"name": "Leia Organa"},
|
||||||
|
{"name": "C-3PO"},
|
||||||
|
{"name": "R2-D2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Han Solo",
|
||||||
|
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
||||||
|
"friends": [
|
||||||
|
{"name": "Luke Skywalker"},
|
||||||
|
{"name": "Leia Organa"},
|
||||||
|
{"name": "R2-D2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Leia Organa",
|
||||||
|
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
||||||
|
"friends": [
|
||||||
|
{"name": "Luke Skywalker"},
|
||||||
|
{"name": "Han Solo"},
|
||||||
|
{"name": "C-3PO"},
|
||||||
|
{"name": "R2-D2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_fetch_luke_query 1"] = {"data": {"human": {"name": "Luke Skywalker"}}}
|
||||||
|
|
||||||
|
snapshots["test_fetch_some_id_query 1"] = {
|
||||||
|
"data": {"human": {"name": "Luke Skywalker"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_fetch_some_id_query2 1"] = {"data": {"human": {"name": "Han Solo"}}}
|
||||||
|
|
||||||
|
snapshots["test_invalid_id_query 1"] = {"data": {"human": None}}
|
||||||
|
|
||||||
|
snapshots["test_fetch_luke_aliased 1"] = {"data": {"luke": {"name": "Luke Skywalker"}}}
|
||||||
|
|
||||||
|
snapshots["test_fetch_luke_and_leia_aliased 1"] = {
|
||||||
|
"data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_duplicate_fields 1"] = {
|
||||||
|
"data": {
|
||||||
|
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
||||||
|
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_use_fragment 1"] = {
|
||||||
|
"data": {
|
||||||
|
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
||||||
|
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_check_type_of_r2 1"] = {
|
||||||
|
"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_check_type_of_luke 1"] = {
|
||||||
|
"data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}}
|
||||||
|
}
|
|
@ -8,19 +8,19 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_hero_name_query():
|
def test_hero_name_query(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query HeroNameQuery {
|
query HeroNameQuery {
|
||||||
hero {
|
hero {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {"data": {"hero": {"name": "R2-D2"}}}
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
||||||
|
|
||||||
def test_hero_name_and_friends_query():
|
def test_hero_name_and_friends_query(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query HeroNameAndFriendsQuery {
|
query HeroNameAndFriendsQuery {
|
||||||
hero {
|
hero {
|
||||||
id
|
id
|
||||||
|
@ -30,24 +30,12 @@ def test_hero_name_and_friends_query():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {
|
|
||||||
"hero": {
|
|
||||||
"id": "2001",
|
|
||||||
"name": "R2-D2",
|
|
||||||
"friends": [
|
|
||||||
{"name": "Luke Skywalker"},
|
|
||||||
{"name": "Han Solo"},
|
|
||||||
{"name": "Leia Organa"},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_nested_query():
|
def test_nested_query(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query NestedQuery {
|
query NestedQuery {
|
||||||
hero {
|
hero {
|
||||||
name
|
name
|
||||||
|
@ -60,113 +48,70 @@ def test_nested_query():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {
|
|
||||||
"hero": {
|
|
||||||
"name": "R2-D2",
|
|
||||||
"friends": [
|
|
||||||
{
|
|
||||||
"name": "Luke Skywalker",
|
|
||||||
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
|
||||||
"friends": [
|
|
||||||
{"name": "Han Solo"},
|
|
||||||
{"name": "Leia Organa"},
|
|
||||||
{"name": "C-3PO"},
|
|
||||||
{"name": "R2-D2"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Han Solo",
|
|
||||||
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
|
||||||
"friends": [
|
|
||||||
{"name": "Luke Skywalker"},
|
|
||||||
{"name": "Leia Organa"},
|
|
||||||
{"name": "R2-D2"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Leia Organa",
|
|
||||||
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
|
||||||
"friends": [
|
|
||||||
{"name": "Luke Skywalker"},
|
|
||||||
{"name": "Han Solo"},
|
|
||||||
{"name": "C-3PO"},
|
|
||||||
{"name": "R2-D2"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_luke_query():
|
def test_fetch_luke_query(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query FetchLukeQuery {
|
query FetchLukeQuery {
|
||||||
human(id: "1000") {
|
human(id: "1000") {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {"data": {"human": {"name": "Luke Skywalker"}}}
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_some_id_query():
|
def test_fetch_some_id_query(snapshot):
|
||||||
result = client.execute(
|
query = """
|
||||||
"""
|
|
||||||
query FetchSomeIDQuery($someId: String!) {
|
query FetchSomeIDQuery($someId: String!) {
|
||||||
human(id: $someId) {
|
human(id: $someId) {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""",
|
"""
|
||||||
variables={"someId": "1000"},
|
params = {"someId": "1000"}
|
||||||
)
|
snapshot.assert_match(client.execute(query, variables=params))
|
||||||
assert result == {"data": {"human": {"name": "Luke Skywalker"}}}
|
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_some_id_query2():
|
def test_fetch_some_id_query2(snapshot):
|
||||||
result = client.execute(
|
query = """
|
||||||
"""
|
|
||||||
query FetchSomeIDQuery($someId: String!) {
|
query FetchSomeIDQuery($someId: String!) {
|
||||||
human(id: $someId) {
|
human(id: $someId) {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""",
|
"""
|
||||||
variables={"someId": "1002"},
|
params = {"someId": "1002"}
|
||||||
)
|
snapshot.assert_match(client.execute(query, variables=params))
|
||||||
assert result == {"data": {"human": {"name": "Han Solo"}}}
|
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_id_query():
|
def test_invalid_id_query(snapshot):
|
||||||
result = client.execute(
|
query = """
|
||||||
"""
|
|
||||||
query humanQuery($id: String!) {
|
query humanQuery($id: String!) {
|
||||||
human(id: $id) {
|
human(id: $id) {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""",
|
"""
|
||||||
variables={"id": "not a valid id"},
|
params = {"id": "not a valid id"}
|
||||||
)
|
snapshot.assert_match(client.execute(query, variables=params))
|
||||||
assert result == {"data": {"human": None}}
|
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_luke_aliased():
|
def test_fetch_luke_aliased(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query FetchLukeAliased {
|
query FetchLukeAliased {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {"data": {"luke": {"name": "Luke Skywalker"}}}
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_luke_and_leia_aliased():
|
def test_fetch_luke_and_leia_aliased(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query FetchLukeAndLeiaAliased {
|
query FetchLukeAndLeiaAliased {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
name
|
name
|
||||||
|
@ -175,14 +120,12 @@ def test_fetch_luke_and_leia_aliased():
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_fields():
|
def test_duplicate_fields(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query DuplicateFields {
|
query DuplicateFields {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
name
|
name
|
||||||
|
@ -193,17 +136,12 @@ def test_duplicate_fields():
|
||||||
homePlanet
|
homePlanet
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {
|
|
||||||
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
|
||||||
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_use_fragment():
|
def test_use_fragment(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query UseFragment {
|
query UseFragment {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
...HumanFragment
|
...HumanFragment
|
||||||
|
@ -216,36 +154,29 @@ def test_use_fragment():
|
||||||
name
|
name
|
||||||
homePlanet
|
homePlanet
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {
|
|
||||||
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
|
||||||
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_type_of_r2():
|
def test_check_type_of_r2(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query CheckTypeOfR2 {
|
query CheckTypeOfR2 {
|
||||||
hero {
|
hero {
|
||||||
__typename
|
__typename
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}}
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
||||||
|
|
||||||
def test_check_type_of_luke():
|
def test_check_type_of_luke(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query CheckTypeOfLuke {
|
query CheckTypeOfLuke {
|
||||||
hero(episode: EMPIRE) {
|
hero(episode: EMPIRE) {
|
||||||
__typename
|
__typename
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}}
|
|
||||||
}
|
|
||||||
|
|
|
@ -64,10 +64,10 @@ class Query(graphene.ObjectType):
|
||||||
empire = graphene.Field(Faction)
|
empire = graphene.Field(Faction)
|
||||||
node = relay.Node.Field()
|
node = relay.Node.Field()
|
||||||
|
|
||||||
def resolve_rebels(root, info):
|
def resolve_rebels(self, info):
|
||||||
return get_rebels()
|
return get_rebels()
|
||||||
|
|
||||||
def resolve_empire(root, info):
|
def resolve_empire(self, info):
|
||||||
return get_empire()
|
return get_empire()
|
||||||
|
|
||||||
|
|
||||||
|
|
0
examples/starwars_relay/tests/snapshots/__init__.py
Normal file
0
examples/starwars_relay/tests/snapshots/__init__.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# snapshottest: v1 - https://goo.gl/zC4yUc
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from snapshottest import Snapshot
|
||||||
|
|
||||||
|
snapshots = Snapshot()
|
||||||
|
|
||||||
|
snapshots["test_correct_fetch_first_ship_rebels 1"] = {
|
||||||
|
"data": {
|
||||||
|
"rebels": {
|
||||||
|
"name": "Alliance to Restore the Republic",
|
||||||
|
"ships": {
|
||||||
|
"pageInfo": {
|
||||||
|
"startCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
||||||
|
"endCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
||||||
|
"hasNextPage": True,
|
||||||
|
"hasPreviousPage": False,
|
||||||
|
},
|
||||||
|
"edges": [
|
||||||
|
{"cursor": "YXJyYXljb25uZWN0aW9uOjA=", "node": {"name": "X-Wing"}}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# snapshottest: v1 - https://goo.gl/zC4yUc
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from snapshottest import Snapshot
|
||||||
|
|
||||||
|
snapshots = Snapshot()
|
||||||
|
|
||||||
|
snapshots["test_mutations 1"] = {
|
||||||
|
"data": {
|
||||||
|
"introduceShip": {
|
||||||
|
"ship": {"id": "U2hpcDo5", "name": "Peter"},
|
||||||
|
"faction": {
|
||||||
|
"name": "Alliance to Restore the Republic",
|
||||||
|
"ships": {
|
||||||
|
"edges": [
|
||||||
|
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
|
||||||
|
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
|
||||||
|
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
|
||||||
|
{"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
|
||||||
|
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
|
||||||
|
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,119 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# snapshottest: v1 - https://goo.gl/zC4yUc
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from snapshottest import Snapshot
|
||||||
|
|
||||||
|
|
||||||
|
snapshots = Snapshot()
|
||||||
|
|
||||||
|
snapshots["test_correctly_fetches_id_name_rebels 1"] = {
|
||||||
|
"data": {
|
||||||
|
"rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_correctly_refetches_rebels 1"] = {
|
||||||
|
"data": {"node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_correctly_fetches_id_name_empire 1"] = {
|
||||||
|
"data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_correctly_refetches_empire 1"] = {
|
||||||
|
"data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots["test_correctly_refetches_xwing 1"] = {
|
||||||
|
"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshots[
|
||||||
|
"test_str_schema 1"
|
||||||
|
] = '''"""A faction in the Star Wars saga"""
|
||||||
|
type Faction implements Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
|
||||||
|
"""The name of the faction."""
|
||||||
|
name: String
|
||||||
|
|
||||||
|
"""The ships used by the faction."""
|
||||||
|
ships(before: String = null, after: String = null, first: Int = null, last: Int = null): ShipConnection
|
||||||
|
}
|
||||||
|
|
||||||
|
input IntroduceShipInput {
|
||||||
|
shipName: String!
|
||||||
|
factionId: String!
|
||||||
|
clientMutationId: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type IntroduceShipPayload {
|
||||||
|
ship: Ship
|
||||||
|
faction: Faction
|
||||||
|
clientMutationId: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type Mutation {
|
||||||
|
introduceShip(input: IntroduceShipInput!): IntroduceShipPayload
|
||||||
|
}
|
||||||
|
|
||||||
|
"""An object with an ID"""
|
||||||
|
interface Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.
|
||||||
|
"""
|
||||||
|
type PageInfo {
|
||||||
|
"""When paginating forwards, are there more items?"""
|
||||||
|
hasNextPage: Boolean!
|
||||||
|
|
||||||
|
"""When paginating backwards, are there more items?"""
|
||||||
|
hasPreviousPage: Boolean!
|
||||||
|
|
||||||
|
"""When paginating backwards, the cursor to continue."""
|
||||||
|
startCursor: String
|
||||||
|
|
||||||
|
"""When paginating forwards, the cursor to continue."""
|
||||||
|
endCursor: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type Query {
|
||||||
|
rebels: Faction
|
||||||
|
empire: Faction
|
||||||
|
node(
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
): Node
|
||||||
|
}
|
||||||
|
|
||||||
|
"""A ship in the Star Wars saga"""
|
||||||
|
type Ship implements Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
|
||||||
|
"""The name of the ship."""
|
||||||
|
name: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type ShipConnection {
|
||||||
|
"""Pagination data for this connection."""
|
||||||
|
pageInfo: PageInfo!
|
||||||
|
|
||||||
|
"""Contains the nodes in this connection."""
|
||||||
|
edges: [ShipEdge]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""A Relay edge containing a `Ship` and its cursor."""
|
||||||
|
type ShipEdge {
|
||||||
|
"""The item at the end of the edge"""
|
||||||
|
node: Ship
|
||||||
|
|
||||||
|
"""A cursor for use in pagination"""
|
||||||
|
cursor: String!
|
||||||
|
}
|
||||||
|
'''
|
|
@ -8,46 +8,26 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_correct_fetch_first_ship_rebels():
|
def test_correct_fetch_first_ship_rebels(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query RebelsShipsQuery {
|
query RebelsShipsQuery {
|
||||||
rebels {
|
rebels {
|
||||||
name,
|
name,
|
||||||
ships(first: 1) {
|
ships(first: 1) {
|
||||||
pageInfo {
|
pageInfo {
|
||||||
startCursor
|
startCursor
|
||||||
endCursor
|
endCursor
|
||||||
hasNextPage
|
hasNextPage
|
||||||
hasPreviousPage
|
hasPreviousPage
|
||||||
}
|
}
|
||||||
edges {
|
edges {
|
||||||
cursor
|
cursor
|
||||||
node {
|
node {
|
||||||
name
|
name
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
}
|
||||||
assert result == {
|
|
||||||
"data": {
|
|
||||||
"rebels": {
|
|
||||||
"name": "Alliance to Restore the Republic",
|
|
||||||
"ships": {
|
|
||||||
"pageInfo": {
|
|
||||||
"startCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
|
||||||
"endCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
|
||||||
"hasNextPage": True,
|
|
||||||
"hasPreviousPage": False,
|
|
||||||
},
|
|
||||||
"edges": [
|
|
||||||
{
|
|
||||||
"cursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
|
||||||
"node": {"name": "X-Wing"},
|
|
||||||
}
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
"""
|
||||||
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
|
@ -8,45 +8,26 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_mutations():
|
def test_mutations(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
mutation MyMutation {
|
mutation MyMutation {
|
||||||
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
|
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
|
||||||
ship {
|
ship {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
faction {
|
faction {
|
||||||
name
|
name
|
||||||
ships {
|
ships {
|
||||||
edges {
|
edges {
|
||||||
node {
|
node {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
}
|
||||||
assert result == {
|
|
||||||
"data": {
|
|
||||||
"introduceShip": {
|
|
||||||
"ship": {"id": "U2hpcDo5", "name": "Peter"},
|
|
||||||
"faction": {
|
|
||||||
"name": "Alliance to Restore the Republic",
|
|
||||||
"ships": {
|
|
||||||
"edges": [
|
|
||||||
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
|
|
||||||
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
|
|
||||||
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
|
|
||||||
{"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
|
|
||||||
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
|
|
||||||
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
"""
|
||||||
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import textwrap
|
|
||||||
|
|
||||||
from graphene.test import Client
|
from graphene.test import Client
|
||||||
|
|
||||||
from ..data import setup
|
from ..data import setup
|
||||||
|
@ -10,115 +8,24 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_str_schema():
|
def test_str_schema(snapshot):
|
||||||
assert str(schema).strip() == textwrap.dedent(
|
snapshot.assert_match(str(schema))
|
||||||
'''\
|
|
||||||
type Query {
|
|
||||||
rebels: Faction
|
|
||||||
empire: Faction
|
|
||||||
node(
|
|
||||||
"""The ID of the object"""
|
|
||||||
id: ID!
|
|
||||||
): Node
|
|
||||||
}
|
|
||||||
|
|
||||||
"""A faction in the Star Wars saga"""
|
|
||||||
type Faction implements Node {
|
|
||||||
"""The ID of the object"""
|
|
||||||
id: ID!
|
|
||||||
|
|
||||||
"""The name of the faction."""
|
|
||||||
name: String
|
|
||||||
|
|
||||||
"""The ships used by the faction."""
|
|
||||||
ships(before: String, after: String, first: Int, last: Int): ShipConnection
|
|
||||||
}
|
|
||||||
|
|
||||||
"""An object with an ID"""
|
|
||||||
interface Node {
|
|
||||||
"""The ID of the object"""
|
|
||||||
id: ID!
|
|
||||||
}
|
|
||||||
|
|
||||||
type ShipConnection {
|
|
||||||
"""Pagination data for this connection."""
|
|
||||||
pageInfo: PageInfo!
|
|
||||||
|
|
||||||
"""Contains the nodes in this connection."""
|
|
||||||
edges: [ShipEdge]!
|
|
||||||
}
|
|
||||||
|
|
||||||
"""
|
|
||||||
The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.
|
|
||||||
"""
|
|
||||||
type PageInfo {
|
|
||||||
"""When paginating forwards, are there more items?"""
|
|
||||||
hasNextPage: Boolean!
|
|
||||||
|
|
||||||
"""When paginating backwards, are there more items?"""
|
|
||||||
hasPreviousPage: Boolean!
|
|
||||||
|
|
||||||
"""When paginating backwards, the cursor to continue."""
|
|
||||||
startCursor: String
|
|
||||||
|
|
||||||
"""When paginating forwards, the cursor to continue."""
|
|
||||||
endCursor: String
|
|
||||||
}
|
|
||||||
|
|
||||||
"""A Relay edge containing a `Ship` and its cursor."""
|
|
||||||
type ShipEdge {
|
|
||||||
"""The item at the end of the edge"""
|
|
||||||
node: Ship
|
|
||||||
|
|
||||||
"""A cursor for use in pagination"""
|
|
||||||
cursor: String!
|
|
||||||
}
|
|
||||||
|
|
||||||
"""A ship in the Star Wars saga"""
|
|
||||||
type Ship implements Node {
|
|
||||||
"""The ID of the object"""
|
|
||||||
id: ID!
|
|
||||||
|
|
||||||
"""The name of the ship."""
|
|
||||||
name: String
|
|
||||||
}
|
|
||||||
|
|
||||||
type Mutation {
|
|
||||||
introduceShip(input: IntroduceShipInput!): IntroduceShipPayload
|
|
||||||
}
|
|
||||||
|
|
||||||
type IntroduceShipPayload {
|
|
||||||
ship: Ship
|
|
||||||
faction: Faction
|
|
||||||
clientMutationId: String
|
|
||||||
}
|
|
||||||
|
|
||||||
input IntroduceShipInput {
|
|
||||||
shipName: String!
|
|
||||||
factionId: String!
|
|
||||||
clientMutationId: String
|
|
||||||
}'''
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_fetches_id_name_rebels():
|
def test_correctly_fetches_id_name_rebels(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query RebelsQuery {
|
query RebelsQuery {
|
||||||
rebels {
|
rebels {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {
|
|
||||||
"rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_refetches_rebels():
|
def test_correctly_refetches_rebels(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query RebelsRefetchQuery {
|
query RebelsRefetchQuery {
|
||||||
node(id: "RmFjdGlvbjox") {
|
node(id: "RmFjdGlvbjox") {
|
||||||
id
|
id
|
||||||
|
@ -127,30 +34,24 @@ def test_correctly_refetches_rebels():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {
|
|
||||||
"node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_fetches_id_name_empire():
|
def test_correctly_fetches_id_name_empire(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query EmpireQuery {
|
query EmpireQuery {
|
||||||
empire {
|
empire {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_refetches_empire():
|
def test_correctly_refetches_empire(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query EmpireRefetchQuery {
|
query EmpireRefetchQuery {
|
||||||
node(id: "RmFjdGlvbjoy") {
|
node(id: "RmFjdGlvbjoy") {
|
||||||
id
|
id
|
||||||
|
@ -159,14 +60,12 @@ def test_correctly_refetches_empire():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {
|
snapshot.assert_match(client.execute(query))
|
||||||
"data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_refetches_xwing():
|
def test_correctly_refetches_xwing(snapshot):
|
||||||
result = client.execute("""
|
query = """
|
||||||
query XWingRefetchQuery {
|
query XWingRefetchQuery {
|
||||||
node(id: "U2hpcDox") {
|
node(id: "U2hpcDox") {
|
||||||
id
|
id
|
||||||
|
@ -175,5 +74,5 @@ def test_correctly_refetches_xwing():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""")
|
"""
|
||||||
assert result == {"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}}
|
snapshot.assert_match(client.execute(query))
|
||||||
|
|
|
@ -1,98 +1,88 @@
|
||||||
from .pyutils.version import get_version
|
from .pyutils.version import get_version
|
||||||
|
|
||||||
|
from .types import (
|
||||||
|
ObjectType,
|
||||||
|
InputObjectType,
|
||||||
|
Interface,
|
||||||
|
Mutation,
|
||||||
|
Field,
|
||||||
|
InputField,
|
||||||
|
Schema,
|
||||||
|
Scalar,
|
||||||
|
String,
|
||||||
|
ID,
|
||||||
|
Int,
|
||||||
|
Float,
|
||||||
|
Boolean,
|
||||||
|
Date,
|
||||||
|
DateTime,
|
||||||
|
Time,
|
||||||
|
Decimal,
|
||||||
|
JSONString,
|
||||||
|
UUID,
|
||||||
|
List,
|
||||||
|
NonNull,
|
||||||
|
Enum,
|
||||||
|
Argument,
|
||||||
|
Dynamic,
|
||||||
|
Union,
|
||||||
|
Context,
|
||||||
|
ResolveInfo,
|
||||||
|
)
|
||||||
from .relay import (
|
from .relay import (
|
||||||
BaseGlobalIDType,
|
Node,
|
||||||
|
is_node,
|
||||||
|
GlobalID,
|
||||||
ClientIDMutation,
|
ClientIDMutation,
|
||||||
Connection,
|
Connection,
|
||||||
ConnectionField,
|
ConnectionField,
|
||||||
DefaultGlobalIDType,
|
|
||||||
GlobalID,
|
|
||||||
Node,
|
|
||||||
PageInfo,
|
PageInfo,
|
||||||
SimpleGlobalIDType,
|
|
||||||
UUIDGlobalIDType,
|
|
||||||
is_node,
|
|
||||||
)
|
)
|
||||||
from .types import (
|
|
||||||
ID,
|
|
||||||
UUID,
|
|
||||||
Argument,
|
|
||||||
Base64,
|
|
||||||
BigInt,
|
|
||||||
Boolean,
|
|
||||||
Context,
|
|
||||||
Date,
|
|
||||||
DateTime,
|
|
||||||
Decimal,
|
|
||||||
Dynamic,
|
|
||||||
Enum,
|
|
||||||
Field,
|
|
||||||
Float,
|
|
||||||
InputField,
|
|
||||||
InputObjectType,
|
|
||||||
Int,
|
|
||||||
Interface,
|
|
||||||
JSONString,
|
|
||||||
List,
|
|
||||||
Mutation,
|
|
||||||
NonNull,
|
|
||||||
ObjectType,
|
|
||||||
ResolveInfo,
|
|
||||||
Scalar,
|
|
||||||
Schema,
|
|
||||||
String,
|
|
||||||
Time,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
from .utils.module_loading import lazy_import
|
|
||||||
from .utils.resolve_only_args import resolve_only_args
|
from .utils.resolve_only_args import resolve_only_args
|
||||||
|
from .utils.module_loading import lazy_import
|
||||||
|
|
||||||
VERSION = (3, 4, 3, "final", 0)
|
|
||||||
|
VERSION = (3, 0, 0, "alpha", 1)
|
||||||
|
|
||||||
|
|
||||||
__version__ = get_version(VERSION)
|
__version__ = get_version(VERSION)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"__version__",
|
"__version__",
|
||||||
"Argument",
|
"ObjectType",
|
||||||
"Base64",
|
"InputObjectType",
|
||||||
"BigInt",
|
"Interface",
|
||||||
"BaseGlobalIDType",
|
"Mutation",
|
||||||
|
"Field",
|
||||||
|
"InputField",
|
||||||
|
"Schema",
|
||||||
|
"Scalar",
|
||||||
|
"String",
|
||||||
|
"ID",
|
||||||
|
"Int",
|
||||||
|
"Float",
|
||||||
|
"Enum",
|
||||||
"Boolean",
|
"Boolean",
|
||||||
|
"Date",
|
||||||
|
"DateTime",
|
||||||
|
"Time",
|
||||||
|
"Decimal",
|
||||||
|
"JSONString",
|
||||||
|
"UUID",
|
||||||
|
"List",
|
||||||
|
"NonNull",
|
||||||
|
"Argument",
|
||||||
|
"Dynamic",
|
||||||
|
"Union",
|
||||||
|
"resolve_only_args",
|
||||||
|
"Node",
|
||||||
|
"is_node",
|
||||||
|
"GlobalID",
|
||||||
"ClientIDMutation",
|
"ClientIDMutation",
|
||||||
"Connection",
|
"Connection",
|
||||||
"ConnectionField",
|
"ConnectionField",
|
||||||
"Context",
|
|
||||||
"Date",
|
|
||||||
"DateTime",
|
|
||||||
"Decimal",
|
|
||||||
"DefaultGlobalIDType",
|
|
||||||
"Dynamic",
|
|
||||||
"Enum",
|
|
||||||
"Field",
|
|
||||||
"Float",
|
|
||||||
"GlobalID",
|
|
||||||
"ID",
|
|
||||||
"InputField",
|
|
||||||
"InputObjectType",
|
|
||||||
"Int",
|
|
||||||
"Interface",
|
|
||||||
"JSONString",
|
|
||||||
"List",
|
|
||||||
"Mutation",
|
|
||||||
"Node",
|
|
||||||
"NonNull",
|
|
||||||
"ObjectType",
|
|
||||||
"PageInfo",
|
"PageInfo",
|
||||||
"ResolveInfo",
|
|
||||||
"Scalar",
|
|
||||||
"Schema",
|
|
||||||
"SimpleGlobalIDType",
|
|
||||||
"String",
|
|
||||||
"Time",
|
|
||||||
"Union",
|
|
||||||
"UUID",
|
|
||||||
"UUIDGlobalIDType",
|
|
||||||
"is_node",
|
|
||||||
"lazy_import",
|
"lazy_import",
|
||||||
"resolve_only_args",
|
"Context",
|
||||||
|
"ResolveInfo",
|
||||||
]
|
]
|
||||||
|
|
23
graphene/pyutils/init_subclass.py
Normal file
23
graphene/pyutils/init_subclass.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
is_init_subclass_available = hasattr(object, "__init_subclass__")
|
||||||
|
|
||||||
|
if not is_init_subclass_available:
|
||||||
|
|
||||||
|
class InitSubclassMeta(type):
|
||||||
|
"""Metaclass that implements PEP 487 protocol"""
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, ns, **kwargs):
|
||||||
|
__init_subclass__ = ns.pop("__init_subclass__", None)
|
||||||
|
if __init_subclass__:
|
||||||
|
__init_subclass__ = classmethod(__init_subclass__)
|
||||||
|
ns["__init_subclass__"] = __init_subclass__
|
||||||
|
return super(InitSubclassMeta, cls).__new__(cls, name, bases, ns, **kwargs)
|
||||||
|
|
||||||
|
def __init__(cls, name, bases, ns, **kwargs):
|
||||||
|
super(InitSubclassMeta, cls).__init__(name, bases, ns)
|
||||||
|
super_class = super(cls, cls)
|
||||||
|
if hasattr(super_class, "__init_subclass__"):
|
||||||
|
super_class.__init_subclass__.__func__(cls, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
else:
|
||||||
|
InitSubclassMeta = type # type: ignore
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -17,7 +19,10 @@ def get_version(version=None):
|
||||||
sub = ""
|
sub = ""
|
||||||
if version[3] == "alpha" and version[4] == 0:
|
if version[3] == "alpha" and version[4] == 0:
|
||||||
git_changeset = get_git_changeset()
|
git_changeset = get_git_changeset()
|
||||||
sub = ".dev%s" % git_changeset if git_changeset else ".dev"
|
if git_changeset:
|
||||||
|
sub = ".dev%s" % git_changeset
|
||||||
|
else:
|
||||||
|
sub = ".dev"
|
||||||
elif version[3] != "final":
|
elif version[3] != "final":
|
||||||
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
|
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
|
||||||
sub = mapping[version[3]] + str(version[4])
|
sub = mapping[version[3]] + str(version[4])
|
||||||
|
@ -71,6 +76,6 @@ def get_git_changeset():
|
||||||
)
|
)
|
||||||
timestamp = git_log.communicate()[0]
|
timestamp = git_log.communicate()[0]
|
||||||
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
|
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
|
||||||
except Exception:
|
except:
|
||||||
return None
|
return None
|
||||||
return timestamp.strftime("%Y%m%d%H%M%S")
|
return timestamp.strftime("%Y%m%d%H%M%S")
|
||||||
|
|
|
@ -1,23 +1,13 @@
|
||||||
from .node import Node, is_node, GlobalID
|
from .node import Node, is_node, GlobalID
|
||||||
from .mutation import ClientIDMutation
|
from .mutation import ClientIDMutation
|
||||||
from .connection import Connection, ConnectionField, PageInfo
|
from .connection import Connection, ConnectionField, PageInfo
|
||||||
from .id_type import (
|
|
||||||
BaseGlobalIDType,
|
|
||||||
DefaultGlobalIDType,
|
|
||||||
SimpleGlobalIDType,
|
|
||||||
UUIDGlobalIDType,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"BaseGlobalIDType",
|
"Node",
|
||||||
|
"is_node",
|
||||||
|
"GlobalID",
|
||||||
"ClientIDMutation",
|
"ClientIDMutation",
|
||||||
"Connection",
|
"Connection",
|
||||||
"ConnectionField",
|
"ConnectionField",
|
||||||
"DefaultGlobalIDType",
|
|
||||||
"GlobalID",
|
|
||||||
"Node",
|
|
||||||
"PageInfo",
|
"PageInfo",
|
||||||
"SimpleGlobalIDType",
|
|
||||||
"UUIDGlobalIDType",
|
|
||||||
"is_node",
|
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import re
|
import re
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
from graphql_relay import connection_from_array
|
from graphql_relay import connection_from_array
|
||||||
|
|
||||||
|
@ -9,34 +8,7 @@ from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String
|
||||||
from ..types.field import Field
|
from ..types.field import Field
|
||||||
from ..types.objecttype import ObjectType, ObjectTypeOptions
|
from ..types.objecttype import ObjectType, ObjectTypeOptions
|
||||||
from ..utils.thenables import maybe_thenable
|
from ..utils.thenables import maybe_thenable
|
||||||
from .node import is_node, AbstractNode
|
from .node import is_node
|
||||||
|
|
||||||
|
|
||||||
def get_edge_class(
|
|
||||||
connection_class: Type["Connection"],
|
|
||||||
_node: Type[AbstractNode],
|
|
||||||
base_name: str,
|
|
||||||
strict_types: bool = False,
|
|
||||||
):
|
|
||||||
edge_class = getattr(connection_class, "Edge", None)
|
|
||||||
|
|
||||||
class EdgeBase:
|
|
||||||
node = Field(
|
|
||||||
NonNull(_node) if strict_types else _node,
|
|
||||||
description="The item at the end of the edge",
|
|
||||||
)
|
|
||||||
cursor = String(required=True, description="A cursor for use in pagination")
|
|
||||||
|
|
||||||
class EdgeMeta:
|
|
||||||
description = f"A Relay edge containing a `{base_name}` and its cursor."
|
|
||||||
|
|
||||||
edge_name = f"{base_name}Edge"
|
|
||||||
|
|
||||||
edge_bases = [edge_class, EdgeBase] if edge_class else [EdgeBase]
|
|
||||||
if not isinstance(edge_class, ObjectType):
|
|
||||||
edge_bases = [*edge_bases, ObjectType]
|
|
||||||
|
|
||||||
return type(edge_name, tuple(edge_bases), {"Meta": EdgeMeta})
|
|
||||||
|
|
||||||
|
|
||||||
class PageInfo(ObjectType):
|
class PageInfo(ObjectType):
|
||||||
|
@ -89,43 +61,54 @@ class Connection(ObjectType):
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(
|
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
|
||||||
cls, node=None, name=None, strict_types=False, _meta=None, **options
|
_meta = ConnectionOptions(cls)
|
||||||
):
|
assert node, "You have to provide a node in {}.Meta".format(cls.__name__)
|
||||||
if not _meta:
|
|
||||||
_meta = ConnectionOptions(cls)
|
|
||||||
assert node, f"You have to provide a node in {cls.__name__}.Meta"
|
|
||||||
assert isinstance(node, NonNull) or issubclass(
|
assert isinstance(node, NonNull) or issubclass(
|
||||||
node, (Scalar, Enum, ObjectType, Interface, Union, NonNull)
|
node, (Scalar, Enum, ObjectType, Interface, Union, NonNull)
|
||||||
), f'Received incompatible node "{node}" for Connection {cls.__name__}.'
|
), ('Received incompatible node "{}" for Connection {}.').format(
|
||||||
|
node, cls.__name__
|
||||||
|
)
|
||||||
|
|
||||||
base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name
|
base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name
|
||||||
if not name:
|
if not name:
|
||||||
name = f"{base_name}Connection"
|
name = "{}Connection".format(base_name)
|
||||||
|
|
||||||
|
edge_class = getattr(cls, "Edge", None)
|
||||||
|
_node = node
|
||||||
|
|
||||||
|
class EdgeBase:
|
||||||
|
node = Field(_node, description="The item at the end of the edge")
|
||||||
|
cursor = String(required=True, description="A cursor for use in pagination")
|
||||||
|
|
||||||
|
class EdgeMeta:
|
||||||
|
description = "A Relay edge containing a `{}` and its cursor.".format(
|
||||||
|
base_name
|
||||||
|
)
|
||||||
|
|
||||||
|
edge_name = "{}Edge".format(base_name)
|
||||||
|
if edge_class:
|
||||||
|
edge_bases = (edge_class, EdgeBase, ObjectType)
|
||||||
|
else:
|
||||||
|
edge_bases = (EdgeBase, ObjectType)
|
||||||
|
|
||||||
|
edge = type(edge_name, edge_bases, {"Meta": EdgeMeta})
|
||||||
|
cls.Edge = edge
|
||||||
|
|
||||||
options["name"] = name
|
options["name"] = name
|
||||||
|
|
||||||
_meta.node = node
|
_meta.node = node
|
||||||
|
_meta.fields = {
|
||||||
if not _meta.fields:
|
"page_info": Field(
|
||||||
_meta.fields = {}
|
|
||||||
|
|
||||||
if "page_info" not in _meta.fields:
|
|
||||||
_meta.fields["page_info"] = Field(
|
|
||||||
PageInfo,
|
PageInfo,
|
||||||
name="pageInfo",
|
name="pageInfo",
|
||||||
required=True,
|
required=True,
|
||||||
description="Pagination data for this connection.",
|
description="Pagination data for this connection.",
|
||||||
)
|
),
|
||||||
|
"edges": Field(
|
||||||
if "edges" not in _meta.fields:
|
NonNull(List(edge)),
|
||||||
edge_class = get_edge_class(cls, node, base_name, strict_types) # type: ignore
|
|
||||||
cls.Edge = edge_class
|
|
||||||
_meta.fields["edges"] = Field(
|
|
||||||
NonNull(List(NonNull(edge_class) if strict_types else edge_class)),
|
|
||||||
description="Contains the nodes in this connection.",
|
description="Contains the nodes in this connection.",
|
||||||
)
|
),
|
||||||
|
}
|
||||||
return super(Connection, cls).__init_subclass_with_meta__(
|
return super(Connection, cls).__init_subclass_with_meta__(
|
||||||
_meta=_meta, **options
|
_meta=_meta, **options
|
||||||
)
|
)
|
||||||
|
@ -138,19 +121,19 @@ def connection_adapter(cls, edges, pageInfo):
|
||||||
|
|
||||||
|
|
||||||
class IterableConnectionField(Field):
|
class IterableConnectionField(Field):
|
||||||
def __init__(self, type_, *args, **kwargs):
|
def __init__(self, type, *args, **kwargs):
|
||||||
kwargs.setdefault("before", String())
|
kwargs.setdefault("before", String())
|
||||||
kwargs.setdefault("after", String())
|
kwargs.setdefault("after", String())
|
||||||
kwargs.setdefault("first", Int())
|
kwargs.setdefault("first", Int())
|
||||||
kwargs.setdefault("last", Int())
|
kwargs.setdefault("last", Int())
|
||||||
super(IterableConnectionField, self).__init__(type_, *args, **kwargs)
|
super(IterableConnectionField, self).__init__(type, *args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self):
|
def type(self):
|
||||||
type_ = super(IterableConnectionField, self).type
|
type = super(IterableConnectionField, self).type
|
||||||
connection_type = type_
|
connection_type = type
|
||||||
if isinstance(type_, NonNull):
|
if isinstance(type, NonNull):
|
||||||
connection_type = type_.of_type
|
connection_type = type.of_type
|
||||||
|
|
||||||
if is_node(connection_type):
|
if is_node(connection_type):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
@ -158,10 +141,10 @@ class IterableConnectionField(Field):
|
||||||
"Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections"
|
"Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert issubclass(
|
assert issubclass(connection_type, Connection), (
|
||||||
connection_type, Connection
|
'{} type has to be a subclass of Connection. Received "{}".'
|
||||||
), f'{self.__class__.__name__} type has to be a subclass of Connection. Received "{connection_type}".'
|
).format(self.__class__.__name__, connection_type)
|
||||||
return type_
|
return type
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def resolve_connection(cls, connection_type, args, resolved):
|
def resolve_connection(cls, connection_type, args, resolved):
|
||||||
|
@ -169,9 +152,9 @@ class IterableConnectionField(Field):
|
||||||
return resolved
|
return resolved
|
||||||
|
|
||||||
assert isinstance(resolved, Iterable), (
|
assert isinstance(resolved, Iterable), (
|
||||||
f"Resolved value from the connection field has to be an iterable or instance of {connection_type}. "
|
"Resolved value from the connection field has to be an iterable or instance of {}. "
|
||||||
f'Received "{resolved}"'
|
'Received "{}"'
|
||||||
)
|
).format(connection_type, resolved)
|
||||||
connection = connection_from_array(
|
connection = connection_from_array(
|
||||||
resolved,
|
resolved,
|
||||||
args,
|
args,
|
||||||
|
@ -192,8 +175,8 @@ class IterableConnectionField(Field):
|
||||||
on_resolve = partial(cls.resolve_connection, connection_type, args)
|
on_resolve = partial(cls.resolve_connection, connection_type, args)
|
||||||
return maybe_thenable(resolved, on_resolve)
|
return maybe_thenable(resolved, on_resolve)
|
||||||
|
|
||||||
def wrap_resolve(self, parent_resolver):
|
def get_resolver(self, parent_resolver):
|
||||||
resolver = super(IterableConnectionField, self).wrap_resolve(parent_resolver)
|
resolver = super(IterableConnectionField, self).get_resolver(parent_resolver)
|
||||||
return partial(self.connection_resolver, resolver, self.type)
|
return partial(self.connection_resolver, resolver, self.type)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,87 +0,0 @@
|
||||||
from graphql_relay import from_global_id, to_global_id
|
|
||||||
|
|
||||||
from ..types import ID, UUID
|
|
||||||
from ..types.base import BaseType
|
|
||||||
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
|
|
||||||
class BaseGlobalIDType:
|
|
||||||
"""
|
|
||||||
Base class that define the required attributes/method for a type.
|
|
||||||
"""
|
|
||||||
|
|
||||||
graphene_type: Type[BaseType] = ID
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
# return _type, _id
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, _type, _id):
|
|
||||||
# return _id
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultGlobalIDType(BaseGlobalIDType):
|
|
||||||
"""
|
|
||||||
Default global ID type: base64 encoded version of "<node type name>: <node id>".
|
|
||||||
"""
|
|
||||||
|
|
||||||
graphene_type = ID
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
try:
|
|
||||||
_type, _id = from_global_id(global_id)
|
|
||||||
if not _type:
|
|
||||||
raise ValueError("Invalid Global ID")
|
|
||||||
return _type, _id
|
|
||||||
except Exception as e:
|
|
||||||
raise Exception(
|
|
||||||
f'Unable to parse global ID "{global_id}". '
|
|
||||||
'Make sure it is a base64 encoded string in the format: "TypeName:id". '
|
|
||||||
f"Exception message: {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, _type, _id):
|
|
||||||
return to_global_id(_type, _id)
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleGlobalIDType(BaseGlobalIDType):
|
|
||||||
"""
|
|
||||||
Simple global ID type: simply the id of the object.
|
|
||||||
To be used carefully as the user is responsible for ensuring that the IDs are indeed global
|
|
||||||
(otherwise it could cause request caching issues).
|
|
||||||
"""
|
|
||||||
|
|
||||||
graphene_type = ID
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
_type = info.return_type.graphene_type._meta.name
|
|
||||||
return _type, global_id
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, _type, _id):
|
|
||||||
return _id
|
|
||||||
|
|
||||||
|
|
||||||
class UUIDGlobalIDType(BaseGlobalIDType):
|
|
||||||
"""
|
|
||||||
UUID global ID type.
|
|
||||||
By definition UUID are global so they are used as they are.
|
|
||||||
"""
|
|
||||||
|
|
||||||
graphene_type = UUID
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
_type = info.return_type.graphene_type._meta.name
|
|
||||||
return _type, global_id
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, _type, _id):
|
|
||||||
return _id
|
|
|
@ -27,7 +27,7 @@ class ClientIDMutation(Mutation):
|
||||||
input_fields = {}
|
input_fields = {}
|
||||||
|
|
||||||
cls.Input = type(
|
cls.Input = type(
|
||||||
f"{base_name}Input",
|
"{}Input".format(base_name),
|
||||||
bases,
|
bases,
|
||||||
dict(input_fields, client_mutation_id=String(name="clientMutationId")),
|
dict(input_fields, client_mutation_id=String(name="clientMutationId")),
|
||||||
)
|
)
|
||||||
|
@ -39,12 +39,12 @@ class ClientIDMutation(Mutation):
|
||||||
mutate_and_get_payload = getattr(cls, "mutate_and_get_payload", None)
|
mutate_and_get_payload = getattr(cls, "mutate_and_get_payload", None)
|
||||||
if cls.mutate and cls.mutate.__func__ == ClientIDMutation.mutate.__func__:
|
if cls.mutate and cls.mutate.__func__ == ClientIDMutation.mutate.__func__:
|
||||||
assert mutate_and_get_payload, (
|
assert mutate_and_get_payload, (
|
||||||
f"{name or cls.__name__}.mutate_and_get_payload method is required"
|
"{name}.mutate_and_get_payload method is required"
|
||||||
" in a ClientIDMutation."
|
" in a ClientIDMutation."
|
||||||
)
|
).format(name=name or cls.__name__)
|
||||||
|
|
||||||
if not name:
|
if not name:
|
||||||
name = f"{base_name}Payload"
|
name = "{}Payload".format(base_name)
|
||||||
|
|
||||||
super(ClientIDMutation, cls).__init_subclass_with_meta__(
|
super(ClientIDMutation, cls).__init_subclass_with_meta__(
|
||||||
output=None, arguments=arguments, name=name, **options
|
output=None, arguments=arguments, name=name, **options
|
||||||
|
@ -58,7 +58,9 @@ class ClientIDMutation(Mutation):
|
||||||
payload.client_mutation_id = input.get("client_mutation_id")
|
payload.client_mutation_id = input.get("client_mutation_id")
|
||||||
except Exception:
|
except Exception:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Cannot set client_mutation_id in the payload object {repr(payload)}"
|
("Cannot set client_mutation_id in the payload object {}").format(
|
||||||
|
repr(payload)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from inspect import isclass
|
from inspect import isclass
|
||||||
|
|
||||||
from ..types import Field, Interface, ObjectType
|
from graphql_relay import from_global_id, to_global_id
|
||||||
|
|
||||||
|
from ..types import ID, Field, Interface, ObjectType
|
||||||
from ..types.interface import InterfaceOptions
|
from ..types.interface import InterfaceOptions
|
||||||
from ..types.utils import get_type
|
from ..types.utils import get_type
|
||||||
from .id_type import BaseGlobalIDType, DefaultGlobalIDType
|
|
||||||
|
|
||||||
|
|
||||||
def is_node(objecttype):
|
def is_node(objecttype):
|
||||||
|
@ -17,22 +18,16 @@ def is_node(objecttype):
|
||||||
if not issubclass(objecttype, ObjectType):
|
if not issubclass(objecttype, ObjectType):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return any(issubclass(i, Node) for i in objecttype._meta.interfaces)
|
for i in objecttype._meta.interfaces:
|
||||||
|
if issubclass(i, Node):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class GlobalID(Field):
|
class GlobalID(Field):
|
||||||
def __init__(
|
def __init__(self, node=None, parent_type=None, required=True, *args, **kwargs):
|
||||||
self,
|
super(GlobalID, self).__init__(ID, required=required, *args, **kwargs)
|
||||||
node=None,
|
|
||||||
parent_type=None,
|
|
||||||
required=True,
|
|
||||||
global_id_type=DefaultGlobalIDType,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
super(GlobalID, self).__init__(
|
|
||||||
global_id_type.graphene_type, required=required, *args, **kwargs
|
|
||||||
)
|
|
||||||
self.node = node or Node
|
self.node = node or Node
|
||||||
self.parent_type_name = parent_type._meta.name if parent_type else None
|
self.parent_type_name = parent_type._meta.name if parent_type else None
|
||||||
|
|
||||||
|
@ -42,7 +37,7 @@ class GlobalID(Field):
|
||||||
parent_type_name = parent_type_name or info.parent_type.name
|
parent_type_name = parent_type_name or info.parent_type.name
|
||||||
return node.to_global_id(parent_type_name, type_id) # root._meta.name
|
return node.to_global_id(parent_type_name, type_id) # root._meta.name
|
||||||
|
|
||||||
def wrap_resolve(self, parent_resolver):
|
def get_resolver(self, parent_resolver):
|
||||||
return partial(
|
return partial(
|
||||||
self.id_resolver,
|
self.id_resolver,
|
||||||
parent_resolver,
|
parent_resolver,
|
||||||
|
@ -52,22 +47,20 @@ class GlobalID(Field):
|
||||||
|
|
||||||
|
|
||||||
class NodeField(Field):
|
class NodeField(Field):
|
||||||
def __init__(self, node, type_=False, **kwargs):
|
def __init__(self, node, type=False, **kwargs):
|
||||||
assert issubclass(node, Node), "NodeField can only operate in Nodes"
|
assert issubclass(node, Node), "NodeField can only operate in Nodes"
|
||||||
self.node_type = node
|
self.node_type = node
|
||||||
self.field_type = type_
|
self.field_type = type
|
||||||
global_id_type = node._meta.global_id_type
|
|
||||||
|
|
||||||
super(NodeField, self).__init__(
|
super(NodeField, self).__init__(
|
||||||
# If we don't specify a type, the field type will be the node interface
|
# If we don's specify a type, the field type will be the node
|
||||||
type_ or node,
|
# interface
|
||||||
id=global_id_type.graphene_type(
|
type or node,
|
||||||
required=True, description="The ID of the object"
|
id=ID(required=True, description="The ID of the object"),
|
||||||
),
|
**kwargs
|
||||||
**kwargs,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def wrap_resolve(self, parent_resolver):
|
def get_resolver(self, parent_resolver):
|
||||||
return partial(self.node_type.node_resolver, get_type(self.field_type))
|
return partial(self.node_type.node_resolver, get_type(self.field_type))
|
||||||
|
|
||||||
|
|
||||||
|
@ -76,23 +69,11 @@ class AbstractNode(Interface):
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(cls, global_id_type=DefaultGlobalIDType, **options):
|
def __init_subclass_with_meta__(cls, **options):
|
||||||
assert issubclass(
|
|
||||||
global_id_type, BaseGlobalIDType
|
|
||||||
), "Custom ID type need to be implemented as a subclass of BaseGlobalIDType."
|
|
||||||
_meta = InterfaceOptions(cls)
|
_meta = InterfaceOptions(cls)
|
||||||
_meta.global_id_type = global_id_type
|
_meta.fields = {"id": GlobalID(cls, description="The ID of the object")}
|
||||||
_meta.fields = {
|
|
||||||
"id": GlobalID(
|
|
||||||
cls, global_id_type=global_id_type, description="The ID of the object"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
return cls._meta.global_id_type.resolve_global_id(info, global_id)
|
|
||||||
|
|
||||||
|
|
||||||
class Node(AbstractNode):
|
class Node(AbstractNode):
|
||||||
"""An object with an ID"""
|
"""An object with an ID"""
|
||||||
|
@ -107,23 +88,38 @@ class Node(AbstractNode):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_node_from_global_id(cls, info, global_id, only_type=None):
|
def get_node_from_global_id(cls, info, global_id, only_type=None):
|
||||||
_type, _id = cls.resolve_global_id(info, global_id)
|
try:
|
||||||
|
_type, _id = cls.from_global_id(global_id)
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(
|
||||||
|
(
|
||||||
|
'Unable to parse global ID "{global_id}". '
|
||||||
|
'Make sure it is a base64 encoded string in the format: "TypeName:id". '
|
||||||
|
"Exception message: {exception}".format(
|
||||||
|
global_id=global_id, exception=str(e)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
graphene_type = info.schema.get_type(_type)
|
graphene_type = info.schema.get_type(_type)
|
||||||
if graphene_type is None:
|
if graphene_type is None:
|
||||||
raise Exception(f'Relay Node "{_type}" not found in schema')
|
raise Exception(
|
||||||
|
'Relay Node "{_type}" not found in schema'.format(_type=_type)
|
||||||
|
)
|
||||||
|
|
||||||
graphene_type = graphene_type.graphene_type
|
graphene_type = graphene_type.graphene_type
|
||||||
|
|
||||||
if only_type:
|
if only_type:
|
||||||
assert (
|
assert graphene_type == only_type, ("Must receive a {} id.").format(
|
||||||
graphene_type == only_type
|
only_type._meta.name
|
||||||
), f"Must receive a {only_type._meta.name} id."
|
)
|
||||||
|
|
||||||
# We make sure the ObjectType implements the "Node" interface
|
# We make sure the ObjectType implements the "Node" interface
|
||||||
if cls not in graphene_type._meta.interfaces:
|
if cls not in graphene_type._meta.interfaces:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f'ObjectType "{_type}" does not implement the "{cls}" interface.'
|
'ObjectType "{_type}" does not implement the "{cls}" interface.'.format(
|
||||||
|
_type=_type, cls=cls
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
get_node = getattr(graphene_type, "get_node", None)
|
get_node = getattr(graphene_type, "get_node", None)
|
||||||
|
@ -131,5 +127,9 @@ class Node(AbstractNode):
|
||||||
return get_node(info, _id)
|
return get_node(info, _id)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def to_global_id(cls, type_, id):
|
def from_global_id(cls, global_id):
|
||||||
return cls._meta.global_id_type.to_global_id(type_, id)
|
return from_global_id(global_id)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, type, id):
|
||||||
|
return to_global_id(type, id)
|
||||||
|
|
|
@ -1,15 +1,7 @@
|
||||||
import re
|
|
||||||
|
|
||||||
from pytest import raises
|
from pytest import raises
|
||||||
|
|
||||||
from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String
|
from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String
|
||||||
from ..connection import (
|
from ..connection import Connection, ConnectionField, PageInfo
|
||||||
Connection,
|
|
||||||
ConnectionField,
|
|
||||||
PageInfo,
|
|
||||||
ConnectionOptions,
|
|
||||||
get_edge_class,
|
|
||||||
)
|
|
||||||
from ..node import Node
|
from ..node import Node
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,111 +51,6 @@ def test_connection_inherit_abstracttype():
|
||||||
assert list(fields) == ["page_info", "edges", "extra"]
|
assert list(fields) == ["page_info", "edges", "extra"]
|
||||||
|
|
||||||
|
|
||||||
def test_connection_extra_abstract_fields():
|
|
||||||
class ConnectionWithNodes(Connection):
|
|
||||||
class Meta:
|
|
||||||
abstract = True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
|
|
||||||
_meta = ConnectionOptions(cls)
|
|
||||||
|
|
||||||
_meta.fields = {
|
|
||||||
"nodes": Field(
|
|
||||||
NonNull(List(node)),
|
|
||||||
description="Contains all the nodes in this connection.",
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
return super(ConnectionWithNodes, cls).__init_subclass_with_meta__(
|
|
||||||
node=node, name=name, _meta=_meta, **options
|
|
||||||
)
|
|
||||||
|
|
||||||
class MyObjectConnection(ConnectionWithNodes):
|
|
||||||
class Meta:
|
|
||||||
node = MyObject
|
|
||||||
|
|
||||||
class Edge:
|
|
||||||
other = String()
|
|
||||||
|
|
||||||
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
|
||||||
fields = MyObjectConnection._meta.fields
|
|
||||||
assert list(fields) == ["nodes", "page_info", "edges"]
|
|
||||||
edge_field = fields["edges"]
|
|
||||||
pageinfo_field = fields["page_info"]
|
|
||||||
nodes_field = fields["nodes"]
|
|
||||||
|
|
||||||
assert isinstance(edge_field, Field)
|
|
||||||
assert isinstance(edge_field.type, NonNull)
|
|
||||||
assert isinstance(edge_field.type.of_type, List)
|
|
||||||
assert edge_field.type.of_type.of_type == MyObjectConnection.Edge
|
|
||||||
|
|
||||||
assert isinstance(pageinfo_field, Field)
|
|
||||||
assert isinstance(pageinfo_field.type, NonNull)
|
|
||||||
assert pageinfo_field.type.of_type == PageInfo
|
|
||||||
|
|
||||||
assert isinstance(nodes_field, Field)
|
|
||||||
assert isinstance(nodes_field.type, NonNull)
|
|
||||||
assert isinstance(nodes_field.type.of_type, List)
|
|
||||||
assert nodes_field.type.of_type.of_type == MyObject
|
|
||||||
|
|
||||||
|
|
||||||
def test_connection_override_fields():
|
|
||||||
class ConnectionWithNodes(Connection):
|
|
||||||
class Meta:
|
|
||||||
abstract = True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
|
|
||||||
_meta = ConnectionOptions(cls)
|
|
||||||
base_name = (
|
|
||||||
re.sub("Connection$", "", name or cls.__name__) or node._meta.name
|
|
||||||
)
|
|
||||||
|
|
||||||
edge_class = get_edge_class(cls, node, base_name)
|
|
||||||
|
|
||||||
_meta.fields = {
|
|
||||||
"page_info": Field(
|
|
||||||
NonNull(
|
|
||||||
PageInfo,
|
|
||||||
name="pageInfo",
|
|
||||||
required=True,
|
|
||||||
description="Pagination data for this connection.",
|
|
||||||
)
|
|
||||||
),
|
|
||||||
"edges": Field(
|
|
||||||
NonNull(List(NonNull(edge_class))),
|
|
||||||
description="Contains the nodes in this connection.",
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
return super(ConnectionWithNodes, cls).__init_subclass_with_meta__(
|
|
||||||
node=node, name=name, _meta=_meta, **options
|
|
||||||
)
|
|
||||||
|
|
||||||
class MyObjectConnection(ConnectionWithNodes):
|
|
||||||
class Meta:
|
|
||||||
node = MyObject
|
|
||||||
|
|
||||||
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
|
||||||
fields = MyObjectConnection._meta.fields
|
|
||||||
assert list(fields) == ["page_info", "edges"]
|
|
||||||
edge_field = fields["edges"]
|
|
||||||
pageinfo_field = fields["page_info"]
|
|
||||||
|
|
||||||
assert isinstance(edge_field, Field)
|
|
||||||
assert isinstance(edge_field.type, NonNull)
|
|
||||||
assert isinstance(edge_field.type.of_type, List)
|
|
||||||
assert isinstance(edge_field.type.of_type.of_type, NonNull)
|
|
||||||
|
|
||||||
assert edge_field.type.of_type.of_type.of_type.__name__ == "MyObjectEdge"
|
|
||||||
|
|
||||||
# This page info is NonNull
|
|
||||||
assert isinstance(pageinfo_field, Field)
|
|
||||||
assert isinstance(edge_field.type, NonNull)
|
|
||||||
assert pageinfo_field.type.of_type == PageInfo
|
|
||||||
|
|
||||||
|
|
||||||
def test_connection_name():
|
def test_connection_name():
|
||||||
custom_name = "MyObjectCustomNameConnection"
|
custom_name = "MyObjectCustomNameConnection"
|
||||||
|
|
||||||
|
@ -299,20 +186,3 @@ def test_connectionfield_required():
|
||||||
executed = schema.execute("{ testConnection { edges { cursor } } }")
|
executed = schema.execute("{ testConnection { edges { cursor } } }")
|
||||||
assert not executed.errors
|
assert not executed.errors
|
||||||
assert executed.data == {"testConnection": {"edges": []}}
|
assert executed.data == {"testConnection": {"edges": []}}
|
||||||
|
|
||||||
|
|
||||||
def test_connectionfield_strict_types():
|
|
||||||
class MyObjectConnection(Connection):
|
|
||||||
class Meta:
|
|
||||||
node = MyObject
|
|
||||||
strict_types = True
|
|
||||||
|
|
||||||
connection_field = ConnectionField(MyObjectConnection)
|
|
||||||
edges_field_type = connection_field.type._meta.fields["edges"].type
|
|
||||||
assert isinstance(edges_field_type, NonNull)
|
|
||||||
|
|
||||||
edges_list_element_type = edges_field_type.of_type.of_type
|
|
||||||
assert isinstance(edges_list_element_type, NonNull)
|
|
||||||
|
|
||||||
node_field = edges_list_element_type.of_type._meta.fields["node"]
|
|
||||||
assert isinstance(node_field.type, NonNull)
|
|
||||||
|
|
|
@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter
|
||||||
def edges(selected_letters):
|
def edges(selected_letters):
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
|
"node": {"id": base64("Letter:%s" % l.id), "letter": l.letter},
|
||||||
"cursor": base64("arrayconnection:%s" % letter.id),
|
"cursor": base64("arrayconnection:%s" % l.id),
|
||||||
}
|
}
|
||||||
for letter in [letters[i] for i in selected_letters]
|
for l in [letters[i] for i in selected_letters]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -66,6 +66,7 @@ def cursor_for(ltr):
|
||||||
async def execute(args=""):
|
async def execute(args=""):
|
||||||
if args:
|
if args:
|
||||||
args = "(" + args + ")"
|
args = "(" + args + ")"
|
||||||
|
|
||||||
return await schema.execute_async(
|
return await schema.execute_async(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
|
@ -133,28 +134,32 @@ async def test_respects_an_overly_large_last():
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_first_and_after():
|
async def test_respects_first_and_after():
|
||||||
await check(f'first: 2, after: "{cursor_for("B")}"', "CD", has_next_page=True)
|
await check(
|
||||||
|
'first: 2, after: "{}"'.format(cursor_for("B")), "CD", has_next_page=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_first_and_after_with_long_first():
|
async def test_respects_first_and_after_with_long_first():
|
||||||
await check(f'first: 10, after: "{cursor_for("B")}"', "CDE")
|
await check('first: 10, after: "{}"'.format(cursor_for("B")), "CDE")
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_last_and_before():
|
async def test_respects_last_and_before():
|
||||||
await check(f'last: 2, before: "{cursor_for("D")}"', "BC", has_previous_page=True)
|
await check(
|
||||||
|
'last: 2, before: "{}"'.format(cursor_for("D")), "BC", has_previous_page=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_last_and_before_with_long_last():
|
async def test_respects_last_and_before_with_long_last():
|
||||||
await check(f'last: 10, before: "{cursor_for("D")}"', "ABC")
|
await check('last: 10, before: "{}"'.format(cursor_for("D")), "ABC")
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_first_and_after_and_before_too_few():
|
async def test_respects_first_and_after_and_before_too_few():
|
||||||
await check(
|
await check(
|
||||||
f'first: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"',
|
'first: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
||||||
"BC",
|
"BC",
|
||||||
has_next_page=True,
|
has_next_page=True,
|
||||||
)
|
)
|
||||||
|
@ -163,21 +168,23 @@ async def test_respects_first_and_after_and_before_too_few():
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_first_and_after_and_before_too_many():
|
async def test_respects_first_and_after_and_before_too_many():
|
||||||
await check(
|
await check(
|
||||||
f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
'first: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
||||||
|
"BCD",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_first_and_after_and_before_exactly_right():
|
async def test_respects_first_and_after_and_before_exactly_right():
|
||||||
await check(
|
await check(
|
||||||
f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
'first: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
||||||
|
"BCD",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_last_and_after_and_before_too_few():
|
async def test_respects_last_and_after_and_before_too_few():
|
||||||
await check(
|
await check(
|
||||||
f'last: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"',
|
'last: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
||||||
"CD",
|
"CD",
|
||||||
has_previous_page=True,
|
has_previous_page=True,
|
||||||
)
|
)
|
||||||
|
@ -186,14 +193,16 @@ async def test_respects_last_and_after_and_before_too_few():
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_last_and_after_and_before_too_many():
|
async def test_respects_last_and_after_and_before_too_many():
|
||||||
await check(
|
await check(
|
||||||
f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
'last: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
||||||
|
"BCD",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_respects_last_and_after_and_before_exactly_right():
|
async def test_respects_last_and_after_and_before_exactly_right():
|
||||||
await check(
|
await check(
|
||||||
f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
'last: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
||||||
|
"BCD",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -210,7 +219,9 @@ async def test_returns_all_elements_if_cursors_are_invalid():
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_returns_all_elements_if_cursors_are_on_the_outside():
|
async def test_returns_all_elements_if_cursors_are_on_the_outside():
|
||||||
await check(
|
await check(
|
||||||
f'before: "{base64("arrayconnection:%s" % 6)}" after: "{base64("arrayconnection:%s" % -1)}"',
|
'before: "{}" after: "{}"'.format(
|
||||||
|
base64("arrayconnection:%s" % 6), base64("arrayconnection:%s" % -1)
|
||||||
|
),
|
||||||
"ABCDE",
|
"ABCDE",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -218,7 +229,9 @@ async def test_returns_all_elements_if_cursors_are_on_the_outside():
|
||||||
@mark.asyncio
|
@mark.asyncio
|
||||||
async def test_returns_no_elements_if_cursors_cross():
|
async def test_returns_no_elements_if_cursors_cross():
|
||||||
await check(
|
await check(
|
||||||
f'before: "{base64("arrayconnection:%s" % 2)}" after: "{base64("arrayconnection:%s" % 4)}"',
|
'before: "{}" after: "{}"'.format(
|
||||||
|
base64("arrayconnection:%s" % 2), base64("arrayconnection:%s" % 4)
|
||||||
|
),
|
||||||
"",
|
"",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,325 +0,0 @@
|
||||||
import re
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from graphql import graphql_sync
|
|
||||||
|
|
||||||
from ..id_type import BaseGlobalIDType, SimpleGlobalIDType, UUIDGlobalIDType
|
|
||||||
from ..node import Node
|
|
||||||
from ...types import Int, ObjectType, Schema, String
|
|
||||||
|
|
||||||
|
|
||||||
class TestUUIDGlobalID:
|
|
||||||
def setup_method(self):
|
|
||||||
self.user_list = [
|
|
||||||
{"id": uuid4(), "name": "First"},
|
|
||||||
{"id": uuid4(), "name": "Second"},
|
|
||||||
{"id": uuid4(), "name": "Third"},
|
|
||||||
{"id": uuid4(), "name": "Fourth"},
|
|
||||||
]
|
|
||||||
self.users = {user["id"]: user for user in self.user_list}
|
|
||||||
|
|
||||||
class CustomNode(Node):
|
|
||||||
class Meta:
|
|
||||||
global_id_type = UUIDGlobalIDType
|
|
||||||
|
|
||||||
class User(ObjectType):
|
|
||||||
class Meta:
|
|
||||||
interfaces = [CustomNode]
|
|
||||||
|
|
||||||
name = String()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_node(cls, _type, _id):
|
|
||||||
return self.users[_id]
|
|
||||||
|
|
||||||
class RootQuery(ObjectType):
|
|
||||||
user = CustomNode.Field(User)
|
|
||||||
|
|
||||||
self.schema = Schema(query=RootQuery, types=[User])
|
|
||||||
self.graphql_schema = self.schema.graphql_schema
|
|
||||||
|
|
||||||
def test_str_schema_correct(self):
|
|
||||||
"""
|
|
||||||
Check that the schema has the expected and custom node interface and user type and that they both use UUIDs
|
|
||||||
"""
|
|
||||||
parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema))
|
|
||||||
types = [t for t, f in parsed]
|
|
||||||
fields = [f for t, f in parsed]
|
|
||||||
custom_node_interface = "interface CustomNode"
|
|
||||||
assert custom_node_interface in types
|
|
||||||
assert (
|
|
||||||
'"""The ID of the object"""\n id: UUID!'
|
|
||||||
== fields[types.index(custom_node_interface)]
|
|
||||||
)
|
|
||||||
user_type = "type User implements CustomNode"
|
|
||||||
assert user_type in types
|
|
||||||
assert (
|
|
||||||
'"""The ID of the object"""\n id: UUID!\n name: String'
|
|
||||||
== fields[types.index(user_type)]
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_get_by_id(self):
|
|
||||||
query = """query userById($id: UUID!) {
|
|
||||||
user(id: $id) {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
# UUID need to be converted to string for serialization
|
|
||||||
result = graphql_sync(
|
|
||||||
self.graphql_schema,
|
|
||||||
query,
|
|
||||||
variable_values={"id": str(self.user_list[0]["id"])},
|
|
||||||
)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data["user"]["id"] == str(self.user_list[0]["id"])
|
|
||||||
assert result.data["user"]["name"] == self.user_list[0]["name"]
|
|
||||||
|
|
||||||
|
|
||||||
class TestSimpleGlobalID:
|
|
||||||
def setup_method(self):
|
|
||||||
self.user_list = [
|
|
||||||
{"id": "my global primary key in clear 1", "name": "First"},
|
|
||||||
{"id": "my global primary key in clear 2", "name": "Second"},
|
|
||||||
{"id": "my global primary key in clear 3", "name": "Third"},
|
|
||||||
{"id": "my global primary key in clear 4", "name": "Fourth"},
|
|
||||||
]
|
|
||||||
self.users = {user["id"]: user for user in self.user_list}
|
|
||||||
|
|
||||||
class CustomNode(Node):
|
|
||||||
class Meta:
|
|
||||||
global_id_type = SimpleGlobalIDType
|
|
||||||
|
|
||||||
class User(ObjectType):
|
|
||||||
class Meta:
|
|
||||||
interfaces = [CustomNode]
|
|
||||||
|
|
||||||
name = String()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_node(cls, _type, _id):
|
|
||||||
return self.users[_id]
|
|
||||||
|
|
||||||
class RootQuery(ObjectType):
|
|
||||||
user = CustomNode.Field(User)
|
|
||||||
|
|
||||||
self.schema = Schema(query=RootQuery, types=[User])
|
|
||||||
self.graphql_schema = self.schema.graphql_schema
|
|
||||||
|
|
||||||
def test_str_schema_correct(self):
|
|
||||||
"""
|
|
||||||
Check that the schema has the expected and custom node interface and user type and that they both use UUIDs
|
|
||||||
"""
|
|
||||||
parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema))
|
|
||||||
types = [t for t, f in parsed]
|
|
||||||
fields = [f for t, f in parsed]
|
|
||||||
custom_node_interface = "interface CustomNode"
|
|
||||||
assert custom_node_interface in types
|
|
||||||
assert (
|
|
||||||
'"""The ID of the object"""\n id: ID!'
|
|
||||||
== fields[types.index(custom_node_interface)]
|
|
||||||
)
|
|
||||||
user_type = "type User implements CustomNode"
|
|
||||||
assert user_type in types
|
|
||||||
assert (
|
|
||||||
'"""The ID of the object"""\n id: ID!\n name: String'
|
|
||||||
== fields[types.index(user_type)]
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_get_by_id(self):
|
|
||||||
query = """query {
|
|
||||||
user(id: "my global primary key in clear 3") {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
result = graphql_sync(self.graphql_schema, query)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data["user"]["id"] == self.user_list[2]["id"]
|
|
||||||
assert result.data["user"]["name"] == self.user_list[2]["name"]
|
|
||||||
|
|
||||||
|
|
||||||
class TestCustomGlobalID:
|
|
||||||
def setup_method(self):
|
|
||||||
self.user_list = [
|
|
||||||
{"id": 1, "name": "First"},
|
|
||||||
{"id": 2, "name": "Second"},
|
|
||||||
{"id": 3, "name": "Third"},
|
|
||||||
{"id": 4, "name": "Fourth"},
|
|
||||||
]
|
|
||||||
self.users = {user["id"]: user for user in self.user_list}
|
|
||||||
|
|
||||||
class CustomGlobalIDType(BaseGlobalIDType):
|
|
||||||
"""
|
|
||||||
Global id that is simply and integer in clear.
|
|
||||||
"""
|
|
||||||
|
|
||||||
graphene_type = Int
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
_type = info.return_type.graphene_type._meta.name
|
|
||||||
return _type, global_id
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, _type, _id):
|
|
||||||
return _id
|
|
||||||
|
|
||||||
class CustomNode(Node):
|
|
||||||
class Meta:
|
|
||||||
global_id_type = CustomGlobalIDType
|
|
||||||
|
|
||||||
class User(ObjectType):
|
|
||||||
class Meta:
|
|
||||||
interfaces = [CustomNode]
|
|
||||||
|
|
||||||
name = String()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_node(cls, _type, _id):
|
|
||||||
return self.users[_id]
|
|
||||||
|
|
||||||
class RootQuery(ObjectType):
|
|
||||||
user = CustomNode.Field(User)
|
|
||||||
|
|
||||||
self.schema = Schema(query=RootQuery, types=[User])
|
|
||||||
self.graphql_schema = self.schema.graphql_schema
|
|
||||||
|
|
||||||
def test_str_schema_correct(self):
|
|
||||||
"""
|
|
||||||
Check that the schema has the expected and custom node interface and user type and that they both use UUIDs
|
|
||||||
"""
|
|
||||||
parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema))
|
|
||||||
types = [t for t, f in parsed]
|
|
||||||
fields = [f for t, f in parsed]
|
|
||||||
custom_node_interface = "interface CustomNode"
|
|
||||||
assert custom_node_interface in types
|
|
||||||
assert (
|
|
||||||
'"""The ID of the object"""\n id: Int!'
|
|
||||||
== fields[types.index(custom_node_interface)]
|
|
||||||
)
|
|
||||||
user_type = "type User implements CustomNode"
|
|
||||||
assert user_type in types
|
|
||||||
assert (
|
|
||||||
'"""The ID of the object"""\n id: Int!\n name: String'
|
|
||||||
== fields[types.index(user_type)]
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_get_by_id(self):
|
|
||||||
query = """query {
|
|
||||||
user(id: 2) {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
result = graphql_sync(self.graphql_schema, query)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data["user"]["id"] == self.user_list[1]["id"]
|
|
||||||
assert result.data["user"]["name"] == self.user_list[1]["name"]
|
|
||||||
|
|
||||||
|
|
||||||
class TestIncompleteCustomGlobalID:
|
|
||||||
def setup_method(self):
|
|
||||||
self.user_list = [
|
|
||||||
{"id": 1, "name": "First"},
|
|
||||||
{"id": 2, "name": "Second"},
|
|
||||||
{"id": 3, "name": "Third"},
|
|
||||||
{"id": 4, "name": "Fourth"},
|
|
||||||
]
|
|
||||||
self.users = {user["id"]: user for user in self.user_list}
|
|
||||||
|
|
||||||
def test_must_define_to_global_id(self):
|
|
||||||
"""
|
|
||||||
Test that if the `to_global_id` method is not defined, we can query the object, but we can't request its ID.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class CustomGlobalIDType(BaseGlobalIDType):
|
|
||||||
graphene_type = Int
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def resolve_global_id(cls, info, global_id):
|
|
||||||
_type = info.return_type.graphene_type._meta.name
|
|
||||||
return _type, global_id
|
|
||||||
|
|
||||||
class CustomNode(Node):
|
|
||||||
class Meta:
|
|
||||||
global_id_type = CustomGlobalIDType
|
|
||||||
|
|
||||||
class User(ObjectType):
|
|
||||||
class Meta:
|
|
||||||
interfaces = [CustomNode]
|
|
||||||
|
|
||||||
name = String()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_node(cls, _type, _id):
|
|
||||||
return self.users[_id]
|
|
||||||
|
|
||||||
class RootQuery(ObjectType):
|
|
||||||
user = CustomNode.Field(User)
|
|
||||||
|
|
||||||
self.schema = Schema(query=RootQuery, types=[User])
|
|
||||||
self.graphql_schema = self.schema.graphql_schema
|
|
||||||
|
|
||||||
query = """query {
|
|
||||||
user(id: 2) {
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
result = graphql_sync(self.graphql_schema, query)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data["user"]["name"] == self.user_list[1]["name"]
|
|
||||||
|
|
||||||
query = """query {
|
|
||||||
user(id: 2) {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
result = graphql_sync(self.graphql_schema, query)
|
|
||||||
assert result.errors is not None
|
|
||||||
assert len(result.errors) == 1
|
|
||||||
assert result.errors[0].path == ["user", "id"]
|
|
||||||
|
|
||||||
def test_must_define_resolve_global_id(self):
|
|
||||||
"""
|
|
||||||
Test that if the `resolve_global_id` method is not defined, we can't query the object by ID.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class CustomGlobalIDType(BaseGlobalIDType):
|
|
||||||
graphene_type = Int
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, _type, _id):
|
|
||||||
return _id
|
|
||||||
|
|
||||||
class CustomNode(Node):
|
|
||||||
class Meta:
|
|
||||||
global_id_type = CustomGlobalIDType
|
|
||||||
|
|
||||||
class User(ObjectType):
|
|
||||||
class Meta:
|
|
||||||
interfaces = [CustomNode]
|
|
||||||
|
|
||||||
name = String()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_node(cls, _type, _id):
|
|
||||||
return self.users[_id]
|
|
||||||
|
|
||||||
class RootQuery(ObjectType):
|
|
||||||
user = CustomNode.Field(User)
|
|
||||||
|
|
||||||
self.schema = Schema(query=RootQuery, types=[User])
|
|
||||||
self.graphql_schema = self.schema.graphql_schema
|
|
||||||
|
|
||||||
query = """query {
|
|
||||||
user(id: 2) {
|
|
||||||
id
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}"""
|
|
||||||
result = graphql_sync(self.graphql_schema, query)
|
|
||||||
assert result.errors is not None
|
|
||||||
assert len(result.errors) == 1
|
|
||||||
assert result.errors[0].path == ["user"]
|
|
|
@ -45,7 +45,7 @@ def test_global_id_allows_overriding_of_node_and_required():
|
||||||
def test_global_id_defaults_to_info_parent_type():
|
def test_global_id_defaults_to_info_parent_type():
|
||||||
my_id = "1"
|
my_id = "1"
|
||||||
gid = GlobalID()
|
gid = GlobalID()
|
||||||
id_resolver = gid.wrap_resolve(lambda *_: my_id)
|
id_resolver = gid.get_resolver(lambda *_: my_id)
|
||||||
my_global_id = id_resolver(None, Info(User))
|
my_global_id = id_resolver(None, Info(User))
|
||||||
assert my_global_id == to_global_id(User._meta.name, my_id)
|
assert my_global_id == to_global_id(User._meta.name, my_id)
|
||||||
|
|
||||||
|
@ -53,6 +53,6 @@ def test_global_id_defaults_to_info_parent_type():
|
||||||
def test_global_id_allows_setting_customer_parent_type():
|
def test_global_id_allows_setting_customer_parent_type():
|
||||||
my_id = "1"
|
my_id = "1"
|
||||||
gid = GlobalID(parent_type=User)
|
gid = GlobalID(parent_type=User)
|
||||||
id_resolver = gid.wrap_resolve(lambda *_: my_id)
|
id_resolver = gid.get_resolver(lambda *_: my_id)
|
||||||
my_global_id = id_resolver(None, None)
|
my_global_id = id_resolver(None, None)
|
||||||
assert my_global_id == to_global_id(User._meta.name, my_id)
|
assert my_global_id == to_global_id(User._meta.name, my_id)
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
import re
|
import re
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
from graphql_relay import to_global_id
|
from graphql_relay import to_global_id
|
||||||
|
|
||||||
|
from graphql.pyutils import dedent
|
||||||
|
|
||||||
from ...types import ObjectType, Schema, String
|
from ...types import ObjectType, Schema, String
|
||||||
from ..node import Node, is_node
|
from ..node import Node, is_node
|
||||||
|
|
||||||
|
|
||||||
class SharedNodeFields:
|
class SharedNodeFields:
|
||||||
|
|
||||||
shared = String()
|
shared = String()
|
||||||
something_else = String()
|
something_else = String()
|
||||||
|
|
||||||
|
@ -54,7 +55,6 @@ def test_node_good():
|
||||||
assert "id" in MyNode._meta.fields
|
assert "id" in MyNode._meta.fields
|
||||||
assert is_node(MyNode)
|
assert is_node(MyNode)
|
||||||
assert not is_node(object)
|
assert not is_node(object)
|
||||||
assert not is_node("node")
|
|
||||||
|
|
||||||
|
|
||||||
def test_node_query():
|
def test_node_query():
|
||||||
|
@ -171,10 +171,8 @@ def test_node_field_only_lazy_type_wrong():
|
||||||
|
|
||||||
|
|
||||||
def test_str_schema():
|
def test_str_schema():
|
||||||
assert (
|
assert str(schema) == dedent(
|
||||||
str(schema).strip()
|
'''
|
||||||
== dedent(
|
|
||||||
'''
|
|
||||||
schema {
|
schema {
|
||||||
query: RootQuery
|
query: RootQuery
|
||||||
}
|
}
|
||||||
|
@ -185,12 +183,6 @@ def test_str_schema():
|
||||||
name: String
|
name: String
|
||||||
}
|
}
|
||||||
|
|
||||||
"""An object with an ID"""
|
|
||||||
interface Node {
|
|
||||||
"""The ID of the object"""
|
|
||||||
id: ID!
|
|
||||||
}
|
|
||||||
|
|
||||||
type MyOtherNode implements Node {
|
type MyOtherNode implements Node {
|
||||||
"""The ID of the object"""
|
"""The ID of the object"""
|
||||||
id: ID!
|
id: ID!
|
||||||
|
@ -199,6 +191,12 @@ def test_str_schema():
|
||||||
extraField: String
|
extraField: String
|
||||||
}
|
}
|
||||||
|
|
||||||
|
"""An object with an ID"""
|
||||||
|
interface Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
}
|
||||||
|
|
||||||
type RootQuery {
|
type RootQuery {
|
||||||
first: String
|
first: String
|
||||||
node(
|
node(
|
||||||
|
@ -215,5 +213,4 @@ def test_str_schema():
|
||||||
): MyNode
|
): MyNode
|
||||||
}
|
}
|
||||||
'''
|
'''
|
||||||
).strip()
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
from graphql import graphql_sync
|
from graphql import graphql_sync
|
||||||
|
from graphql.pyutils import dedent
|
||||||
|
|
||||||
from ...types import Interface, ObjectType, Schema
|
from ...types import Interface, ObjectType, Schema
|
||||||
from ...types.scalars import Int, String
|
from ...types.scalars import Int, String
|
||||||
|
@ -12,7 +11,7 @@ class CustomNode(Node):
|
||||||
name = "Node"
|
name = "Node"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def to_global_id(type_, id):
|
def to_global_id(type, id):
|
||||||
return id
|
return id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -54,20 +53,15 @@ graphql_schema = schema.graphql_schema
|
||||||
|
|
||||||
|
|
||||||
def test_str_schema_correct():
|
def test_str_schema_correct():
|
||||||
assert (
|
assert str(schema) == dedent(
|
||||||
str(schema).strip()
|
'''
|
||||||
== dedent(
|
|
||||||
'''
|
|
||||||
schema {
|
schema {
|
||||||
query: RootQuery
|
query: RootQuery
|
||||||
}
|
}
|
||||||
|
|
||||||
type User implements Node {
|
interface BasePhoto {
|
||||||
"""The ID of the object"""
|
"""The width of the photo in pixels"""
|
||||||
id: ID!
|
width: Int
|
||||||
|
|
||||||
"""The full name of the user"""
|
|
||||||
name: String
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Node {
|
interface Node {
|
||||||
|
@ -83,19 +77,21 @@ def test_str_schema_correct():
|
||||||
width: Int
|
width: Int
|
||||||
}
|
}
|
||||||
|
|
||||||
interface BasePhoto {
|
|
||||||
"""The width of the photo in pixels"""
|
|
||||||
width: Int
|
|
||||||
}
|
|
||||||
|
|
||||||
type RootQuery {
|
type RootQuery {
|
||||||
node(
|
node(
|
||||||
"""The ID of the object"""
|
"""The ID of the object"""
|
||||||
id: ID!
|
id: ID!
|
||||||
): Node
|
): Node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type User implements Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
|
||||||
|
"""The full name of the user"""
|
||||||
|
name: String
|
||||||
|
}
|
||||||
'''
|
'''
|
||||||
).strip()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from promise import Promise, is_thenable
|
||||||
|
from graphql.error import format_error as format_graphql_error
|
||||||
from graphql.error import GraphQLError
|
from graphql.error import GraphQLError
|
||||||
|
|
||||||
from graphene.types.schema import Schema
|
from graphene.types.schema import Schema
|
||||||
|
@ -5,7 +7,7 @@ from graphene.types.schema import Schema
|
||||||
|
|
||||||
def default_format_error(error):
|
def default_format_error(error):
|
||||||
if isinstance(error, GraphQLError):
|
if isinstance(error, GraphQLError):
|
||||||
return error.formatted
|
return format_graphql_error(error)
|
||||||
return {"message": str(error)}
|
return {"message": str(error)}
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,10 +32,7 @@ class Client:
|
||||||
|
|
||||||
def execute(self, *args, **kwargs):
|
def execute(self, *args, **kwargs):
|
||||||
executed = self.schema.execute(*args, **dict(self.execute_options, **kwargs))
|
executed = self.schema.execute(*args, **dict(self.execute_options, **kwargs))
|
||||||
return self.format_result(executed)
|
if is_thenable(executed):
|
||||||
|
return Promise.resolve(executed).then(self.format_result)
|
||||||
|
|
||||||
async def execute_async(self, *args, **kwargs):
|
|
||||||
executed = await self.schema.execute_async(
|
|
||||||
*args, **dict(self.execute_options, **kwargs)
|
|
||||||
)
|
|
||||||
return self.format_result(executed)
|
return self.format_result(executed)
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
# https://github.com/graphql-python/graphene/issues/1293
|
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
import graphene
|
|
||||||
from graphql.utilities import print_schema
|
|
||||||
|
|
||||||
|
|
||||||
class Filters(graphene.InputObjectType):
|
|
||||||
datetime_after = graphene.DateTime(
|
|
||||||
required=False,
|
|
||||||
default_value=datetime.fromtimestamp(1434549820.776, timezone.utc),
|
|
||||||
)
|
|
||||||
datetime_before = graphene.DateTime(
|
|
||||||
required=False,
|
|
||||||
default_value=datetime.fromtimestamp(1444549820.776, timezone.utc),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SetDatetime(graphene.Mutation):
|
|
||||||
class Arguments:
|
|
||||||
filters = Filters(required=True)
|
|
||||||
|
|
||||||
ok = graphene.Boolean()
|
|
||||||
|
|
||||||
def mutate(root, info, filters):
|
|
||||||
return SetDatetime(ok=True)
|
|
||||||
|
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
|
||||||
goodbye = graphene.String()
|
|
||||||
|
|
||||||
|
|
||||||
class Mutations(graphene.ObjectType):
|
|
||||||
set_datetime = SetDatetime.Field()
|
|
||||||
|
|
||||||
|
|
||||||
def test_schema_printable_with_default_datetime_value():
|
|
||||||
schema = graphene.Schema(query=Query, mutation=Mutations)
|
|
||||||
schema_str = print_schema(schema.graphql_schema)
|
|
||||||
assert schema_str, "empty schema printed"
|
|
|
@ -1,36 +0,0 @@
|
||||||
from ...types import ObjectType, Schema, String, NonNull
|
|
||||||
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
hello = String(input=NonNull(String))
|
|
||||||
|
|
||||||
def resolve_hello(self, info, input):
|
|
||||||
if input == "nothing":
|
|
||||||
return None
|
|
||||||
return f"Hello {input}!"
|
|
||||||
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
|
|
||||||
def test_required_input_provided():
|
|
||||||
"""
|
|
||||||
Test that a required argument works when provided.
|
|
||||||
"""
|
|
||||||
input_value = "Potato"
|
|
||||||
result = schema.execute('{ hello(input: "%s") }' % input_value)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data == {"hello": "Hello Potato!"}
|
|
||||||
|
|
||||||
|
|
||||||
def test_required_input_missing():
|
|
||||||
"""
|
|
||||||
Test that a required argument raised an error if not provided.
|
|
||||||
"""
|
|
||||||
result = schema.execute("{ hello }")
|
|
||||||
assert result.errors
|
|
||||||
assert len(result.errors) == 1
|
|
||||||
assert (
|
|
||||||
result.errors[0].message
|
|
||||||
== "Field 'hello' argument 'input' of type 'String!' is required, but it was not provided."
|
|
||||||
)
|
|
|
@ -1,53 +0,0 @@
|
||||||
import pytest
|
|
||||||
|
|
||||||
from ...types.base64 import Base64
|
|
||||||
from ...types.datetime import Date, DateTime
|
|
||||||
from ...types.decimal import Decimal
|
|
||||||
from ...types.generic import GenericScalar
|
|
||||||
from ...types.json import JSONString
|
|
||||||
from ...types.objecttype import ObjectType
|
|
||||||
from ...types.scalars import ID, BigInt, Boolean, Float, Int, String
|
|
||||||
from ...types.schema import Schema
|
|
||||||
from ...types.uuid import UUID
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"input_type,input_value",
|
|
||||||
[
|
|
||||||
(Date, '"2022-02-02"'),
|
|
||||||
(GenericScalar, '"foo"'),
|
|
||||||
(Int, "1"),
|
|
||||||
(BigInt, "12345678901234567890"),
|
|
||||||
(Float, "1.1"),
|
|
||||||
(String, '"foo"'),
|
|
||||||
(Boolean, "true"),
|
|
||||||
(ID, "1"),
|
|
||||||
(DateTime, '"2022-02-02T11:11:11"'),
|
|
||||||
(UUID, '"cbebbc62-758e-4f75-a890-bc73b5017d81"'),
|
|
||||||
(Decimal, '"1.1"'),
|
|
||||||
(JSONString, '"{\\"key\\":\\"foo\\",\\"value\\":\\"bar\\"}"'),
|
|
||||||
(Base64, '"Q2hlbG8gd29ycmxkCg=="'),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_parse_literal_with_variables(input_type, input_value):
|
|
||||||
# input_b needs to be evaluated as literal while the variable dict for
|
|
||||||
# input_a is passed along.
|
|
||||||
|
|
||||||
class Query(ObjectType):
|
|
||||||
generic = GenericScalar(input_a=GenericScalar(), input_b=input_type())
|
|
||||||
|
|
||||||
def resolve_generic(self, info, input_a=None, input_b=None):
|
|
||||||
return input
|
|
||||||
|
|
||||||
schema = Schema(query=Query)
|
|
||||||
|
|
||||||
query = f"""
|
|
||||||
query Test($a: GenericScalar){{
|
|
||||||
generic(inputA: $a, inputB: {input_value})
|
|
||||||
}}
|
|
||||||
"""
|
|
||||||
result = schema.execute(
|
|
||||||
query,
|
|
||||||
variables={"a": "bar"},
|
|
||||||
)
|
|
||||||
assert not result.errors
|
|
|
@ -27,7 +27,7 @@ def test_issue():
|
||||||
graphene.Schema(query=Query)
|
graphene.Schema(query=Query)
|
||||||
|
|
||||||
assert str(exc_info.value) == (
|
assert str(exc_info.value) == (
|
||||||
"Query fields cannot be resolved."
|
"Query fields cannot be resolved:"
|
||||||
" IterableConnectionField type has to be a subclass of Connection."
|
" IterableConnectionField type has to be a subclass of Connection."
|
||||||
' Received "MyUnion".'
|
' Received "MyUnion".'
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
import pickle
|
|
||||||
|
|
||||||
from ...types.enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class PickleEnum(Enum):
|
|
||||||
# is defined outside of test because pickle unable to dump class inside ot pytest function
|
|
||||||
A = "a"
|
|
||||||
B = 1
|
|
||||||
|
|
||||||
|
|
||||||
def test_enums_pickling():
|
|
||||||
a = PickleEnum.A
|
|
||||||
pickled = pickle.dumps(a)
|
|
||||||
restored = pickle.loads(pickled)
|
|
||||||
assert type(a) is type(restored)
|
|
||||||
assert a == restored
|
|
||||||
assert a.value == restored.value
|
|
||||||
assert a.name == restored.name
|
|
||||||
|
|
||||||
b = PickleEnum.B
|
|
||||||
pickled = pickle.dumps(b)
|
|
||||||
restored = pickle.loads(pickled)
|
|
||||||
assert type(a) is type(restored)
|
|
||||||
assert b == restored
|
|
||||||
assert b.value == restored.value
|
|
||||||
assert b.name == restored.name
|
|
|
@ -1,53 +1,52 @@
|
||||||
|
# flake8: noqa
|
||||||
from graphql import GraphQLResolveInfo as ResolveInfo
|
from graphql import GraphQLResolveInfo as ResolveInfo
|
||||||
|
|
||||||
from .argument import Argument
|
from .objecttype import ObjectType
|
||||||
from .base64 import Base64
|
from .interface import Interface
|
||||||
from .context import Context
|
from .mutation import Mutation
|
||||||
|
from .scalars import Scalar, String, ID, Int, Float, Boolean
|
||||||
from .datetime import Date, DateTime, Time
|
from .datetime import Date, DateTime, Time
|
||||||
from .decimal import Decimal
|
from .decimal import Decimal
|
||||||
from .dynamic import Dynamic
|
from .json import JSONString
|
||||||
|
from .uuid import UUID
|
||||||
|
from .schema import Schema
|
||||||
|
from .structures import List, NonNull
|
||||||
from .enum import Enum
|
from .enum import Enum
|
||||||
from .field import Field
|
from .field import Field
|
||||||
from .inputfield import InputField
|
from .inputfield import InputField
|
||||||
|
from .argument import Argument
|
||||||
from .inputobjecttype import InputObjectType
|
from .inputobjecttype import InputObjectType
|
||||||
from .interface import Interface
|
from .dynamic import Dynamic
|
||||||
from .json import JSONString
|
|
||||||
from .mutation import Mutation
|
|
||||||
from .objecttype import ObjectType
|
|
||||||
from .scalars import ID, BigInt, Boolean, Float, Int, Scalar, String
|
|
||||||
from .schema import Schema
|
|
||||||
from .structures import List, NonNull
|
|
||||||
from .union import Union
|
from .union import Union
|
||||||
from .uuid import UUID
|
from .context import Context
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Argument",
|
"ObjectType",
|
||||||
"Base64",
|
"InputObjectType",
|
||||||
"BigInt",
|
"Interface",
|
||||||
"Boolean",
|
"Mutation",
|
||||||
"Context",
|
|
||||||
"Date",
|
|
||||||
"DateTime",
|
|
||||||
"Decimal",
|
|
||||||
"Dynamic",
|
|
||||||
"Enum",
|
"Enum",
|
||||||
"Field",
|
"Field",
|
||||||
"Float",
|
|
||||||
"ID",
|
|
||||||
"InputField",
|
"InputField",
|
||||||
"InputObjectType",
|
|
||||||
"Int",
|
|
||||||
"Interface",
|
|
||||||
"JSONString",
|
|
||||||
"List",
|
|
||||||
"Mutation",
|
|
||||||
"NonNull",
|
|
||||||
"ObjectType",
|
|
||||||
"ResolveInfo",
|
|
||||||
"Scalar",
|
|
||||||
"Schema",
|
"Schema",
|
||||||
|
"Scalar",
|
||||||
"String",
|
"String",
|
||||||
|
"ID",
|
||||||
|
"Int",
|
||||||
|
"Float",
|
||||||
|
"Date",
|
||||||
|
"DateTime",
|
||||||
"Time",
|
"Time",
|
||||||
|
"Decimal",
|
||||||
|
"JSONString",
|
||||||
"UUID",
|
"UUID",
|
||||||
|
"Boolean",
|
||||||
|
"List",
|
||||||
|
"NonNull",
|
||||||
|
"Argument",
|
||||||
|
"Dynamic",
|
||||||
"Union",
|
"Union",
|
||||||
|
"Context",
|
||||||
|
"ResolveInfo",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from graphql import Undefined
|
|
||||||
|
|
||||||
from .dynamic import Dynamic
|
from .dynamic import Dynamic
|
||||||
from .mountedtype import MountedType
|
from .mountedtype import MountedType
|
||||||
|
@ -31,22 +30,18 @@ class Argument(MountedType):
|
||||||
type (class for a graphene.UnmountedType): must be a class (not an instance) of an
|
type (class for a graphene.UnmountedType): must be a class (not an instance) of an
|
||||||
unmounted graphene type (ex. scalar or object) which is used for the type of this
|
unmounted graphene type (ex. scalar or object) which is used for the type of this
|
||||||
argument in the GraphQL schema.
|
argument in the GraphQL schema.
|
||||||
required (optional, bool): indicates this argument as not null in the graphql schema. Same behavior
|
required (bool): indicates this argument as not null in the graphql schema. Same behavior
|
||||||
as graphene.NonNull. Default False.
|
as graphene.NonNull. Default False.
|
||||||
name (optional, str): the name of the GraphQL argument. Defaults to parameter name.
|
name (str): the name of the GraphQL argument. Defaults to parameter name.
|
||||||
description (optional, str): the description of the GraphQL argument in the schema.
|
description (str): the description of the GraphQL argument in the schema.
|
||||||
default_value (optional, Any): The value to be provided if the user does not set this argument in
|
default_value (Any): The value to be provided if the user does not set this argument in
|
||||||
the operation.
|
the operation.
|
||||||
deprecation_reason (optional, str): Setting this value indicates that the argument is
|
|
||||||
depreciated and may provide instruction or reason on how for clients to proceed. Cannot be
|
|
||||||
set if the argument is required (see spec).
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
type_,
|
type,
|
||||||
default_value=Undefined,
|
default_value=None,
|
||||||
deprecation_reason=None,
|
|
||||||
description=None,
|
description=None,
|
||||||
name=None,
|
name=None,
|
||||||
required=False,
|
required=False,
|
||||||
|
@ -55,16 +50,12 @@ class Argument(MountedType):
|
||||||
super(Argument, self).__init__(_creation_counter=_creation_counter)
|
super(Argument, self).__init__(_creation_counter=_creation_counter)
|
||||||
|
|
||||||
if required:
|
if required:
|
||||||
assert (
|
type = NonNull(type)
|
||||||
deprecation_reason is None
|
|
||||||
), f"Argument {name} is required, cannot deprecate it."
|
|
||||||
type_ = NonNull(type_)
|
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self._type = type_
|
self._type = type
|
||||||
self.default_value = default_value
|
self.default_value = default_value
|
||||||
self.description = description
|
self.description = description
|
||||||
self.deprecation_reason = deprecation_reason
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self):
|
def type(self):
|
||||||
|
@ -76,7 +67,6 @@ class Argument(MountedType):
|
||||||
and self.type == other.type
|
and self.type == other.type
|
||||||
and self.default_value == other.default_value
|
and self.default_value == other.default_value
|
||||||
and self.description == other.description
|
and self.description == other.description
|
||||||
and self.deprecation_reason == other.deprecation_reason
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,17 +94,18 @@ def to_arguments(args, extra_args=None):
|
||||||
|
|
||||||
if isinstance(arg, (InputField, Field)):
|
if isinstance(arg, (InputField, Field)):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Expected {default_name} to be Argument, "
|
"Expected {} to be Argument, but received {}. Try using Argument({}).".format(
|
||||||
f"but received {type(arg).__name__}. Try using Argument({arg.type})."
|
default_name, type(arg).__name__, arg.type
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not isinstance(arg, Argument):
|
if not isinstance(arg, Argument):
|
||||||
raise ValueError(f'Unknown argument "{default_name}".')
|
raise ValueError('Unknown argument "{}".'.format(default_name))
|
||||||
|
|
||||||
arg_name = default_name or arg.name
|
arg_name = default_name or arg.name
|
||||||
assert (
|
assert (
|
||||||
arg_name not in arguments
|
arg_name not in arguments
|
||||||
), f'More than one Argument have same name "{arg_name}".'
|
), 'More than one Argument have same name "{}".'.format(arg_name)
|
||||||
arguments[arg_name] = arg
|
arguments[arg_name] = arg
|
||||||
|
|
||||||
return arguments
|
return arguments
|
||||||
|
|
|
@ -1,17 +1,17 @@
|
||||||
from typing import Type, Optional
|
from typing import Type
|
||||||
|
|
||||||
from ..utils.subclass_with_meta import SubclassWithMeta, SubclassWithMeta_Meta
|
from ..utils.subclass_with_meta import SubclassWithMeta
|
||||||
from ..utils.trim_docstring import trim_docstring
|
from ..utils.trim_docstring import trim_docstring
|
||||||
|
|
||||||
|
|
||||||
class BaseOptions:
|
class BaseOptions:
|
||||||
name: Optional[str] = None
|
name = None # type: str
|
||||||
description: Optional[str] = None
|
description = None # type: str
|
||||||
|
|
||||||
_frozen: bool = False
|
_frozen = False # type: bool
|
||||||
|
|
||||||
def __init__(self, class_type: Type):
|
def __init__(self, class_type):
|
||||||
self.class_type: Type = class_type
|
self.class_type = class_type # type: Type
|
||||||
|
|
||||||
def freeze(self):
|
def freeze(self):
|
||||||
self._frozen = True
|
self._frozen = True
|
||||||
|
@ -20,13 +20,10 @@ class BaseOptions:
|
||||||
if not self._frozen:
|
if not self._frozen:
|
||||||
super(BaseOptions, self).__setattr__(name, value)
|
super(BaseOptions, self).__setattr__(name, value)
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Can't modify frozen Options {self}")
|
raise Exception("Can't modify frozen Options {}".format(self))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<{self.__class__.__name__} name={repr(self.name)}>"
|
return "<{} name={}>".format(self.__class__.__name__, repr(self.name))
|
||||||
|
|
||||||
|
|
||||||
BaseTypeMeta = SubclassWithMeta_Meta
|
|
||||||
|
|
||||||
|
|
||||||
class BaseType(SubclassWithMeta):
|
class BaseType(SubclassWithMeta):
|
||||||
|
@ -38,7 +35,7 @@ class BaseType(SubclassWithMeta):
|
||||||
def __init_subclass_with_meta__(
|
def __init_subclass_with_meta__(
|
||||||
cls, name=None, description=None, _meta=None, **_kwargs
|
cls, name=None, description=None, _meta=None, **_kwargs
|
||||||
):
|
):
|
||||||
assert "_meta" not in cls.__dict__, "Can't assign meta directly"
|
assert "_meta" not in cls.__dict__, "Can't assign directly meta"
|
||||||
if not _meta:
|
if not _meta:
|
||||||
return
|
return
|
||||||
_meta.name = name or cls.__name__
|
_meta.name = name or cls.__name__
|
||||||
|
|
|
@ -1,43 +0,0 @@
|
||||||
from binascii import Error as _Error
|
|
||||||
from base64 import b64decode, b64encode
|
|
||||||
|
|
||||||
from graphql.error import GraphQLError
|
|
||||||
from graphql.language import StringValueNode, print_ast
|
|
||||||
|
|
||||||
from .scalars import Scalar
|
|
||||||
|
|
||||||
|
|
||||||
class Base64(Scalar):
|
|
||||||
"""
|
|
||||||
The `Base64` scalar type represents a base64-encoded String.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def serialize(value):
|
|
||||||
if not isinstance(value, bytes):
|
|
||||||
if isinstance(value, str):
|
|
||||||
value = value.encode("utf-8")
|
|
||||||
else:
|
|
||||||
value = str(value).encode("utf-8")
|
|
||||||
return b64encode(value).decode("utf-8")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def parse_literal(cls, node, _variables=None):
|
|
||||||
if not isinstance(node, StringValueNode):
|
|
||||||
raise GraphQLError(
|
|
||||||
f"Base64 cannot represent non-string value: {print_ast(node)}"
|
|
||||||
)
|
|
||||||
return cls.parse_value(node.value)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def parse_value(value):
|
|
||||||
if not isinstance(value, bytes):
|
|
||||||
if not isinstance(value, str):
|
|
||||||
raise GraphQLError(
|
|
||||||
f"Base64 cannot represent non-string value: {repr(value)}"
|
|
||||||
)
|
|
||||||
value = value.encode("utf-8")
|
|
||||||
try:
|
|
||||||
return b64decode(value, validate=True).decode("utf-8")
|
|
||||||
except _Error:
|
|
||||||
raise GraphQLError(f"Base64 cannot decode value: {repr(value)}")
|
|
|
@ -1,9 +1,10 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from dateutil.parser import isoparse
|
from aniso8601 import parse_date, parse_datetime, parse_time
|
||||||
|
from graphql import Undefined
|
||||||
from graphql.error import GraphQLError
|
from graphql.language import StringValueNode
|
||||||
from graphql.language import StringValueNode, print_ast
|
|
||||||
|
|
||||||
from .scalars import Scalar
|
from .scalars import Scalar
|
||||||
|
|
||||||
|
@ -19,28 +20,25 @@ class Date(Scalar):
|
||||||
def serialize(date):
|
def serialize(date):
|
||||||
if isinstance(date, datetime.datetime):
|
if isinstance(date, datetime.datetime):
|
||||||
date = date.date()
|
date = date.date()
|
||||||
if not isinstance(date, datetime.date):
|
assert isinstance(
|
||||||
raise GraphQLError(f"Date cannot represent value: {repr(date)}")
|
date, datetime.date
|
||||||
|
), 'Received not compatible date "{}"'.format(repr(date))
|
||||||
return date.isoformat()
|
return date.isoformat()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node, _variables=None):
|
def parse_literal(cls, node):
|
||||||
if not isinstance(node, StringValueNode):
|
if isinstance(node, StringValueNode):
|
||||||
raise GraphQLError(
|
return cls.parse_value(node.value)
|
||||||
f"Date cannot represent non-string value: {print_ast(node)}"
|
|
||||||
)
|
|
||||||
return cls.parse_value(node.value)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_value(value):
|
def parse_value(value):
|
||||||
if isinstance(value, datetime.date):
|
|
||||||
return value
|
|
||||||
if not isinstance(value, str):
|
|
||||||
raise GraphQLError(f"Date cannot represent non-string value: {repr(value)}")
|
|
||||||
try:
|
try:
|
||||||
return datetime.date.fromisoformat(value)
|
if isinstance(value, datetime.date):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return parse_date(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise GraphQLError(f"Date cannot represent value: {repr(value)}")
|
return Undefined
|
||||||
|
|
||||||
|
|
||||||
class DateTime(Scalar):
|
class DateTime(Scalar):
|
||||||
|
@ -52,30 +50,25 @@ class DateTime(Scalar):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(dt):
|
def serialize(dt):
|
||||||
if not isinstance(dt, (datetime.datetime, datetime.date)):
|
assert isinstance(
|
||||||
raise GraphQLError(f"DateTime cannot represent value: {repr(dt)}")
|
dt, (datetime.datetime, datetime.date)
|
||||||
|
), 'Received not compatible datetime "{}"'.format(repr(dt))
|
||||||
return dt.isoformat()
|
return dt.isoformat()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node, _variables=None):
|
def parse_literal(cls, node):
|
||||||
if not isinstance(node, StringValueNode):
|
if isinstance(node, StringValueNode):
|
||||||
raise GraphQLError(
|
return cls.parse_value(node.value)
|
||||||
f"DateTime cannot represent non-string value: {print_ast(node)}"
|
|
||||||
)
|
|
||||||
return cls.parse_value(node.value)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_value(value):
|
def parse_value(value):
|
||||||
if isinstance(value, datetime.datetime):
|
|
||||||
return value
|
|
||||||
if not isinstance(value, str):
|
|
||||||
raise GraphQLError(
|
|
||||||
f"DateTime cannot represent non-string value: {repr(value)}"
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
return isoparse(value)
|
if isinstance(value, datetime.datetime):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return parse_datetime(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise GraphQLError(f"DateTime cannot represent value: {repr(value)}")
|
return Undefined
|
||||||
|
|
||||||
|
|
||||||
class Time(Scalar):
|
class Time(Scalar):
|
||||||
|
@ -87,25 +80,22 @@ class Time(Scalar):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(time):
|
def serialize(time):
|
||||||
if not isinstance(time, datetime.time):
|
assert isinstance(
|
||||||
raise GraphQLError(f"Time cannot represent value: {repr(time)}")
|
time, datetime.time
|
||||||
|
), 'Received not compatible time "{}"'.format(repr(time))
|
||||||
return time.isoformat()
|
return time.isoformat()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node, _variables=None):
|
def parse_literal(cls, node):
|
||||||
if not isinstance(node, StringValueNode):
|
if isinstance(node, StringValueNode):
|
||||||
raise GraphQLError(
|
return cls.parse_value(node.value)
|
||||||
f"Time cannot represent non-string value: {print_ast(node)}"
|
|
||||||
)
|
|
||||||
return cls.parse_value(node.value)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_value(cls, value):
|
def parse_value(cls, value):
|
||||||
if isinstance(value, datetime.time):
|
|
||||||
return value
|
|
||||||
if not isinstance(value, str):
|
|
||||||
raise GraphQLError(f"Time cannot represent non-string value: {repr(value)}")
|
|
||||||
try:
|
try:
|
||||||
return datetime.time.fromisoformat(value)
|
if isinstance(value, datetime.time):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return parse_time(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise GraphQLError(f"Time cannot represent value: {repr(value)}")
|
return Undefined
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from decimal import Decimal as _Decimal
|
from decimal import Decimal as _Decimal
|
||||||
|
|
||||||
from graphql import Undefined
|
from graphql.language.ast import StringValueNode
|
||||||
from graphql.language.ast import StringValueNode, IntValueNode
|
|
||||||
|
|
||||||
from .scalars import Scalar
|
from .scalars import Scalar
|
||||||
|
|
||||||
|
@ -15,20 +16,19 @@ class Decimal(Scalar):
|
||||||
def serialize(dec):
|
def serialize(dec):
|
||||||
if isinstance(dec, str):
|
if isinstance(dec, str):
|
||||||
dec = _Decimal(dec)
|
dec = _Decimal(dec)
|
||||||
assert isinstance(
|
assert isinstance(dec, _Decimal), 'Received not compatible Decimal "{}"'.format(
|
||||||
dec, _Decimal
|
repr(dec)
|
||||||
), f'Received not compatible Decimal "{repr(dec)}"'
|
)
|
||||||
return str(dec)
|
return str(dec)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node, _variables=None):
|
def parse_literal(cls, node):
|
||||||
if isinstance(node, (StringValueNode, IntValueNode)):
|
if isinstance(node, StringValueNode):
|
||||||
return cls.parse_value(node.value)
|
return cls.parse_value(node.value)
|
||||||
return Undefined
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_value(value):
|
def parse_value(value):
|
||||||
try:
|
try:
|
||||||
return _Decimal(value)
|
return _Decimal(value)
|
||||||
except Exception:
|
except ValueError:
|
||||||
return Undefined
|
return None
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
from enum import Enum as PyEnum
|
|
||||||
|
|
||||||
from graphql import (
|
from graphql import (
|
||||||
GraphQLEnumType,
|
GraphQLEnumType,
|
||||||
GraphQLInputObjectType,
|
GraphQLInputObjectType,
|
||||||
|
@ -20,11 +18,6 @@ class GrapheneGraphQLType:
|
||||||
self.graphene_type = kwargs.pop("graphene_type")
|
self.graphene_type = kwargs.pop("graphene_type")
|
||||||
super(GrapheneGraphQLType, self).__init__(*args, **kwargs)
|
super(GrapheneGraphQLType, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def __copy__(self):
|
|
||||||
result = GrapheneGraphQLType(graphene_type=self.graphene_type)
|
|
||||||
result.__dict__.update(self.__dict__)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType):
|
class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType):
|
||||||
pass
|
pass
|
||||||
|
@ -43,19 +36,7 @@ class GrapheneScalarType(GrapheneGraphQLType, GraphQLScalarType):
|
||||||
|
|
||||||
|
|
||||||
class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType):
|
class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType):
|
||||||
def serialize(self, value):
|
pass
|
||||||
if not isinstance(value, PyEnum):
|
|
||||||
enum = self.graphene_type._meta.enum
|
|
||||||
try:
|
|
||||||
# Try and get enum by value
|
|
||||||
value = enum(value)
|
|
||||||
except ValueError:
|
|
||||||
# Try and get enum by name
|
|
||||||
try:
|
|
||||||
value = enum[value]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return super(GrapheneEnumType, self).serialize(value)
|
|
||||||
|
|
||||||
|
|
||||||
class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType):
|
class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType):
|
||||||
|
|
|
@ -10,10 +10,10 @@ class Dynamic(MountedType):
|
||||||
the schema. So we can have lazy fields.
|
the schema. So we can have lazy fields.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, type_, with_schema=False, _creation_counter=None):
|
def __init__(self, type, with_schema=False, _creation_counter=None):
|
||||||
super(Dynamic, self).__init__(_creation_counter=_creation_counter)
|
super(Dynamic, self).__init__(_creation_counter=_creation_counter)
|
||||||
assert inspect.isfunction(type_) or isinstance(type_, partial)
|
assert inspect.isfunction(type) or isinstance(type, partial)
|
||||||
self.type = type_
|
self.type = type
|
||||||
self.with_schema = with_schema
|
self.with_schema = with_schema
|
||||||
|
|
||||||
def get_type(self, schema=None):
|
def get_type(self, schema=None):
|
||||||
|
|
|
@ -12,10 +12,6 @@ def eq_enum(self, other):
|
||||||
return self.value is other
|
return self.value is other
|
||||||
|
|
||||||
|
|
||||||
def hash_enum(self):
|
|
||||||
return hash(self.name)
|
|
||||||
|
|
||||||
|
|
||||||
EnumType = type(PyEnum)
|
EnumType = type(PyEnum)
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,17 +21,15 @@ class EnumOptions(BaseOptions):
|
||||||
|
|
||||||
|
|
||||||
class EnumMeta(SubclassWithMeta_Meta):
|
class EnumMeta(SubclassWithMeta_Meta):
|
||||||
def __new__(cls, name_, bases, classdict, **options):
|
def __new__(cls, name, bases, classdict, **options):
|
||||||
enum_members = dict(classdict, __eq__=eq_enum, __hash__=hash_enum)
|
enum_members = dict(classdict, __eq__=eq_enum)
|
||||||
# We remove the Meta attribute from the class to not collide
|
# We remove the Meta attribute from the class to not collide
|
||||||
# with the enum values.
|
# with the enum values.
|
||||||
enum_members.pop("Meta", None)
|
enum_members.pop("Meta", None)
|
||||||
enum = PyEnum(cls.__name__, enum_members)
|
enum = PyEnum(cls.__name__, enum_members)
|
||||||
obj = SubclassWithMeta_Meta.__new__(
|
return SubclassWithMeta_Meta.__new__(
|
||||||
cls, name_, bases, dict(classdict, __enum__=enum), **options
|
cls, name, bases, dict(classdict, __enum__=enum), **options
|
||||||
)
|
)
|
||||||
globals()[name_] = obj.__enum__
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def get(cls, value):
|
def get(cls, value):
|
||||||
return cls._meta.enum(value)
|
return cls._meta.enum(value)
|
||||||
|
@ -58,19 +52,15 @@ class EnumMeta(SubclassWithMeta_Meta):
|
||||||
return super(EnumMeta, cls).__call__(*args, **kwargs)
|
return super(EnumMeta, cls).__call__(*args, **kwargs)
|
||||||
# return cls._meta.enum(*args, **kwargs)
|
# return cls._meta.enum(*args, **kwargs)
|
||||||
|
|
||||||
def __iter__(cls):
|
def from_enum(cls, enum, description=None, deprecation_reason=None): # noqa: N805
|
||||||
return cls._meta.enum.__iter__()
|
description = description or enum.__doc__
|
||||||
|
|
||||||
def from_enum(cls, enum, name=None, description=None, deprecation_reason=None): # noqa: N805
|
|
||||||
name = name or enum.__name__
|
|
||||||
description = description or enum.__doc__ or "An enumeration."
|
|
||||||
meta_dict = {
|
meta_dict = {
|
||||||
"enum": enum,
|
"enum": enum,
|
||||||
"description": description,
|
"description": description,
|
||||||
"deprecation_reason": deprecation_reason,
|
"deprecation_reason": deprecation_reason,
|
||||||
}
|
}
|
||||||
meta_class = type("Meta", (object,), meta_dict)
|
meta_class = type("Meta", (object,), meta_dict)
|
||||||
return type(name, (Enum,), {"Meta": meta_class})
|
return type(meta_class.enum.__name__, (Enum,), {"Meta": meta_class})
|
||||||
|
|
||||||
|
|
||||||
class Enum(UnmountedType, BaseType, metaclass=EnumMeta):
|
class Enum(UnmountedType, BaseType, metaclass=EnumMeta):
|
||||||
|
|
|
@ -4,17 +4,15 @@ from functools import partial
|
||||||
|
|
||||||
from .argument import Argument, to_arguments
|
from .argument import Argument, to_arguments
|
||||||
from .mountedtype import MountedType
|
from .mountedtype import MountedType
|
||||||
from .resolver import default_resolver
|
|
||||||
from .structures import NonNull
|
from .structures import NonNull
|
||||||
from .unmountedtype import UnmountedType
|
from .unmountedtype import UnmountedType
|
||||||
from .utils import get_type
|
from .utils import get_type
|
||||||
from ..utils.deprecated import warn_deprecation
|
|
||||||
|
|
||||||
base_type = type
|
base_type = type
|
||||||
|
|
||||||
|
|
||||||
def source_resolver(source, root, info, **args):
|
def source_resolver(source, root, info, **args):
|
||||||
resolved = default_resolver(source, None, root, info, **args)
|
resolved = getattr(root, source, None)
|
||||||
if inspect.isfunction(resolved) or inspect.ismethod(resolved):
|
if inspect.isfunction(resolved) or inspect.ismethod(resolved):
|
||||||
return resolved()
|
return resolved()
|
||||||
return resolved
|
return resolved
|
||||||
|
@ -41,13 +39,11 @@ class Field(MountedType):
|
||||||
last_name = graphene.Field(String, description='Surname') # explicitly mounted as Field
|
last_name = graphene.Field(String, description='Surname') # explicitly mounted as Field
|
||||||
|
|
||||||
args:
|
args:
|
||||||
type (class for a graphene.UnmountedType): Must be a class (not an instance) of an
|
type (class for a graphene.UnmountedType): must be a class (not an instance) of an
|
||||||
unmounted graphene type (ex. scalar or object) which is used for the type of this
|
unmounted graphene type (ex. scalar or object) which is used for the type of this
|
||||||
field in the GraphQL schema. You can provide a dotted module import path (string)
|
field in the GraphQL schema.
|
||||||
to the class instead of the class itself (e.g. to avoid circular import issues).
|
args (optional, Dict[str, graphene.Argument]): arguments that can be input to the field.
|
||||||
args (optional, Dict[str, graphene.Argument]): Arguments that can be input to the field.
|
Prefer to use **extra_args.
|
||||||
Prefer to use ``**extra_args``, unless you use an argument name that clashes with one
|
|
||||||
of the Field arguments presented here (see :ref:`example<ResolverParamGraphQLArguments>`).
|
|
||||||
resolver (optional, Callable): A function to get the value for a Field from the parent
|
resolver (optional, Callable): A function to get the value for a Field from the parent
|
||||||
value object. If not set, the default resolver method for the schema is used.
|
value object. If not set, the default resolver method for the schema is used.
|
||||||
source (optional, str): attribute name to resolve for this field from the parent value
|
source (optional, str): attribute name to resolve for this field from the parent value
|
||||||
|
@ -66,7 +62,7 @@ class Field(MountedType):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
type_,
|
type,
|
||||||
args=None,
|
args=None,
|
||||||
resolver=None,
|
resolver=None,
|
||||||
source=None,
|
source=None,
|
||||||
|
@ -76,21 +72,21 @@ class Field(MountedType):
|
||||||
required=False,
|
required=False,
|
||||||
_creation_counter=None,
|
_creation_counter=None,
|
||||||
default_value=None,
|
default_value=None,
|
||||||
**extra_args,
|
**extra_args
|
||||||
):
|
):
|
||||||
super(Field, self).__init__(_creation_counter=_creation_counter)
|
super(Field, self).__init__(_creation_counter=_creation_counter)
|
||||||
assert not args or isinstance(
|
assert not args or isinstance(args, Mapping), (
|
||||||
args, Mapping
|
'Arguments in a field have to be a mapping, received "{}".'
|
||||||
), f'Arguments in a field have to be a mapping, received "{args}".'
|
).format(args)
|
||||||
assert not (
|
assert not (
|
||||||
source and resolver
|
source and resolver
|
||||||
), "A Field cannot have a source and a resolver in at the same time."
|
), "A Field cannot have a source and a resolver in at the same time."
|
||||||
assert not callable(
|
assert not callable(default_value), (
|
||||||
default_value
|
'The default value can not be a function but received "{}".'
|
||||||
), f'The default value can not be a function but received "{base_type(default_value)}".'
|
).format(base_type(default_value))
|
||||||
|
|
||||||
if required:
|
if required:
|
||||||
type_ = NonNull(type_)
|
type = NonNull(type)
|
||||||
|
|
||||||
# Check if name is actually an argument of the field
|
# Check if name is actually an argument of the field
|
||||||
if isinstance(name, (Argument, UnmountedType)):
|
if isinstance(name, (Argument, UnmountedType)):
|
||||||
|
@ -103,7 +99,7 @@ class Field(MountedType):
|
||||||
source = None
|
source = None
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self._type = type_
|
self._type = type
|
||||||
self.args = to_arguments(args or {}, extra_args)
|
self.args = to_arguments(args or {}, extra_args)
|
||||||
if source:
|
if source:
|
||||||
resolver = partial(source_resolver, source)
|
resolver = partial(source_resolver, source)
|
||||||
|
@ -116,24 +112,5 @@ class Field(MountedType):
|
||||||
def type(self):
|
def type(self):
|
||||||
return get_type(self._type)
|
return get_type(self._type)
|
||||||
|
|
||||||
get_resolver = None
|
def get_resolver(self, parent_resolver):
|
||||||
|
|
||||||
def wrap_resolve(self, parent_resolver):
|
|
||||||
"""
|
|
||||||
Wraps a function resolver, using the ObjectType resolve_{FIELD_NAME}
|
|
||||||
(parent_resolver) if the Field definition has no resolver.
|
|
||||||
"""
|
|
||||||
if self.get_resolver is not None:
|
|
||||||
warn_deprecation(
|
|
||||||
"The get_resolver method is being deprecated, please rename it to wrap_resolve."
|
|
||||||
)
|
|
||||||
return self.get_resolver(parent_resolver)
|
|
||||||
|
|
||||||
return self.resolver or parent_resolver
|
return self.resolver or parent_resolver
|
||||||
|
|
||||||
def wrap_subscribe(self, parent_subscribe):
|
|
||||||
"""
|
|
||||||
Wraps a function subscribe, using the ObjectType subscribe_{FIELD_NAME}
|
|
||||||
(parent_subscribe) if the Field definition has no subscribe.
|
|
||||||
"""
|
|
||||||
return parent_subscribe
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from graphql.language.ast import (
|
from graphql.language.ast import (
|
||||||
BooleanValueNode,
|
BooleanValueNode,
|
||||||
FloatValueNode,
|
FloatValueNode,
|
||||||
|
@ -27,7 +29,7 @@ class GenericScalar(Scalar):
|
||||||
parse_value = identity
|
parse_value = identity
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast, _variables=None):
|
def parse_literal(ast):
|
||||||
if isinstance(ast, (StringValueNode, BooleanValueNode)):
|
if isinstance(ast, (StringValueNode, BooleanValueNode)):
|
||||||
return ast.value
|
return ast.value
|
||||||
elif isinstance(ast, IntValueNode):
|
elif isinstance(ast, IntValueNode):
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
from graphql import Undefined
|
from graphql import Undefined
|
||||||
|
|
||||||
from .mountedtype import MountedType
|
from .mountedtype import MountedType
|
||||||
from .structures import NonNull
|
from .structures import NonNull
|
||||||
from .utils import get_type
|
from .utils import get_type
|
||||||
|
@ -48,23 +47,20 @@ class InputField(MountedType):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
type_,
|
type,
|
||||||
name=None,
|
name=None,
|
||||||
default_value=Undefined,
|
default_value=Undefined,
|
||||||
deprecation_reason=None,
|
deprecation_reason=None,
|
||||||
description=None,
|
description=None,
|
||||||
required=False,
|
required=False,
|
||||||
_creation_counter=None,
|
_creation_counter=None,
|
||||||
**extra_args,
|
**extra_args
|
||||||
):
|
):
|
||||||
super(InputField, self).__init__(_creation_counter=_creation_counter)
|
super(InputField, self).__init__(_creation_counter=_creation_counter)
|
||||||
self.name = name
|
self.name = name
|
||||||
if required:
|
if required:
|
||||||
assert (
|
type = NonNull(type)
|
||||||
deprecation_reason is None
|
self._type = type
|
||||||
), f"InputField {name} is required, cannot deprecate it."
|
|
||||||
type_ = NonNull(type_)
|
|
||||||
self._type = type_
|
|
||||||
self.deprecation_reason = deprecation_reason
|
self.deprecation_reason = deprecation_reason
|
||||||
self.default_value = default_value
|
self.default_value = default_value
|
||||||
self.description = description
|
self.description = description
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from .base import BaseOptions, BaseType
|
from .base import BaseOptions, BaseType
|
||||||
from .inputfield import InputField
|
from .inputfield import InputField
|
||||||
from .unmountedtype import UnmountedType
|
from .unmountedtype import UnmountedType
|
||||||
from .utils import yank_fields_from_attrs
|
from .utils import yank_fields_from_attrs
|
||||||
|
|
||||||
# For static type checking with type checker
|
# For static type checking with Mypy
|
||||||
if TYPE_CHECKING:
|
MYPY = False
|
||||||
|
if MYPY:
|
||||||
from typing import Dict, Callable # NOQA
|
from typing import Dict, Callable # NOQA
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,39 +14,14 @@ class InputObjectTypeOptions(BaseOptions):
|
||||||
container = None # type: InputObjectTypeContainer
|
container = None # type: InputObjectTypeContainer
|
||||||
|
|
||||||
|
|
||||||
# Currently in Graphene, we get a `None` whenever we access an (optional) field that was not set in an InputObjectType
|
class InputObjectTypeContainer(dict, BaseType):
|
||||||
# using the InputObjectType.<attribute> dot access syntax. This is ambiguous, because in this current (Graphene
|
|
||||||
# historical) arrangement, we cannot distinguish between a field not being set and a field being set to None.
|
|
||||||
# At the same time, we shouldn't break existing code that expects a `None` when accessing a field that was not set.
|
|
||||||
_INPUT_OBJECT_TYPE_DEFAULT_VALUE = None
|
|
||||||
|
|
||||||
# To mitigate this, we provide the function `set_input_object_type_default_value` to allow users to change the default
|
|
||||||
# value returned in non-specified fields in InputObjectType to another meaningful sentinel value (e.g. Undefined)
|
|
||||||
# if they want to. This way, we can keep code that expects a `None` working while we figure out a better solution (or
|
|
||||||
# a well-documented breaking change) for this issue.
|
|
||||||
|
|
||||||
|
|
||||||
def set_input_object_type_default_value(default_value):
|
|
||||||
"""
|
|
||||||
Change the sentinel value returned by non-specified fields in an InputObjectType
|
|
||||||
Useful to differentiate between a field not being set and a field being set to None by using a sentinel value
|
|
||||||
(e.g. Undefined is a good sentinel value for this purpose)
|
|
||||||
|
|
||||||
This function should be called at the beginning of the app or in some other place where it is guaranteed to
|
|
||||||
be called before any InputObjectType is defined.
|
|
||||||
"""
|
|
||||||
global _INPUT_OBJECT_TYPE_DEFAULT_VALUE
|
|
||||||
_INPUT_OBJECT_TYPE_DEFAULT_VALUE = default_value
|
|
||||||
|
|
||||||
|
|
||||||
class InputObjectTypeContainer(dict, BaseType): # type: ignore
|
|
||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
dict.__init__(self, *args, **kwargs)
|
dict.__init__(self, *args, **kwargs)
|
||||||
for key in self._meta.fields:
|
for key in self._meta.fields:
|
||||||
setattr(self, key, self.get(key, _INPUT_OBJECT_TYPE_DEFAULT_VALUE))
|
setattr(self, key, self.get(key, None))
|
||||||
|
|
||||||
def __init_subclass__(cls, *args, **kwargs):
|
def __init_subclass__(cls, *args, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,17 +1,15 @@
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from .base import BaseOptions, BaseType
|
from .base import BaseOptions, BaseType
|
||||||
from .field import Field
|
from .field import Field
|
||||||
from .utils import yank_fields_from_attrs
|
from .utils import yank_fields_from_attrs
|
||||||
|
|
||||||
# For static type checking with type checker
|
# For static type checking with Mypy
|
||||||
if TYPE_CHECKING:
|
MYPY = False
|
||||||
from typing import Dict, Iterable, Type # NOQA
|
if MYPY:
|
||||||
|
from typing import Dict # NOQA
|
||||||
|
|
||||||
|
|
||||||
class InterfaceOptions(BaseOptions):
|
class InterfaceOptions(BaseOptions):
|
||||||
fields = None # type: Dict[str, Field]
|
fields = None # type: Dict[str, Field]
|
||||||
interfaces = () # type: Iterable[Type[Interface]]
|
|
||||||
|
|
||||||
|
|
||||||
class Interface(BaseType):
|
class Interface(BaseType):
|
||||||
|
@ -47,7 +45,7 @@ class Interface(BaseType):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(cls, _meta=None, interfaces=(), **options):
|
def __init_subclass_with_meta__(cls, _meta=None, **options):
|
||||||
if not _meta:
|
if not _meta:
|
||||||
_meta = InterfaceOptions(cls)
|
_meta = InterfaceOptions(cls)
|
||||||
|
|
||||||
|
@ -60,9 +58,6 @@ class Interface(BaseType):
|
||||||
else:
|
else:
|
||||||
_meta.fields = fields
|
_meta.fields = fields
|
||||||
|
|
||||||
if not _meta.interfaces:
|
|
||||||
_meta.interfaces = interfaces
|
|
||||||
|
|
||||||
super(Interface, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
super(Interface, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from graphql import Undefined
|
|
||||||
from graphql.language.ast import StringValueNode
|
from graphql.language.ast import StringValueNode
|
||||||
|
|
||||||
from .scalars import Scalar
|
from .scalars import Scalar
|
||||||
|
@ -19,13 +20,9 @@ class JSONString(Scalar):
|
||||||
return json.dumps(dt)
|
return json.dumps(dt)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(node, _variables=None):
|
def parse_literal(node):
|
||||||
if isinstance(node, StringValueNode):
|
if isinstance(node, StringValueNode):
|
||||||
try:
|
return json.loads(node.value)
|
||||||
return json.loads(node.value)
|
|
||||||
except Exception as error:
|
|
||||||
raise ValueError(f"Badly formed JSONString: {str(error)}")
|
|
||||||
return Undefined
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_value(value):
|
def parse_value(value):
|
||||||
|
|
|
@ -8,13 +8,13 @@ class MountedType(OrderedType):
|
||||||
"""
|
"""
|
||||||
Mount the UnmountedType instance
|
Mount the UnmountedType instance
|
||||||
"""
|
"""
|
||||||
assert isinstance(
|
assert isinstance(unmounted, UnmountedType), ("{} can't mount {}").format(
|
||||||
unmounted, UnmountedType
|
cls.__name__, repr(unmounted)
|
||||||
), f"{cls.__name__} can't mount {repr(unmounted)}"
|
)
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
unmounted.get_type(),
|
unmounted.get_type(),
|
||||||
*unmounted.args,
|
*unmounted.args,
|
||||||
_creation_counter=unmounted.creation_counter,
|
_creation_counter=unmounted.creation_counter,
|
||||||
**unmounted.kwargs,
|
**unmounted.kwargs
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from ..utils.deprecated import warn_deprecation
|
from ..utils.deprecated import warn_deprecation
|
||||||
from ..utils.get_unbound_function import get_unbound_function
|
from ..utils.get_unbound_function import get_unbound_function
|
||||||
from ..utils.props import props
|
from ..utils.props import props
|
||||||
|
@ -8,8 +6,9 @@ from .objecttype import ObjectType, ObjectTypeOptions
|
||||||
from .utils import yank_fields_from_attrs
|
from .utils import yank_fields_from_attrs
|
||||||
from .interface import Interface
|
from .interface import Interface
|
||||||
|
|
||||||
# For static type checking with type checker
|
# For static type checking with Mypy
|
||||||
if TYPE_CHECKING:
|
MYPY = False
|
||||||
|
if MYPY:
|
||||||
from .argument import Argument # NOQA
|
from .argument import Argument # NOQA
|
||||||
from typing import Dict, Type, Callable, Iterable # NOQA
|
from typing import Dict, Type, Callable, Iterable # NOQA
|
||||||
|
|
||||||
|
@ -30,21 +29,21 @@ class Mutation(ObjectType):
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
import graphene
|
from graphene import Mutation, ObjectType, String, Boolean, Field
|
||||||
|
|
||||||
class CreatePerson(graphene.Mutation):
|
class CreatePerson(Mutation):
|
||||||
class Arguments:
|
class Arguments:
|
||||||
name = graphene.String()
|
name = String()
|
||||||
|
|
||||||
ok = graphene.Boolean()
|
ok = Boolean()
|
||||||
person = graphene.Field(Person)
|
person = Field(Person)
|
||||||
|
|
||||||
def mutate(parent, info, name):
|
def mutate(parent, info, name):
|
||||||
person = Person(name=name)
|
person = Person(name=name)
|
||||||
ok = True
|
ok = True
|
||||||
return CreatePerson(person=person, ok=ok)
|
return CreatePerson(person=person, ok=ok)
|
||||||
|
|
||||||
class Mutation(graphene.ObjectType):
|
class Mutation(ObjectType):
|
||||||
create_person = CreatePerson.Field()
|
create_person = CreatePerson.Field()
|
||||||
|
|
||||||
Meta class options (optional):
|
Meta class options (optional):
|
||||||
|
@ -73,44 +72,56 @@ class Mutation(ObjectType):
|
||||||
output=None,
|
output=None,
|
||||||
arguments=None,
|
arguments=None,
|
||||||
_meta=None,
|
_meta=None,
|
||||||
**options,
|
**options
|
||||||
):
|
):
|
||||||
if not _meta:
|
if not _meta:
|
||||||
_meta = MutationOptions(cls)
|
_meta = MutationOptions(cls)
|
||||||
|
|
||||||
output = output or getattr(cls, "Output", None)
|
output = output or getattr(cls, "Output", None)
|
||||||
fields = {}
|
fields = {}
|
||||||
|
|
||||||
for interface in interfaces:
|
for interface in interfaces:
|
||||||
assert issubclass(
|
assert issubclass(interface, Interface), (
|
||||||
interface, Interface
|
'All interfaces of {} must be a subclass of Interface. Received "{}".'
|
||||||
), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".'
|
).format(cls.__name__, interface)
|
||||||
fields.update(interface._meta.fields)
|
fields.update(interface._meta.fields)
|
||||||
|
|
||||||
if not output:
|
if not output:
|
||||||
# If output is defined, we don't need to get the fields
|
# If output is defined, we don't need to get the fields
|
||||||
fields = {}
|
fields = {}
|
||||||
for base in reversed(cls.__mro__):
|
for base in reversed(cls.__mro__):
|
||||||
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
|
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
|
||||||
output = cls
|
output = cls
|
||||||
|
|
||||||
if not arguments:
|
if not arguments:
|
||||||
input_class = getattr(cls, "Arguments", None)
|
input_class = getattr(cls, "Arguments", None)
|
||||||
if not input_class:
|
if not input_class:
|
||||||
input_class = getattr(cls, "Input", None)
|
input_class = getattr(cls, "Input", None)
|
||||||
if input_class:
|
if input_class:
|
||||||
warn_deprecation(
|
warn_deprecation(
|
||||||
f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input."
|
(
|
||||||
" Input is now only used in ClientMutationID.\n"
|
"Please use {name}.Arguments instead of {name}.Input."
|
||||||
"Read more:"
|
" Input is now only used in ClientMutationID.\n"
|
||||||
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
|
"Read more:"
|
||||||
|
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
|
||||||
|
).format(name=cls.__name__)
|
||||||
)
|
)
|
||||||
arguments = props(input_class) if input_class else {}
|
|
||||||
|
if input_class:
|
||||||
|
arguments = props(input_class)
|
||||||
|
else:
|
||||||
|
arguments = {}
|
||||||
|
|
||||||
if not resolver:
|
if not resolver:
|
||||||
mutate = getattr(cls, "mutate", None)
|
mutate = getattr(cls, "mutate", None)
|
||||||
assert mutate, "All mutations must define a mutate method in it"
|
assert mutate, "All mutations must define a mutate method in it"
|
||||||
resolver = get_unbound_function(mutate)
|
resolver = get_unbound_function(mutate)
|
||||||
|
|
||||||
if _meta.fields:
|
if _meta.fields:
|
||||||
_meta.fields.update(fields)
|
_meta.fields.update(fields)
|
||||||
else:
|
else:
|
||||||
_meta.fields = fields
|
_meta.fields = fields
|
||||||
|
|
||||||
_meta.interfaces = interfaces
|
_meta.interfaces = interfaces
|
||||||
_meta.output = output
|
_meta.output = output
|
||||||
_meta.resolver = resolver
|
_meta.resolver = resolver
|
||||||
|
@ -122,7 +133,7 @@ class Mutation(ObjectType):
|
||||||
def Field(
|
def Field(
|
||||||
cls, name=None, description=None, deprecation_reason=None, required=False
|
cls, name=None, description=None, deprecation_reason=None, required=False
|
||||||
):
|
):
|
||||||
"""Mount instance of mutation Field."""
|
""" Mount instance of mutation Field. """
|
||||||
return Field(
|
return Field(
|
||||||
cls._meta.output,
|
cls._meta.output,
|
||||||
args=cls._meta.arguments,
|
args=cls._meta.arguments,
|
||||||
|
|
|
@ -1,14 +1,11 @@
|
||||||
from typing import TYPE_CHECKING
|
from .base import BaseOptions, BaseType
|
||||||
|
|
||||||
from .base import BaseOptions, BaseType, BaseTypeMeta
|
|
||||||
from .field import Field
|
from .field import Field
|
||||||
from .interface import Interface
|
from .interface import Interface
|
||||||
from .utils import yank_fields_from_attrs
|
from .utils import yank_fields_from_attrs
|
||||||
|
|
||||||
from dataclasses import make_dataclass, field
|
# For static type checking with Mypy
|
||||||
|
MYPY = False
|
||||||
# For static type checking with type checker
|
if MYPY:
|
||||||
if TYPE_CHECKING:
|
|
||||||
from typing import Dict, Iterable, Type # NOQA
|
from typing import Dict, Iterable, Type # NOQA
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,38 +14,7 @@ class ObjectTypeOptions(BaseOptions):
|
||||||
interfaces = () # type: Iterable[Type[Interface]]
|
interfaces = () # type: Iterable[Type[Interface]]
|
||||||
|
|
||||||
|
|
||||||
class ObjectTypeMeta(BaseTypeMeta):
|
class ObjectType(BaseType):
|
||||||
def __new__(cls, name_, bases, namespace, **options):
|
|
||||||
# Note: it's safe to pass options as keyword arguments as they are still type-checked by ObjectTypeOptions.
|
|
||||||
|
|
||||||
# We create this type, to then overload it with the dataclass attrs
|
|
||||||
class InterObjectType:
|
|
||||||
pass
|
|
||||||
|
|
||||||
base_cls = super().__new__(
|
|
||||||
cls, name_, (InterObjectType,) + bases, namespace, **options
|
|
||||||
)
|
|
||||||
if base_cls._meta:
|
|
||||||
fields = [
|
|
||||||
(
|
|
||||||
key,
|
|
||||||
"typing.Any",
|
|
||||||
field(
|
|
||||||
default=field_value.default_value
|
|
||||||
if isinstance(field_value, Field)
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
)
|
|
||||||
for key, field_value in base_cls._meta.fields.items()
|
|
||||||
]
|
|
||||||
dataclass = make_dataclass(name_, fields, bases=())
|
|
||||||
InterObjectType.__init__ = dataclass.__init__
|
|
||||||
InterObjectType.__eq__ = dataclass.__eq__
|
|
||||||
InterObjectType.__repr__ = dataclass.__repr__
|
|
||||||
return base_cls
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectType(BaseType, metaclass=ObjectTypeMeta):
|
|
||||||
"""
|
"""
|
||||||
Object Type Definition
|
Object Type Definition
|
||||||
|
|
||||||
|
@ -64,7 +30,7 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
|
||||||
Methods starting with ``resolve_<field_name>`` are bound as resolvers of the matching Field
|
Methods starting with ``resolve_<field_name>`` are bound as resolvers of the matching Field
|
||||||
name. If no resolver is provided, the default resolver is used.
|
name. If no resolver is provided, the default resolver is used.
|
||||||
|
|
||||||
Ambiguous types with Interface and Union can be determined through ``is_type_of`` method and
|
Ambiguous types with Interface and Union can be determined through``is_type_of`` method and
|
||||||
``Meta.possible_types`` attribute.
|
``Meta.possible_types`` attribute.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
@ -127,28 +93,32 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
|
||||||
possible_types=(),
|
possible_types=(),
|
||||||
default_resolver=None,
|
default_resolver=None,
|
||||||
_meta=None,
|
_meta=None,
|
||||||
**options,
|
**options
|
||||||
):
|
):
|
||||||
if not _meta:
|
if not _meta:
|
||||||
_meta = ObjectTypeOptions(cls)
|
_meta = ObjectTypeOptions(cls)
|
||||||
|
|
||||||
fields = {}
|
fields = {}
|
||||||
|
|
||||||
for interface in interfaces:
|
for interface in interfaces:
|
||||||
assert issubclass(
|
assert issubclass(interface, Interface), (
|
||||||
interface, Interface
|
'All interfaces of {} must be a subclass of Interface. Received "{}".'
|
||||||
), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".'
|
).format(cls.__name__, interface)
|
||||||
fields.update(interface._meta.fields)
|
fields.update(interface._meta.fields)
|
||||||
|
|
||||||
for base in reversed(cls.__mro__):
|
for base in reversed(cls.__mro__):
|
||||||
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
|
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
|
||||||
|
|
||||||
assert not (possible_types and cls.is_type_of), (
|
assert not (possible_types and cls.is_type_of), (
|
||||||
f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. "
|
"{name}.Meta.possible_types will cause type collision with {name}.is_type_of. "
|
||||||
"Please use one or other."
|
"Please use one or other."
|
||||||
)
|
).format(name=cls.__name__)
|
||||||
|
|
||||||
if _meta.fields:
|
if _meta.fields:
|
||||||
_meta.fields.update(fields)
|
_meta.fields.update(fields)
|
||||||
else:
|
else:
|
||||||
_meta.fields = fields
|
_meta.fields = fields
|
||||||
|
|
||||||
if not _meta.interfaces:
|
if not _meta.interfaces:
|
||||||
_meta.interfaces = interfaces
|
_meta.interfaces = interfaces
|
||||||
_meta.possible_types = possible_types
|
_meta.possible_types = possible_types
|
||||||
|
@ -157,3 +127,45 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
|
||||||
super(ObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
super(ObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
||||||
|
|
||||||
is_type_of = None
|
is_type_of = None
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# ObjectType acting as container
|
||||||
|
args_len = len(args)
|
||||||
|
fields = self._meta.fields.items()
|
||||||
|
if args_len > len(fields):
|
||||||
|
# Daft, but matches old exception sans the err msg.
|
||||||
|
raise IndexError("Number of args exceeds number of fields")
|
||||||
|
fields_iter = iter(fields)
|
||||||
|
|
||||||
|
if not kwargs:
|
||||||
|
for val, (name, field) in zip(args, fields_iter):
|
||||||
|
setattr(self, name, val)
|
||||||
|
else:
|
||||||
|
for val, (name, field) in zip(args, fields_iter):
|
||||||
|
setattr(self, name, val)
|
||||||
|
kwargs.pop(name, None)
|
||||||
|
|
||||||
|
for name, field in fields_iter:
|
||||||
|
try:
|
||||||
|
val = kwargs.pop(
|
||||||
|
name, field.default_value if isinstance(field, Field) else None
|
||||||
|
)
|
||||||
|
setattr(self, name, val)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
for prop in list(kwargs):
|
||||||
|
try:
|
||||||
|
if isinstance(
|
||||||
|
getattr(self.__class__, prop), property
|
||||||
|
) or prop.startswith("_"):
|
||||||
|
setattr(self, prop, kwargs.pop(prop))
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError(
|
||||||
|
"'{}' is an invalid keyword argument for {}".format(
|
||||||
|
list(kwargs)[0], self.__class__.__name__
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -7,7 +7,9 @@ def dict_resolver(attname, default_value, root, info, **args):
|
||||||
|
|
||||||
|
|
||||||
def dict_or_attr_resolver(attname, default_value, root, info, **args):
|
def dict_or_attr_resolver(attname, default_value, root, info, **args):
|
||||||
resolver = dict_resolver if isinstance(root, dict) else attr_resolver
|
resolver = attr_resolver
|
||||||
|
if isinstance(root, dict):
|
||||||
|
resolver = dict_resolver
|
||||||
return resolver(attname, default_value, root, info, **args)
|
return resolver(attname, default_value, root, info, **args)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from graphql import Undefined
|
|
||||||
from graphql.language.ast import (
|
from graphql.language.ast import (
|
||||||
BooleanValueNode,
|
BooleanValueNode,
|
||||||
FloatValueNode,
|
FloatValueNode,
|
||||||
|
@ -68,49 +67,19 @@ class Int(Scalar):
|
||||||
try:
|
try:
|
||||||
num = int(float(value))
|
num = int(float(value))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return Undefined
|
return None
|
||||||
if MIN_INT <= num <= MAX_INT:
|
if MIN_INT <= num <= MAX_INT:
|
||||||
return num
|
return num
|
||||||
return Undefined
|
|
||||||
|
|
||||||
serialize = coerce_int
|
serialize = coerce_int
|
||||||
parse_value = coerce_int
|
parse_value = coerce_int
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast, _variables=None):
|
def parse_literal(ast):
|
||||||
if isinstance(ast, IntValueNode):
|
if isinstance(ast, IntValueNode):
|
||||||
num = int(ast.value)
|
num = int(ast.value)
|
||||||
if MIN_INT <= num <= MAX_INT:
|
if MIN_INT <= num <= MAX_INT:
|
||||||
return num
|
return num
|
||||||
return Undefined
|
|
||||||
|
|
||||||
|
|
||||||
class BigInt(Scalar):
|
|
||||||
"""
|
|
||||||
The `BigInt` scalar type represents non-fractional whole numeric values.
|
|
||||||
`BigInt` is not constrained to 32-bit like the `Int` type and thus is a less
|
|
||||||
compatible type.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def coerce_int(value):
|
|
||||||
try:
|
|
||||||
num = int(value)
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
num = int(float(value))
|
|
||||||
except ValueError:
|
|
||||||
return Undefined
|
|
||||||
return num
|
|
||||||
|
|
||||||
serialize = coerce_int
|
|
||||||
parse_value = coerce_int
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def parse_literal(ast, _variables=None):
|
|
||||||
if isinstance(ast, IntValueNode):
|
|
||||||
return int(ast.value)
|
|
||||||
return Undefined
|
|
||||||
|
|
||||||
|
|
||||||
class Float(Scalar):
|
class Float(Scalar):
|
||||||
|
@ -121,20 +90,20 @@ class Float(Scalar):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def coerce_float(value: Any) -> float:
|
def coerce_float(value):
|
||||||
|
# type: (Any) -> float
|
||||||
try:
|
try:
|
||||||
return float(value)
|
return float(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return Undefined
|
return None
|
||||||
|
|
||||||
serialize = coerce_float
|
serialize = coerce_float
|
||||||
parse_value = coerce_float
|
parse_value = coerce_float
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast, _variables=None):
|
def parse_literal(ast):
|
||||||
if isinstance(ast, (FloatValueNode, IntValueNode)):
|
if isinstance(ast, (FloatValueNode, IntValueNode)):
|
||||||
return float(ast.value)
|
return float(ast.value)
|
||||||
return Undefined
|
|
||||||
|
|
||||||
|
|
||||||
class String(Scalar):
|
class String(Scalar):
|
||||||
|
@ -147,17 +116,16 @@ class String(Scalar):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def coerce_string(value):
|
def coerce_string(value):
|
||||||
if isinstance(value, bool):
|
if isinstance(value, bool):
|
||||||
return "true" if value else "false"
|
return u"true" if value else u"false"
|
||||||
return str(value)
|
return str(value)
|
||||||
|
|
||||||
serialize = coerce_string
|
serialize = coerce_string
|
||||||
parse_value = coerce_string
|
parse_value = coerce_string
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast, _variables=None):
|
def parse_literal(ast):
|
||||||
if isinstance(ast, StringValueNode):
|
if isinstance(ast, StringValueNode):
|
||||||
return ast.value
|
return ast.value
|
||||||
return Undefined
|
|
||||||
|
|
||||||
|
|
||||||
class Boolean(Scalar):
|
class Boolean(Scalar):
|
||||||
|
@ -169,10 +137,9 @@ class Boolean(Scalar):
|
||||||
parse_value = bool
|
parse_value = bool
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast, _variables=None):
|
def parse_literal(ast):
|
||||||
if isinstance(ast, BooleanValueNode):
|
if isinstance(ast, BooleanValueNode):
|
||||||
return ast.value
|
return ast.value
|
||||||
return Undefined
|
|
||||||
|
|
||||||
|
|
||||||
class ID(Scalar):
|
class ID(Scalar):
|
||||||
|
@ -188,7 +155,6 @@ class ID(Scalar):
|
||||||
parse_value = str
|
parse_value = str
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast, _variables=None):
|
def parse_literal(ast):
|
||||||
if isinstance(ast, (StringValueNode, IntValueNode)):
|
if isinstance(ast, (StringValueNode, IntValueNode)):
|
||||||
return ast.value
|
return ast.value
|
||||||
return Undefined
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
from enum import Enum as PyEnum
|
|
||||||
import inspect
|
import inspect
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
@ -8,14 +7,10 @@ from graphql import (
|
||||||
graphql,
|
graphql,
|
||||||
graphql_sync,
|
graphql_sync,
|
||||||
introspection_types,
|
introspection_types,
|
||||||
parse,
|
is_type,
|
||||||
print_schema,
|
print_schema,
|
||||||
subscribe,
|
|
||||||
validate,
|
|
||||||
ExecutionResult,
|
|
||||||
GraphQLArgument,
|
GraphQLArgument,
|
||||||
GraphQLBoolean,
|
GraphQLBoolean,
|
||||||
GraphQLError,
|
|
||||||
GraphQLEnumValue,
|
GraphQLEnumValue,
|
||||||
GraphQLField,
|
GraphQLField,
|
||||||
GraphQLFloat,
|
GraphQLFloat,
|
||||||
|
@ -27,6 +22,7 @@ from graphql import (
|
||||||
GraphQLObjectType,
|
GraphQLObjectType,
|
||||||
GraphQLSchema,
|
GraphQLSchema,
|
||||||
GraphQLString,
|
GraphQLString,
|
||||||
|
Undefined,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..utils.str_converters import to_camel_case
|
from ..utils.str_converters import to_camel_case
|
||||||
|
@ -61,9 +57,9 @@ def assert_valid_root_type(type_):
|
||||||
return
|
return
|
||||||
is_graphene_objecttype = inspect.isclass(type_) and issubclass(type_, ObjectType)
|
is_graphene_objecttype = inspect.isclass(type_) and issubclass(type_, ObjectType)
|
||||||
is_graphql_objecttype = isinstance(type_, GraphQLObjectType)
|
is_graphql_objecttype = isinstance(type_, GraphQLObjectType)
|
||||||
assert (
|
assert is_graphene_objecttype or is_graphql_objecttype, (
|
||||||
is_graphene_objecttype or is_graphql_objecttype
|
"Type {} is not a valid ObjectType."
|
||||||
), f"Type {type_} is not a valid ObjectType."
|
).format(type_)
|
||||||
|
|
||||||
|
|
||||||
def is_graphene_type(type_):
|
def is_graphene_type(type_):
|
||||||
|
@ -75,75 +71,118 @@ def is_graphene_type(type_):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_type(resolve_type_func, map_, type_name, root, info, _type):
|
||||||
|
type_ = resolve_type_func(root, info)
|
||||||
|
|
||||||
|
if not type_:
|
||||||
|
return_type = map_[type_name]
|
||||||
|
return default_type_resolver(root, info, return_type)
|
||||||
|
|
||||||
|
if inspect.isclass(type_) and issubclass(type_, ObjectType):
|
||||||
|
graphql_type = map_.get(type_._meta.name)
|
||||||
|
assert graphql_type, "Can't find type {} in schema".format(type_._meta.name)
|
||||||
|
assert graphql_type.graphene_type == type_, (
|
||||||
|
"The type {} does not match with the associated graphene type {}."
|
||||||
|
).format(type_, graphql_type.graphene_type)
|
||||||
|
return graphql_type
|
||||||
|
|
||||||
|
return type_
|
||||||
|
|
||||||
|
|
||||||
def is_type_of_from_possible_types(possible_types, root, _info):
|
def is_type_of_from_possible_types(possible_types, root, _info):
|
||||||
return isinstance(root, possible_types)
|
return isinstance(root, possible_types)
|
||||||
|
|
||||||
|
|
||||||
# We use this resolver for subscriptions
|
class GrapheneGraphQLSchema(GraphQLSchema):
|
||||||
def identity_resolve(root, info, **arguments):
|
"""A GraphQLSchema that can deal with Graphene types as well."""
|
||||||
return root
|
|
||||||
|
|
||||||
|
|
||||||
class TypeMap(dict):
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
query=None,
|
query=None,
|
||||||
mutation=None,
|
mutation=None,
|
||||||
subscription=None,
|
subscription=None,
|
||||||
types=None,
|
types=None,
|
||||||
|
directives=None,
|
||||||
auto_camelcase=True,
|
auto_camelcase=True,
|
||||||
):
|
):
|
||||||
assert_valid_root_type(query)
|
assert_valid_root_type(query)
|
||||||
assert_valid_root_type(mutation)
|
assert_valid_root_type(mutation)
|
||||||
assert_valid_root_type(subscription)
|
assert_valid_root_type(subscription)
|
||||||
if types is None:
|
|
||||||
types = []
|
|
||||||
for type_ in types:
|
|
||||||
assert is_graphene_type(type_)
|
|
||||||
|
|
||||||
self.auto_camelcase = auto_camelcase
|
self.auto_camelcase = auto_camelcase
|
||||||
|
super().__init__(query, mutation, subscription, types, directives)
|
||||||
|
|
||||||
create_graphql_type = self.add_type
|
if query:
|
||||||
|
self.query_type = self.get_type(
|
||||||
|
query.name if isinstance(query, GraphQLObjectType) else query._meta.name
|
||||||
|
)
|
||||||
|
if mutation:
|
||||||
|
self.mutation_type = self.get_type(
|
||||||
|
mutation.name
|
||||||
|
if isinstance(mutation, GraphQLObjectType)
|
||||||
|
else mutation._meta.name
|
||||||
|
)
|
||||||
|
if subscription:
|
||||||
|
self.subscription_type = self.get_type(
|
||||||
|
subscription.name
|
||||||
|
if isinstance(subscription, GraphQLObjectType)
|
||||||
|
else subscription._meta.name
|
||||||
|
)
|
||||||
|
|
||||||
self.query = create_graphql_type(query) if query else None
|
def get_graphql_type(self, _type):
|
||||||
self.mutation = create_graphql_type(mutation) if mutation else None
|
if not _type:
|
||||||
self.subscription = create_graphql_type(subscription) if subscription else None
|
return _type
|
||||||
|
if is_type(_type):
|
||||||
self.types = [create_graphql_type(graphene_type) for graphene_type in types]
|
return _type
|
||||||
|
if is_graphene_type(_type):
|
||||||
def add_type(self, graphene_type):
|
graphql_type = self.get_type(_type._meta.name)
|
||||||
if inspect.isfunction(graphene_type):
|
assert graphql_type, "Type {} not found in this schema.".format(
|
||||||
graphene_type = graphene_type()
|
_type._meta.name
|
||||||
if isinstance(graphene_type, List):
|
)
|
||||||
return GraphQLList(self.add_type(graphene_type.of_type))
|
assert graphql_type.graphene_type == _type
|
||||||
if isinstance(graphene_type, NonNull):
|
|
||||||
return GraphQLNonNull(self.add_type(graphene_type.of_type))
|
|
||||||
try:
|
|
||||||
name = graphene_type._meta.name
|
|
||||||
except AttributeError:
|
|
||||||
raise TypeError(f"Expected Graphene type, but received: {graphene_type}.")
|
|
||||||
graphql_type = self.get(name)
|
|
||||||
if graphql_type:
|
|
||||||
return graphql_type
|
return graphql_type
|
||||||
if issubclass(graphene_type, ObjectType):
|
raise Exception("{} is not a valid GraphQL type.".format(_type))
|
||||||
graphql_type = self.create_objecttype(graphene_type)
|
|
||||||
elif issubclass(graphene_type, InputObjectType):
|
# noinspection PyMethodOverriding
|
||||||
graphql_type = self.create_inputobjecttype(graphene_type)
|
def type_map_reducer(self, map_, type_):
|
||||||
elif issubclass(graphene_type, Interface):
|
if not type_:
|
||||||
graphql_type = self.create_interface(graphene_type)
|
return map_
|
||||||
elif issubclass(graphene_type, Scalar):
|
if inspect.isfunction(type_):
|
||||||
graphql_type = self.create_scalar(graphene_type)
|
type_ = type_()
|
||||||
elif issubclass(graphene_type, Enum):
|
if is_graphene_type(type_):
|
||||||
graphql_type = self.create_enum(graphene_type)
|
return self.graphene_reducer(map_, type_)
|
||||||
elif issubclass(graphene_type, Union):
|
return super().type_map_reducer(map_, type_)
|
||||||
graphql_type = self.construct_union(graphene_type)
|
|
||||||
|
def graphene_reducer(self, map_, type_):
|
||||||
|
if isinstance(type_, (List, NonNull)):
|
||||||
|
return self.type_map_reducer(map_, type_.of_type)
|
||||||
|
if type_._meta.name in map_:
|
||||||
|
_type = map_[type_._meta.name]
|
||||||
|
if isinstance(_type, GrapheneGraphQLType):
|
||||||
|
assert _type.graphene_type == type_, (
|
||||||
|
"Found different types with the same name in the schema: {}, {}."
|
||||||
|
).format(_type.graphene_type, type_)
|
||||||
|
return map_
|
||||||
|
|
||||||
|
if issubclass(type_, ObjectType):
|
||||||
|
internal_type = self.construct_objecttype(map_, type_)
|
||||||
|
elif issubclass(type_, InputObjectType):
|
||||||
|
internal_type = self.construct_inputobjecttype(map_, type_)
|
||||||
|
elif issubclass(type_, Interface):
|
||||||
|
internal_type = self.construct_interface(map_, type_)
|
||||||
|
elif issubclass(type_, Scalar):
|
||||||
|
internal_type = self.construct_scalar(type_)
|
||||||
|
elif issubclass(type_, Enum):
|
||||||
|
internal_type = self.construct_enum(type_)
|
||||||
|
elif issubclass(type_, Union):
|
||||||
|
internal_type = self.construct_union(map_, type_)
|
||||||
else:
|
else:
|
||||||
raise TypeError(f"Expected Graphene type, but received: {graphene_type}.")
|
raise Exception("Expected Graphene type, but received: {}.".format(type_))
|
||||||
self[name] = graphql_type
|
|
||||||
return graphql_type
|
return super().type_map_reducer(map_, internal_type)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_scalar(graphene_type):
|
def construct_scalar(type_):
|
||||||
# We have a mapping to the original GraphQL types
|
# We have a mapping to the original GraphQL types
|
||||||
# so there are no collisions.
|
# so there are no collisions.
|
||||||
_scalars = {
|
_scalars = {
|
||||||
|
@ -153,146 +192,138 @@ class TypeMap(dict):
|
||||||
Boolean: GraphQLBoolean,
|
Boolean: GraphQLBoolean,
|
||||||
ID: GraphQLID,
|
ID: GraphQLID,
|
||||||
}
|
}
|
||||||
if graphene_type in _scalars:
|
if type_ in _scalars:
|
||||||
return _scalars[graphene_type]
|
return _scalars[type_]
|
||||||
|
|
||||||
return GrapheneScalarType(
|
return GrapheneScalarType(
|
||||||
graphene_type=graphene_type,
|
graphene_type=type_,
|
||||||
name=graphene_type._meta.name,
|
name=type_._meta.name,
|
||||||
description=graphene_type._meta.description,
|
description=type_._meta.description,
|
||||||
serialize=getattr(graphene_type, "serialize", None),
|
serialize=getattr(type_, "serialize", None),
|
||||||
parse_value=getattr(graphene_type, "parse_value", None),
|
parse_value=getattr(type_, "parse_value", None),
|
||||||
parse_literal=getattr(graphene_type, "parse_literal", None),
|
parse_literal=getattr(type_, "parse_literal", None),
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_enum(graphene_type):
|
def construct_enum(type_):
|
||||||
values = {}
|
values = {}
|
||||||
for name, value in graphene_type._meta.enum.__members__.items():
|
for name, value in type_._meta.enum.__members__.items():
|
||||||
description = getattr(value, "description", None)
|
description = getattr(value, "description", None)
|
||||||
# if the "description" attribute is an Enum, it is likely an enum member
|
|
||||||
# called description, not a description property
|
|
||||||
if isinstance(description, PyEnum):
|
|
||||||
description = None
|
|
||||||
if not description and callable(graphene_type._meta.description):
|
|
||||||
description = graphene_type._meta.description(value)
|
|
||||||
|
|
||||||
deprecation_reason = getattr(value, "deprecation_reason", None)
|
deprecation_reason = getattr(value, "deprecation_reason", None)
|
||||||
if isinstance(deprecation_reason, PyEnum):
|
if not description and callable(type_._meta.description):
|
||||||
deprecation_reason = None
|
description = type_._meta.description(value)
|
||||||
if not deprecation_reason and callable(
|
|
||||||
graphene_type._meta.deprecation_reason
|
if not deprecation_reason and callable(type_._meta.deprecation_reason):
|
||||||
):
|
deprecation_reason = type_._meta.deprecation_reason(value)
|
||||||
deprecation_reason = graphene_type._meta.deprecation_reason(value)
|
|
||||||
|
|
||||||
values[name] = GraphQLEnumValue(
|
values[name] = GraphQLEnumValue(
|
||||||
value=value,
|
value=value.value,
|
||||||
description=description,
|
description=description,
|
||||||
deprecation_reason=deprecation_reason,
|
deprecation_reason=deprecation_reason,
|
||||||
)
|
)
|
||||||
|
|
||||||
type_description = (
|
type_description = (
|
||||||
graphene_type._meta.description(None)
|
type_._meta.description(None)
|
||||||
if callable(graphene_type._meta.description)
|
if callable(type_._meta.description)
|
||||||
else graphene_type._meta.description
|
else type_._meta.description
|
||||||
)
|
)
|
||||||
|
|
||||||
return GrapheneEnumType(
|
return GrapheneEnumType(
|
||||||
graphene_type=graphene_type,
|
graphene_type=type_,
|
||||||
values=values,
|
values=values,
|
||||||
name=graphene_type._meta.name,
|
name=type_._meta.name,
|
||||||
description=type_description,
|
description=type_description,
|
||||||
)
|
)
|
||||||
|
|
||||||
def create_objecttype(self, graphene_type):
|
def construct_objecttype(self, map_, type_):
|
||||||
create_graphql_type = self.add_type
|
if type_._meta.name in map_:
|
||||||
|
_type = map_[type_._meta.name]
|
||||||
|
if isinstance(_type, GrapheneGraphQLType):
|
||||||
|
assert _type.graphene_type == type_, (
|
||||||
|
"Found different types with the same name in the schema: {}, {}."
|
||||||
|
).format(_type.graphene_type, type_)
|
||||||
|
return _type
|
||||||
|
|
||||||
def interfaces():
|
def interfaces():
|
||||||
interfaces = []
|
interfaces = []
|
||||||
for graphene_interface in graphene_type._meta.interfaces:
|
for interface in type_._meta.interfaces:
|
||||||
interface = create_graphql_type(graphene_interface)
|
self.graphene_reducer(map_, interface)
|
||||||
assert interface.graphene_type == graphene_interface
|
internal_type = map_[interface._meta.name]
|
||||||
interfaces.append(interface)
|
assert internal_type.graphene_type == interface
|
||||||
|
interfaces.append(internal_type)
|
||||||
return interfaces
|
return interfaces
|
||||||
|
|
||||||
if graphene_type._meta.possible_types:
|
if type_._meta.possible_types:
|
||||||
is_type_of = partial(
|
is_type_of = partial(
|
||||||
is_type_of_from_possible_types, graphene_type._meta.possible_types
|
is_type_of_from_possible_types, type_._meta.possible_types
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
is_type_of = graphene_type.is_type_of
|
is_type_of = type_.is_type_of
|
||||||
|
|
||||||
return GrapheneObjectType(
|
return GrapheneObjectType(
|
||||||
graphene_type=graphene_type,
|
graphene_type=type_,
|
||||||
name=graphene_type._meta.name,
|
name=type_._meta.name,
|
||||||
description=graphene_type._meta.description,
|
description=type_._meta.description,
|
||||||
fields=partial(self.create_fields_for_type, graphene_type),
|
fields=partial(self.construct_fields_for_type, map_, type_),
|
||||||
is_type_of=is_type_of,
|
is_type_of=is_type_of,
|
||||||
interfaces=interfaces,
|
interfaces=interfaces,
|
||||||
)
|
)
|
||||||
|
|
||||||
def create_interface(self, graphene_type):
|
def construct_interface(self, map_, type_):
|
||||||
resolve_type = (
|
if type_._meta.name in map_:
|
||||||
partial(
|
_type = map_[type_._meta.name]
|
||||||
self.resolve_type, graphene_type.resolve_type, graphene_type._meta.name
|
if isinstance(_type, GrapheneInterfaceType):
|
||||||
|
assert _type.graphene_type == type_, (
|
||||||
|
"Found different types with the same name in the schema: {}, {}."
|
||||||
|
).format(_type.graphene_type, type_)
|
||||||
|
return _type
|
||||||
|
|
||||||
|
_resolve_type = None
|
||||||
|
if type_.resolve_type:
|
||||||
|
_resolve_type = partial(
|
||||||
|
resolve_type, type_.resolve_type, map_, type_._meta.name
|
||||||
)
|
)
|
||||||
if graphene_type.resolve_type
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
def interfaces():
|
|
||||||
interfaces = []
|
|
||||||
for graphene_interface in graphene_type._meta.interfaces:
|
|
||||||
interface = self.add_type(graphene_interface)
|
|
||||||
assert interface.graphene_type == graphene_interface
|
|
||||||
interfaces.append(interface)
|
|
||||||
return interfaces
|
|
||||||
|
|
||||||
return GrapheneInterfaceType(
|
return GrapheneInterfaceType(
|
||||||
graphene_type=graphene_type,
|
graphene_type=type_,
|
||||||
name=graphene_type._meta.name,
|
name=type_._meta.name,
|
||||||
description=graphene_type._meta.description,
|
description=type_._meta.description,
|
||||||
fields=partial(self.create_fields_for_type, graphene_type),
|
fields=partial(self.construct_fields_for_type, map_, type_),
|
||||||
interfaces=interfaces,
|
resolve_type=_resolve_type,
|
||||||
resolve_type=resolve_type,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def create_inputobjecttype(self, graphene_type):
|
def construct_inputobjecttype(self, map_, type_):
|
||||||
return GrapheneInputObjectType(
|
return GrapheneInputObjectType(
|
||||||
graphene_type=graphene_type,
|
graphene_type=type_,
|
||||||
name=graphene_type._meta.name,
|
name=type_._meta.name,
|
||||||
description=graphene_type._meta.description,
|
description=type_._meta.description,
|
||||||
out_type=graphene_type._meta.container,
|
out_type=type_._meta.container,
|
||||||
fields=partial(
|
fields=partial(
|
||||||
self.create_fields_for_type, graphene_type, is_input_type=True
|
self.construct_fields_for_type, map_, type_, is_input_type=True
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def construct_union(self, graphene_type):
|
def construct_union(self, map_, type_):
|
||||||
create_graphql_type = self.add_type
|
_resolve_type = None
|
||||||
|
if type_.resolve_type:
|
||||||
|
_resolve_type = partial(
|
||||||
|
resolve_type, type_.resolve_type, map_, type_._meta.name
|
||||||
|
)
|
||||||
|
|
||||||
def types():
|
def types():
|
||||||
union_types = []
|
union_types = []
|
||||||
for graphene_objecttype in graphene_type._meta.types:
|
for objecttype in type_._meta.types:
|
||||||
object_type = create_graphql_type(graphene_objecttype)
|
self.graphene_reducer(map_, objecttype)
|
||||||
assert object_type.graphene_type == graphene_objecttype
|
internal_type = map_[objecttype._meta.name]
|
||||||
union_types.append(object_type)
|
assert internal_type.graphene_type == objecttype
|
||||||
|
union_types.append(internal_type)
|
||||||
return union_types
|
return union_types
|
||||||
|
|
||||||
resolve_type = (
|
|
||||||
partial(
|
|
||||||
self.resolve_type, graphene_type.resolve_type, graphene_type._meta.name
|
|
||||||
)
|
|
||||||
if graphene_type.resolve_type
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
return GrapheneUnionType(
|
return GrapheneUnionType(
|
||||||
graphene_type=graphene_type,
|
graphene_type=type_,
|
||||||
name=graphene_type._meta.name,
|
name=type_._meta.name,
|
||||||
description=graphene_type._meta.description,
|
description=type_._meta.description,
|
||||||
types=types,
|
types=types,
|
||||||
resolve_type=resolve_type,
|
resolve_type=_resolve_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_name(self, name):
|
def get_name(self, name):
|
||||||
|
@ -300,69 +331,42 @@ class TypeMap(dict):
|
||||||
return to_camel_case(name)
|
return to_camel_case(name)
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def create_fields_for_type(self, graphene_type, is_input_type=False):
|
def construct_fields_for_type(self, map_, type_, is_input_type=False):
|
||||||
create_graphql_type = self.add_type
|
|
||||||
|
|
||||||
fields = {}
|
fields = {}
|
||||||
for name, field in graphene_type._meta.fields.items():
|
for name, field in type_._meta.fields.items():
|
||||||
if isinstance(field, Dynamic):
|
if isinstance(field, Dynamic):
|
||||||
field = get_field_as(field.get_type(self), _as=Field)
|
field = get_field_as(field.get_type(self), _as=Field)
|
||||||
if not field:
|
if not field:
|
||||||
continue
|
continue
|
||||||
field_type = create_graphql_type(field.type)
|
map_ = self.type_map_reducer(map_, field.type)
|
||||||
|
field_type = self.get_field_type(map_, field.type)
|
||||||
if is_input_type:
|
if is_input_type:
|
||||||
_field = GraphQLInputField(
|
_field = GraphQLInputField(
|
||||||
field_type,
|
field_type,
|
||||||
default_value=field.default_value,
|
default_value=field.default_value,
|
||||||
out_name=name,
|
out_name=name,
|
||||||
description=field.description,
|
description=field.description,
|
||||||
deprecation_reason=field.deprecation_reason,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
args = {}
|
args = {}
|
||||||
for arg_name, arg in field.args.items():
|
for arg_name, arg in field.args.items():
|
||||||
arg_type = create_graphql_type(arg.type)
|
map_ = self.type_map_reducer(map_, arg.type)
|
||||||
|
arg_type = self.get_field_type(map_, arg.type)
|
||||||
processed_arg_name = arg.name or self.get_name(arg_name)
|
processed_arg_name = arg.name or self.get_name(arg_name)
|
||||||
args[processed_arg_name] = GraphQLArgument(
|
args[processed_arg_name] = GraphQLArgument(
|
||||||
arg_type,
|
arg_type,
|
||||||
out_name=arg_name,
|
out_name=arg_name,
|
||||||
description=arg.description,
|
description=arg.description,
|
||||||
default_value=arg.default_value,
|
default_value=Undefined
|
||||||
deprecation_reason=arg.deprecation_reason,
|
if isinstance(arg.type, NonNull)
|
||||||
|
else arg.default_value,
|
||||||
)
|
)
|
||||||
subscribe = field.wrap_subscribe(
|
|
||||||
self.get_function_for_type(
|
|
||||||
graphene_type, f"subscribe_{name}", name, field.default_value
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# If we are in a subscription, we use (by default) an
|
|
||||||
# identity-based resolver for the root, rather than the
|
|
||||||
# default resolver for objects/dicts.
|
|
||||||
if subscribe:
|
|
||||||
field_default_resolver = identity_resolve
|
|
||||||
elif issubclass(graphene_type, ObjectType):
|
|
||||||
default_resolver = (
|
|
||||||
graphene_type._meta.default_resolver or get_default_resolver()
|
|
||||||
)
|
|
||||||
field_default_resolver = partial(
|
|
||||||
default_resolver, name, field.default_value
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
field_default_resolver = None
|
|
||||||
|
|
||||||
resolve = field.wrap_resolve(
|
|
||||||
self.get_function_for_type(
|
|
||||||
graphene_type, f"resolve_{name}", name, field.default_value
|
|
||||||
)
|
|
||||||
or field_default_resolver
|
|
||||||
)
|
|
||||||
|
|
||||||
_field = GraphQLField(
|
_field = GraphQLField(
|
||||||
field_type,
|
field_type,
|
||||||
args=args,
|
args=args,
|
||||||
resolve=resolve,
|
resolve=field.get_resolver(
|
||||||
subscribe=subscribe,
|
self.get_resolver_for_type(type_, name, field.default_value)
|
||||||
|
),
|
||||||
deprecation_reason=field.deprecation_reason,
|
deprecation_reason=field.deprecation_reason,
|
||||||
description=field.description,
|
description=field.description,
|
||||||
)
|
)
|
||||||
|
@ -370,19 +374,18 @@ class TypeMap(dict):
|
||||||
fields[field_name] = _field
|
fields[field_name] = _field
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
def get_function_for_type(self, graphene_type, func_name, name, default_value):
|
def get_resolver_for_type(self, type_, name, default_value):
|
||||||
"""Gets a resolve or subscribe function for a given ObjectType"""
|
if not issubclass(type_, ObjectType):
|
||||||
if not issubclass(graphene_type, ObjectType):
|
|
||||||
return
|
return
|
||||||
resolver = getattr(graphene_type, func_name, None)
|
resolver = getattr(type_, "resolve_{}".format(name), None)
|
||||||
if not resolver:
|
if not resolver:
|
||||||
# If we don't find the resolver in the ObjectType class, then try to
|
# If we don't find the resolver in the ObjectType class, then try to
|
||||||
# find it in each of the interfaces
|
# find it in each of the interfaces
|
||||||
interface_resolver = None
|
interface_resolver = None
|
||||||
for interface in graphene_type._meta.interfaces:
|
for interface in type_._meta.interfaces:
|
||||||
if name not in interface._meta.fields:
|
if name not in interface._meta.fields:
|
||||||
continue
|
continue
|
||||||
interface_resolver = getattr(interface, func_name, None)
|
interface_resolver = getattr(interface, "resolve_{}".format(name), None)
|
||||||
if interface_resolver:
|
if interface_resolver:
|
||||||
break
|
break
|
||||||
resolver = interface_resolver
|
resolver = interface_resolver
|
||||||
|
@ -391,33 +394,36 @@ class TypeMap(dict):
|
||||||
if resolver:
|
if resolver:
|
||||||
return get_unbound_function(resolver)
|
return get_unbound_function(resolver)
|
||||||
|
|
||||||
def resolve_type(self, resolve_type_func, type_name, root, info, _type):
|
default_resolver = type_._meta.default_resolver or get_default_resolver()
|
||||||
type_ = resolve_type_func(root, info)
|
return partial(default_resolver, name, default_value)
|
||||||
|
|
||||||
if inspect.isclass(type_) and issubclass(type_, ObjectType):
|
def get_field_type(self, map_, type_):
|
||||||
return type_._meta.name
|
if isinstance(type_, List):
|
||||||
|
return GraphQLList(self.get_field_type(map_, type_.of_type))
|
||||||
return_type = self[type_name]
|
if isinstance(type_, NonNull):
|
||||||
return default_type_resolver(root, info, return_type)
|
return GraphQLNonNull(self.get_field_type(map_, type_.of_type))
|
||||||
|
return map_.get(type_._meta.name)
|
||||||
|
|
||||||
|
|
||||||
class Schema:
|
class Schema:
|
||||||
"""Schema Definition.
|
"""Schema Definition.
|
||||||
|
|
||||||
A Graphene Schema can execute operations (query, mutation, subscription) against the defined
|
A Graphene Schema can execute operations (query, mutation, subscription) against the defined
|
||||||
types. For advanced purposes, the schema can be used to lookup type definitions and answer
|
types. For advanced purposes, the schema can be used to lookup type definitions and answer
|
||||||
questions about the types through introspection.
|
questions about the types through introspection.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read*
|
query (ObjectType): Root query *ObjectType*. Describes entry point for fields to *read*
|
||||||
data in your Schema.
|
data in your Schema.
|
||||||
mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for
|
mutation (ObjectType, optional): Root mutation *ObjectType*. Describes entry point for
|
||||||
fields to *create, update or delete* data in your API.
|
fields to *create, update or delete* data in your API.
|
||||||
subscription (Optional[Type[ObjectType]]): Root subscription *ObjectType*. Describes entry point
|
subscription (ObjectType, optional): Root subscription *ObjectType*. Describes entry point
|
||||||
for fields to receive continuous updates.
|
for fields to receive continuous updates.
|
||||||
types (Optional[List[Type[ObjectType]]]): List of any types to include in schema that
|
|
||||||
may not be introspected through root types.
|
|
||||||
directives (List[GraphQLDirective], optional): List of custom directives to include in the
|
directives (List[GraphQLDirective], optional): List of custom directives to include in the
|
||||||
GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include
|
GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include
|
||||||
and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective].
|
and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective].
|
||||||
|
types (List[GraphQLType], optional): List of any types to include in schema that
|
||||||
|
may not be introspected through root types.
|
||||||
auto_camelcase (bool): Fieldnames will be transformed in Schema's TypeMap from snake_case
|
auto_camelcase (bool): Fieldnames will be transformed in Schema's TypeMap from snake_case
|
||||||
to camelCase (preferred by GraphQL standard). Default True.
|
to camelCase (preferred by GraphQL standard). Default True.
|
||||||
"""
|
"""
|
||||||
|
@ -434,15 +440,13 @@ class Schema:
|
||||||
self.query = query
|
self.query = query
|
||||||
self.mutation = mutation
|
self.mutation = mutation
|
||||||
self.subscription = subscription
|
self.subscription = subscription
|
||||||
type_map = TypeMap(
|
self.graphql_schema = GrapheneGraphQLSchema(
|
||||||
query, mutation, subscription, types, auto_camelcase=auto_camelcase
|
query,
|
||||||
)
|
mutation,
|
||||||
self.graphql_schema = GraphQLSchema(
|
subscription,
|
||||||
type_map.query,
|
types,
|
||||||
type_map.mutation,
|
|
||||||
type_map.subscription,
|
|
||||||
type_map.types,
|
|
||||||
directives,
|
directives,
|
||||||
|
auto_camelcase=auto_camelcase,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
@ -452,11 +456,12 @@ class Schema:
|
||||||
"""
|
"""
|
||||||
This function let the developer select a type in a given schema
|
This function let the developer select a type in a given schema
|
||||||
by accessing its attrs.
|
by accessing its attrs.
|
||||||
|
|
||||||
Example: using schema.Query for accessing the "Query" type in the Schema
|
Example: using schema.Query for accessing the "Query" type in the Schema
|
||||||
"""
|
"""
|
||||||
_type = self.graphql_schema.get_type(type_name)
|
_type = self.graphql_schema.get_type(type_name)
|
||||||
if _type is None:
|
if _type is None:
|
||||||
raise AttributeError(f'Type "{type_name}" not found in the Schema')
|
raise AttributeError('Type "{}" not found in the Schema'.format(type_name))
|
||||||
if isinstance(_type, GrapheneGraphQLType):
|
if isinstance(_type, GrapheneGraphQLType):
|
||||||
return _type.graphene_type
|
return _type.graphene_type
|
||||||
return _type
|
return _type
|
||||||
|
@ -466,9 +471,11 @@ class Schema:
|
||||||
|
|
||||||
def execute(self, *args, **kwargs):
|
def execute(self, *args, **kwargs):
|
||||||
"""Execute a GraphQL query on the schema.
|
"""Execute a GraphQL query on the schema.
|
||||||
|
|
||||||
Use the `graphql_sync` function from `graphql-core` to provide the result
|
Use the `graphql_sync` function from `graphql-core` to provide the result
|
||||||
for a query string. Most of the time this method will be called by one of the Graphene
|
for a query string. Most of the time this method will be called by one of the Graphene
|
||||||
:ref:`Integrations` via a web request.
|
:ref:`Integrations` via a web request.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request_string (str or Document): GraphQL request (query, mutation or subscription)
|
request_string (str or Document): GraphQL request (query, mutation or subscription)
|
||||||
as string or parsed AST form from `graphql-core`.
|
as string or parsed AST form from `graphql-core`.
|
||||||
|
@ -483,8 +490,7 @@ class Schema:
|
||||||
request_string, an operation name must be provided for the result to be provided.
|
request_string, an operation name must be provided for the result to be provided.
|
||||||
middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as
|
middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as
|
||||||
defined in `graphql-core`.
|
defined in `graphql-core`.
|
||||||
execution_context_class (ExecutionContext, optional): The execution context class
|
|
||||||
to use when resolving queries and mutations.
|
|
||||||
Returns:
|
Returns:
|
||||||
:obj:`ExecutionResult` containing any data and errors for the operation.
|
:obj:`ExecutionResult` containing any data and errors for the operation.
|
||||||
"""
|
"""
|
||||||
|
@ -493,28 +499,12 @@ class Schema:
|
||||||
|
|
||||||
async def execute_async(self, *args, **kwargs):
|
async def execute_async(self, *args, **kwargs):
|
||||||
"""Execute a GraphQL query on the schema asynchronously.
|
"""Execute a GraphQL query on the schema asynchronously.
|
||||||
|
|
||||||
Same as `execute`, but uses `graphql` instead of `graphql_sync`.
|
Same as `execute`, but uses `graphql` instead of `graphql_sync`.
|
||||||
"""
|
"""
|
||||||
kwargs = normalize_execute_kwargs(kwargs)
|
kwargs = normalize_execute_kwargs(kwargs)
|
||||||
return await graphql(self.graphql_schema, *args, **kwargs)
|
return await graphql(self.graphql_schema, *args, **kwargs)
|
||||||
|
|
||||||
async def subscribe(self, query, *args, **kwargs):
|
|
||||||
"""Execute a GraphQL subscription on the schema asynchronously."""
|
|
||||||
# Do parsing
|
|
||||||
try:
|
|
||||||
document = parse(query)
|
|
||||||
except GraphQLError as error:
|
|
||||||
return ExecutionResult(data=None, errors=[error])
|
|
||||||
|
|
||||||
# Do validation
|
|
||||||
validation_errors = validate(self.graphql_schema, document)
|
|
||||||
if validation_errors:
|
|
||||||
return ExecutionResult(data=None, errors=validation_errors)
|
|
||||||
|
|
||||||
# Execute the query
|
|
||||||
kwargs = normalize_execute_kwargs(kwargs)
|
|
||||||
return await subscribe(self.graphql_schema, document, *args, **kwargs)
|
|
||||||
|
|
||||||
def introspect(self):
|
def introspect(self):
|
||||||
introspection = self.execute(introspection_query)
|
introspection = self.execute(introspection_query)
|
||||||
if introspection.errors:
|
if introspection.errors:
|
||||||
|
|
|
@ -14,8 +14,9 @@ class Structure(UnmountedType):
|
||||||
cls_name = type(self).__name__
|
cls_name = type(self).__name__
|
||||||
of_type_name = type(of_type).__name__
|
of_type_name = type(of_type).__name__
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"{cls_name} could not have a mounted {of_type_name}()"
|
"{} could not have a mounted {}() as inner type. Try with {}({}).".format(
|
||||||
f" as inner type. Try with {cls_name}({of_type_name})."
|
cls_name, of_type_name, cls_name, of_type_name
|
||||||
|
)
|
||||||
)
|
)
|
||||||
self._of_type = of_type
|
self._of_type = of_type
|
||||||
|
|
||||||
|
@ -49,7 +50,7 @@ class List(Structure):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"[{self.of_type}]"
|
return "[{}]".format(self.of_type)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return isinstance(other, List) and (
|
return isinstance(other, List) and (
|
||||||
|
@ -84,12 +85,12 @@ class NonNull(Structure):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(NonNull, self).__init__(*args, **kwargs)
|
super(NonNull, self).__init__(*args, **kwargs)
|
||||||
assert not isinstance(
|
assert not isinstance(self._of_type, NonNull), (
|
||||||
self._of_type, NonNull
|
"Can only create NonNull of a Nullable GraphQLType but got: {}."
|
||||||
), f"Can only create NonNull of a Nullable GraphQLType but got: {self._of_type}."
|
).format(self._of_type)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.of_type}!"
|
return "{}!".format(self.of_type)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return isinstance(other, NonNull) and (
|
return isinstance(other, NonNull) and (
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user