mirror of
https://github.com/graphql-python/graphene.git
synced 2025-04-24 03:02:00 +03:00
Compare commits
397 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
8290326308 | ||
|
4a274b8424 | ||
|
b3db1c0cb2 | ||
|
3ed7bf6362 | ||
|
ccae7364e5 | ||
|
cf97cbb1de | ||
|
dca31dc61d | ||
|
73df50e3dc | ||
|
821451fddc | ||
|
f2e68141fd | ||
|
431826814d | ||
|
5b3ed2c2ba | ||
|
f95e9221bb | ||
|
48678afba4 | ||
|
dc3b2e49c1 | ||
|
d53a102b08 | ||
|
fd9ecef36e | ||
|
1263e9b41e | ||
|
74b33ae148 | ||
|
6834385786 | ||
|
c335c5f529 | ||
|
d90d65cafe | ||
|
5924cc4150 | ||
|
6a668514de | ||
|
88c3ec539b | ||
|
17d09c8ded | ||
|
614449e651 | ||
|
44dcdad182 | ||
|
221afaf4c4 | ||
|
5db1af039f | ||
|
82d0a68a81 | ||
|
3cd0c30de8 | ||
|
5fb7b54377 | ||
|
baaef0d21a | ||
|
93cb33d359 | ||
|
f5aba2c027 | ||
|
ea7ccc350e | ||
|
6b8cd2dc78 | ||
|
74db349da4 | ||
|
99f0103e37 | ||
|
03cf2e131e | ||
|
d77d0b0571 | ||
|
c636d984c6 | ||
|
2da8e9db5c | ||
|
8ede21e063 | ||
|
57cbef6666 | ||
|
d33e38a391 | ||
|
b76e89c0c2 | ||
|
81e7eee5da | ||
|
969a630541 | ||
|
8b89afeff1 | ||
|
52143473ef | ||
|
8eb2807ce5 | ||
|
340d5ed12f | ||
|
19ea63b9c5 | ||
|
d5dadb7b1b | ||
|
8596349405 | ||
|
a141e848c3 | ||
|
f09b2e5a81 | ||
|
7f6fa16194 | ||
|
0b1bfbf65b | ||
|
f891a3683d | ||
|
a2b63d8d84 | ||
|
b349632a82 | ||
|
ccdd35b354 | ||
|
6969023491 | ||
|
ee1ff975d7 | ||
|
b20bbdcdf7 | ||
|
694c1db21e | ||
|
20219fdc1b | ||
|
45986b18e7 | ||
|
c5ccc9502d | ||
|
35c281a3cd | ||
|
355601bd5c | ||
|
cbf59a88ad | ||
|
24059a8b40 | ||
|
d96ec55abb | ||
|
f1e8f4862a | ||
|
e6429c3c5b | ||
|
ed1290aaba | ||
|
84faf8f57c | ||
|
0ac4d9397e | ||
|
023452a09f | ||
|
6339f489e9 | ||
|
23ca978918 | ||
|
97abb9db42 | ||
|
57f3aa3ba9 | ||
|
8e1c3d3102 | ||
|
13c661332e | ||
|
80e3498750 | ||
|
c77d87d205 | ||
|
8bdcec5cd7 | ||
|
dfece7f65d | ||
|
8589aaeb98 | ||
|
72c2fd5ec3 | ||
|
69be326290 | ||
|
2ee23b0b2c | ||
|
8f6a8f9c4a | ||
|
3bdc67c6ae | ||
|
efe4b89015 | ||
|
785fcb38b6 | ||
|
5475a7ad1f | ||
|
5d4e71f463 | ||
|
9c3e4bb7da | ||
|
9e7e08d48a | ||
|
e37ef00ca4 | ||
|
4e8a1e6057 | ||
|
181d9f76da | ||
|
03277a5512 | ||
|
19ebf08339 | ||
|
bf40e6c419 | ||
|
61f0d8a8e0 | ||
|
763910e7b5 | ||
|
beb957382d | ||
|
06eb1a3e82 | ||
|
e441fa72aa | ||
|
7ecb4e68ba | ||
|
9311d3525d | ||
|
7108bc8577 | ||
|
a61f0a214d | ||
|
27f19e5a90 | ||
|
9e17044ddc | ||
|
b274a607f4 | ||
|
0a54094f59 | ||
|
9c1db0f662 | ||
|
03aad2799a | ||
|
b6c8931b22 | ||
|
10aee710fc | ||
|
1d6f9e984b | ||
|
c1bd25555c | ||
|
78973964b8 | ||
|
f039af2810 | ||
|
47696559c7 | ||
|
2e5944eb20 | ||
|
2c66e496f7 | ||
|
908d5aeaeb | ||
|
d5d7a0e5e0 | ||
|
85f06fb2a6 | ||
|
a3a2f999aa | ||
|
7087710d02 | ||
|
1c3054b7c8 | ||
|
5896ade2dd | ||
|
76701e0809 | ||
|
3b77b5f92a | ||
|
16d0b32a8f | ||
|
d54b819552 | ||
|
1886ec9dcb | ||
|
0ebff3313d | ||
|
7827219ba2 | ||
|
ce59f1ff15 | ||
|
3145543386 | ||
|
7960b02124 | ||
|
9807d6102c | ||
|
e66d6148ab | ||
|
1654d2fa29 | ||
|
772986ac83 | ||
|
0aef168687 | ||
|
dc6b820635 | ||
|
16551369b2 | ||
|
e1822c9ae9 | ||
|
3c50fa817a | ||
|
efc03533ae | ||
|
74a6565ea3 | ||
|
98980b53f6 | ||
|
57a4394bf3 | ||
|
ea4e6d65e9 | ||
|
18cd3451f9 | ||
|
946c2a3807 | ||
|
7d890bf915 | ||
|
0e4c14b076 | ||
|
8ae4369155 | ||
|
c0ddbbfaf4 | ||
|
467b1f8e8d | ||
|
b4be4a686b | ||
|
4e32dac251 | ||
|
ec982ac50b | ||
|
c68071952d | ||
|
ac5dd90f5f | ||
|
7be4bd6bc6 | ||
|
d7b474751d | ||
|
a784ef15e5 | ||
|
5977b1648c | ||
|
4259502dc3 | ||
|
fc2967e276 | ||
|
aa11681048 | ||
|
fce45ef552 | ||
|
5290c9364c | ||
|
69b6286861 | ||
|
485b1ed325 | ||
|
c08379ed85 | ||
|
55cbc4d100 | ||
|
fbac4d5092 | ||
|
002b769db4 | ||
|
12302b78f9 | ||
|
db9d9a08f2 | ||
|
3ed8273239 | ||
|
a5fbb2e9e5 | ||
|
5acd04aa93 | ||
|
17f6a45a47 | ||
|
f5321d619c | ||
|
f622f1f53c | ||
|
6f9cdb4888 | ||
|
7004515f06 | ||
|
a17f63cf03 | ||
|
86b7e6ac86 | ||
|
ae93499a37 | ||
|
2e87ebe5fc | ||
|
e5eeb9d831 | ||
|
e0d4bec2d8 | ||
|
7d09e5b138 | ||
|
84582eb374 | ||
|
e24ac547d6 | ||
|
a53b782bf8 | ||
|
8c327fc4ed | ||
|
b685e109f5 | ||
|
6918db1033 | ||
|
188ce9a6cb | ||
|
86b904d327 | ||
|
29dd3f8391 | ||
|
d085c8852b | ||
|
2130005406 | ||
|
64af43748c | ||
|
81fff0f1b5 | ||
|
d042d5e95a | ||
|
c61f0f736a | ||
|
5b2eb1109a | ||
|
ecd11ccc1e | ||
|
324df19d3d | ||
|
bf034ca85f | ||
|
05d96a9833 | ||
|
a9625dac0e | ||
|
a1fc3688aa | ||
|
4b70186031 | ||
|
47c63f3dd7 | ||
|
966aba06cd | ||
|
b0c8a17ec7 | ||
|
9b756bf12c | ||
|
d6acfc6eae | ||
|
0723dd1d6c | ||
|
df67e69129 | ||
|
396b278aff | ||
|
380166989d | ||
|
3e4305259b | ||
|
a0b522fa39 | ||
|
12ec8dc007 | ||
|
133a831ab9 | ||
|
7a1e9d7798 | ||
|
49fcf9f2e6 | ||
|
37d6eaea46 | ||
|
871c60cf46 | ||
|
0051f82b5f | ||
|
a2fe8dd704 | ||
|
9fdab033a7 | ||
|
cb3bfe011f | ||
|
6f2863ef6e | ||
|
00e36b52d5 | ||
|
f9efe15973 | ||
|
60a9609b9a | ||
|
14183012a8 | ||
|
1cf303a27b | ||
|
88f79b2850 | ||
|
5d97c848e0 | ||
|
5e6f68957e | ||
|
ffb7701466 | ||
|
796880fc5c | ||
|
98e10f0db8 | ||
|
ac98be7836 | ||
|
ba5b7dd3d7 | ||
|
be97a369f7 | ||
|
03bd6984dd | ||
|
23bb52a770 | ||
|
ad0b3a529c | ||
|
9a19447213 | ||
|
55a03ba716 | ||
|
f82b811377 | ||
|
bd6d8d086d | ||
|
81d61f82c5 | ||
|
482c7fcc65 | ||
|
c0fbcba97a | ||
|
e31b93d1fd | ||
|
abc2c2a784 | ||
|
3f6f426946 | ||
|
7c7876d37c | ||
|
a3b215d891 | ||
|
e90aa1b712 | ||
|
8e7d76bbce | ||
|
3d0e460be1 | ||
|
0808e8acb3 | ||
|
c96bd680d7 | ||
|
6e4058960d | ||
|
167c8c203c | ||
|
6fc1a8f79d | ||
|
57157ab7f4 | ||
|
4170e73251 | ||
|
8dee85cc13 | ||
|
abe547fb4d | ||
|
0e8a3c5063 | ||
|
ac6714e8fa | ||
|
89ca4f58a2 | ||
|
431e93cd68 | ||
|
5cb7d91aaa | ||
|
da1359ecca | ||
|
40229b8a73 | ||
|
89a352e93a | ||
|
17fbcb6746 | ||
|
f0cc9268d2 | ||
|
4e3f46b2c9 | ||
|
40534bcce9 | ||
|
ec9f76cfae | ||
|
eb7966eca7 | ||
|
1fcdeaac65 | ||
|
f73055f72b | ||
|
a5162e9ae3 | ||
|
6a4091b3e4 | ||
|
abff3d75a3 | ||
|
daf0d17647 | ||
|
d0cfee5641 | ||
|
bcbb66c025 | ||
|
582ac59bf7 | ||
|
28f6b18f55 | ||
|
7b6dae7fa3 | ||
|
c5b2281e22 | ||
|
21cccf4c96 | ||
|
0805436d45 | ||
|
ca9188a615 | ||
|
bbe11c9b5e | ||
|
96d497c2b8 | ||
|
a4681ce6b2 | ||
|
d95163c61d | ||
|
9ae2359b87 | ||
|
fd0bd0ccd7 | ||
|
359753dc0f | ||
|
806b99f59d | ||
|
ae7a5d71c7 | ||
|
88d68fa672 | ||
|
ff4fb4f86a | ||
|
e32cbc3346 | ||
|
d1b1ad733f | ||
|
e1a2eb5a35 | ||
|
349add5700 | ||
|
2bc7699a98 | ||
|
2c43a2ae0a | ||
|
3aafe58d4d | ||
|
08c86f3def | ||
|
37a6c01839 | ||
|
8e53672a1d | ||
|
4d5a091d16 | ||
|
80066e55f3 | ||
|
587ce5ae93 | ||
|
52f54f51b2 | ||
|
95cfff8c37 | ||
|
e07e89d2e2 | ||
|
a0be081fc0 | ||
|
49a41060a6 | ||
|
7cfc3ffe67 | ||
|
83a5587d71 | ||
|
89ba0da6d7 | ||
|
984e7cebe1 | ||
|
21dbaa93c4 | ||
|
ca02095806 | ||
|
4752ec08ab | ||
|
7e17b92a18 | ||
|
85e6c3d3fb | ||
|
705cad76b2 | ||
|
8eb23ed802 | ||
|
a4c812f886 | ||
|
3df3754ae6 | ||
|
131cbebc88 | ||
|
2a3d92682a | ||
|
b8ecc3929d | ||
|
8d5843dc21 | ||
|
8d4b9cdc77 | ||
|
00d370b228 | ||
|
e043527d5e | ||
|
325ab0d002 | ||
|
e748c5f048 | ||
|
563ef221d4 | ||
|
9512528a77 | ||
|
3d41a500c9 | ||
|
3e5319cf70 | ||
|
5777d85f99 | ||
|
bf3a4a88a4 | ||
|
c40ce98bb8 | ||
|
baec6249e5 | ||
|
07ec419578 | ||
|
727e09105f | ||
|
4f2b278e12 | ||
|
7dd8305bdf | ||
|
d728b84e48 | ||
|
00cc97875d | ||
|
8ca7b855ac | ||
|
c076412ba5 | ||
|
fc3dbf0963 | ||
|
0fdc2ca3eb | ||
|
de050fa6db | ||
|
d40ef4be2b | ||
|
49258193ed |
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: "\U0001F41B bug"
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Note: for support questions, please use stackoverflow**. This repository's issues are reserved for feature requests and bug reports.
|
||||||
|
|
||||||
|
* **What is the current behavior?**
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
* **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem** via
|
||||||
|
a github repo, https://repl.it or similar.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
* **What is the expected behavior?**
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
* **What is the motivation / use case for changing the behavior?**
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
* **Please tell us about your environment:**
|
||||||
|
|
||||||
|
- Version:
|
||||||
|
- Platform:
|
||||||
|
|
||||||
|
* **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow)
|
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
blank_issues_enabled: false
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: "✨ enhancement"
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
24
.github/stale.yml
vendored
Normal file
24
.github/stale.yml
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Number of days of inactivity before an issue becomes stale
|
||||||
|
daysUntilStale: false
|
||||||
|
# Number of days of inactivity before a stale issue is closed
|
||||||
|
daysUntilClose: false
|
||||||
|
# Issues with these labels will never be considered stale
|
||||||
|
exemptLabels:
|
||||||
|
- pinned
|
||||||
|
- security
|
||||||
|
- 🐛 bug
|
||||||
|
- 📖 documentation
|
||||||
|
- 🙋 help wanted
|
||||||
|
- ✨ enhancement
|
||||||
|
- good first issue
|
||||||
|
- work in progress
|
||||||
|
# Label to use when marking an issue as stale
|
||||||
|
staleLabel: wontfix
|
||||||
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
|
markComment: false
|
||||||
|
# markComment: >
|
||||||
|
# This issue has been automatically marked as stale because it has not had
|
||||||
|
# recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
# for your contributions.
|
||||||
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
|
closeComment: false
|
21
.github/workflows/build.yaml
vendored
Normal file
21
.github/workflows/build.yaml
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
name: 📦 Build
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.10
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install build twine
|
||||||
|
- name: Building package
|
||||||
|
run: python3 -m build
|
||||||
|
- name: Check package with Twine
|
||||||
|
run: twine check dist/*
|
26
.github/workflows/deploy.yml
vendored
Normal file
26
.github/workflows/deploy.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
name: 🚀 Deploy to PyPI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.10
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Build wheel and source tarball
|
||||||
|
run: |
|
||||||
|
pip install wheel
|
||||||
|
python setup.py sdist bdist_wheel
|
||||||
|
- name: Publish a Python distribution to PyPI
|
||||||
|
uses: pypa/gh-action-pypi-publish@v1.1.0
|
||||||
|
with:
|
||||||
|
user: __token__
|
||||||
|
password: ${{ secrets.pypi_password }}
|
26
.github/workflows/lint.yml
vendored
Normal file
26
.github/workflows/lint.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
name: 💅 Lint
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.10
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install tox
|
||||||
|
- name: Run lint
|
||||||
|
run: tox
|
||||||
|
env:
|
||||||
|
TOXENV: pre-commit
|
||||||
|
- name: Run mypy
|
||||||
|
run: tox
|
||||||
|
env:
|
||||||
|
TOXENV: mypy
|
64
.github/workflows/tests.yml
vendored
Normal file
64
.github/workflows/tests.yml
vendored
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
name: 📄 Tests
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- '*.x'
|
||||||
|
paths-ignore:
|
||||||
|
- 'docs/**'
|
||||||
|
- '*.md'
|
||||||
|
- '*.rst'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- '*.x'
|
||||||
|
paths-ignore:
|
||||||
|
- 'docs/**'
|
||||||
|
- '*.md'
|
||||||
|
- '*.rst'
|
||||||
|
jobs:
|
||||||
|
tests:
|
||||||
|
# runs the test suite
|
||||||
|
name: ${{ matrix.name }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- {name: '3.13', python: '3.13', os: ubuntu-latest, tox: py313}
|
||||||
|
- {name: '3.12', python: '3.12', os: ubuntu-latest, tox: py312}
|
||||||
|
- {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311}
|
||||||
|
- {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
|
||||||
|
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
|
||||||
|
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
|
- name: update pip
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install --upgrade setuptools wheel
|
||||||
|
|
||||||
|
- name: get pip cache dir
|
||||||
|
id: pip-cache
|
||||||
|
run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
|
||||||
|
- name: cache pip dependencies
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}
|
||||||
|
- run: pip install tox
|
||||||
|
- run: tox -e ${{ matrix.tox }}
|
||||||
|
- name: Upload coverage.xml
|
||||||
|
if: ${{ matrix.python == '3.10' }}
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: graphene-coverage
|
||||||
|
path: coverage.xml
|
||||||
|
if-no-files-found: error
|
||||||
|
- name: Upload coverage.xml to codecov
|
||||||
|
if: ${{ matrix.python == '3.10' }}
|
||||||
|
uses: codecov/codecov-action@v4
|
13
.gitignore
vendored
13
.gitignore
vendored
|
@ -10,7 +10,6 @@ __pycache__/
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
env/
|
|
||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
|
@ -45,7 +44,8 @@ htmlcov/
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*,cover
|
*.cover
|
||||||
|
.pytest_cache/
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
|
@ -60,6 +60,14 @@ docs/_build/
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
target/
|
target/
|
||||||
|
|
||||||
|
# VirtualEnv
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
# Typing
|
||||||
|
.mypy_cache/
|
||||||
|
|
||||||
/tests/django.sqlite
|
/tests/django.sqlite
|
||||||
|
|
||||||
|
@ -82,3 +90,4 @@ target/
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
.vscode
|
.vscode
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
|
.ruff_cache
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[settings]
|
|
||||||
known_third_party = aniso8601,graphql,graphql_relay,promise,pytest,pytz,pyutils,setuptools,six,snapshottest,sphinx_graphene_theme
|
|
|
@ -1,24 +1,29 @@
|
||||||
|
default_language_version:
|
||||||
|
python: python3.10
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: git://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v1.3.0
|
rev: v4.3.0
|
||||||
hooks:
|
hooks:
|
||||||
|
- id: check-merge-conflict
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: debug-statements
|
- id: debug-statements
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
exclude: ^docs/.*$
|
exclude: ^docs/.*$
|
||||||
- id: trailing-whitespace
|
|
||||||
exclude: README.md
|
|
||||||
- id: pretty-format-json
|
- id: pretty-format-json
|
||||||
args:
|
args:
|
||||||
- --autofix
|
- --autofix
|
||||||
- id: flake8
|
- id: trailing-whitespace
|
||||||
|
exclude: README.md
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v1.4.0
|
rev: v2.37.3
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
- repo: https://github.com/ambv/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 18.6b4
|
# Ruff version.
|
||||||
|
rev: v0.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: ruff
|
||||||
language_version: python3.6
|
- id: ruff-format
|
||||||
|
args: [ --check ]
|
||||||
|
|
33
.travis.yml
33
.travis.yml
|
@ -1,33 +0,0 @@
|
||||||
language: python
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- env: TOXENV=py27
|
|
||||||
python: 2.7
|
|
||||||
- env: TOXENV=py34
|
|
||||||
python: 3.4
|
|
||||||
- env: TOXENV=py35
|
|
||||||
python: 3.5
|
|
||||||
- env: TOXENV=py36
|
|
||||||
python: 3.6
|
|
||||||
- env: TOXENV=pypy
|
|
||||||
python: pypy-5.7.1
|
|
||||||
- env: TOXENV=pre-commit
|
|
||||||
python: 3.6
|
|
||||||
- env: TOXENV=mypy
|
|
||||||
python: 3.6
|
|
||||||
install:
|
|
||||||
- pip install coveralls tox
|
|
||||||
script: tox
|
|
||||||
after_success: coveralls
|
|
||||||
cache:
|
|
||||||
directories:
|
|
||||||
- $HOME/.cache/pip
|
|
||||||
- $HOME/.cache/pre-commit
|
|
||||||
deploy:
|
|
||||||
provider: pypi
|
|
||||||
user: syrusakbary
|
|
||||||
on:
|
|
||||||
tags: true
|
|
||||||
password:
|
|
||||||
secure: LHOp9DvYR+70vj4YVY8+JRNCKUOfYZREEUY3+4lMUpY7Zy5QwDfgEMXG64ybREH9dFldpUqVXRj53eeU3spfudSfh8NHkgqW7qihez2AhSnRc4dK6ooNfB+kLcSoJ4nUFGxdYImABc4V1hJvflGaUkTwDNYVxJF938bPaO797IvSbuI86llwqkvuK2Vegv9q/fy9sVGaF9VZIs4JgXwR5AyDR7FBArl+S84vWww4vTFD33hoE88VR4QvFY3/71BwRtQrnCMm7AOm31P9u29yi3bpzQpiOR2rHsgrsYdm597QzFKVxYwsmf9uAx2bpbSPy2WibunLePIvOFwm8xcfwnz4/J4ONBc5PSFmUytTWpzEnxb0bfUNLuYloIS24V6OZ8BfAhiYZ1AwySeJCQDM4Vk1V8IF6trTtyx5EW/uV9jsHCZ3LFsAD7UnFRTosIgN3SAK3ZWCEk5oF2IvjecsolEfkRXB3q9EjMkkuXRUeFDH2lWJLgNE27BzY6myvZVzPmfwZUsPBlPD/6w+WLSp97Rjgr9zS3T1d4ddqFM4ZYu04f2i7a/UUQqG+itzzuX5DWLPvzuNt37JB45mB9IsvxPyXZ6SkAcLl48NGyKok1f3vQnvphkfkl4lni29woKhaau8xlsuEDrcwOoeAsVcZXiItg+l+z2SlIwM0A06EvQ=
|
|
||||||
distributions: "sdist bdist_wheel"
|
|
28
Makefile
Normal file
28
Makefile
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
.PHONY: help
|
||||||
|
help:
|
||||||
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
|
@grep -E '^\.PHONY: [a-zA-Z_-]+ .*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = "(: |##)"}; {printf "\033[36m%-30s\033[0m %s\n", $$2, $$3}'
|
||||||
|
|
||||||
|
.PHONY: install-dev ## Install development dependencies
|
||||||
|
install-dev:
|
||||||
|
pip install -e ".[dev]"
|
||||||
|
|
||||||
|
.PHONY: test ## Run tests
|
||||||
|
test:
|
||||||
|
py.test graphene examples
|
||||||
|
|
||||||
|
.PHONY: docs ## Generate docs
|
||||||
|
docs: install-dev
|
||||||
|
cd docs && make install && make html
|
||||||
|
|
||||||
|
.PHONY: docs-live ## Generate docs with live reloading
|
||||||
|
docs-live: install-dev
|
||||||
|
cd docs && make install && make livehtml
|
||||||
|
|
||||||
|
.PHONY: format
|
||||||
|
format:
|
||||||
|
black graphene examples setup.py
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint:
|
||||||
|
flake8 graphene examples setup.py
|
86
README.md
86
README.md
|
@ -1,50 +1,47 @@
|
||||||
Please read [UPGRADE-v2.0.md](/UPGRADE-v2.0.md) to learn how to upgrade to Graphene `2.0`.
|
#  [Graphene](http://graphene-python.org) [](https://badge.fury.io/py/graphene) [](https://coveralls.io/github/graphql-python/graphene?branch=master) [](https://discord.gg/T6Gp6NFYHe)
|
||||||
|
|
||||||
---
|
[💬 Join the community on Discord](https://discord.gg/T6Gp6NFYHe)
|
||||||
|
|
||||||
#  [Graphene](http://graphene-python.org) [](https://travis-ci.org/graphql-python/graphene) [](https://badge.fury.io/py/graphene) [](https://coveralls.io/github/graphql-python/graphene?branch=master)
|
**We are looking for contributors**! Please check the current issues to see how you can help ❤️
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
[Graphene](http://graphene-python.org) is a Python library for building GraphQL schemas/types fast and easily.
|
[Graphene](http://graphene-python.org) is an opinionated Python library for building GraphQL schemas/types fast and easily.
|
||||||
|
|
||||||
- **Easy to use:** Graphene helps you use GraphQL in Python without effort.
|
- **Easy to use:** Graphene helps you use GraphQL in Python without effort.
|
||||||
- **Relay:** Graphene has builtin support for Relay.
|
- **Relay:** Graphene has builtin support for Relay.
|
||||||
- **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), NoSQL, custom Python objects, etc.
|
- **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), Mongo, custom Python objects, etc.
|
||||||
We believe that by providing a complete API you could plug Graphene anywhere your data lives and make your data available
|
We believe that by providing a complete API you could plug Graphene anywhere your data lives and make your data available
|
||||||
through GraphQL.
|
through GraphQL.
|
||||||
|
|
||||||
|
|
||||||
## Integrations
|
## Integrations
|
||||||
|
|
||||||
Graphene has multiple integrations with different frameworks:
|
Graphene has multiple integrations with different frameworks:
|
||||||
|
|
||||||
| integration | Package |
|
| integration | Package |
|
||||||
|---------------|-------------------|
|
| ----------------- | --------------------------------------------------------------------------------------- |
|
||||||
| Django | [graphene-django](https://github.com/graphql-python/graphene-django/) |
|
| SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) |
|
||||||
| SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) |
|
| Mongo | [graphene-mongo](https://github.com/graphql-python/graphene-mongo/) |
|
||||||
| Google App Engine | [graphene-gae](https://github.com/graphql-python/graphene-gae/) |
|
| Apollo Federation | [graphene-federation](https://github.com/graphql-python/graphene-federation/) |
|
||||||
| Peewee | *In progress* ([Tracking Issue](https://github.com/graphql-python/graphene/issues/289)) |
|
| Django | [graphene-django](https://github.com/graphql-python/graphene-django/) |
|
||||||
|
|
||||||
Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql).
|
Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql).
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
For instaling graphene, just run this command in your shell
|
To install `graphene`, just run this command in your shell
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install "graphene>=2.0"
|
pip install "graphene>=3.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
## 2.0 Upgrade Guide
|
|
||||||
|
|
||||||
Please read [UPGRADE-v2.0.md](/UPGRADE-v2.0.md) to learn how to upgrade.
|
|
||||||
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
Here is one example for you to get started:
|
Here is one example for you to get started:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
import graphene
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
hello = graphene.String(description='A typical hello world')
|
hello = graphene.String(description='A typical hello world')
|
||||||
|
|
||||||
|
@ -67,9 +64,13 @@ result = schema.execute(query)
|
||||||
|
|
||||||
If you want to learn even more, you can also check the following [examples](examples/):
|
If you want to learn even more, you can also check the following [examples](examples/):
|
||||||
|
|
||||||
* **Basic Schema**: [Starwars example](examples/starwars)
|
- **Basic Schema**: [Starwars example](examples/starwars)
|
||||||
* **Relay Schema**: [Starwars Relay example](examples/starwars_relay)
|
- **Relay Schema**: [Starwars Relay example](examples/starwars_relay)
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
Documentation and links to additional resources are available at
|
||||||
|
https://docs.graphene-python.org/en/latest/
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
@ -84,43 +85,46 @@ pip install -e ".[test]"
|
||||||
Well-written tests and maintaining good test coverage is important to this project. While developing, run new and existing tests with:
|
Well-written tests and maintaining good test coverage is important to this project. While developing, run new and existing tests with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
py.test PATH/TO/MY/DIR/test_test.py # Single file
|
pytest graphene/relay/tests/test_node.py # Single file
|
||||||
py.test PATH/TO/MY/DIR/ # All tests in directory
|
pytest graphene/relay # All tests in directory
|
||||||
```
|
```
|
||||||
|
|
||||||
Add the `-s` flag if you have introduced breakpoints into the code for debugging.
|
Add the `-s` flag if you have introduced breakpoints into the code for debugging.
|
||||||
Add the `-v` ("verbose") flag to get more detailed test output. For even more detailed output, use `-vv`.
|
Add the `-v` ("verbose") flag to get more detailed test output. For even more detailed output, use `-vv`.
|
||||||
Check out the [pytest documentation](https://docs.pytest.org/en/latest/) for more options and test running controls.
|
Check out the [pytest documentation](https://docs.pytest.org/en/latest/) for more options and test running controls.
|
||||||
|
|
||||||
|
Regularly ensure your `pre-commit` hooks are up to date and enabled:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pre-commit install
|
||||||
|
```
|
||||||
|
|
||||||
You can also run the benchmarks with:
|
You can also run the benchmarks with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
py.test graphene --benchmark-only
|
pytest graphene --benchmark-only
|
||||||
```
|
```
|
||||||
|
|
||||||
Graphene supports several versions of Python. To make sure that changes do not break compatibility with any of those versions, we use `tox` to create virtualenvs for each python version and run tests with that version. To run against all python versions defined in the `tox.ini` config file, just run:
|
Graphene supports several versions of Python. To make sure that changes do not break compatibility with any of those versions, we use `tox` to create virtualenvs for each Python version and run tests with that version. To run against all Python versions defined in the `tox.ini` config file, just run:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
tox
|
tox
|
||||||
```
|
```
|
||||||
If you wish to run against a specific version defined in the `tox.ini` file:
|
|
||||||
```sh
|
|
||||||
tox -e py36
|
|
||||||
```
|
|
||||||
Tox can only use whatever versions of python are installed on your system. When you create a pull request, Travis will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful!
|
|
||||||
|
|
||||||
### Documentation
|
If you wish to run against a specific version defined in the `tox.ini` file:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
tox -e py39
|
||||||
|
```
|
||||||
|
|
||||||
|
Tox can only use whatever versions of Python are installed on your system. When you create a pull request, GitHub Actions pipelines will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of Python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful!
|
||||||
|
|
||||||
|
### Building Documentation
|
||||||
|
|
||||||
The documentation is generated using the excellent [Sphinx](http://www.sphinx-doc.org/) and a custom theme.
|
The documentation is generated using the excellent [Sphinx](http://www.sphinx-doc.org/) and a custom theme.
|
||||||
|
|
||||||
The documentation dependencies are installed by running:
|
An HTML version of the documentation is produced by running:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
cd docs
|
make docs
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
Then to produce a HTML version of the documentation:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
make html
|
|
||||||
```
|
```
|
||||||
|
|
137
README.rst
137
README.rst
|
@ -1,137 +0,0 @@
|
||||||
Please read `UPGRADE-v2.0.md </UPGRADE-v2.0.md>`__ to learn how to
|
|
||||||
upgrade to Graphene ``2.0``.
|
|
||||||
|
|
||||||
--------------
|
|
||||||
|
|
||||||
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
|
|
||||||
=========================================================================================================
|
|
||||||
|
|
||||||
`Graphene <http://graphene-python.org>`__ is a Python library for
|
|
||||||
building GraphQL schemas/types fast and easily.
|
|
||||||
|
|
||||||
- **Easy to use:** Graphene helps you use GraphQL in Python without
|
|
||||||
effort.
|
|
||||||
- **Relay:** Graphene has builtin support for Relay.
|
|
||||||
- **Data agnostic:** Graphene supports any kind of data source: SQL
|
|
||||||
(Django, SQLAlchemy), NoSQL, custom Python objects, etc. We believe
|
|
||||||
that by providing a complete API you could plug Graphene anywhere
|
|
||||||
your data lives and make your data available through GraphQL.
|
|
||||||
|
|
||||||
Integrations
|
|
||||||
------------
|
|
||||||
|
|
||||||
Graphene has multiple integrations with different frameworks:
|
|
||||||
|
|
||||||
+---------------------+----------------------------------------------------------------------------------------------+
|
|
||||||
| integration | Package |
|
|
||||||
+=====================+==============================================================================================+
|
|
||||||
| Django | `graphene-django <https://github.com/graphql-python/graphene-django/>`__ |
|
|
||||||
+---------------------+----------------------------------------------------------------------------------------------+
|
|
||||||
| SQLAlchemy | `graphene-sqlalchemy <https://github.com/graphql-python/graphene-sqlalchemy/>`__ |
|
|
||||||
+---------------------+----------------------------------------------------------------------------------------------+
|
|
||||||
| Google App Engine | `graphene-gae <https://github.com/graphql-python/graphene-gae/>`__ |
|
|
||||||
+---------------------+----------------------------------------------------------------------------------------------+
|
|
||||||
| Peewee | *In progress* (`Tracking Issue <https://github.com/graphql-python/graphene/issues/289>`__) |
|
|
||||||
+---------------------+----------------------------------------------------------------------------------------------+
|
|
||||||
|
|
||||||
Also, Graphene is fully compatible with the GraphQL spec, working
|
|
||||||
seamlessly with all GraphQL clients, such as
|
|
||||||
`Relay <https://github.com/facebook/relay>`__,
|
|
||||||
`Apollo <https://github.com/apollographql/apollo-client>`__ and
|
|
||||||
`gql <https://github.com/graphql-python/gql>`__.
|
|
||||||
|
|
||||||
Installation
|
|
||||||
------------
|
|
||||||
|
|
||||||
For instaling graphene, just run this command in your shell
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
pip install "graphene>=2.0"
|
|
||||||
|
|
||||||
2.0 Upgrade Guide
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Please read `UPGRADE-v2.0.md </UPGRADE-v2.0.md>`__ to learn how to
|
|
||||||
upgrade.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
|
|
||||||
Here is one example for you to get started:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
|
||||||
hello = graphene.String(description='A typical hello world')
|
|
||||||
|
|
||||||
def resolve_hello(self, info):
|
|
||||||
return 'World'
|
|
||||||
|
|
||||||
schema = graphene.Schema(query=Query)
|
|
||||||
|
|
||||||
Then Querying ``graphene.Schema`` is as simple as:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
query = '''
|
|
||||||
query SayHello {
|
|
||||||
hello
|
|
||||||
}
|
|
||||||
'''
|
|
||||||
result = schema.execute(query)
|
|
||||||
|
|
||||||
If you want to learn even more, you can also check the following
|
|
||||||
`examples <examples/>`__:
|
|
||||||
|
|
||||||
- **Basic Schema**: `Starwars example <examples/starwars>`__
|
|
||||||
- **Relay Schema**: `Starwars Relay
|
|
||||||
example <examples/starwars_relay>`__
|
|
||||||
|
|
||||||
Contributing
|
|
||||||
------------
|
|
||||||
|
|
||||||
After cloning this repo, ensure dependencies are installed by running:
|
|
||||||
|
|
||||||
.. code:: sh
|
|
||||||
|
|
||||||
pip install -e ".[test]"
|
|
||||||
|
|
||||||
After developing, the full test suite can be evaluated by running:
|
|
||||||
|
|
||||||
.. code:: sh
|
|
||||||
|
|
||||||
py.test graphene --cov=graphene --benchmark-skip # Use -v -s for verbose mode
|
|
||||||
|
|
||||||
You can also run the benchmarks with:
|
|
||||||
|
|
||||||
.. code:: sh
|
|
||||||
|
|
||||||
py.test graphene --benchmark-only
|
|
||||||
|
|
||||||
Documentation
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The documentation is generated using the excellent
|
|
||||||
`Sphinx <http://www.sphinx-doc.org/>`__ and a custom theme.
|
|
||||||
|
|
||||||
The documentation dependencies are installed by running:
|
|
||||||
|
|
||||||
.. code:: sh
|
|
||||||
|
|
||||||
cd docs
|
|
||||||
pip install -r requirements.txt
|
|
||||||
|
|
||||||
Then to produce a HTML version of the documentation:
|
|
||||||
|
|
||||||
.. code:: sh
|
|
||||||
|
|
||||||
make html
|
|
||||||
|
|
||||||
.. |Graphene Logo| image:: http://graphene-python.org/favicon.png
|
|
||||||
.. |Build Status| image:: https://travis-ci.org/graphql-python/graphene.svg?branch=master
|
|
||||||
:target: https://travis-ci.org/graphql-python/graphene
|
|
||||||
.. |PyPI version| image:: https://badge.fury.io/py/graphene.svg
|
|
||||||
:target: https://badge.fury.io/py/graphene
|
|
||||||
.. |Coverage Status| image:: https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github
|
|
||||||
:target: https://coveralls.io/github/graphql-python/graphene?branch=master
|
|
15
SECURITY.md
Normal file
15
SECURITY.md
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
Support for security issues is currently provided for Graphene 3.0 and above. Support on earlier versions cannot be guaranteed by the maintainers of this library, but community PRs may be accepted in critical cases.
|
||||||
|
The preferred mitigation strategy is via an upgrade to Graphene 3.
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| 3.x | :white_check_mark: |
|
||||||
|
| <3.x | :x: |
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
Please use responsible disclosure by contacting a core maintainer via Discord or E-Mail.
|
|
@ -2,30 +2,29 @@
|
||||||
|
|
||||||
Big changes from v0.10.x to 1.0. While on the surface a lot of this just looks like shuffling around API, the entire codebase has been rewritten to handle some really great use cases and improved performance.
|
Big changes from v0.10.x to 1.0. While on the surface a lot of this just looks like shuffling around API, the entire codebase has been rewritten to handle some really great use cases and improved performance.
|
||||||
|
|
||||||
|
|
||||||
## Backwards Compatibility and Deprecation Warnings
|
## Backwards Compatibility and Deprecation Warnings
|
||||||
|
|
||||||
This has been a community project from the start, we need your help making the upgrade as smooth as possible for everybody!
|
This has been a community project from the start, we need your help making the upgrade as smooth as possible for everybody!
|
||||||
We have done our best to provide backwards compatibility with deprecated APIs.
|
We have done our best to provide backwards compatibility with deprecated APIs.
|
||||||
|
|
||||||
|
|
||||||
## Deprecations
|
## Deprecations
|
||||||
|
|
||||||
* `with_context` is no longer needed. Resolvers now always take the context argument.
|
- `with_context` is no longer needed. Resolvers now always take the context argument.
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def resolve_xxx(self, args, info):
|
def resolve_xxx(root, args, info):
|
||||||
# ...
|
# ...
|
||||||
```
|
```
|
||||||
|
|
||||||
With 1.0:
|
With 1.0:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def resolve_xxx(self, args, context, info):
|
def resolve_xxx(root, args, context, info):
|
||||||
# ...
|
# ...
|
||||||
```
|
```
|
||||||
|
|
||||||
* `ObjectType` and `Interface` no longer accept the `abstract` option in the `Meta`.
|
- `ObjectType` and `Interface` no longer accept the `abstract` option in the `Meta`.
|
||||||
Inheriting fields should be now achieved using `AbstractType` inheritance.
|
Inheriting fields should be now achieved using `AbstractType` inheritance.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
@ -42,6 +41,7 @@ We have done our best to provide backwards compatibility with deprecated APIs.
|
||||||
```
|
```
|
||||||
|
|
||||||
With 1.0:
|
With 1.0:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class MyBaseQuery(graphene.AbstractType):
|
class MyBaseQuery(graphene.AbstractType):
|
||||||
my_field = String()
|
my_field = String()
|
||||||
|
@ -50,9 +50,9 @@ We have done our best to provide backwards compatibility with deprecated APIs.
|
||||||
pass
|
pass
|
||||||
```
|
```
|
||||||
|
|
||||||
* The `type_name` option in the Meta in types is now `name`
|
- The `type_name` option in the Meta in types is now `name`
|
||||||
|
|
||||||
* Type references no longer work with strings, but with functions.
|
- Type references no longer work with strings, but with functions.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
|
@ -70,7 +70,6 @@ We have done our best to provide backwards compatibility with deprecated APIs.
|
||||||
users = graphene.List(lambda: User)
|
users = graphene.List(lambda: User)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Schema
|
## Schema
|
||||||
|
|
||||||
Schemas in graphene `1.0` are `Immutable`, that means that once you create a `graphene.Schema` any
|
Schemas in graphene `1.0` are `Immutable`, that means that once you create a `graphene.Schema` any
|
||||||
|
@ -80,7 +79,6 @@ The `name` argument is removed from the Schema.
|
||||||
The arguments `executor` and `middlewares` are also removed from the `Schema` definition.
|
The arguments `executor` and `middlewares` are also removed from the `Schema` definition.
|
||||||
You can still use them, but by calling explicitly in the `execute` method in `graphql`.
|
You can still use them, but by calling explicitly in the `execute` method in `graphql`.
|
||||||
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# Old way
|
# Old way
|
||||||
schema = graphene.Schema(name='My Schema')
|
schema = graphene.Schema(name='My Schema')
|
||||||
|
@ -94,7 +92,6 @@ schema = graphene.Schema(
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Interfaces
|
## Interfaces
|
||||||
|
|
||||||
For implementing an Interface in an ObjectType, you have to add it onto `Meta.interfaces`.
|
For implementing an Interface in an ObjectType, you have to add it onto `Meta.interfaces`.
|
||||||
|
@ -131,7 +128,7 @@ class ReverseString(Mutation):
|
||||||
|
|
||||||
reversed = String()
|
reversed = String()
|
||||||
|
|
||||||
def mutate(self, args, context, info):
|
def mutate(root, args, context, info):
|
||||||
reversed = args.get('input')[::-1]
|
reversed = args.get('input')[::-1]
|
||||||
return ReverseString(reversed=reversed)
|
return ReverseString(reversed=reversed)
|
||||||
|
|
||||||
|
@ -156,16 +153,15 @@ class Query(ObjectType):
|
||||||
```
|
```
|
||||||
|
|
||||||
Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it
|
Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it
|
||||||
explicity.
|
explicitly.
|
||||||
|
|
||||||
|
|
||||||
## Django
|
## Django
|
||||||
|
|
||||||
The Django integration with Graphene now has an independent package: `graphene-django`.
|
The Django integration with Graphene now has an independent package: `graphene-django`.
|
||||||
For installing, you have to replace the old `graphene[django]` with `graphene-django`.
|
For installing, you have to replace the old `graphene[django]` with `graphene-django`.
|
||||||
|
|
||||||
* As the package is now independent, you now have to import from `graphene_django`.
|
- As the package is now independent, you now have to import from `graphene_django`.
|
||||||
* **DjangoNode no longer exists**, please use `relay.Node` instead:
|
- **DjangoNode no longer exists**, please use `relay.Node` instead:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from graphene.relay import Node
|
from graphene.relay import Node
|
||||||
|
@ -181,8 +177,8 @@ For installing, you have to replace the old `graphene[django]` with `graphene-dj
|
||||||
The SQLAlchemy integration with Graphene now has an independent package: `graphene-sqlalchemy`.
|
The SQLAlchemy integration with Graphene now has an independent package: `graphene-sqlalchemy`.
|
||||||
For installing, you have to replace the old `graphene[sqlalchemy]` with `graphene-sqlalchemy`.
|
For installing, you have to replace the old `graphene[sqlalchemy]` with `graphene-sqlalchemy`.
|
||||||
|
|
||||||
* As the package is now independent, you have to import now from `graphene_sqlalchemy`.
|
- As the package is now independent, you have to import now from `graphene_sqlalchemy`.
|
||||||
* **SQLAlchemyNode no longer exists**, please use `relay.Node` instead:
|
- **SQLAlchemyNode no longer exists**, please use `relay.Node` instead:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from graphene.relay import Node
|
from graphene.relay import Node
|
||||||
|
|
|
@ -7,20 +7,22 @@ It also improves the field resolvers, [simplifying the code](#simpler-resolvers)
|
||||||
developer has to write to use them.
|
developer has to write to use them.
|
||||||
|
|
||||||
**Deprecations:**
|
**Deprecations:**
|
||||||
* [`AbstractType`](#abstracttype-deprecated)
|
|
||||||
* [`resolve_only_args`](#resolve_only_args)
|
- [`AbstractType`](#abstracttype-deprecated)
|
||||||
* [`Mutation.Input`](#mutationinput)
|
- [`resolve_only_args`](#resolve_only_args)
|
||||||
|
- [`Mutation.Input`](#mutationinput)
|
||||||
|
|
||||||
**Breaking changes:**
|
**Breaking changes:**
|
||||||
* [`Simpler Resolvers`](#simpler-resolvers)
|
|
||||||
* [`Node Connections`](#node-connections)
|
- [`Simpler Resolvers`](#simpler-resolvers)
|
||||||
|
- [`Node Connections`](#node-connections)
|
||||||
|
|
||||||
**New Features!**
|
**New Features!**
|
||||||
* [`InputObjectType`](#inputobjecttype)
|
|
||||||
* [`Meta as Class arguments`](#meta-ass-class-arguments) (_only available for Python 3_)
|
|
||||||
|
|
||||||
|
- [`InputObjectType`](#inputobjecttype)
|
||||||
|
- [`Meta as Class arguments`](#meta-as-class-arguments) (_only available for Python 3_)
|
||||||
|
|
||||||
> The type metaclasses are now deleted as they are no longer necessary. If your code was depending
|
> The type metaclasses are now deleted as they are no longer necessary. If your code was depending
|
||||||
> on this strategy for creating custom attrs, see an [example on how to do it in 2.0](https://github.com/graphql-python/graphene/blob/v2.0.0/graphene/tests/issues/test_425.py).
|
> on this strategy for creating custom attrs, see an [example on how to do it in 2.0](https://github.com/graphql-python/graphene/blob/v2.0.0/graphene/tests/issues/test_425.py).
|
||||||
|
|
||||||
## Deprecations
|
## Deprecations
|
||||||
|
@ -49,7 +51,7 @@ class Pet(CommonFields, Interface):
|
||||||
pass
|
pass
|
||||||
```
|
```
|
||||||
|
|
||||||
### resolve\_only\_args
|
### resolve_only_args
|
||||||
|
|
||||||
`resolve_only_args` is now deprecated as the resolver API has been simplified.
|
`resolve_only_args` is now deprecated as the resolver API has been simplified.
|
||||||
|
|
||||||
|
@ -60,8 +62,8 @@ class User(ObjectType):
|
||||||
name = String()
|
name = String()
|
||||||
|
|
||||||
@resolve_only_args
|
@resolve_only_args
|
||||||
def resolve_name(self):
|
def resolve_name(root):
|
||||||
return self.name
|
return root.name
|
||||||
```
|
```
|
||||||
|
|
||||||
With 2.0:
|
With 2.0:
|
||||||
|
@ -70,8 +72,8 @@ With 2.0:
|
||||||
class User(ObjectType):
|
class User(ObjectType):
|
||||||
name = String()
|
name = String()
|
||||||
|
|
||||||
def resolve_name(self, info):
|
def resolve_name(root, info):
|
||||||
return self.name
|
return root.name
|
||||||
```
|
```
|
||||||
|
|
||||||
### Mutation.Input
|
### Mutation.Input
|
||||||
|
@ -94,7 +96,6 @@ class User(Mutation):
|
||||||
name = String()
|
name = String()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Breaking Changes
|
## Breaking Changes
|
||||||
|
|
||||||
### Simpler resolvers
|
### Simpler resolvers
|
||||||
|
@ -108,7 +109,7 @@ Before:
|
||||||
```python
|
```python
|
||||||
my_field = graphene.String(my_arg=graphene.String())
|
my_field = graphene.String(my_arg=graphene.String())
|
||||||
|
|
||||||
def resolve_my_field(self, args, context, info):
|
def resolve_my_field(root, args, context, info):
|
||||||
my_arg = args.get('my_arg')
|
my_arg = args.get('my_arg')
|
||||||
return ...
|
return ...
|
||||||
```
|
```
|
||||||
|
@ -118,15 +119,15 @@ With 2.0:
|
||||||
```python
|
```python
|
||||||
my_field = graphene.String(my_arg=graphene.String())
|
my_field = graphene.String(my_arg=graphene.String())
|
||||||
|
|
||||||
def resolve_my_field(self, info, my_arg):
|
def resolve_my_field(root, info, my_arg):
|
||||||
return ...
|
return ...
|
||||||
```
|
```
|
||||||
|
|
||||||
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as stantard `Connection`'s arguments (first, before, after, before).**
|
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as standard `Connection`'s arguments (first, last, after, before).**
|
||||||
You may need something like this:
|
You may need something like this:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def resolve_my_field(self, info, known_field1, known_field2, **args): ## get other args with: args.get('arg_key')
|
def resolve_my_field(root, info, known_field1, known_field2, **args): ## get other args with: args.get('arg_key')
|
||||||
```
|
```
|
||||||
|
|
||||||
And, if you need the context in the resolver, you can use `info.context`:
|
And, if you need the context in the resolver, you can use `info.context`:
|
||||||
|
@ -134,7 +135,7 @@ And, if you need the context in the resolver, you can use `info.context`:
|
||||||
```python
|
```python
|
||||||
my_field = graphene.String(my_arg=graphene.String())
|
my_field = graphene.String(my_arg=graphene.String())
|
||||||
|
|
||||||
def resolve_my_field(self, info, my_arg):
|
def resolve_my_field(root, info, my_arg):
|
||||||
context = info.context
|
context = info.context
|
||||||
return ...
|
return ...
|
||||||
```
|
```
|
||||||
|
@ -188,6 +189,7 @@ class MyObject(ObjectType):
|
||||||
```
|
```
|
||||||
|
|
||||||
To:
|
To:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class MyObject(ObjectType):
|
class MyObject(ObjectType):
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -203,30 +205,32 @@ class MyObject(ObjectType):
|
||||||
The parameters' order of `get_node_from_global_id` method has changed. You may need to adjust your [Node Root Field](http://docs.graphene-python.org/en/latest/relay/nodes/#node-root-field) and maybe other places that uses this method to obtain an object.
|
The parameters' order of `get_node_from_global_id` method has changed. You may need to adjust your [Node Root Field](http://docs.graphene-python.org/en/latest/relay/nodes/#node-root-field) and maybe other places that uses this method to obtain an object.
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class RootQuery(object):
|
class RootQuery(object):
|
||||||
...
|
...
|
||||||
node = Field(relay.Node, id=ID(required=True))
|
node = Field(relay.Node, id=ID(required=True))
|
||||||
|
|
||||||
def resolve_node(self, args, context, info):
|
def resolve_node(root, args, context, info):
|
||||||
node = relay.Node.get_node_from_global_id(args['id'], context, info)
|
node = relay.Node.get_node_from_global_id(args['id'], context, info)
|
||||||
return node
|
return node
|
||||||
```
|
```
|
||||||
|
|
||||||
Now:
|
Now:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class RootQuery(object):
|
class RootQuery(object):
|
||||||
...
|
...
|
||||||
node = Field(relay.Node, id=ID(required=True))
|
node = Field(relay.Node, id=ID(required=True))
|
||||||
|
|
||||||
def resolve_node(self, info, id):
|
def resolve_node(root, info, id):
|
||||||
node = relay.Node.get_node_from_global_id(info, id)
|
node = relay.Node.get_node_from_global_id(info, id)
|
||||||
return node
|
return node
|
||||||
```
|
```
|
||||||
|
|
||||||
## Mutation.mutate
|
## Mutation.mutate
|
||||||
|
|
||||||
Now only receives (`self`, `info`, `**args`) and is not a @classmethod
|
Now only receives (`root`, `info`, `**kwargs`) and is not a @classmethod
|
||||||
|
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
|
@ -245,7 +249,7 @@ With 2.0:
|
||||||
class SomeMutation(Mutation):
|
class SomeMutation(Mutation):
|
||||||
...
|
...
|
||||||
|
|
||||||
def mutate(self, info, **args):
|
def mutate(root, info, **args):
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -258,17 +262,14 @@ class SomeMutation(Mutation):
|
||||||
last_name = String(required=True)
|
last_name = String(required=True)
|
||||||
...
|
...
|
||||||
|
|
||||||
def mutate(self, info, first_name, last_name):
|
def mutate(root, info, first_name, last_name):
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## ClientIDMutation.mutate_and_get_payload
|
## ClientIDMutation.mutate_and_get_payload
|
||||||
|
|
||||||
Now only receives (`root`, `info`, `**input`)
|
Now only receives (`root`, `info`, `**input`)
|
||||||
|
|
||||||
|
|
||||||
### Middlewares
|
### Middlewares
|
||||||
|
|
||||||
If you are using Middelwares, you need to some adjustments:
|
If you are using Middelwares, you need to some adjustments:
|
||||||
|
@ -276,7 +277,7 @@ If you are using Middelwares, you need to some adjustments:
|
||||||
Before:
|
Before:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class MyGrapheneMiddleware(object):
|
class MyGrapheneMiddleware(object):
|
||||||
def resolve(self, next_mw, root, args, context, info):
|
def resolve(self, next_mw, root, args, context, info):
|
||||||
|
|
||||||
## Middleware code
|
## Middleware code
|
||||||
|
@ -287,17 +288,16 @@ class MyGrapheneMiddleware(object):
|
||||||
With 2.0:
|
With 2.0:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class MyGrapheneMiddleware(object):
|
class MyGrapheneMiddleware(object):
|
||||||
def resolve(self, next_mw, root, info, **args):
|
def resolve(self, next_mw, root, info, **args):
|
||||||
context = info.context
|
context = info.context
|
||||||
|
|
||||||
## Middleware code
|
## Middleware code
|
||||||
|
|
||||||
info.context = context
|
info.context = context
|
||||||
return next_mw(root, info, **args)```
|
return next_mw(root, info, **args)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## New Features
|
## New Features
|
||||||
|
|
||||||
### InputObjectType
|
### InputObjectType
|
||||||
|
@ -321,7 +321,7 @@ class Query(ObjectType):
|
||||||
user = graphene.Field(User, input=UserInput())
|
user = graphene.Field(User, input=UserInput())
|
||||||
|
|
||||||
@resolve_only_args
|
@resolve_only_args
|
||||||
def resolve_user(self, input):
|
def resolve_user(root, input):
|
||||||
user_id = input.get('id')
|
user_id = input.get('id')
|
||||||
if is_valid_input(user_id):
|
if is_valid_input(user_id):
|
||||||
return get_user(user_id)
|
return get_user(user_id)
|
||||||
|
@ -334,18 +334,17 @@ class UserInput(InputObjectType):
|
||||||
id = ID(required=True)
|
id = ID(required=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_valid(self):
|
def is_valid(root):
|
||||||
return self.id.startswith('userid_')
|
return root.id.startswith('userid_')
|
||||||
|
|
||||||
class Query(ObjectType):
|
class Query(ObjectType):
|
||||||
user = graphene.Field(User, input=UserInput())
|
user = graphene.Field(User, input=UserInput())
|
||||||
|
|
||||||
def resolve_user(self, info, input):
|
def resolve_user(root, info, input):
|
||||||
if input.is_valid:
|
if input.is_valid:
|
||||||
return get_user(input.id)
|
return get_user(input.id)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### Meta as Class arguments
|
### Meta as Class arguments
|
||||||
|
|
||||||
Now you can use the meta options as class arguments (**ONLY PYTHON 3**).
|
Now you can use the meta options as class arguments (**ONLY PYTHON 3**).
|
||||||
|
@ -366,7 +365,6 @@ class Dog(ObjectType, interfaces=[Pet]):
|
||||||
name = String()
|
name = String()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
### Abstract types
|
### Abstract types
|
||||||
|
|
||||||
Now you can create abstact types super easily, without the need of subclassing the meta.
|
Now you can create abstact types super easily, without the need of subclassing the meta.
|
||||||
|
@ -378,11 +376,8 @@ class Base(ObjectType):
|
||||||
|
|
||||||
id = ID()
|
id = ID()
|
||||||
|
|
||||||
def resolve_id(self, info):
|
def resolve_id(root, info):
|
||||||
return "{type}_{id}".format(
|
return f"{root.__class__.__name__}_{root.id}"
|
||||||
type=self.__class__.__name__,
|
|
||||||
id=self.id
|
|
||||||
)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### UUID Scalar
|
### UUID Scalar
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Install the required scripts with
|
|
||||||
# pip install autoflake autopep8 isort
|
|
||||||
autoflake ./examples/ ./graphene/ -r --remove-unused-variables --remove-all-unused-imports --in-place
|
|
||||||
autopep8 ./examples/ ./graphene/ -r --in-place --experimental --aggressive --max-line-length 120
|
|
||||||
isort -rc ./examples/ ./graphene/
|
|
|
@ -17,75 +17,54 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
.PHONY: help
|
.PHONY: help
|
||||||
help:
|
help:
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
@echo " html to make standalone HTML files"
|
@grep -E '^\.PHONY: [a-zA-Z_-]+ .*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = "(: |##)"}; {printf "\033[36m%-30s\033[0m %s\n", $$2, $$3}'
|
||||||
@echo " dirhtml to make HTML files named index.html in directories"
|
|
||||||
@echo " singlehtml to make a single large HTML file"
|
|
||||||
@echo " pickle to make pickle files"
|
|
||||||
@echo " json to make JSON files"
|
|
||||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
|
||||||
@echo " qthelp to make HTML files and a qthelp project"
|
|
||||||
@echo " applehelp to make an Apple Help Book"
|
|
||||||
@echo " devhelp to make HTML files and a Devhelp project"
|
|
||||||
@echo " epub to make an epub"
|
|
||||||
@echo " epub3 to make an epub3"
|
|
||||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
|
||||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
|
||||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
|
||||||
@echo " text to make text files"
|
|
||||||
@echo " man to make manual pages"
|
|
||||||
@echo " texinfo to make Texinfo files"
|
|
||||||
@echo " info to make Texinfo files and run them through makeinfo"
|
|
||||||
@echo " gettext to make PO message catalogs"
|
|
||||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
|
||||||
@echo " xml to make Docutils-native XML files"
|
|
||||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
|
||||||
@echo " linkcheck to check all external links for integrity"
|
|
||||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
|
||||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
|
||||||
@echo " dummy to check syntax errors of document sources"
|
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: install ## to install all documentation related requirements
|
||||||
|
install:
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
.PHONY: clean ## to remove all built documentation
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(BUILDDIR)/*
|
rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
.PHONY: html
|
.PHONY: html ## to make standalone HTML files
|
||||||
html:
|
html:
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
.PHONY: dirhtml
|
.PHONY: dirhtml ## to make HTML files named index.html in directories
|
||||||
dirhtml:
|
dirhtml:
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||||
|
|
||||||
.PHONY: singlehtml
|
.PHONY: singlehtml ## to make a single large HTML file
|
||||||
singlehtml:
|
singlehtml:
|
||||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||||
|
|
||||||
.PHONY: pickle
|
.PHONY: pickle ## to make pickle files
|
||||||
pickle:
|
pickle:
|
||||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished; now you can process the pickle files."
|
@echo "Build finished; now you can process the pickle files."
|
||||||
|
|
||||||
.PHONY: json
|
.PHONY: json ## to make JSON files
|
||||||
json:
|
json:
|
||||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished; now you can process the JSON files."
|
@echo "Build finished; now you can process the JSON files."
|
||||||
|
|
||||||
.PHONY: htmlhelp
|
.PHONY: htmlhelp ## to make HTML files and a HTML help project
|
||||||
htmlhelp:
|
htmlhelp:
|
||||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||||
|
|
||||||
.PHONY: qthelp
|
.PHONY: qthelp ## to make HTML files and a qthelp project
|
||||||
qthelp:
|
qthelp:
|
||||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||||
@echo
|
@echo
|
||||||
|
@ -95,7 +74,7 @@ qthelp:
|
||||||
@echo "To view the help file:"
|
@echo "To view the help file:"
|
||||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Graphene.qhc"
|
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Graphene.qhc"
|
||||||
|
|
||||||
.PHONY: applehelp
|
.PHONY: applehelp ## to make an Apple Help Book
|
||||||
applehelp:
|
applehelp:
|
||||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||||
@echo
|
@echo
|
||||||
|
@ -104,7 +83,7 @@ applehelp:
|
||||||
"~/Library/Documentation/Help or install it in your application" \
|
"~/Library/Documentation/Help or install it in your application" \
|
||||||
"bundle."
|
"bundle."
|
||||||
|
|
||||||
.PHONY: devhelp
|
.PHONY: devhelp ## to make HTML files and a Devhelp project
|
||||||
devhelp:
|
devhelp:
|
||||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||||
@echo
|
@echo
|
||||||
|
@ -114,19 +93,19 @@ devhelp:
|
||||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Graphene"
|
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Graphene"
|
||||||
@echo "# devhelp"
|
@echo "# devhelp"
|
||||||
|
|
||||||
.PHONY: epub
|
.PHONY: epub ## to make an epub
|
||||||
epub:
|
epub:
|
||||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||||
|
|
||||||
.PHONY: epub3
|
.PHONY: epub3 ## to make an epub3
|
||||||
epub3:
|
epub3:
|
||||||
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
|
||||||
|
|
||||||
.PHONY: latex
|
.PHONY: latex ## to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||||
latex:
|
latex:
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
@echo
|
@echo
|
||||||
|
@ -134,33 +113,33 @@ latex:
|
||||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||||
"(use \`make latexpdf' here to do that automatically)."
|
"(use \`make latexpdf' here to do that automatically)."
|
||||||
|
|
||||||
.PHONY: latexpdf
|
.PHONY: latexpdf ## to make LaTeX files and run them through pdflatex
|
||||||
latexpdf:
|
latexpdf:
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
@echo "Running LaTeX files through pdflatex..."
|
@echo "Running LaTeX files through pdflatex..."
|
||||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
.PHONY: latexpdfja
|
.PHONY: latexpdfja ## to make LaTeX files and run them through platex/dvipdfmx
|
||||||
latexpdfja:
|
latexpdfja:
|
||||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
.PHONY: text
|
.PHONY: text ## to make text files
|
||||||
text:
|
text:
|
||||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||||
|
|
||||||
.PHONY: man
|
.PHONY: man ## to make manual pages
|
||||||
man:
|
man:
|
||||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||||
|
|
||||||
.PHONY: texinfo
|
.PHONY: texinfo ## to make Texinfo files
|
||||||
texinfo:
|
texinfo:
|
||||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
@echo
|
@echo
|
||||||
|
@ -168,62 +147,62 @@ texinfo:
|
||||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||||
"(use \`make info' here to do that automatically)."
|
"(use \`make info' here to do that automatically)."
|
||||||
|
|
||||||
.PHONY: info
|
.PHONY: info ## to make Texinfo files and run them through makeinfo
|
||||||
info:
|
info:
|
||||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
@echo "Running Texinfo files through makeinfo..."
|
@echo "Running Texinfo files through makeinfo..."
|
||||||
make -C $(BUILDDIR)/texinfo info
|
make -C $(BUILDDIR)/texinfo info
|
||||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||||
|
|
||||||
.PHONY: gettext
|
.PHONY: gettext ## to make PO message catalogs
|
||||||
gettext:
|
gettext:
|
||||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||||
|
|
||||||
.PHONY: changes
|
.PHONY: changes ## to make an overview of all changed/added/deprecated items
|
||||||
changes:
|
changes:
|
||||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||||
@echo
|
@echo
|
||||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||||
|
|
||||||
.PHONY: linkcheck
|
.PHONY: linkcheck ## to check all external links for integrity
|
||||||
linkcheck:
|
linkcheck:
|
||||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||||
@echo
|
@echo
|
||||||
@echo "Link check complete; look for any errors in the above output " \
|
@echo "Link check complete; look for any errors in the above output " \
|
||||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||||
|
|
||||||
.PHONY: doctest
|
.PHONY: doctest ## to run all doctests embedded in the documentation (if enabled)
|
||||||
doctest:
|
doctest:
|
||||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||||
@echo "Testing of doctests in the sources finished, look at the " \
|
@echo "Testing of doctests in the sources finished, look at the " \
|
||||||
"results in $(BUILDDIR)/doctest/output.txt."
|
"results in $(BUILDDIR)/doctest/output.txt."
|
||||||
|
|
||||||
.PHONY: coverage
|
.PHONY: coverage ## to run coverage check of the documentation (if enabled)
|
||||||
coverage:
|
coverage:
|
||||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||||
@echo "Testing of coverage in the sources finished, look at the " \
|
@echo "Testing of coverage in the sources finished, look at the " \
|
||||||
"results in $(BUILDDIR)/coverage/python.txt."
|
"results in $(BUILDDIR)/coverage/python.txt."
|
||||||
|
|
||||||
.PHONY: xml
|
.PHONY: xml ## to make Docutils-native XML files
|
||||||
xml:
|
xml:
|
||||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||||
|
|
||||||
.PHONY: pseudoxml
|
.PHONY: pseudoxml ## to make pseudoxml-XML files for display purposes
|
||||||
pseudoxml:
|
pseudoxml:
|
||||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||||
|
|
||||||
.PHONY: dummy
|
.PHONY: dummy ## to check syntax errors of document sources
|
||||||
dummy:
|
dummy:
|
||||||
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. Dummy builder generates no files."
|
@echo "Build finished. Dummy builder generates no files."
|
||||||
|
|
||||||
.PHONY: livehtml
|
.PHONY: livehtml ## to build and serve live-reloading documentation
|
||||||
livehtml:
|
livehtml:
|
||||||
sphinx-autobuild -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
sphinx-autobuild -b html --watch ../graphene $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
|
|
110
docs/api/index.rst
Normal file
110
docs/api/index.rst
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
API Reference
|
||||||
|
=============
|
||||||
|
|
||||||
|
Schema
|
||||||
|
------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.types.schema.Schema
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. Uncomment sections / types as API documentation is fleshed out
|
||||||
|
.. in each class
|
||||||
|
|
||||||
|
Object types
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.ObjectType
|
||||||
|
|
||||||
|
.. autoclass:: graphene.InputObjectType
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Mutation
|
||||||
|
:members:
|
||||||
|
|
||||||
|
.. _fields-mounted-types:
|
||||||
|
|
||||||
|
Fields (Mounted Types)
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Field
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Argument
|
||||||
|
|
||||||
|
.. autoclass:: graphene.InputField
|
||||||
|
|
||||||
|
Fields (Unmounted Types)
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.types.unmountedtype.UnmountedType
|
||||||
|
|
||||||
|
GraphQL Scalars
|
||||||
|
---------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Int()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Float()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.String()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Boolean()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.ID()
|
||||||
|
|
||||||
|
Graphene Scalars
|
||||||
|
----------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Date()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.DateTime()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Time()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Decimal()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.UUID()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.JSONString()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Base64()
|
||||||
|
|
||||||
|
Enum
|
||||||
|
----
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Enum()
|
||||||
|
|
||||||
|
Structures
|
||||||
|
----------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.List
|
||||||
|
|
||||||
|
.. autoclass:: graphene.NonNull
|
||||||
|
|
||||||
|
Type Extension
|
||||||
|
--------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Interface()
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Union()
|
||||||
|
|
||||||
|
Execution Metadata
|
||||||
|
------------------
|
||||||
|
|
||||||
|
.. autoclass:: graphene.ResolveInfo
|
||||||
|
|
||||||
|
.. autoclass:: graphene.Context
|
||||||
|
|
||||||
|
.. autoclass:: graphql.ExecutionResult
|
||||||
|
|
||||||
|
.. Relay
|
||||||
|
.. -----
|
||||||
|
|
||||||
|
.. .. autoclass:: graphene.Node
|
||||||
|
|
||||||
|
.. .. autoclass:: graphene.GlobalID
|
||||||
|
|
||||||
|
.. .. autoclass:: graphene.ClientIDMutation
|
||||||
|
|
||||||
|
.. .. autoclass:: graphene.Connection
|
||||||
|
|
||||||
|
.. .. autoclass:: graphene.ConnectionField
|
||||||
|
|
||||||
|
.. .. autoclass:: graphene.PageInfo
|
26
docs/conf.py
26
docs/conf.py
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
import sphinx_graphene_theme
|
import sphinx_graphene_theme
|
||||||
|
|
||||||
|
@ -22,9 +23,8 @@ on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
#
|
#
|
||||||
# import os
|
|
||||||
# import sys
|
sys.path.insert(0, os.path.abspath(".."))
|
||||||
# sys.path.insert(0, os.path.abspath('.'))
|
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
|
@ -41,6 +41,7 @@ extensions = [
|
||||||
"sphinx.ext.todo",
|
"sphinx.ext.todo",
|
||||||
"sphinx.ext.coverage",
|
"sphinx.ext.coverage",
|
||||||
"sphinx.ext.viewcode",
|
"sphinx.ext.viewcode",
|
||||||
|
"sphinx.ext.napoleon",
|
||||||
]
|
]
|
||||||
if not on_rtd:
|
if not on_rtd:
|
||||||
extensions += ["sphinx.ext.githubpages"]
|
extensions += ["sphinx.ext.githubpages"]
|
||||||
|
@ -62,25 +63,25 @@ source_suffix = ".rst"
|
||||||
master_doc = "index"
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u"Graphene"
|
project = "Graphene"
|
||||||
copyright = u"Graphene 2016"
|
copyright = "Graphene 2016"
|
||||||
author = u"Syrus Akbary"
|
author = "Syrus Akbary"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = u"1.0"
|
version = "1.0"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = u"1.0"
|
release = "1.0"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#
|
#
|
||||||
# This is also used if you do content translation via gettext catalogs.
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
# Usually you set "language" from the command line for these cases.
|
# Usually you set "language" from the command line for these cases.
|
||||||
language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
|
@ -276,7 +277,7 @@ latex_elements = {
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(master_doc, "Graphene.tex", u"Graphene Documentation", u"Syrus Akbary", "manual")
|
(master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual")
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
@ -316,7 +317,7 @@ latex_documents = [
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [(master_doc, "graphene", u"Graphene Documentation", [author], 1)]
|
man_pages = [(master_doc, "graphene", "Graphene Documentation", [author], 1)]
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#
|
#
|
||||||
|
@ -332,7 +333,7 @@ texinfo_documents = [
|
||||||
(
|
(
|
||||||
master_doc,
|
master_doc,
|
||||||
"Graphene",
|
"Graphene",
|
||||||
u"Graphene Documentation",
|
"Graphene Documentation",
|
||||||
author,
|
author,
|
||||||
"Graphene",
|
"Graphene",
|
||||||
"One line description of project.",
|
"One line description of project.",
|
||||||
|
@ -454,5 +455,4 @@ intersphinx_mapping = {
|
||||||
"http://docs.graphene-python.org/projects/sqlalchemy/en/latest/",
|
"http://docs.graphene-python.org/projects/sqlalchemy/en/latest/",
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
"graphene_gae": ("http://docs.graphene-python.org/projects/gae/en/latest/", None),
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ Dataloader
|
||||||
DataLoader is a generic utility to be used as part of your application's
|
DataLoader is a generic utility to be used as part of your application's
|
||||||
data fetching layer to provide a simplified and consistent API over
|
data fetching layer to provide a simplified and consistent API over
|
||||||
various remote data sources such as databases or web services via batching
|
various remote data sources such as databases or web services via batching
|
||||||
and caching.
|
and caching. It is provided by a separate package `aiodataloader <https://pypi.org/project/aiodataloader/>`.
|
||||||
|
|
||||||
|
|
||||||
Batching
|
Batching
|
||||||
|
@ -15,38 +15,49 @@ Create loaders by providing a batch loading function.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from promise import Promise
|
from aiodataloader import DataLoader
|
||||||
from promise.dataloader import DataLoader
|
|
||||||
|
|
||||||
class UserLoader(DataLoader):
|
class UserLoader(DataLoader):
|
||||||
def batch_load_fn(self, keys):
|
async def batch_load_fn(self, keys):
|
||||||
# Here we return a promise that will result on the
|
# Here we call a function to return a user for each key in keys
|
||||||
# corresponding user for each key in keys
|
return [get_user(id=key) for key in keys]
|
||||||
return Promise.resolve([get_user(id=key) for key in keys])
|
|
||||||
|
|
||||||
|
|
||||||
A batch loading function accepts an list of keys, and returns a ``Promise``
|
A batch loading async function accepts a list of keys, and returns a list of ``values``.
|
||||||
which resolves to an list of ``values``.
|
|
||||||
|
|
||||||
Then load individual values from the loader. ``DataLoader`` will coalesce all
|
|
||||||
individual loads which occur within a single frame of execution (executed once
|
|
||||||
the wrapping promise is resolved) and then call your batch function with all
|
|
||||||
requested keys.
|
|
||||||
|
|
||||||
|
``DataLoader`` will coalesce all individual loads which occur within a
|
||||||
|
single frame of execution (executed once the wrapping event loop is resolved)
|
||||||
|
and then call your batch function with all requested keys.
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
user_loader = UserLoader()
|
user_loader = UserLoader()
|
||||||
|
|
||||||
user_loader.load(1).then(lambda user: user_loader.load(user.best_friend_id))
|
user1 = await user_loader.load(1)
|
||||||
|
user1_best_friend = await user_loader.load(user1.best_friend_id)
|
||||||
|
|
||||||
user_loader.load(2).then(lambda user: user_loader.load(user.best_friend_id))
|
user2 = await user_loader.load(2)
|
||||||
|
user2_best_friend = await user_loader.load(user2.best_friend_id)
|
||||||
|
|
||||||
|
|
||||||
A naive application may have issued *four* round-trips to a backend for the
|
A naive application may have issued *four* round-trips to a backend for the
|
||||||
required information, but with ``DataLoader`` this application will make at most *two*.
|
required information, but with ``DataLoader`` this application will make at most *two*.
|
||||||
|
|
||||||
|
Note that loaded values are one-to-one with the keys and must have the same
|
||||||
|
order. This means that if you load all values from a single query, you must
|
||||||
|
make sure that you then order the query result for the results to match the keys:
|
||||||
|
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
class UserLoader(DataLoader):
|
||||||
|
async def batch_load_fn(self, keys):
|
||||||
|
users = {user.id: user for user in User.objects.filter(id__in=keys)}
|
||||||
|
return [users.get(user_id) for user_id in keys]
|
||||||
|
|
||||||
|
|
||||||
``DataLoader`` allows you to decouple unrelated parts of your application without
|
``DataLoader`` allows you to decouple unrelated parts of your application without
|
||||||
sacrificing the performance of batch data-loading. While the loader presents
|
sacrificing the performance of batch data-loading. While the loader presents
|
||||||
an API that loads individual values, all concurrent requests will be coalesced
|
an API that loads individual values, all concurrent requests will be coalesced
|
||||||
|
@ -84,7 +95,7 @@ Consider the following GraphQL request:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Naively, if ``me``, ``bestFriend`` and ``friends`` each need to request the backend,
|
If ``me``, ``bestFriend`` and ``friends`` each need to send a request to the backend,
|
||||||
there could be at most 13 database requests!
|
there could be at most 13 database requests!
|
||||||
|
|
||||||
|
|
||||||
|
@ -99,8 +110,8 @@ leaner code and at most 4 database requests, and possibly fewer if there are cac
|
||||||
best_friend = graphene.Field(lambda: User)
|
best_friend = graphene.Field(lambda: User)
|
||||||
friends = graphene.List(lambda: User)
|
friends = graphene.List(lambda: User)
|
||||||
|
|
||||||
def resolve_best_friend(self, info):
|
async def resolve_best_friend(root, info):
|
||||||
return user_loader.load(self.best_friend_id)
|
return await user_loader.load(root.best_friend_id)
|
||||||
|
|
||||||
def resolve_friends(self, info):
|
async def resolve_friends(root, info):
|
||||||
return user_loader.load_many(self.friend_ids)
|
return await user_loader.load_many(root.friend_ids)
|
||||||
|
|
|
@ -1,59 +1,138 @@
|
||||||
|
.. _SchemaExecute:
|
||||||
|
|
||||||
Executing a query
|
Executing a query
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
For executing a query against a schema, you can directly call the ``execute`` method on it.
|
||||||
For executing a query a schema, you can directly call the ``execute`` method on it.
|
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
schema = graphene.Schema(...)
|
from graphene import Schema
|
||||||
|
|
||||||
|
schema = Schema(...)
|
||||||
result = schema.execute('{ name }')
|
result = schema.execute('{ name }')
|
||||||
|
|
||||||
``result`` represents the result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred.
|
``result`` represents the result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred.
|
||||||
|
|
||||||
|
|
||||||
|
.. _SchemaExecuteContext:
|
||||||
|
|
||||||
Context
|
Context
|
||||||
_______
|
_______
|
||||||
|
|
||||||
You can pass context to a query via ``context_value``.
|
You can pass context to a query via ``context``.
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
from graphene import ObjectType, String, Schema
|
||||||
name = graphene.String()
|
|
||||||
|
|
||||||
def resolve_name(self, info):
|
class Query(ObjectType):
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
def resolve_name(root, info):
|
||||||
return info.context.get('name')
|
return info.context.get('name')
|
||||||
|
|
||||||
schema = graphene.Schema(Query)
|
schema = Schema(Query)
|
||||||
result = schema.execute('{ name }', context_value={'name': 'Syrus'})
|
result = schema.execute('{ name }', context={'name': 'Syrus'})
|
||||||
|
assert result.data['name'] == 'Syrus'
|
||||||
|
|
||||||
|
|
||||||
Variables
|
Variables
|
||||||
_______
|
_________
|
||||||
|
|
||||||
You can pass variables to a query via ``variable_values``.
|
You can pass variables to a query via ``variables``.
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
from graphene import ObjectType, Field, ID, Schema
|
||||||
user = graphene.Field(User)
|
|
||||||
|
|
||||||
def resolve_user(self, info):
|
class Query(ObjectType):
|
||||||
return info.context.get('user')
|
user = Field(User, id=ID(required=True))
|
||||||
|
|
||||||
schema = graphene.Schema(Query)
|
def resolve_user(root, info, id):
|
||||||
|
return get_user_by_id(id)
|
||||||
|
|
||||||
|
schema = Schema(Query)
|
||||||
result = schema.execute(
|
result = schema.execute(
|
||||||
'''query getUser($id: ID) {
|
'''
|
||||||
|
query getUser($id: ID) {
|
||||||
user(id: $id) {
|
user(id: $id) {
|
||||||
|
id
|
||||||
|
firstName
|
||||||
|
lastName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
''',
|
||||||
|
variables={'id': 12},
|
||||||
|
)
|
||||||
|
|
||||||
|
Root Value
|
||||||
|
__________
|
||||||
|
|
||||||
|
Value used for :ref:`ResolverParamParent` in root queries and mutations can be overridden using ``root`` parameter.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, Field, Schema
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
me = Field(User)
|
||||||
|
|
||||||
|
def resolve_user(root, info):
|
||||||
|
return {'id': root.id, 'firstName': root.name}
|
||||||
|
|
||||||
|
schema = Schema(Query)
|
||||||
|
user_root = User(id=12, name='bob')
|
||||||
|
result = schema.execute(
|
||||||
|
'''
|
||||||
|
query getUser {
|
||||||
|
user {
|
||||||
id
|
id
|
||||||
firstName
|
firstName
|
||||||
lastName
|
lastName
|
||||||
}
|
}
|
||||||
}''',
|
}
|
||||||
variable_values={'id': 12},
|
''',
|
||||||
|
root=user_root
|
||||||
)
|
)
|
||||||
|
assert result.data['user']['id'] == user_root.id
|
||||||
|
|
||||||
|
Operation Name
|
||||||
|
______________
|
||||||
|
|
||||||
|
If there are multiple operations defined in a query string, ``operation_name`` should be used to indicate which should be executed.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, Field, Schema
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
user = Field(User)
|
||||||
|
|
||||||
|
def resolve_user(root, info):
|
||||||
|
return get_user_by_id(12)
|
||||||
|
|
||||||
|
schema = Schema(Query)
|
||||||
|
query_string = '''
|
||||||
|
query getUserWithFirstName {
|
||||||
|
user {
|
||||||
|
id
|
||||||
|
firstName
|
||||||
|
lastName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query getUserWithFullName {
|
||||||
|
user {
|
||||||
|
id
|
||||||
|
fullName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
result = schema.execute(
|
||||||
|
query_string,
|
||||||
|
operation_name='getUserWithFullName'
|
||||||
|
)
|
||||||
|
assert result.data['user']['fullName']
|
||||||
|
|
8
docs/execution/fileuploading.rst
Normal file
8
docs/execution/fileuploading.rst
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
File uploading
|
||||||
|
==============
|
||||||
|
|
||||||
|
File uploading is not part of the official GraphQL spec yet and is not natively
|
||||||
|
implemented in Graphene.
|
||||||
|
|
||||||
|
If your server needs to support file uploading then you can use the library: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
|
||||||
|
uploads and conforms to the unoffical GraphQL `multipart request spec <https://github.com/jaydenseric/graphql-multipart-request-spec>`_.
|
|
@ -8,3 +8,6 @@ Execution
|
||||||
execute
|
execute
|
||||||
middleware
|
middleware
|
||||||
dataloader
|
dataloader
|
||||||
|
fileuploading
|
||||||
|
subscriptions
|
||||||
|
queryvalidation
|
||||||
|
|
|
@ -16,7 +16,7 @@ Resolve arguments
|
||||||
|
|
||||||
Middlewares ``resolve`` is invoked with several arguments:
|
Middlewares ``resolve`` is invoked with several arguments:
|
||||||
|
|
||||||
- ``next`` represents the execution chain. Call ``next`` to continue evalution.
|
- ``next`` represents the execution chain. Call ``next`` to continue evaluation.
|
||||||
- ``root`` is the root value object passed throughout the query.
|
- ``root`` is the root value object passed throughout the query.
|
||||||
- ``info`` is the resolver info.
|
- ``info`` is the resolver info.
|
||||||
- ``args`` is the dict of arguments passed to the field.
|
- ``args`` is the dict of arguments passed to the field.
|
||||||
|
@ -41,12 +41,14 @@ And then execute it with:
|
||||||
|
|
||||||
result = schema.execute('THE QUERY', middleware=[AuthorizationMiddleware()])
|
result = schema.execute('THE QUERY', middleware=[AuthorizationMiddleware()])
|
||||||
|
|
||||||
|
If the ``middleware`` argument includes multiple middlewares,
|
||||||
|
these middlewares will be executed bottom-up, i.e. from last to first.
|
||||||
|
|
||||||
Functional example
|
Functional example
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
Middleware can also be defined as a function. Here we define a middleware that
|
Middleware can also be defined as a function. Here we define a middleware that
|
||||||
logs the time it takes to resolve each field
|
logs the time it takes to resolve each field:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -55,12 +57,9 @@ logs the time it takes to resolve each field
|
||||||
def timing_middleware(next, root, info, **args):
|
def timing_middleware(next, root, info, **args):
|
||||||
start = timer()
|
start = timer()
|
||||||
return_value = next(root, info, **args)
|
return_value = next(root, info, **args)
|
||||||
duration = timer() - start
|
duration = round((timer() - start) * 1000, 2)
|
||||||
logger.debug("{parent_type}.{field_name}: {duration} ms".format(
|
parent_type_name = root._meta.name if root and hasattr(root, '_meta') else ''
|
||||||
parent_type=root._meta.name if root and hasattr(root, '_meta') else '',
|
logger.debug(f"{parent_type_name}.{info.field_name}: {duration} ms")
|
||||||
field_name=info.field_name,
|
|
||||||
duration=round(duration * 1000, 2)
|
|
||||||
))
|
|
||||||
return return_value
|
return return_value
|
||||||
|
|
||||||
|
|
||||||
|
|
123
docs/execution/queryvalidation.rst
Normal file
123
docs/execution/queryvalidation.rst
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
Query Validation
|
||||||
|
================
|
||||||
|
GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements
|
||||||
|
standard query validators. For example, there is an validator that tests if queried field exists on queried type, that
|
||||||
|
makes query fail with "Cannot query field on type" error if it doesn't.
|
||||||
|
|
||||||
|
To help with common use cases, graphene provides a few validation rules out of the box.
|
||||||
|
|
||||||
|
|
||||||
|
Depth limit Validator
|
||||||
|
---------------------
|
||||||
|
The depth limit validator helps to prevent execution of malicious
|
||||||
|
queries. It takes in the following arguments.
|
||||||
|
|
||||||
|
- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document.
|
||||||
|
- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean
|
||||||
|
- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
Here is how you would implement depth-limiting on your schema.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphql import validate, parse
|
||||||
|
from graphene import ObjectType, Schema, String
|
||||||
|
from graphene.validation import depth_limit_validator
|
||||||
|
|
||||||
|
|
||||||
|
class MyQuery(ObjectType):
|
||||||
|
name = String(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=MyQuery)
|
||||||
|
|
||||||
|
# queries which have a depth more than 20
|
||||||
|
# will not be executed.
|
||||||
|
|
||||||
|
validation_errors = validate(
|
||||||
|
schema=schema.graphql_schema,
|
||||||
|
document_ast=parse('THE QUERY'),
|
||||||
|
rules=(
|
||||||
|
depth_limit_validator(
|
||||||
|
max_depth=20
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Disable Introspection
|
||||||
|
---------------------
|
||||||
|
the disable introspection validation rule ensures that your schema cannot be introspected.
|
||||||
|
This is a useful security measure in production environments.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
Here is how you would disable introspection for your schema.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphql import validate, parse
|
||||||
|
from graphene import ObjectType, Schema, String
|
||||||
|
from graphene.validation import DisableIntrospection
|
||||||
|
|
||||||
|
|
||||||
|
class MyQuery(ObjectType):
|
||||||
|
name = String(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=MyQuery)
|
||||||
|
|
||||||
|
# introspection queries will not be executed.
|
||||||
|
|
||||||
|
validation_errors = validate(
|
||||||
|
schema=schema.graphql_schema,
|
||||||
|
document_ast=parse('THE QUERY'),
|
||||||
|
rules=(
|
||||||
|
DisableIntrospection,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Implementing custom validators
|
||||||
|
------------------------------
|
||||||
|
All custom query validators should extend the `ValidationRule <https://github.com/graphql-python/graphql-core/blob/v3.0.5/src/graphql/validation/rules/__init__.py#L37>`_
|
||||||
|
base class importable from the graphql.validation.rules module. Query validators are visitor classes. They are
|
||||||
|
instantiated at the time of query validation with one required argument (context: ASTValidationContext). In order to
|
||||||
|
perform validation, your validator class should define one or more of enter_* and leave_* methods. For possible
|
||||||
|
enter/leave items as well as details on function documentation, please see contents of the visitor module. To make
|
||||||
|
validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure
|
||||||
|
reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation
|
||||||
|
if any of those fields are blacklisted:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphql import GraphQLError
|
||||||
|
from graphql.language import FieldNode
|
||||||
|
from graphql.validation import ValidationRule
|
||||||
|
|
||||||
|
|
||||||
|
my_blacklist = (
|
||||||
|
"disallowed_field",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_blacklisted_field(field_name: str):
|
||||||
|
return field_name.lower() in my_blacklist
|
||||||
|
|
||||||
|
|
||||||
|
class BlackListRule(ValidationRule):
|
||||||
|
def enter_field(self, node: FieldNode, *_args):
|
||||||
|
field_name = node.name.value
|
||||||
|
if not is_blacklisted_field(field_name):
|
||||||
|
return
|
||||||
|
|
||||||
|
self.report_error(
|
||||||
|
GraphQLError(
|
||||||
|
f"Cannot query '{field_name}': field is blacklisted.", node,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
40
docs/execution/subscriptions.rst
Normal file
40
docs/execution/subscriptions.rst
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
.. _SchemaSubscription:
|
||||||
|
|
||||||
|
Subscriptions
|
||||||
|
=============
|
||||||
|
|
||||||
|
To create a subscription, you can directly call the ``subscribe`` method on the
|
||||||
|
schema. This method is async and must be awaited.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
from graphene import ObjectType, String, Schema, Field
|
||||||
|
|
||||||
|
# Every schema requires a query.
|
||||||
|
class Query(ObjectType):
|
||||||
|
hello = String()
|
||||||
|
|
||||||
|
def resolve_hello(root, info):
|
||||||
|
return "Hello, world!"
|
||||||
|
|
||||||
|
class Subscription(ObjectType):
|
||||||
|
time_of_day = String()
|
||||||
|
|
||||||
|
async def subscribe_time_of_day(root, info):
|
||||||
|
while True:
|
||||||
|
yield datetime.now().isoformat()
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
schema = Schema(query=Query, subscription=Subscription)
|
||||||
|
|
||||||
|
async def main(schema):
|
||||||
|
subscription = 'subscription { timeOfDay }'
|
||||||
|
result = await schema.subscribe(subscription)
|
||||||
|
async for item in result:
|
||||||
|
print(item.data['timeOfDay'])
|
||||||
|
|
||||||
|
asyncio.run(main(schema))
|
||||||
|
|
||||||
|
The ``result`` is an async iterator which yields items in the same manner as a query.
|
|
@ -11,11 +11,16 @@ Contents:
|
||||||
execution/index
|
execution/index
|
||||||
relay/index
|
relay/index
|
||||||
testing/index
|
testing/index
|
||||||
|
api/index
|
||||||
|
|
||||||
|
.. _Integrations:
|
||||||
|
|
||||||
Integrations
|
Integrations
|
||||||
-----
|
------------
|
||||||
|
|
||||||
* `Graphene-Django <http://docs.graphene-python.org/projects/django/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-django/>`_)
|
* `Graphene-Django <http://docs.graphene-python.org/projects/django/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-django/>`_)
|
||||||
|
* Flask-Graphql (`source <https://github.com/graphql-python/flask-graphql>`_)
|
||||||
* `Graphene-SQLAlchemy <http://docs.graphene-python.org/projects/sqlalchemy/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-sqlalchemy/>`_)
|
* `Graphene-SQLAlchemy <http://docs.graphene-python.org/projects/sqlalchemy/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-sqlalchemy/>`_)
|
||||||
* `Graphene-GAE <http://docs.graphene-python.org/projects/gae/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-gae/>`_)
|
|
||||||
* `Graphene-Mongo <http://graphene-mongo.readthedocs.io/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-mongo>`_)
|
* `Graphene-Mongo <http://graphene-mongo.readthedocs.io/en/latest/>`_ (`source <https://github.com/graphql-python/graphene-mongo>`_)
|
||||||
|
* `Starlette <https://www.starlette.io/graphql/>`_ (`source <https://github.com/encode/starlette>`_)
|
||||||
|
* `FastAPI <https://fastapi.tiangolo.com/advanced/graphql/>`_ (`source <https://github.com/tiangolo/fastapi>`_)
|
||||||
|
|
|
@ -1,57 +1,144 @@
|
||||||
Getting started
|
Getting started
|
||||||
===============
|
===============
|
||||||
|
|
||||||
What is GraphQL?
|
Introduction
|
||||||
----------------
|
|
||||||
|
|
||||||
For an introduction to GraphQL and an overview of its concepts, please refer
|
|
||||||
to `the official introduction <http://graphql.org/learn/>`_.
|
|
||||||
|
|
||||||
Let’s build a basic GraphQL schema from scratch.
|
|
||||||
|
|
||||||
Requirements
|
|
||||||
------------
|
------------
|
||||||
|
|
||||||
- Python (2.7, 3.4, 3.5, 3.6, pypy)
|
What is GraphQL?
|
||||||
- Graphene (2.0)
|
~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
GraphQL is a query language for your API.
|
||||||
|
|
||||||
|
It provides a standard way to:
|
||||||
|
|
||||||
|
* *describe data provided by a server* in a statically typed **Schema**
|
||||||
|
* *request data* in a **Query** which exactly describes your data requirements and
|
||||||
|
* *receive data* in a **Response** containing only the data you requested.
|
||||||
|
|
||||||
|
For an introduction to GraphQL and an overview of its concepts, please refer to `the official GraphQL documentation`_.
|
||||||
|
|
||||||
|
.. _the official GraphQL documentation: http://graphql.org/learn/
|
||||||
|
|
||||||
|
What is Graphene?
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Graphene is a library that provides tools to implement a GraphQL API in Python using a *code-first* approach.
|
||||||
|
|
||||||
|
Compare Graphene's *code-first* approach to building a GraphQL API with *schema-first* approaches like `Apollo Server`_ (JavaScript) or Ariadne_ (Python). Instead of writing GraphQL **Schema Definition Language (SDL)**, we write Python code to describe the data provided by your server.
|
||||||
|
|
||||||
|
.. _Apollo Server: https://www.apollographql.com/docs/apollo-server/
|
||||||
|
|
||||||
|
.. _Ariadne: https://ariadnegraphql.org/
|
||||||
|
|
||||||
|
Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well.
|
||||||
|
|
||||||
|
An example in Graphene
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Let’s build a basic GraphQL schema to say "hello" and "goodbye" in Graphene.
|
||||||
|
|
||||||
|
When we send a **Query** requesting only one **Field**, ``hello``, and specify a value for the ``firstName`` **Argument**...
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
{
|
||||||
|
hello(firstName: "friend")
|
||||||
|
}
|
||||||
|
|
||||||
|
...we would expect the following Response containing only the data requested (the ``goodbye`` field is not resolved).
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"hello": "Hello friend!"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Requirements
|
||||||
|
~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- Python (3.8, 3.9, 3.10, 3.11, 3.12, pypy)
|
||||||
|
- Graphene (3.0)
|
||||||
|
|
||||||
Project setup
|
Project setup
|
||||||
-------------
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
pip install "graphene>=2.0"
|
pip install "graphene>=3.0"
|
||||||
|
|
||||||
Creating a basic Schema
|
Creating a basic Schema
|
||||||
-----------------------
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
A GraphQL schema describes your data model, and provides a GraphQL
|
In Graphene, we can define a simple schema using the following code:
|
||||||
server with an associated set of resolve methods that know how to fetch
|
|
||||||
data.
|
|
||||||
|
|
||||||
We are going to create a very simple schema, with a ``Query`` with only
|
|
||||||
one field: ``hello`` and an input name. And when we query it, it should return ``"Hello {name}"``.
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
import graphene
|
from graphene import ObjectType, String, Schema
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
class Query(ObjectType):
|
||||||
hello = graphene.String(name=graphene.String(default_value="stranger"))
|
# this defines a Field `hello` in our Schema with a single Argument `first_name`
|
||||||
|
# By default, the argument name will automatically be camel-based into firstName in the generated schema
|
||||||
|
hello = String(first_name=String(default_value="stranger"))
|
||||||
|
goodbye = String()
|
||||||
|
|
||||||
def resolve_hello(self, info, name):
|
# our Resolver method takes the GraphQL context (root, info) as well as
|
||||||
return 'Hello ' + name
|
# Argument (first_name) for the Field and returns data for the query Response
|
||||||
|
def resolve_hello(root, info, first_name):
|
||||||
|
return f'Hello {first_name}!'
|
||||||
|
|
||||||
schema = graphene.Schema(query=Query)
|
def resolve_goodbye(root, info):
|
||||||
|
return 'See ya!'
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
|
||||||
|
A GraphQL **Schema** describes each **Field** in the data model provided by the server using scalar types like *String*, *Int* and *Enum* and compound types like *List* and *Object*. For more details refer to the Graphene :ref:`TypesReference`.
|
||||||
|
|
||||||
|
Our schema can also define any number of **Arguments** for our **Fields**. This is a powerful way for a **Query** to describe the exact data requirements for each **Field**.
|
||||||
|
|
||||||
|
For each **Field** in our **Schema**, we write a **Resolver** method to fetch data requested by a client's **Query** using the current context and **Arguments**. For more details, refer to this section on :ref:`Resolvers`.
|
||||||
|
|
||||||
|
Schema Definition Language (SDL)
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
In the `GraphQL Schema Definition Language`_, we could describe the fields defined by our example code as shown below.
|
||||||
|
|
||||||
|
.. _GraphQL Schema Definition Language: https://graphql.org/learn/schema/
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
type Query {
|
||||||
|
hello(firstName: String = "stranger"): String
|
||||||
|
goodbye: String
|
||||||
|
}
|
||||||
|
|
||||||
|
Further examples in this documentation will use SDL to describe schema created by ObjectTypes and other fields.
|
||||||
|
|
||||||
Querying
|
Querying
|
||||||
--------
|
~~~~~~~~
|
||||||
|
|
||||||
Then we can start querying our schema:
|
Then we can start querying our **Schema** by passing a GraphQL query string to ``execute``:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
result = schema.execute('{ hello }')
|
# we can query for our field (with the default argument)
|
||||||
print(result.data['hello']) # "Hello stranger"
|
query_string = '{ hello }'
|
||||||
|
result = schema.execute(query_string)
|
||||||
|
print(result.data['hello'])
|
||||||
|
# "Hello stranger!"
|
||||||
|
|
||||||
Congrats! You got your first graphene schema working!
|
# or passing the argument in the query
|
||||||
|
query_with_argument = '{ hello(firstName: "GraphQL") }'
|
||||||
|
result = schema.execute(query_with_argument)
|
||||||
|
print(result.data['hello'])
|
||||||
|
# "Hello GraphQL!"
|
||||||
|
|
||||||
|
Next steps
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
Congrats! You got your first Graphene schema working!
|
||||||
|
|
||||||
|
Normally, we don't need to directly execute a query string against our schema as Graphene provides many useful Integrations with popular web frameworks like Flask and Django. Check out :ref:`Integrations` for more information on how to get started serving your GraphQL API.
|
||||||
|
|
|
@ -41,5 +41,5 @@ that implements ``Node`` will have a default Connection.
|
||||||
name = graphene.String()
|
name = graphene.String()
|
||||||
ships = relay.ConnectionField(ShipConnection)
|
ships = relay.ConnectionField(ShipConnection)
|
||||||
|
|
||||||
def resolve_ships(self, info):
|
def resolve_ships(root, info):
|
||||||
return []
|
return []
|
||||||
|
|
|
@ -19,11 +19,8 @@ Useful links
|
||||||
- `Getting started with Relay`_
|
- `Getting started with Relay`_
|
||||||
- `Relay Global Identification Specification`_
|
- `Relay Global Identification Specification`_
|
||||||
- `Relay Cursor Connection Specification`_
|
- `Relay Cursor Connection Specification`_
|
||||||
- `Relay input Object Mutation`_
|
|
||||||
|
|
||||||
.. _Relay: https://facebook.github.io/relay/docs/en/graphql-server-specification.html
|
.. _Relay: https://relay.dev/docs/guides/graphql-server-specification/
|
||||||
.. _Relay specification: https://facebook.github.io/relay/graphql/objectidentification.htm#sec-Node-root-field
|
.. _Getting started with Relay: https://relay.dev/docs/getting-started/step-by-step-guide/
|
||||||
.. _Getting started with Relay: https://facebook.github.io/relay/docs/en/quick-start-guide.html
|
.. _Relay Global Identification Specification: https://relay.dev/graphql/objectidentification.htm
|
||||||
.. _Relay Global Identification Specification: https://facebook.github.io/relay/graphql/objectidentification.htm
|
.. _Relay Cursor Connection Specification: https://relay.dev/graphql/connections.htm
|
||||||
.. _Relay Cursor Connection Specification: https://facebook.github.io/relay/graphql/connections.htm
|
|
||||||
.. _Relay input Object Mutation: https://facebook.github.io/relay/graphql/mutations.htm
|
|
||||||
|
|
|
@ -51,20 +51,20 @@ Example of a custom node:
|
||||||
name = 'Node'
|
name = 'Node'
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def to_global_id(type, id):
|
def to_global_id(type_, id):
|
||||||
return '{}:{}'.format(type, id)
|
return f"{type_}:{id}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_node_from_global_id(info, global_id, only_type=None):
|
def get_node_from_global_id(info, global_id, only_type=None):
|
||||||
type, id = global_id.split(':')
|
type_, id = global_id.split(':')
|
||||||
if only_type:
|
if only_type:
|
||||||
# We assure that the node type that we want to retrieve
|
# We assure that the node type that we want to retrieve
|
||||||
# is the same that was indicated in the field type
|
# is the same that was indicated in the field type
|
||||||
assert type == only_type._meta.name, 'Received not compatible node.'
|
assert type_ == only_type._meta.name, 'Received not compatible node.'
|
||||||
|
|
||||||
if type == 'User':
|
if type_ == 'User':
|
||||||
return get_user(id)
|
return get_user(id)
|
||||||
elif type == 'Photo':
|
elif type_ == 'Photo':
|
||||||
return get_photo(id)
|
return get_photo(id)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Required library
|
# Required library
|
||||||
Sphinx==1.5.3
|
Sphinx==6.1.3
|
||||||
|
sphinx-autobuild==2021.3.14
|
||||||
# Docs template
|
# Docs template
|
||||||
http://graphene-python.org/sphinx_graphene_theme.zip
|
http://graphene-python.org/sphinx_graphene_theme.zip
|
||||||
|
|
|
@ -54,7 +54,7 @@ Execute parameters
|
||||||
~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
You can also add extra keyword arguments to the ``execute`` method, such as
|
You can also add extra keyword arguments to the ``execute`` method, such as
|
||||||
``context_value``, ``root_value``, ``variable_values``, ...:
|
``context``, ``root``, ``variables``, ...:
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
@ -63,49 +63,9 @@ You can also add extra keyword arguments to the ``execute`` method, such as
|
||||||
|
|
||||||
def test_hey():
|
def test_hey():
|
||||||
client = Client(my_schema)
|
client = Client(my_schema)
|
||||||
executed = client.execute('''{ hey }''', context_value={'user': 'Peter'})
|
executed = client.execute('''{ hey }''', context={'user': 'Peter'})
|
||||||
assert executed == {
|
assert executed == {
|
||||||
'data': {
|
'data': {
|
||||||
'hey': 'hello Peter!'
|
'hey': 'hello Peter!'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Snapshot testing
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
As our APIs evolve, we need to know when our changes introduce any breaking changes that might break
|
|
||||||
some of the clients of our GraphQL app.
|
|
||||||
|
|
||||||
However, writing tests and replicate the same response we expect from our GraphQL application can be
|
|
||||||
tedious and repetitive task, and sometimes it's easier to skip this process.
|
|
||||||
|
|
||||||
Because of that, we recommend the usage of `SnapshotTest <https://github.com/syrusakbary/snapshottest/>`_.
|
|
||||||
|
|
||||||
SnapshotTest let us write all this tests in a breeze, as creates automatically the ``snapshots`` for us
|
|
||||||
the first time the test is executed.
|
|
||||||
|
|
||||||
|
|
||||||
Here is a simple example on how our tests will look if we use ``pytest``:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
def test_hey(snapshot):
|
|
||||||
client = Client(my_schema)
|
|
||||||
# This will create a snapshot dir and a snapshot file
|
|
||||||
# the first time the test is executed, with the response
|
|
||||||
# of the execution.
|
|
||||||
snapshot.assert_match(client.execute('''{ hey }'''))
|
|
||||||
|
|
||||||
|
|
||||||
If we are using ``unittest``:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
from snapshottest import TestCase
|
|
||||||
|
|
||||||
class APITestCase(TestCase):
|
|
||||||
def test_api_me(self):
|
|
||||||
"""Testing the API for /me"""
|
|
||||||
client = Client(my_schema)
|
|
||||||
self.assertMatchSnapshot(client.execute('''{ hey }'''))
|
|
||||||
|
|
|
@ -1,43 +0,0 @@
|
||||||
AbstractTypes
|
|
||||||
=============
|
|
||||||
|
|
||||||
An AbstractType contains fields that can be shared among
|
|
||||||
``graphene.ObjectType``, ``graphene.Interface``,
|
|
||||||
``graphene.InputObjectType`` or other ``graphene.AbstractType``.
|
|
||||||
|
|
||||||
The basics:
|
|
||||||
|
|
||||||
- Each AbstractType is a Python class that inherits from ``graphene.AbstractType``.
|
|
||||||
- Each attribute of the AbstractType represents a field (a ``graphene.Field`` or
|
|
||||||
``graphene.InputField`` depending on where it is mounted)
|
|
||||||
|
|
||||||
Quick example
|
|
||||||
-------------
|
|
||||||
|
|
||||||
In this example UserFields is an ``AbstractType`` with a name. ``User`` and
|
|
||||||
``UserInput`` are two types that have their own fields
|
|
||||||
plus the ones defined in ``UserFields``.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import graphene
|
|
||||||
|
|
||||||
class UserFields(graphene.AbstractType):
|
|
||||||
name = graphene.String()
|
|
||||||
|
|
||||||
class User(graphene.ObjectType, UserFields):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class UserInput(graphene.InputObjectType, UserFields):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
.. code::
|
|
||||||
|
|
||||||
type User {
|
|
||||||
name: String
|
|
||||||
}
|
|
||||||
|
|
||||||
inputtype UserInput {
|
|
||||||
name: String
|
|
||||||
}
|
|
|
@ -1,7 +1,7 @@
|
||||||
Enums
|
Enums
|
||||||
=====
|
=====
|
||||||
|
|
||||||
A ``Enum`` is a special ``GraphQL`` type that represents a set of
|
An ``Enum`` is a special ``GraphQL`` type that represents a set of
|
||||||
symbolic names (members) bound to unique, constant values.
|
symbolic names (members) bound to unique, constant values.
|
||||||
|
|
||||||
Definition
|
Definition
|
||||||
|
@ -59,7 +59,10 @@ you can add description etc. to your enum without changing the original:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
graphene.Enum.from_enum(AlreadyExistingPyEnum, description=lambda value: return 'foo' if value == AlreadyExistingPyEnum.Foo else 'bar')
|
graphene.Enum.from_enum(
|
||||||
|
AlreadyExistingPyEnum,
|
||||||
|
description=lambda v: return 'foo' if v == AlreadyExistingPyEnum.Foo else 'bar'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
Notes
|
Notes
|
||||||
|
@ -74,6 +77,7 @@ In the Python ``Enum`` implementation you can access a member by initing the Enu
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
class Color(Enum):
|
class Color(Enum):
|
||||||
RED = 1
|
RED = 1
|
||||||
GREEN = 2
|
GREEN = 2
|
||||||
|
@ -82,11 +86,12 @@ In the Python ``Enum`` implementation you can access a member by initing the Enu
|
||||||
assert Color(1) == Color.RED
|
assert Color(1) == Color.RED
|
||||||
|
|
||||||
|
|
||||||
However, in Graphene ``Enum`` you need to call get to have the same effect:
|
However, in Graphene ``Enum`` you need to call `.get` to have the same effect:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
from graphene import Enum
|
from graphene import Enum
|
||||||
|
|
||||||
class Color(Enum):
|
class Color(Enum):
|
||||||
RED = 1
|
RED = 1
|
||||||
GREEN = 2
|
GREEN = 2
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
.. _TypesReference:
|
||||||
|
|
||||||
===============
|
===============
|
||||||
Types Reference
|
Types Reference
|
||||||
===============
|
===============
|
||||||
|
@ -5,12 +7,11 @@ Types Reference
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
enums
|
schema
|
||||||
scalars
|
scalars
|
||||||
list-and-nonnull
|
list-and-nonnull
|
||||||
objecttypes
|
objecttypes
|
||||||
|
enums
|
||||||
interfaces
|
interfaces
|
||||||
unions
|
unions
|
||||||
schema
|
|
||||||
mutations
|
mutations
|
||||||
abstracttypes
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
.. _Interfaces:
|
||||||
|
|
||||||
Interfaces
|
Interfaces
|
||||||
==========
|
==========
|
||||||
|
|
||||||
|
@ -42,7 +44,7 @@ Both of these types have all of the fields from the ``Character`` interface,
|
||||||
but also bring in extra fields, ``home_planet``, ``starships`` and
|
but also bring in extra fields, ``home_planet``, ``starships`` and
|
||||||
``primary_function``, that are specific to that particular type of character.
|
``primary_function``, that are specific to that particular type of character.
|
||||||
|
|
||||||
The full GraphQL schema defition will look like this:
|
The full GraphQL schema definition will look like this:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
|
@ -82,7 +84,7 @@ For example, you can define a field ``hero`` that resolves to any
|
||||||
episode=graphene.Int(required=True)
|
episode=graphene.Int(required=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
def resolve_hero(_, info, episode):
|
def resolve_hero(root, info, episode):
|
||||||
# Luke is the hero of Episode V
|
# Luke is the hero of Episode V
|
||||||
if episode == 5:
|
if episode == 5:
|
||||||
return get_human(name='Luke Skywalker')
|
return get_human(name='Luke Skywalker')
|
||||||
|
@ -91,7 +93,7 @@ For example, you can define a field ``hero`` that resolves to any
|
||||||
schema = graphene.Schema(query=Query, types=[Human, Droid])
|
schema = graphene.Schema(query=Query, types=[Human, Droid])
|
||||||
|
|
||||||
This allows you to directly query for fields that exist on the Character interface
|
This allows you to directly query for fields that exist on the Character interface
|
||||||
as well as selecting specific fields on any type that implments the interface
|
as well as selecting specific fields on any type that implements the interface
|
||||||
using `inline fragments <https://graphql.org/learn/queries/#inline-fragments>`_.
|
using `inline fragments <https://graphql.org/learn/queries/#inline-fragments>`_.
|
||||||
|
|
||||||
For example, the following query:
|
For example, the following query:
|
||||||
|
|
|
@ -19,7 +19,7 @@ This example defines a Mutation:
|
||||||
ok = graphene.Boolean()
|
ok = graphene.Boolean()
|
||||||
person = graphene.Field(lambda: Person)
|
person = graphene.Field(lambda: Person)
|
||||||
|
|
||||||
def mutate(self, info, name):
|
def mutate(root, info, name):
|
||||||
person = Person(name=name)
|
person = Person(name=name)
|
||||||
ok = True
|
ok = True
|
||||||
return CreatePerson(person=person, ok=ok)
|
return CreatePerson(person=person, ok=ok)
|
||||||
|
@ -27,12 +27,13 @@ This example defines a Mutation:
|
||||||
**person** and **ok** are the output fields of the Mutation when it is
|
**person** and **ok** are the output fields of the Mutation when it is
|
||||||
resolved.
|
resolved.
|
||||||
|
|
||||||
**Input** attributes are the arguments that the Mutation
|
**Arguments** attributes are the arguments that the Mutation
|
||||||
``CreatePerson`` needs for resolving, in this case **name** will be the
|
``CreatePerson`` needs for resolving, in this case **name** will be the
|
||||||
only argument for the mutation.
|
only argument for the mutation.
|
||||||
|
|
||||||
**mutate** is the function that will be applied once the mutation is
|
**mutate** is the function that will be applied once the mutation is
|
||||||
called.
|
called. This method is just a special resolver that we can change
|
||||||
|
data within. It takes the same arguments as the standard query :ref:`ResolverArguments`.
|
||||||
|
|
||||||
So, we can finish our schema like this:
|
So, we can finish our schema like this:
|
||||||
|
|
||||||
|
@ -84,9 +85,9 @@ We should receive:
|
||||||
|
|
||||||
InputFields and InputObjectTypes
|
InputFields and InputObjectTypes
|
||||||
----------------------------------
|
----------------------------------
|
||||||
InputFields are used in mutations to allow nested input data for mutations
|
InputFields are used in mutations to allow nested input data for mutations.
|
||||||
|
|
||||||
To use an InputField you define an InputObjectType that specifies the structure of your input data
|
To use an InputField you define an InputObjectType that specifies the structure of your input data:
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
@ -103,7 +104,6 @@ To use an InputField you define an InputObjectType that specifies the structure
|
||||||
|
|
||||||
person = graphene.Field(Person)
|
person = graphene.Field(Person)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def mutate(root, info, person_data=None):
|
def mutate(root, info, person_data=None):
|
||||||
person = Person(
|
person = Person(
|
||||||
name=person_data.name,
|
name=person_data.name,
|
||||||
|
@ -112,7 +112,7 @@ To use an InputField you define an InputObjectType that specifies the structure
|
||||||
return CreatePerson(person=person)
|
return CreatePerson(person=person)
|
||||||
|
|
||||||
|
|
||||||
Note that **name** and **age** are part of **person_data** now
|
Note that **name** and **age** are part of **person_data** now.
|
||||||
|
|
||||||
Using the above mutation your new query would look like this:
|
Using the above mutation your new query would look like this:
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ Using the above mutation your new query would look like this:
|
||||||
}
|
}
|
||||||
|
|
||||||
InputObjectTypes can also be fields of InputObjectTypes allowing you to have
|
InputObjectTypes can also be fields of InputObjectTypes allowing you to have
|
||||||
as complex of input data as you need
|
as complex of input data as you need:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -157,10 +157,10 @@ To return an existing ObjectType instead of a mutation-specific type, set the **
|
||||||
|
|
||||||
Output = Person
|
Output = Person
|
||||||
|
|
||||||
def mutate(self, info, name):
|
def mutate(root, info, name):
|
||||||
return Person(name=name)
|
return Person(name=name)
|
||||||
|
|
||||||
Then, if we query (``schema.execute(query_str)``) the following:
|
Then, if we query (``schema.execute(query_str)``) with the following:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
ObjectTypes
|
.. _ObjectType:
|
||||||
===========
|
|
||||||
|
|
||||||
An ObjectType is the single, definitive source of information about your
|
ObjectType
|
||||||
data. It contains the essential fields and behaviors of the data you’re
|
==========
|
||||||
querying.
|
|
||||||
|
A Graphene *ObjectType* is the building block used to define the relationship between **Fields** in your **Schema** and how their data is retrieved.
|
||||||
|
|
||||||
The basics:
|
The basics:
|
||||||
|
|
||||||
- Each ObjectType is a Python class that inherits from
|
- Each ObjectType is a Python class that inherits from ``graphene.ObjectType``.
|
||||||
``graphene.ObjectType``.
|
|
||||||
- Each attribute of the ObjectType represents a ``Field``.
|
- Each attribute of the ObjectType represents a ``Field``.
|
||||||
|
- Each ``Field`` has a :ref:`resolver method<Resolvers>` to fetch data (or :ref:`DefaultResolver`).
|
||||||
|
|
||||||
Quick example
|
Quick example
|
||||||
-------------
|
-------------
|
||||||
|
@ -18,19 +18,17 @@ This example model defines a Person, with a first and a last name:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
import graphene
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
class Person(graphene.ObjectType):
|
class Person(ObjectType):
|
||||||
first_name = graphene.String()
|
first_name = String()
|
||||||
last_name = graphene.String()
|
last_name = String()
|
||||||
full_name = graphene.String()
|
full_name = String()
|
||||||
|
|
||||||
def resolve_full_name(self, info):
|
def resolve_full_name(parent, info):
|
||||||
return '{} {}'.format(self.first_name, self.last_name)
|
return f"{parent.first_name} {parent.last_name}"
|
||||||
|
|
||||||
**first\_name** and **last\_name** are fields of the ObjectType. Each
|
This *ObjectType* defines the field **first\_name**, **last\_name**, and **full\_name**. Each field is specified as a class attribute, and each attribute maps to a Field. Data is fetched by our ``resolve_full_name`` :ref:`resolver method<Resolvers>` for ``full_name`` field and the :ref:`DefaultResolver` for other fields.
|
||||||
field is specified as a class attribute, and each attribute maps to a
|
|
||||||
Field.
|
|
||||||
|
|
||||||
The above ``Person`` ObjectType has the following schema representation:
|
The above ``Person`` ObjectType has the following schema representation:
|
||||||
|
|
||||||
|
@ -42,65 +40,396 @@ The above ``Person`` ObjectType has the following schema representation:
|
||||||
fullName: String
|
fullName: String
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.. _Resolvers:
|
||||||
|
|
||||||
Resolvers
|
Resolvers
|
||||||
---------
|
---------
|
||||||
|
|
||||||
A resolver is a method that resolves certain fields within a
|
A **Resolver** is a method that helps us answer **Queries** by fetching data for a **Field** in our **Schema**.
|
||||||
``ObjectType``. If not specififed otherwise, the resolver of a
|
|
||||||
field is the ``resolve_{field_name}`` method on the ``ObjectType``.
|
|
||||||
|
|
||||||
By default resolvers take the arguments ``info`` and ``*args``.
|
Resolvers are lazily executed, so if a field is not included in a query, its resolver will not be executed.
|
||||||
|
|
||||||
NOTE: The resolvers on a ``ObjectType`` are always treated as ``staticmethod``\ s,
|
Each field on an *ObjectType* in Graphene should have a corresponding resolver method to fetch data. This resolver method should match the field name. For example, in the ``Person`` type above, the ``full_name`` field is resolved by the method ``resolve_full_name``.
|
||||||
so the first argument to the resolver method ``self`` (or ``root``) need
|
|
||||||
not be an actual instance of the ``ObjectType``.
|
|
||||||
|
|
||||||
|
Each resolver method takes the parameters:
|
||||||
|
|
||||||
Quick example
|
* :ref:`ResolverParamParent` for the value object use to resolve most fields
|
||||||
~~~~~~~~~~~~~
|
* :ref:`ResolverParamInfo` for query and schema meta information and per-request context
|
||||||
|
* :ref:`ResolverParamGraphQLArguments` as defined on the **Field**.
|
||||||
|
|
||||||
This example model defines a ``Query`` type, which has a reverse field
|
.. _ResolverArguments:
|
||||||
that reverses the given ``word`` argument using the ``resolve_reverse``
|
|
||||||
method in the class.
|
Resolver Parameters
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
.. _ResolverParamParent:
|
||||||
|
|
||||||
|
Parent Value Object (*parent*)
|
||||||
|
******************************
|
||||||
|
|
||||||
|
This parameter is typically used to derive the values for most fields on an *ObjectType*.
|
||||||
|
|
||||||
|
The first parameter of a resolver method (*parent*) is the value object returned from the resolver of the parent field. If there is no parent field, such as a root Query field, then the value for *parent* is set to the ``root_value`` configured while executing the query (default ``None``). See :ref:`SchemaExecute` for more details on executing queries.
|
||||||
|
|
||||||
|
Resolver example
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If we have a schema with Person type and one field on the root query.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
import graphene
|
from graphene import ObjectType, String, Field
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
def get_human(name):
|
||||||
reverse = graphene.String(word=graphene.String())
|
first_name, last_name = name.split()
|
||||||
|
return Person(first_name, last_name)
|
||||||
|
|
||||||
def resolve_reverse(self, info, word):
|
class Person(ObjectType):
|
||||||
return word[::-1]
|
full_name = String()
|
||||||
|
|
||||||
|
def resolve_full_name(parent, info):
|
||||||
|
return f"{parent.first_name} {parent.last_name}"
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
me = Field(Person)
|
||||||
|
|
||||||
|
def resolve_me(parent, info):
|
||||||
|
# returns an object that represents a Person
|
||||||
|
return get_human(name="Luke Skywalker")
|
||||||
|
|
||||||
|
When we execute a query against that schema.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
query_string = "{ me { fullName } }"
|
||||||
|
result = schema.execute(query_string)
|
||||||
|
|
||||||
|
assert result.data["me"] == {"fullName": "Luke Skywalker"}
|
||||||
|
|
||||||
|
Then we go through the following steps to resolve this query:
|
||||||
|
|
||||||
|
* ``parent`` is set with the root_value from query execution (None).
|
||||||
|
* ``Query.resolve_me`` called with ``parent`` None which returns a value object ``Person("Luke", "Skywalker")``.
|
||||||
|
* This value object is then used as ``parent`` while calling ``Person.resolve_full_name`` to resolve the scalar String value "Luke Skywalker".
|
||||||
|
* The scalar value is serialized and sent back in the query response.
|
||||||
|
|
||||||
|
Each resolver returns the next :ref:`ResolverParamParent` to be used in executing the following resolver in the chain. If the Field is a Scalar type, that value will be serialized and sent in the **Response**. Otherwise, while resolving Compound types like *ObjectType*, the value be passed forward as the next :ref:`ResolverParamParent`.
|
||||||
|
|
||||||
|
Naming convention
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
This :ref:`ResolverParamParent` is sometimes named ``obj``, ``parent``, or ``source`` in other GraphQL documentation. It can also be named after the value object being resolved (ex. ``root`` for a root Query or Mutation, and ``person`` for a Person value object). Sometimes this argument will be named ``self`` in Graphene code, but this can be misleading due to :ref:`ResolverImplicitStaticMethod` while executing queries in Graphene.
|
||||||
|
|
||||||
|
.. _ResolverParamInfo:
|
||||||
|
|
||||||
|
GraphQL Execution Info (*info*)
|
||||||
|
*******************************
|
||||||
|
|
||||||
|
The second parameter provides two things:
|
||||||
|
|
||||||
|
* reference to meta information about the execution of the current GraphQL Query (fields, schema, parsed query, etc.)
|
||||||
|
* access to per-request ``context`` which can be used to store user authentication, data loader instances or anything else useful for resolving the query.
|
||||||
|
|
||||||
|
Only context will be required for most applications. See :ref:`SchemaExecuteContext` for more information about setting context.
|
||||||
|
|
||||||
|
.. _ResolverParamGraphQLArguments:
|
||||||
|
|
||||||
|
GraphQL Arguments (*\*\*kwargs*)
|
||||||
|
********************************
|
||||||
|
|
||||||
|
Any arguments that a field defines gets passed to the resolver function as
|
||||||
|
keyword arguments. For example:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, Field, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
human_by_name = Field(Human, name=String(required=True))
|
||||||
|
|
||||||
|
def resolve_human_by_name(parent, info, name):
|
||||||
|
return get_human(name=name)
|
||||||
|
|
||||||
|
You can then execute the following query:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
query {
|
||||||
|
humanByName(name: "Luke Skywalker") {
|
||||||
|
firstName
|
||||||
|
lastName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*Note:* There are several arguments to a field that are "reserved" by Graphene
|
||||||
|
(see :ref:`fields-mounted-types`).
|
||||||
|
You can still define an argument that clashes with one of these fields by using
|
||||||
|
the ``args`` parameter like so:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, Field, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
answer = String(args={'description': String()})
|
||||||
|
|
||||||
|
def resolve_answer(parent, info, description):
|
||||||
|
return description
|
||||||
|
|
||||||
|
|
||||||
|
Convenience Features of Graphene Resolvers
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
.. _ResolverImplicitStaticMethod:
|
||||||
|
|
||||||
|
Implicit staticmethod
|
||||||
|
*********************
|
||||||
|
|
||||||
|
One surprising feature of Graphene is that all resolver methods are treated implicitly as staticmethods. This means that, unlike other methods in Python, the first argument of a resolver is *never* ``self`` while it is being executed by Graphene. Instead, the first argument is always :ref:`ResolverParamParent`. In practice, this is very convenient as, in GraphQL, we are almost always more concerned with the using the parent value object to resolve queries than attributes on the Python object itself.
|
||||||
|
|
||||||
|
The two resolvers in this example are effectively the same.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
|
class Person(ObjectType):
|
||||||
|
first_name = String()
|
||||||
|
last_name = String()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_first_name(parent, info):
|
||||||
|
'''
|
||||||
|
Decorating a Python method with `staticmethod` ensures that `self` will not be provided as an
|
||||||
|
argument. However, Graphene does not need this decorator for this behavior.
|
||||||
|
'''
|
||||||
|
return parent.first_name
|
||||||
|
|
||||||
|
def resolve_last_name(parent, info):
|
||||||
|
'''
|
||||||
|
Normally the first argument for this method would be `self`, but Graphene executes this as
|
||||||
|
a staticmethod implicitly.
|
||||||
|
'''
|
||||||
|
return parent.last_name
|
||||||
|
|
||||||
|
# ...
|
||||||
|
|
||||||
|
If you prefer your code to be more explicit, feel free to use ``@staticmethod`` decorators. Otherwise, your code may be cleaner without them!
|
||||||
|
|
||||||
|
.. _DefaultResolver:
|
||||||
|
|
||||||
|
Default Resolver
|
||||||
|
****************
|
||||||
|
|
||||||
|
If a resolver method is not defined for a **Field** attribute on our *ObjectType*, Graphene supplies a default resolver.
|
||||||
|
|
||||||
|
If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a dictionary key matching the field name. Otherwise, the resolver will get the attribute from the parent value object matching the field name.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from graphene import ObjectType, String, Field, Schema
|
||||||
|
|
||||||
|
PersonValueObject = namedtuple("Person", ["first_name", "last_name"])
|
||||||
|
|
||||||
|
class Person(ObjectType):
|
||||||
|
first_name = String()
|
||||||
|
last_name = String()
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
me = Field(Person)
|
||||||
|
my_best_friend = Field(Person)
|
||||||
|
|
||||||
|
def resolve_me(parent, info):
|
||||||
|
# always pass an object for `me` field
|
||||||
|
return PersonValueObject(first_name="Luke", last_name="Skywalker")
|
||||||
|
|
||||||
|
def resolve_my_best_friend(parent, info):
|
||||||
|
# always pass a dictionary for `my_best_fiend_field`
|
||||||
|
return {"first_name": "R2", "last_name": "D2"}
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
result = schema.execute('''
|
||||||
|
{
|
||||||
|
me { firstName lastName }
|
||||||
|
myBestFriend { firstName lastName }
|
||||||
|
}
|
||||||
|
''')
|
||||||
|
# With default resolvers we can resolve attributes from an object..
|
||||||
|
assert result.data["me"] == {"firstName": "Luke", "lastName": "Skywalker"}
|
||||||
|
|
||||||
|
# With default resolvers, we can also resolve keys from a dictionary..
|
||||||
|
assert result.data["myBestFriend"] == {"firstName": "R2", "lastName": "D2"}
|
||||||
|
|
||||||
|
Advanced
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
GraphQL Argument defaults
|
||||||
|
*************************
|
||||||
|
|
||||||
|
If you define an argument for a field that is not required (and in a query
|
||||||
|
execution it is not provided as an argument) it will not be passed to the
|
||||||
|
resolver function at all. This is so that the developer can differentiate
|
||||||
|
between a ``undefined`` value for an argument and an explicit ``null`` value.
|
||||||
|
|
||||||
|
For example, given this schema:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
hello = String(required=True, name=String())
|
||||||
|
|
||||||
|
def resolve_hello(parent, info, name):
|
||||||
|
return name if name else 'World'
|
||||||
|
|
||||||
|
And this query:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
query {
|
||||||
|
hello
|
||||||
|
}
|
||||||
|
|
||||||
|
An error will be thrown:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
TypeError: resolve_hello() missing 1 required positional argument: 'name'
|
||||||
|
|
||||||
|
You can fix this error in several ways. Either by combining all keyword arguments
|
||||||
|
into a dict:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
hello = String(required=True, name=String())
|
||||||
|
|
||||||
|
def resolve_hello(parent, info, **kwargs):
|
||||||
|
name = kwargs.get('name', 'World')
|
||||||
|
return f'Hello, {name}!'
|
||||||
|
|
||||||
|
Or by setting a default value for the keyword argument:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
hello = String(required=True, name=String())
|
||||||
|
|
||||||
|
def resolve_hello(parent, info, name='World'):
|
||||||
|
return f'Hello, {name}!'
|
||||||
|
|
||||||
|
One can also set a default value for an Argument in the GraphQL schema itself using Graphene!
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
hello = String(
|
||||||
|
required=True,
|
||||||
|
name=String(default_value='World')
|
||||||
|
)
|
||||||
|
|
||||||
|
def resolve_hello(parent, info, name):
|
||||||
|
return f'Hello, {name}!'
|
||||||
|
|
||||||
Resolvers outside the class
|
Resolvers outside the class
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
***************************
|
||||||
|
|
||||||
A field can use a custom resolver from outside the class:
|
A field can use a custom resolver from outside the class:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
import graphene
|
from graphene import ObjectType, String
|
||||||
|
|
||||||
def reverse(root, info, word):
|
def resolve_full_name(person, info):
|
||||||
return word[::-1]
|
return f"{person.first_name} {person.last_name}"
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
class Person(ObjectType):
|
||||||
reverse = graphene.String(word=graphene.String(), resolver=reverse)
|
first_name = String()
|
||||||
|
last_name = String()
|
||||||
|
full_name = String(resolver=resolve_full_name)
|
||||||
|
|
||||||
|
|
||||||
Instances as data containers
|
Instances as value objects
|
||||||
----------------------------
|
**************************
|
||||||
|
|
||||||
Graphene ``ObjectType``\ s can act as containers too. So with the
|
Graphene ``ObjectType``\ s can act as value objects too. So with the
|
||||||
previous example you could do:
|
previous example you could use ``Person`` to capture data for each of the *ObjectType*'s fields.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
peter = Person(first_name='Peter', last_name='Griffin')
|
peter = Person(first_name='Peter', last_name='Griffin')
|
||||||
|
|
||||||
peter.first_name # prints "Peter"
|
peter.first_name # prints "Peter"
|
||||||
peter.last_name # prints "Griffin"
|
peter.last_name # prints "Griffin"
|
||||||
|
|
||||||
|
Field camelcasing
|
||||||
|
*****************
|
||||||
|
|
||||||
|
Graphene automatically camelcases fields on *ObjectType* from ``field_name`` to ``fieldName`` to conform with GraphQL standards. See :ref:`SchemaAutoCamelCase` for more information.
|
||||||
|
|
||||||
|
*ObjectType* Configuration - Meta class
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
Graphene uses a Meta inner class on *ObjectType* to set different options.
|
||||||
|
|
||||||
|
GraphQL type name
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
By default the type name in the GraphQL schema will be the same as the class name
|
||||||
|
that defines the ``ObjectType``. This can be changed by setting the ``name``
|
||||||
|
property on the ``Meta`` class:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType
|
||||||
|
|
||||||
|
class MyGraphQlSong(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
name = 'Song'
|
||||||
|
|
||||||
|
GraphQL Description
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The schema description of an *ObjectType* can be set as a docstring on the Python object or on the Meta inner class.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType
|
||||||
|
|
||||||
|
class MyGraphQlSong(ObjectType):
|
||||||
|
''' We can set the schema description for an Object Type here on a docstring '''
|
||||||
|
class Meta:
|
||||||
|
description = 'But if we set the description in Meta, this value is used instead'
|
||||||
|
|
||||||
|
Interfaces & Possible Types
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Setting ``interfaces`` in Meta inner class specifies the GraphQL Interfaces that this Object implements.
|
||||||
|
|
||||||
|
Providing ``possible_types`` helps Graphene resolve ambiguous types such as interfaces or Unions.
|
||||||
|
|
||||||
|
See :ref:`Interfaces` for more information.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import ObjectType, Node
|
||||||
|
|
||||||
|
Song = namedtuple('Song', ('title', 'artist'))
|
||||||
|
|
||||||
|
class MyGraphQlSong(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = (Node, )
|
||||||
|
possible_types = (Song, )
|
||||||
|
|
||||||
.. _Interface: /docs/interfaces/
|
.. _Interface: /docs/interfaces/
|
||||||
|
|
|
@ -1,6 +1,13 @@
|
||||||
|
.. _Scalars:
|
||||||
|
|
||||||
Scalars
|
Scalars
|
||||||
=======
|
=======
|
||||||
|
|
||||||
|
Scalar types represent concrete values at the leaves of a query. There are
|
||||||
|
several built in types that Graphene provides out of the box which represent common
|
||||||
|
values in Python. You can also create your own Scalar types to better express
|
||||||
|
values that you might have in your data model.
|
||||||
|
|
||||||
All Scalar types accept the following arguments. All are optional:
|
All Scalar types accept the following arguments. All are optional:
|
||||||
|
|
||||||
``name``: *string*
|
``name``: *string*
|
||||||
|
@ -13,7 +20,7 @@ All Scalar types accept the following arguments. All are optional:
|
||||||
|
|
||||||
``required``: *boolean*
|
``required``: *boolean*
|
||||||
|
|
||||||
If ``True``, the server will enforce a value for this field. See `NonNull <./list-and-nonnull.html#nonnull>`_. Default is ``False``.
|
If ``True``, the server will enforce a value for this field. See `NonNull <../list-and-nonnull.html#nonnull>`_. Default is ``False``.
|
||||||
|
|
||||||
``deprecation_reason``: *string*
|
``deprecation_reason``: *string*
|
||||||
|
|
||||||
|
@ -25,35 +32,39 @@ All Scalar types accept the following arguments. All are optional:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Base scalars
|
Built in scalars
|
||||||
------------
|
----------------
|
||||||
|
|
||||||
Graphene defines the following base Scalar Types:
|
Graphene defines the following base Scalar Types that match the default `GraphQL types <https://graphql.org/learn/schema/#scalar-types>`_:
|
||||||
|
|
||||||
``graphene.String``
|
``graphene.String``
|
||||||
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents textual data, represented as UTF-8
|
Represents textual data, represented as UTF-8
|
||||||
character sequences. The String type is most often used by GraphQL to
|
character sequences. The String type is most often used by GraphQL to
|
||||||
represent free-form human-readable text.
|
represent free-form human-readable text.
|
||||||
|
|
||||||
``graphene.Int``
|
``graphene.Int``
|
||||||
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents non-fractional signed whole numeric
|
Represents non-fractional signed whole numeric
|
||||||
values. Int can represent values between `-(2^53 - 1)` and `2^53 - 1` since
|
values. Int is a signed 32‐bit integer per the
|
||||||
represented in JSON as double-precision floating point numbers specified
|
`GraphQL spec <https://facebook.github.io/graphql/June2018/#sec-Int>`_
|
||||||
by `IEEE 754 <http://en.wikipedia.org/wiki/IEEE_floating_point>`_.
|
|
||||||
|
|
||||||
``graphene.Float``
|
``graphene.Float``
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents signed double-precision fractional
|
Represents signed double-precision fractional
|
||||||
values as specified by
|
values as specified by
|
||||||
`IEEE 754 <http://en.wikipedia.org/wiki/IEEE_floating_point>`_.
|
`IEEE 754 <http://en.wikipedia.org/wiki/IEEE_floating_point>`_.
|
||||||
|
|
||||||
``graphene.Boolean``
|
``graphene.Boolean``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents `true` or `false`.
|
Represents `true` or `false`.
|
||||||
|
|
||||||
``graphene.ID``
|
``graphene.ID``
|
||||||
|
^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents a unique identifier, often used to
|
Represents a unique identifier, often used to
|
||||||
refetch an object or as key for a cache. The ID type appears in a JSON
|
refetch an object or as key for a cache. The ID type appears in a JSON
|
||||||
|
@ -61,24 +72,183 @@ Graphene defines the following base Scalar Types:
|
||||||
When expected as an input type, any string (such as `"4"`) or integer
|
When expected as an input type, any string (such as `"4"`) or integer
|
||||||
(such as `4`) input value will be accepted as an ID.
|
(such as `4`) input value will be accepted as an ID.
|
||||||
|
|
||||||
Graphene also provides custom scalars for Dates, Times, and JSON:
|
----
|
||||||
|
|
||||||
``graphene.types.datetime.Date``
|
Graphene also provides custom scalars for common values:
|
||||||
|
|
||||||
|
``graphene.Date``
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents a Date value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
Represents a Date value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
||||||
|
|
||||||
``graphene.types.datetime.DateTime``
|
.. code:: python
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from graphene import Schema, ObjectType, Date
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
one_week_from = Date(required=True, date_input=Date(required=True))
|
||||||
|
|
||||||
|
def resolve_one_week_from(root, info, date_input):
|
||||||
|
assert date_input == datetime.date(2006, 1, 2)
|
||||||
|
return date_input + datetime.timedelta(weeks=1)
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
results = schema.execute("""
|
||||||
|
query {
|
||||||
|
oneWeekFrom(dateInput: "2006-01-02")
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert results.data == {"oneWeekFrom": "2006-01-09"}
|
||||||
|
|
||||||
|
|
||||||
|
``graphene.DateTime``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents a DateTime value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
Represents a DateTime value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
||||||
|
|
||||||
``graphene.types.datetime.Time``
|
.. code:: python
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from graphene import Schema, ObjectType, DateTime
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
one_hour_from = DateTime(required=True, datetime_input=DateTime(required=True))
|
||||||
|
|
||||||
|
def resolve_one_hour_from(root, info, datetime_input):
|
||||||
|
assert datetime_input == datetime.datetime(2006, 1, 2, 15, 4, 5)
|
||||||
|
return datetime_input + datetime.timedelta(hours=1)
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
results = schema.execute("""
|
||||||
|
query {
|
||||||
|
oneHourFrom(datetimeInput: "2006-01-02T15:04:05")
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert results.data == {"oneHourFrom": "2006-01-02T16:04:05"}
|
||||||
|
|
||||||
|
``graphene.Time``
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents a Time value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
Represents a Time value as specified by `iso8601 <https://en.wikipedia.org/wiki/ISO_8601>`_.
|
||||||
|
|
||||||
``graphene.types.json.JSONString``
|
.. code:: python
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from graphene import Schema, ObjectType, Time
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
one_hour_from = Time(required=True, time_input=Time(required=True))
|
||||||
|
|
||||||
|
def resolve_one_hour_from(root, info, time_input):
|
||||||
|
assert time_input == datetime.time(15, 4, 5)
|
||||||
|
tmp_time_input = datetime.datetime.combine(datetime.date(1, 1, 1), time_input)
|
||||||
|
return (tmp_time_input + datetime.timedelta(hours=1)).time()
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
results = schema.execute("""
|
||||||
|
query {
|
||||||
|
oneHourFrom(timeInput: "15:04:05")
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert results.data == {"oneHourFrom": "16:04:05"}
|
||||||
|
|
||||||
|
``graphene.Decimal``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Represents a Python Decimal value.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import decimal
|
||||||
|
from graphene import Schema, ObjectType, Decimal
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
add_one_to = Decimal(required=True, decimal_input=Decimal(required=True))
|
||||||
|
|
||||||
|
def resolve_add_one_to(root, info, decimal_input):
|
||||||
|
assert decimal_input == decimal.Decimal("10.50")
|
||||||
|
return decimal_input + decimal.Decimal("1")
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
results = schema.execute("""
|
||||||
|
query {
|
||||||
|
addOneTo(decimalInput: "10.50")
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert results.data == {"addOneTo": "11.50"}
|
||||||
|
|
||||||
|
``graphene.JSONString``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Represents a JSON string.
|
Represents a JSON string.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import Schema, ObjectType, JSONString, String
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
update_json_key = JSONString(
|
||||||
|
required=True,
|
||||||
|
json_input=JSONString(required=True),
|
||||||
|
key=String(required=True),
|
||||||
|
value=String(required=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def resolve_update_json_key(root, info, json_input, key, value):
|
||||||
|
assert json_input == {"name": "Jane"}
|
||||||
|
json_input[key] = value
|
||||||
|
return json_input
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
results = schema.execute("""
|
||||||
|
query {
|
||||||
|
updateJsonKey(jsonInput: "{\\"name\\": \\"Jane\\"}", key: "name", value: "Beth")
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert results.data == {"updateJsonKey": "{\"name\": \"Beth\"}"}
|
||||||
|
|
||||||
|
|
||||||
|
``graphene.Base64``
|
||||||
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Represents a Base64 encoded string.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import Schema, ObjectType, Base64
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
increment_encoded_id = Base64(
|
||||||
|
required=True,
|
||||||
|
base64_input=Base64(required=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
def resolve_increment_encoded_id(root, info, base64_input):
|
||||||
|
assert base64_input == "4"
|
||||||
|
return int(base64_input) + 1
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
results = schema.execute("""
|
||||||
|
query {
|
||||||
|
incrementEncodedId(base64Input: "NA==")
|
||||||
|
}
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert results.data == {"incrementEncodedId": "NQ=="}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Custom scalars
|
Custom scalars
|
||||||
--------------
|
--------------
|
||||||
|
@ -100,8 +270,8 @@ The following is an example for creating a DateTime scalar:
|
||||||
return dt.isoformat()
|
return dt.isoformat()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(node):
|
def parse_literal(node, _variables=None):
|
||||||
if isinstance(node, ast.StringValue):
|
if isinstance(node, ast.StringValueNode):
|
||||||
return datetime.datetime.strptime(
|
return datetime.datetime.strptime(
|
||||||
node.value, "%Y-%m-%dT%H:%M:%S.%f")
|
node.value, "%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,42 @@
|
||||||
Schema
|
Schema
|
||||||
======
|
======
|
||||||
|
|
||||||
A Schema is created by supplying the root types of each type of operation, query and mutation (optional).
|
A GraphQL **Schema** defines the types and relationships between **Fields** in your API.
|
||||||
A schema definition is then supplied to the validator and executor.
|
|
||||||
|
A Schema is created by supplying the root :ref:`ObjectType` of each operation, query (mandatory), mutation and subscription.
|
||||||
|
|
||||||
|
Schema will collect all type definitions related to the root operations and then supply them to the validator and executor.
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
my_schema = Schema(
|
my_schema = Schema(
|
||||||
query=MyRootQuery,
|
query=MyRootQuery,
|
||||||
mutation=MyRootMutation,
|
mutation=MyRootMutation,
|
||||||
|
subscription=MyRootSubscription
|
||||||
)
|
)
|
||||||
|
|
||||||
|
A Root Query is just a special :ref:`ObjectType` that defines the fields that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types:
|
||||||
|
|
||||||
|
* Query fetches data
|
||||||
|
* Mutation changes data and retrieves the changes
|
||||||
|
* Subscription sends changes to clients in real-time
|
||||||
|
|
||||||
|
Review the `GraphQL documentation on Schema`_ for a brief overview of fields, schema and operations.
|
||||||
|
|
||||||
|
.. _GraphQL documentation on Schema: https://graphql.org/learn/schema/
|
||||||
|
|
||||||
|
|
||||||
|
Querying
|
||||||
|
--------
|
||||||
|
|
||||||
|
To query a schema, call the ``execute`` method on it. See :ref:`SchemaExecute` for more details.
|
||||||
|
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
query_string = 'query whoIsMyBestFriend { myBestFriend { lastName } }'
|
||||||
|
my_schema.execute(query_string)
|
||||||
|
|
||||||
Types
|
Types
|
||||||
-----
|
-----
|
||||||
|
|
||||||
|
@ -18,7 +44,7 @@ There are some cases where the schema cannot access all of the types that we pla
|
||||||
For example, when a field returns an ``Interface``, the schema doesn't know about any of the
|
For example, when a field returns an ``Interface``, the schema doesn't know about any of the
|
||||||
implementations.
|
implementations.
|
||||||
|
|
||||||
In this case, we need to use the ``types`` argument when creating the Schema.
|
In this case, we need to use the ``types`` argument when creating the Schema:
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
@ -28,26 +54,16 @@ In this case, we need to use the ``types`` argument when creating the Schema.
|
||||||
types=[SomeExtraObjectType, ]
|
types=[SomeExtraObjectType, ]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
.. _SchemaAutoCamelCase:
|
||||||
|
|
||||||
Querying
|
Auto camelCase field names
|
||||||
--------
|
|
||||||
|
|
||||||
To query a schema, call the ``execute`` method on it.
|
|
||||||
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
my_schema.execute('{ lastName }')
|
|
||||||
|
|
||||||
|
|
||||||
Auto CamelCase field names
|
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
By default all field and argument names (that are not
|
By default all field and argument names (that are not
|
||||||
explicitly set with the ``name`` arg) will be converted from
|
explicitly set with the ``name`` arg) will be converted from
|
||||||
``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client)
|
``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client)
|
||||||
|
|
||||||
For example with the ObjectType
|
For example with the ObjectType the ``last_name`` field name is converted to ``lastName``:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
@ -55,12 +71,10 @@ For example with the ObjectType
|
||||||
last_name = graphene.String()
|
last_name = graphene.String()
|
||||||
other_name = graphene.String(name='_other_Name')
|
other_name = graphene.String(name='_other_Name')
|
||||||
|
|
||||||
the ``last_name`` field name is converted to ``lastName``.
|
|
||||||
|
|
||||||
In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor.
|
In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor.
|
||||||
``other_name`` converts to ``_other_Name`` (without further transformations).
|
``other_name`` converts to ``_other_Name`` (without further transformations).
|
||||||
|
|
||||||
Your query should look like
|
Your query should look like:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
|
@ -70,7 +84,7 @@ Your query should look like
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation.
|
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation:
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ to specify any common fields between the types.
|
||||||
The basics:
|
The basics:
|
||||||
|
|
||||||
- Each Union is a Python class that inherits from ``graphene.Union``.
|
- Each Union is a Python class that inherits from ``graphene.Union``.
|
||||||
- Unions don't have any fields on it, just links to the possible objecttypes.
|
- Unions don't have any fields on it, just links to the possible ObjectTypes.
|
||||||
|
|
||||||
Quick example
|
Quick example
|
||||||
-------------
|
-------------
|
||||||
|
|
|
@ -7,7 +7,7 @@ class GeoInput(graphene.InputObjectType):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def latlng(self):
|
def latlng(self):
|
||||||
return "({},{})".format(self.lat, self.lng)
|
return f"({self.lat},{self.lng})"
|
||||||
|
|
||||||
|
|
||||||
class Address(graphene.ObjectType):
|
class Address(graphene.ObjectType):
|
||||||
|
@ -17,7 +17,7 @@ class Address(graphene.ObjectType):
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
address = graphene.Field(Address, geo=GeoInput(required=True))
|
address = graphene.Field(Address, geo=GeoInput(required=True))
|
||||||
|
|
||||||
def resolve_address(self, info, geo):
|
def resolve_address(root, info, geo):
|
||||||
return Address(latlng=geo.latlng)
|
return Address(latlng=geo.latlng)
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ class CreateAddress(graphene.Mutation):
|
||||||
|
|
||||||
Output = Address
|
Output = Address
|
||||||
|
|
||||||
def mutate(self, info, geo):
|
def mutate(root, info, geo):
|
||||||
return Address(latlng=geo.latlng)
|
return Address(latlng=geo.latlng)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ class User(graphene.ObjectType):
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
me = graphene.Field(User)
|
me = graphene.Field(User)
|
||||||
|
|
||||||
def resolve_me(self, info):
|
def resolve_me(root, info):
|
||||||
return info.context["user"]
|
return info.context["user"]
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,11 +25,11 @@ query = """
|
||||||
|
|
||||||
|
|
||||||
def test_query():
|
def test_query():
|
||||||
result = schema.execute(query, context_value={"user": User(id="1", name="Syrus")})
|
result = schema.execute(query, context={"user": User(id="1", name="Syrus")})
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == {"me": {"id": "1", "name": "Syrus"}}
|
assert result.data == {"me": {"id": "1", "name": "Syrus"}}
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
result = schema.execute(query, context_value={"user": User(id="X", name="Console")})
|
result = schema.execute(query, context={"user": User(id="X", name="Console")})
|
||||||
print(result.data["me"])
|
print(result.data["me"])
|
||||||
|
|
|
@ -8,10 +8,9 @@ class Patron(graphene.ObjectType):
|
||||||
|
|
||||||
|
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
|
|
||||||
patron = graphene.Field(Patron)
|
patron = graphene.Field(Patron)
|
||||||
|
|
||||||
def resolve_patron(self, info):
|
def resolve_patron(root, info):
|
||||||
return Patron(id=1, name="Syrus", age=27)
|
return Patron(id=1, name="Syrus", age=27)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -39,13 +39,13 @@ class Query(graphene.ObjectType):
|
||||||
human = graphene.Field(Human, id=graphene.String())
|
human = graphene.Field(Human, id=graphene.String())
|
||||||
droid = graphene.Field(Droid, id=graphene.String())
|
droid = graphene.Field(Droid, id=graphene.String())
|
||||||
|
|
||||||
def resolve_hero(self, info, episode=None):
|
def resolve_hero(root, info, episode=None):
|
||||||
return get_hero(episode)
|
return get_hero(episode)
|
||||||
|
|
||||||
def resolve_human(self, info, id):
|
def resolve_human(root, info, id):
|
||||||
return get_human(id)
|
return get_human(id)
|
||||||
|
|
||||||
def resolve_droid(self, info, id):
|
def resolve_droid(root, info, id):
|
||||||
return get_droid(id)
|
return get_droid(id)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,100 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# snapshottest: v1 - https://goo.gl/zC4yUc
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from snapshottest import Snapshot
|
|
||||||
|
|
||||||
snapshots = Snapshot()
|
|
||||||
|
|
||||||
snapshots["test_hero_name_query 1"] = {"data": {"hero": {"name": "R2-D2"}}}
|
|
||||||
|
|
||||||
snapshots["test_hero_name_and_friends_query 1"] = {
|
|
||||||
"data": {
|
|
||||||
"hero": {
|
|
||||||
"id": "2001",
|
|
||||||
"name": "R2-D2",
|
|
||||||
"friends": [
|
|
||||||
{"name": "Luke Skywalker"},
|
|
||||||
{"name": "Han Solo"},
|
|
||||||
{"name": "Leia Organa"},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_nested_query 1"] = {
|
|
||||||
"data": {
|
|
||||||
"hero": {
|
|
||||||
"name": "R2-D2",
|
|
||||||
"friends": [
|
|
||||||
{
|
|
||||||
"name": "Luke Skywalker",
|
|
||||||
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
|
||||||
"friends": [
|
|
||||||
{"name": "Han Solo"},
|
|
||||||
{"name": "Leia Organa"},
|
|
||||||
{"name": "C-3PO"},
|
|
||||||
{"name": "R2-D2"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Han Solo",
|
|
||||||
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
|
||||||
"friends": [
|
|
||||||
{"name": "Luke Skywalker"},
|
|
||||||
{"name": "Leia Organa"},
|
|
||||||
{"name": "R2-D2"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Leia Organa",
|
|
||||||
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
|
||||||
"friends": [
|
|
||||||
{"name": "Luke Skywalker"},
|
|
||||||
{"name": "Han Solo"},
|
|
||||||
{"name": "C-3PO"},
|
|
||||||
{"name": "R2-D2"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_fetch_luke_query 1"] = {"data": {"human": {"name": "Luke Skywalker"}}}
|
|
||||||
|
|
||||||
snapshots["test_fetch_some_id_query 1"] = {
|
|
||||||
"data": {"human": {"name": "Luke Skywalker"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_fetch_some_id_query2 1"] = {"data": {"human": {"name": "Han Solo"}}}
|
|
||||||
|
|
||||||
snapshots["test_invalid_id_query 1"] = {"data": {"human": None}}
|
|
||||||
|
|
||||||
snapshots["test_fetch_luke_aliased 1"] = {"data": {"luke": {"name": "Luke Skywalker"}}}
|
|
||||||
|
|
||||||
snapshots["test_fetch_luke_and_leia_aliased 1"] = {
|
|
||||||
"data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_duplicate_fields 1"] = {
|
|
||||||
"data": {
|
|
||||||
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
|
||||||
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_use_fragment 1"] = {
|
|
||||||
"data": {
|
|
||||||
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
|
||||||
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_check_type_of_r2 1"] = {
|
|
||||||
"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_check_type_of_luke 1"] = {
|
|
||||||
"data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}}
|
|
||||||
}
|
|
|
@ -8,19 +8,19 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_hero_name_query(snapshot):
|
def test_hero_name_query():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query HeroNameQuery {
|
query HeroNameQuery {
|
||||||
hero {
|
hero {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {"data": {"hero": {"name": "R2-D2"}}}
|
||||||
|
|
||||||
|
|
||||||
def test_hero_name_and_friends_query(snapshot):
|
def test_hero_name_and_friends_query():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query HeroNameAndFriendsQuery {
|
query HeroNameAndFriendsQuery {
|
||||||
hero {
|
hero {
|
||||||
id
|
id
|
||||||
|
@ -30,12 +30,24 @@ def test_hero_name_and_friends_query(snapshot):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"hero": {
|
||||||
|
"id": "2001",
|
||||||
|
"name": "R2-D2",
|
||||||
|
"friends": [
|
||||||
|
{"name": "Luke Skywalker"},
|
||||||
|
{"name": "Han Solo"},
|
||||||
|
{"name": "Leia Organa"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_nested_query(snapshot):
|
def test_nested_query():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query NestedQuery {
|
query NestedQuery {
|
||||||
hero {
|
hero {
|
||||||
name
|
name
|
||||||
|
@ -48,70 +60,113 @@ def test_nested_query(snapshot):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"hero": {
|
||||||
|
"name": "R2-D2",
|
||||||
|
"friends": [
|
||||||
|
{
|
||||||
|
"name": "Luke Skywalker",
|
||||||
|
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
||||||
|
"friends": [
|
||||||
|
{"name": "Han Solo"},
|
||||||
|
{"name": "Leia Organa"},
|
||||||
|
{"name": "C-3PO"},
|
||||||
|
{"name": "R2-D2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Han Solo",
|
||||||
|
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
||||||
|
"friends": [
|
||||||
|
{"name": "Luke Skywalker"},
|
||||||
|
{"name": "Leia Organa"},
|
||||||
|
{"name": "R2-D2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Leia Organa",
|
||||||
|
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
|
||||||
|
"friends": [
|
||||||
|
{"name": "Luke Skywalker"},
|
||||||
|
{"name": "Han Solo"},
|
||||||
|
{"name": "C-3PO"},
|
||||||
|
{"name": "R2-D2"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_luke_query(snapshot):
|
def test_fetch_luke_query():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query FetchLukeQuery {
|
query FetchLukeQuery {
|
||||||
human(id: "1000") {
|
human(id: "1000") {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {"data": {"human": {"name": "Luke Skywalker"}}}
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_some_id_query(snapshot):
|
def test_fetch_some_id_query():
|
||||||
query = """
|
result = client.execute(
|
||||||
|
"""
|
||||||
query FetchSomeIDQuery($someId: String!) {
|
query FetchSomeIDQuery($someId: String!) {
|
||||||
human(id: $someId) {
|
human(id: $someId) {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""",
|
||||||
params = {"someId": "1000"}
|
variables={"someId": "1000"},
|
||||||
snapshot.assert_match(client.execute(query, variable_values=params))
|
)
|
||||||
|
assert result == {"data": {"human": {"name": "Luke Skywalker"}}}
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_some_id_query2(snapshot):
|
def test_fetch_some_id_query2():
|
||||||
query = """
|
result = client.execute(
|
||||||
|
"""
|
||||||
query FetchSomeIDQuery($someId: String!) {
|
query FetchSomeIDQuery($someId: String!) {
|
||||||
human(id: $someId) {
|
human(id: $someId) {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""",
|
||||||
params = {"someId": "1002"}
|
variables={"someId": "1002"},
|
||||||
snapshot.assert_match(client.execute(query, variable_values=params))
|
)
|
||||||
|
assert result == {"data": {"human": {"name": "Han Solo"}}}
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_id_query(snapshot):
|
def test_invalid_id_query():
|
||||||
query = """
|
result = client.execute(
|
||||||
|
"""
|
||||||
query humanQuery($id: String!) {
|
query humanQuery($id: String!) {
|
||||||
human(id: $id) {
|
human(id: $id) {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""",
|
||||||
params = {"id": "not a valid id"}
|
variables={"id": "not a valid id"},
|
||||||
snapshot.assert_match(client.execute(query, variable_values=params))
|
)
|
||||||
|
assert result == {"data": {"human": None}}
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_luke_aliased(snapshot):
|
def test_fetch_luke_aliased():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query FetchLukeAliased {
|
query FetchLukeAliased {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {"data": {"luke": {"name": "Luke Skywalker"}}}
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_luke_and_leia_aliased(snapshot):
|
def test_fetch_luke_and_leia_aliased():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query FetchLukeAndLeiaAliased {
|
query FetchLukeAndLeiaAliased {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
name
|
name
|
||||||
|
@ -120,12 +175,14 @@ def test_fetch_luke_and_leia_aliased(snapshot):
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_duplicate_fields(snapshot):
|
def test_duplicate_fields():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query DuplicateFields {
|
query DuplicateFields {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
name
|
name
|
||||||
|
@ -136,12 +193,17 @@ def test_duplicate_fields(snapshot):
|
||||||
homePlanet
|
homePlanet
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
||||||
|
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_use_fragment(snapshot):
|
def test_use_fragment():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query UseFragment {
|
query UseFragment {
|
||||||
luke: human(id: "1000") {
|
luke: human(id: "1000") {
|
||||||
...HumanFragment
|
...HumanFragment
|
||||||
|
@ -154,29 +216,36 @@ def test_use_fragment(snapshot):
|
||||||
name
|
name
|
||||||
homePlanet
|
homePlanet
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
|
||||||
|
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_check_type_of_r2(snapshot):
|
def test_check_type_of_r2():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query CheckTypeOfR2 {
|
query CheckTypeOfR2 {
|
||||||
hero {
|
hero {
|
||||||
__typename
|
__typename
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}}
|
||||||
|
|
||||||
|
|
||||||
def test_check_type_of_luke(snapshot):
|
def test_check_type_of_luke():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query CheckTypeOfLuke {
|
query CheckTypeOfLuke {
|
||||||
hero(episode: EMPIRE) {
|
hero(episode: EMPIRE) {
|
||||||
__typename
|
__typename
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}}
|
||||||
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ def setup():
|
||||||
|
|
||||||
# Yeah, technically it's Corellian. But it flew in the service of the rebels,
|
# Yeah, technically it's Corellian. But it flew in the service of the rebels,
|
||||||
# so for the purposes of this demo it's a rebel ship.
|
# so for the purposes of this demo it's a rebel ship.
|
||||||
falcon = Ship(id="4", name="Millenium Falcon")
|
falcon = Ship(id="4", name="Millennium Falcon")
|
||||||
|
|
||||||
homeOne = Ship(id="5", name="Home One")
|
homeOne = Ship(id="5", name="Home One")
|
||||||
|
|
||||||
|
|
|
@ -64,10 +64,10 @@ class Query(graphene.ObjectType):
|
||||||
empire = graphene.Field(Faction)
|
empire = graphene.Field(Faction)
|
||||||
node = relay.Node.Field()
|
node = relay.Node.Field()
|
||||||
|
|
||||||
def resolve_rebels(self, info):
|
def resolve_rebels(root, info):
|
||||||
return get_rebels()
|
return get_rebels()
|
||||||
|
|
||||||
def resolve_empire(self, info):
|
def resolve_empire(root, info):
|
||||||
return get_empire()
|
return get_empire()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,26 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# snapshottest: v1 - https://goo.gl/zC4yUc
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from snapshottest import Snapshot
|
|
||||||
|
|
||||||
snapshots = Snapshot()
|
|
||||||
|
|
||||||
snapshots["test_correct_fetch_first_ship_rebels 1"] = {
|
|
||||||
"data": {
|
|
||||||
"rebels": {
|
|
||||||
"name": "Alliance to Restore the Republic",
|
|
||||||
"ships": {
|
|
||||||
"pageInfo": {
|
|
||||||
"startCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
|
||||||
"endCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
|
||||||
"hasNextPage": True,
|
|
||||||
"hasPreviousPage": False,
|
|
||||||
},
|
|
||||||
"edges": [
|
|
||||||
{"cursor": "YXJyYXljb25uZWN0aW9uOjA=", "node": {"name": "X-Wing"}}
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,28 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# snapshottest: v1 - https://goo.gl/zC4yUc
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from snapshottest import Snapshot
|
|
||||||
|
|
||||||
snapshots = Snapshot()
|
|
||||||
|
|
||||||
snapshots["test_mutations 1"] = {
|
|
||||||
"data": {
|
|
||||||
"introduceShip": {
|
|
||||||
"ship": {"id": "U2hpcDo5", "name": "Peter"},
|
|
||||||
"faction": {
|
|
||||||
"name": "Alliance to Restore the Republic",
|
|
||||||
"ships": {
|
|
||||||
"edges": [
|
|
||||||
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
|
|
||||||
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
|
|
||||||
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
|
|
||||||
{"node": {"id": "U2hpcDo0", "name": "Millenium Falcon"}},
|
|
||||||
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
|
|
||||||
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,91 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# snapshottest: v1 - https://goo.gl/zC4yUc
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from snapshottest import Snapshot
|
|
||||||
|
|
||||||
snapshots = Snapshot()
|
|
||||||
|
|
||||||
snapshots["test_correctly_fetches_id_name_rebels 1"] = {
|
|
||||||
"data": {
|
|
||||||
"rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_correctly_refetches_rebels 1"] = {
|
|
||||||
"data": {"node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_correctly_fetches_id_name_empire 1"] = {
|
|
||||||
"data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_correctly_refetches_empire 1"] = {
|
|
||||||
"data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots["test_correctly_refetches_xwing 1"] = {
|
|
||||||
"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshots[
|
|
||||||
"test_str_schema 1"
|
|
||||||
] = """schema {
|
|
||||||
query: Query
|
|
||||||
mutation: Mutation
|
|
||||||
}
|
|
||||||
|
|
||||||
type Faction implements Node {
|
|
||||||
id: ID!
|
|
||||||
name: String
|
|
||||||
ships(before: String, after: String, first: Int, last: Int): ShipConnection
|
|
||||||
}
|
|
||||||
|
|
||||||
input IntroduceShipInput {
|
|
||||||
shipName: String!
|
|
||||||
factionId: String!
|
|
||||||
clientMutationId: String
|
|
||||||
}
|
|
||||||
|
|
||||||
type IntroduceShipPayload {
|
|
||||||
ship: Ship
|
|
||||||
faction: Faction
|
|
||||||
clientMutationId: String
|
|
||||||
}
|
|
||||||
|
|
||||||
type Mutation {
|
|
||||||
introduceShip(input: IntroduceShipInput!): IntroduceShipPayload
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Node {
|
|
||||||
id: ID!
|
|
||||||
}
|
|
||||||
|
|
||||||
type PageInfo {
|
|
||||||
hasNextPage: Boolean!
|
|
||||||
hasPreviousPage: Boolean!
|
|
||||||
startCursor: String
|
|
||||||
endCursor: String
|
|
||||||
}
|
|
||||||
|
|
||||||
type Query {
|
|
||||||
rebels: Faction
|
|
||||||
empire: Faction
|
|
||||||
node(id: ID!): Node
|
|
||||||
}
|
|
||||||
|
|
||||||
type Ship implements Node {
|
|
||||||
id: ID!
|
|
||||||
name: String
|
|
||||||
}
|
|
||||||
|
|
||||||
type ShipConnection {
|
|
||||||
pageInfo: PageInfo!
|
|
||||||
edges: [ShipEdge]!
|
|
||||||
}
|
|
||||||
|
|
||||||
type ShipEdge {
|
|
||||||
node: Ship
|
|
||||||
cursor: String!
|
|
||||||
}
|
|
||||||
"""
|
|
|
@ -8,26 +8,46 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_correct_fetch_first_ship_rebels(snapshot):
|
def test_correct_fetch_first_ship_rebels():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query RebelsShipsQuery {
|
query RebelsShipsQuery {
|
||||||
rebels {
|
rebels {
|
||||||
name,
|
name,
|
||||||
ships(first: 1) {
|
ships(first: 1) {
|
||||||
pageInfo {
|
pageInfo {
|
||||||
startCursor
|
startCursor
|
||||||
endCursor
|
endCursor
|
||||||
hasNextPage
|
hasNextPage
|
||||||
hasPreviousPage
|
hasPreviousPage
|
||||||
}
|
}
|
||||||
edges {
|
edges {
|
||||||
cursor
|
cursor
|
||||||
node {
|
node {
|
||||||
name
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
""")
|
||||||
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"rebels": {
|
||||||
|
"name": "Alliance to Restore the Republic",
|
||||||
|
"ships": {
|
||||||
|
"pageInfo": {
|
||||||
|
"startCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
||||||
|
"endCursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
||||||
|
"hasNextPage": True,
|
||||||
|
"hasPreviousPage": False,
|
||||||
|
},
|
||||||
|
"edges": [
|
||||||
|
{
|
||||||
|
"cursor": "YXJyYXljb25uZWN0aW9uOjA=",
|
||||||
|
"node": {"name": "X-Wing"},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
"""
|
|
||||||
snapshot.assert_match(client.execute(query))
|
|
||||||
|
|
|
@ -8,26 +8,45 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_mutations(snapshot):
|
def test_mutations():
|
||||||
query = """
|
result = client.execute("""
|
||||||
mutation MyMutation {
|
mutation MyMutation {
|
||||||
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
|
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
|
||||||
ship {
|
ship {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
faction {
|
faction {
|
||||||
name
|
name
|
||||||
ships {
|
ships {
|
||||||
edges {
|
edges {
|
||||||
node {
|
node {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
""")
|
||||||
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"introduceShip": {
|
||||||
|
"ship": {"id": "U2hpcDo5", "name": "Peter"},
|
||||||
|
"faction": {
|
||||||
|
"name": "Alliance to Restore the Republic",
|
||||||
|
"ships": {
|
||||||
|
"edges": [
|
||||||
|
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
|
||||||
|
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
|
||||||
|
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
|
||||||
|
{"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
|
||||||
|
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
|
||||||
|
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
"""
|
|
||||||
snapshot.assert_match(client.execute(query))
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import textwrap
|
||||||
|
|
||||||
from graphene.test import Client
|
from graphene.test import Client
|
||||||
|
|
||||||
from ..data import setup
|
from ..data import setup
|
||||||
|
@ -8,24 +10,115 @@ setup()
|
||||||
client = Client(schema)
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
def test_str_schema(snapshot):
|
def test_str_schema():
|
||||||
snapshot.assert_match(str(schema))
|
assert str(schema).strip() == textwrap.dedent(
|
||||||
|
'''\
|
||||||
|
type Query {
|
||||||
|
rebels: Faction
|
||||||
|
empire: Faction
|
||||||
|
node(
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
): Node
|
||||||
|
}
|
||||||
|
|
||||||
|
"""A faction in the Star Wars saga"""
|
||||||
|
type Faction implements Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
|
||||||
|
"""The name of the faction."""
|
||||||
|
name: String
|
||||||
|
|
||||||
|
"""The ships used by the faction."""
|
||||||
|
ships(before: String, after: String, first: Int, last: Int): ShipConnection
|
||||||
|
}
|
||||||
|
|
||||||
|
"""An object with an ID"""
|
||||||
|
interface Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
}
|
||||||
|
|
||||||
|
type ShipConnection {
|
||||||
|
"""Pagination data for this connection."""
|
||||||
|
pageInfo: PageInfo!
|
||||||
|
|
||||||
|
"""Contains the nodes in this connection."""
|
||||||
|
edges: [ShipEdge]!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.
|
||||||
|
"""
|
||||||
|
type PageInfo {
|
||||||
|
"""When paginating forwards, are there more items?"""
|
||||||
|
hasNextPage: Boolean!
|
||||||
|
|
||||||
|
"""When paginating backwards, are there more items?"""
|
||||||
|
hasPreviousPage: Boolean!
|
||||||
|
|
||||||
|
"""When paginating backwards, the cursor to continue."""
|
||||||
|
startCursor: String
|
||||||
|
|
||||||
|
"""When paginating forwards, the cursor to continue."""
|
||||||
|
endCursor: String
|
||||||
|
}
|
||||||
|
|
||||||
|
"""A Relay edge containing a `Ship` and its cursor."""
|
||||||
|
type ShipEdge {
|
||||||
|
"""The item at the end of the edge"""
|
||||||
|
node: Ship
|
||||||
|
|
||||||
|
"""A cursor for use in pagination"""
|
||||||
|
cursor: String!
|
||||||
|
}
|
||||||
|
|
||||||
|
"""A ship in the Star Wars saga"""
|
||||||
|
type Ship implements Node {
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
|
||||||
|
"""The name of the ship."""
|
||||||
|
name: String
|
||||||
|
}
|
||||||
|
|
||||||
|
type Mutation {
|
||||||
|
introduceShip(input: IntroduceShipInput!): IntroduceShipPayload
|
||||||
|
}
|
||||||
|
|
||||||
|
type IntroduceShipPayload {
|
||||||
|
ship: Ship
|
||||||
|
faction: Faction
|
||||||
|
clientMutationId: String
|
||||||
|
}
|
||||||
|
|
||||||
|
input IntroduceShipInput {
|
||||||
|
shipName: String!
|
||||||
|
factionId: String!
|
||||||
|
clientMutationId: String
|
||||||
|
}'''
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_fetches_id_name_rebels(snapshot):
|
def test_correctly_fetches_id_name_rebels():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query RebelsQuery {
|
query RebelsQuery {
|
||||||
rebels {
|
rebels {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_refetches_rebels(snapshot):
|
def test_correctly_refetches_rebels():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query RebelsRefetchQuery {
|
query RebelsRefetchQuery {
|
||||||
node(id: "RmFjdGlvbjox") {
|
node(id: "RmFjdGlvbjox") {
|
||||||
id
|
id
|
||||||
|
@ -34,24 +127,30 @@ def test_correctly_refetches_rebels(snapshot):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {
|
||||||
|
"node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_fetches_id_name_empire(snapshot):
|
def test_correctly_fetches_id_name_empire():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query EmpireQuery {
|
query EmpireQuery {
|
||||||
empire {
|
empire {
|
||||||
id
|
id
|
||||||
name
|
name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_refetches_empire(snapshot):
|
def test_correctly_refetches_empire():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query EmpireRefetchQuery {
|
query EmpireRefetchQuery {
|
||||||
node(id: "RmFjdGlvbjoy") {
|
node(id: "RmFjdGlvbjoy") {
|
||||||
id
|
id
|
||||||
|
@ -60,12 +159,14 @@ def test_correctly_refetches_empire(snapshot):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {
|
||||||
|
"data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_correctly_refetches_xwing(snapshot):
|
def test_correctly_refetches_xwing():
|
||||||
query = """
|
result = client.execute("""
|
||||||
query XWingRefetchQuery {
|
query XWingRefetchQuery {
|
||||||
node(id: "U2hpcDox") {
|
node(id: "U2hpcDox") {
|
||||||
id
|
id
|
||||||
|
@ -74,5 +175,5 @@ def test_correctly_refetches_xwing(snapshot):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""")
|
||||||
snapshot.assert_match(client.execute(query))
|
assert result == {"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}}
|
||||||
|
|
|
@ -1,88 +1,98 @@
|
||||||
from .pyutils.version import get_version
|
from .pyutils.version import get_version
|
||||||
|
|
||||||
from .types import (
|
|
||||||
AbstractType,
|
|
||||||
ObjectType,
|
|
||||||
InputObjectType,
|
|
||||||
Interface,
|
|
||||||
Mutation,
|
|
||||||
Field,
|
|
||||||
InputField,
|
|
||||||
Schema,
|
|
||||||
Scalar,
|
|
||||||
String,
|
|
||||||
ID,
|
|
||||||
Int,
|
|
||||||
Float,
|
|
||||||
Boolean,
|
|
||||||
Date,
|
|
||||||
DateTime,
|
|
||||||
Time,
|
|
||||||
JSONString,
|
|
||||||
UUID,
|
|
||||||
List,
|
|
||||||
NonNull,
|
|
||||||
Enum,
|
|
||||||
Argument,
|
|
||||||
Dynamic,
|
|
||||||
Union,
|
|
||||||
Context,
|
|
||||||
ResolveInfo,
|
|
||||||
)
|
|
||||||
from .relay import (
|
from .relay import (
|
||||||
Node,
|
BaseGlobalIDType,
|
||||||
is_node,
|
|
||||||
GlobalID,
|
|
||||||
ClientIDMutation,
|
ClientIDMutation,
|
||||||
Connection,
|
Connection,
|
||||||
ConnectionField,
|
ConnectionField,
|
||||||
|
DefaultGlobalIDType,
|
||||||
|
GlobalID,
|
||||||
|
Node,
|
||||||
PageInfo,
|
PageInfo,
|
||||||
|
SimpleGlobalIDType,
|
||||||
|
UUIDGlobalIDType,
|
||||||
|
is_node,
|
||||||
|
)
|
||||||
|
from .types import (
|
||||||
|
ID,
|
||||||
|
UUID,
|
||||||
|
Argument,
|
||||||
|
Base64,
|
||||||
|
BigInt,
|
||||||
|
Boolean,
|
||||||
|
Context,
|
||||||
|
Date,
|
||||||
|
DateTime,
|
||||||
|
Decimal,
|
||||||
|
Dynamic,
|
||||||
|
Enum,
|
||||||
|
Field,
|
||||||
|
Float,
|
||||||
|
InputField,
|
||||||
|
InputObjectType,
|
||||||
|
Int,
|
||||||
|
Interface,
|
||||||
|
JSONString,
|
||||||
|
List,
|
||||||
|
Mutation,
|
||||||
|
NonNull,
|
||||||
|
ObjectType,
|
||||||
|
ResolveInfo,
|
||||||
|
Scalar,
|
||||||
|
Schema,
|
||||||
|
String,
|
||||||
|
Time,
|
||||||
|
Union,
|
||||||
)
|
)
|
||||||
from .utils.resolve_only_args import resolve_only_args
|
|
||||||
from .utils.module_loading import lazy_import
|
from .utils.module_loading import lazy_import
|
||||||
|
from .utils.resolve_only_args import resolve_only_args
|
||||||
|
|
||||||
|
VERSION = (3, 4, 3, "final", 0)
|
||||||
|
|
||||||
VERSION = (2, 1, 3, "final", 0)
|
|
||||||
|
|
||||||
__version__ = get_version(VERSION)
|
__version__ = get_version(VERSION)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"__version__",
|
"__version__",
|
||||||
"ObjectType",
|
|
||||||
"InputObjectType",
|
|
||||||
"Interface",
|
|
||||||
"Mutation",
|
|
||||||
"Field",
|
|
||||||
"InputField",
|
|
||||||
"Schema",
|
|
||||||
"Scalar",
|
|
||||||
"String",
|
|
||||||
"ID",
|
|
||||||
"Int",
|
|
||||||
"Float",
|
|
||||||
"Enum",
|
|
||||||
"Boolean",
|
|
||||||
"Date",
|
|
||||||
"DateTime",
|
|
||||||
"Time",
|
|
||||||
"JSONString",
|
|
||||||
"UUID",
|
|
||||||
"List",
|
|
||||||
"NonNull",
|
|
||||||
"Argument",
|
"Argument",
|
||||||
"Dynamic",
|
"Base64",
|
||||||
"Union",
|
"BigInt",
|
||||||
"resolve_only_args",
|
"BaseGlobalIDType",
|
||||||
"Node",
|
"Boolean",
|
||||||
"is_node",
|
|
||||||
"GlobalID",
|
|
||||||
"ClientIDMutation",
|
"ClientIDMutation",
|
||||||
"Connection",
|
"Connection",
|
||||||
"ConnectionField",
|
"ConnectionField",
|
||||||
"PageInfo",
|
|
||||||
"lazy_import",
|
|
||||||
"Context",
|
"Context",
|
||||||
|
"Date",
|
||||||
|
"DateTime",
|
||||||
|
"Decimal",
|
||||||
|
"DefaultGlobalIDType",
|
||||||
|
"Dynamic",
|
||||||
|
"Enum",
|
||||||
|
"Field",
|
||||||
|
"Float",
|
||||||
|
"GlobalID",
|
||||||
|
"ID",
|
||||||
|
"InputField",
|
||||||
|
"InputObjectType",
|
||||||
|
"Int",
|
||||||
|
"Interface",
|
||||||
|
"JSONString",
|
||||||
|
"List",
|
||||||
|
"Mutation",
|
||||||
|
"Node",
|
||||||
|
"NonNull",
|
||||||
|
"ObjectType",
|
||||||
|
"PageInfo",
|
||||||
"ResolveInfo",
|
"ResolveInfo",
|
||||||
# Deprecated
|
"Scalar",
|
||||||
"AbstractType",
|
"Schema",
|
||||||
|
"SimpleGlobalIDType",
|
||||||
|
"String",
|
||||||
|
"Time",
|
||||||
|
"Union",
|
||||||
|
"UUID",
|
||||||
|
"UUIDGlobalIDType",
|
||||||
|
"is_node",
|
||||||
|
"lazy_import",
|
||||||
|
"resolve_only_args",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from graphql.pyutils.compat import Enum
|
|
||||||
|
|
||||||
try:
|
|
||||||
from inspect import signature
|
|
||||||
except ImportError:
|
|
||||||
from .signature import signature
|
|
||||||
|
|
||||||
if six.PY2:
|
|
||||||
|
|
||||||
def func_name(func):
|
|
||||||
return func.func_name
|
|
||||||
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def func_name(func):
|
|
||||||
return func.__name__
|
|
|
@ -1,23 +0,0 @@
|
||||||
is_init_subclass_available = hasattr(object, "__init_subclass__")
|
|
||||||
|
|
||||||
if not is_init_subclass_available:
|
|
||||||
|
|
||||||
class InitSubclassMeta(type):
|
|
||||||
"""Metaclass that implements PEP 487 protocol"""
|
|
||||||
|
|
||||||
def __new__(cls, name, bases, ns, **kwargs):
|
|
||||||
__init_subclass__ = ns.pop("__init_subclass__", None)
|
|
||||||
if __init_subclass__:
|
|
||||||
__init_subclass__ = classmethod(__init_subclass__)
|
|
||||||
ns["__init_subclass__"] = __init_subclass__
|
|
||||||
return super(InitSubclassMeta, cls).__new__(cls, name, bases, ns, **kwargs)
|
|
||||||
|
|
||||||
def __init__(cls, name, bases, ns, **kwargs):
|
|
||||||
super(InitSubclassMeta, cls).__init__(name, bases, ns)
|
|
||||||
super_class = super(cls, cls)
|
|
||||||
if hasattr(super_class, "__init_subclass__"):
|
|
||||||
super_class.__init_subclass__.__func__(cls, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
else:
|
|
||||||
InitSubclassMeta = type # type: ignore
|
|
|
@ -1,847 +0,0 @@
|
||||||
# Copyright 2001-2013 Python Software Foundation; All Rights Reserved
|
|
||||||
"""Function signature objects for callables
|
|
||||||
Back port of Python 3.3's function signature tools from the inspect module,
|
|
||||||
modified to be compatible with Python 2.7 and 3.2+.
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import functools
|
|
||||||
import itertools
|
|
||||||
import re
|
|
||||||
import types
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
__version__ = "0.4"
|
|
||||||
|
|
||||||
__all__ = ["BoundArguments", "Parameter", "Signature", "signature"]
|
|
||||||
|
|
||||||
|
|
||||||
_WrapperDescriptor = type(type.__call__)
|
|
||||||
_MethodWrapper = type(all.__call__)
|
|
||||||
|
|
||||||
_NonUserDefinedCallables = (
|
|
||||||
_WrapperDescriptor,
|
|
||||||
_MethodWrapper,
|
|
||||||
types.BuiltinFunctionType,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def formatannotation(annotation, base_module=None):
|
|
||||||
if isinstance(annotation, type):
|
|
||||||
if annotation.__module__ in ("builtins", "__builtin__", base_module):
|
|
||||||
return annotation.__name__
|
|
||||||
return annotation.__module__ + "." + annotation.__name__
|
|
||||||
return repr(annotation)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_user_defined_method(cls, method_name, *nested):
|
|
||||||
try:
|
|
||||||
if cls is type:
|
|
||||||
return
|
|
||||||
meth = getattr(cls, method_name)
|
|
||||||
for name in nested:
|
|
||||||
meth = getattr(meth, name, meth)
|
|
||||||
except AttributeError:
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
if not isinstance(meth, _NonUserDefinedCallables):
|
|
||||||
# Once '__signature__' will be added to 'C'-level
|
|
||||||
# callables, this check won't be necessary
|
|
||||||
return meth
|
|
||||||
|
|
||||||
|
|
||||||
def signature(obj):
|
|
||||||
"""Get a signature object for the passed callable."""
|
|
||||||
|
|
||||||
if not callable(obj):
|
|
||||||
raise TypeError("{!r} is not a callable object".format(obj))
|
|
||||||
|
|
||||||
if isinstance(obj, types.MethodType):
|
|
||||||
sig = signature(obj.__func__)
|
|
||||||
if obj.__self__ is None:
|
|
||||||
# Unbound method: the first parameter becomes positional-only
|
|
||||||
if sig.parameters:
|
|
||||||
first = sig.parameters.values()[0].replace(kind=_POSITIONAL_ONLY)
|
|
||||||
return sig.replace(
|
|
||||||
parameters=(first,) + tuple(sig.parameters.values())[1:]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return sig
|
|
||||||
else:
|
|
||||||
# In this case we skip the first parameter of the underlying
|
|
||||||
# function (usually `self` or `cls`).
|
|
||||||
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
|
|
||||||
|
|
||||||
try:
|
|
||||||
sig = obj.__signature__
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
if sig is not None:
|
|
||||||
return sig
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Was this function wrapped by a decorator?
|
|
||||||
wrapped = obj.__wrapped__
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return signature(wrapped)
|
|
||||||
|
|
||||||
if isinstance(obj, types.FunctionType):
|
|
||||||
return Signature.from_function(obj)
|
|
||||||
|
|
||||||
if isinstance(obj, functools.partial):
|
|
||||||
sig = signature(obj.func)
|
|
||||||
|
|
||||||
new_params = OrderedDict(sig.parameters.items())
|
|
||||||
|
|
||||||
partial_args = obj.args or ()
|
|
||||||
partial_keywords = obj.keywords or {}
|
|
||||||
try:
|
|
||||||
ba = sig.bind_partial(*partial_args, **partial_keywords)
|
|
||||||
except TypeError as ex:
|
|
||||||
msg = "partial object {!r} has incorrect arguments".format(obj)
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
for arg_name, arg_value in ba.arguments.items():
|
|
||||||
param = new_params[arg_name]
|
|
||||||
if arg_name in partial_keywords:
|
|
||||||
# We set a new default value, because the following code
|
|
||||||
# is correct:
|
|
||||||
#
|
|
||||||
# >>> def foo(a): print(a)
|
|
||||||
# >>> print(partial(partial(foo, a=10), a=20)())
|
|
||||||
# 20
|
|
||||||
# >>> print(partial(partial(foo, a=10), a=20)(a=30))
|
|
||||||
# 30
|
|
||||||
#
|
|
||||||
# So, with 'partial' objects, passing a keyword argument is
|
|
||||||
# like setting a new default value for the corresponding
|
|
||||||
# parameter
|
|
||||||
#
|
|
||||||
# We also mark this parameter with '_partial_kwarg'
|
|
||||||
# flag. Later, in '_bind', the 'default' value of this
|
|
||||||
# parameter will be added to 'kwargs', to simulate
|
|
||||||
# the 'functools.partial' real call.
|
|
||||||
new_params[arg_name] = param.replace(
|
|
||||||
default=arg_value, _partial_kwarg=True
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL)
|
|
||||||
and not param._partial_kwarg
|
|
||||||
):
|
|
||||||
new_params.pop(arg_name)
|
|
||||||
|
|
||||||
return sig.replace(parameters=new_params.values())
|
|
||||||
|
|
||||||
sig = None
|
|
||||||
if isinstance(obj, type):
|
|
||||||
# obj is a class or a metaclass
|
|
||||||
|
|
||||||
# First, let's see if it has an overloaded __call__ defined
|
|
||||||
# in its metaclass
|
|
||||||
call = _get_user_defined_method(type(obj), "__call__")
|
|
||||||
if call is not None:
|
|
||||||
sig = signature(call)
|
|
||||||
else:
|
|
||||||
# Now we check if the 'obj' class has a '__new__' method
|
|
||||||
new = _get_user_defined_method(obj, "__new__")
|
|
||||||
if new is not None:
|
|
||||||
sig = signature(new)
|
|
||||||
else:
|
|
||||||
# Finally, we should have at least __init__ implemented
|
|
||||||
init = _get_user_defined_method(obj, "__init__")
|
|
||||||
if init is not None:
|
|
||||||
sig = signature(init)
|
|
||||||
elif not isinstance(obj, _NonUserDefinedCallables):
|
|
||||||
# An object with __call__
|
|
||||||
# We also check that the 'obj' is not an instance of
|
|
||||||
# _WrapperDescriptor or _MethodWrapper to avoid
|
|
||||||
# infinite recursion (and even potential segfault)
|
|
||||||
call = _get_user_defined_method(type(obj), "__call__", "im_func")
|
|
||||||
if call is not None:
|
|
||||||
sig = signature(call)
|
|
||||||
|
|
||||||
if sig is not None:
|
|
||||||
# For classes and objects we skip the first parameter of their
|
|
||||||
# __call__, __new__, or __init__ methods
|
|
||||||
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
|
|
||||||
|
|
||||||
if isinstance(obj, types.BuiltinFunctionType):
|
|
||||||
# Raise a nicer error message for builtins
|
|
||||||
msg = "no signature found for builtin function {!r}".format(obj)
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
raise ValueError("callable {!r} is not supported by signature".format(obj))
|
|
||||||
|
|
||||||
|
|
||||||
class _void(object):
|
|
||||||
"""A private marker - used in Parameter & Signature"""
|
|
||||||
|
|
||||||
|
|
||||||
class _empty(object):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class _ParameterKind(int):
|
|
||||||
def __new__(self, *args, **kwargs):
|
|
||||||
obj = int.__new__(self, *args)
|
|
||||||
obj._name = kwargs["name"]
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<_ParameterKind: {!r}>".format(self._name)
|
|
||||||
|
|
||||||
|
|
||||||
_POSITIONAL_ONLY = _ParameterKind(0, name="POSITIONAL_ONLY")
|
|
||||||
_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name="POSITIONAL_OR_KEYWORD")
|
|
||||||
_VAR_POSITIONAL = _ParameterKind(2, name="VAR_POSITIONAL")
|
|
||||||
_KEYWORD_ONLY = _ParameterKind(3, name="KEYWORD_ONLY")
|
|
||||||
_VAR_KEYWORD = _ParameterKind(4, name="VAR_KEYWORD")
|
|
||||||
|
|
||||||
|
|
||||||
class Parameter(object):
|
|
||||||
"""Represents a parameter in a function signature.
|
|
||||||
Has the following public attributes:
|
|
||||||
* name : str
|
|
||||||
The name of the parameter as a string.
|
|
||||||
* default : object
|
|
||||||
The default value for the parameter if specified. If the
|
|
||||||
parameter has no default value, this attribute is not set.
|
|
||||||
* annotation
|
|
||||||
The annotation for the parameter if specified. If the
|
|
||||||
parameter has no annotation, this attribute is not set.
|
|
||||||
* kind : str
|
|
||||||
Describes how argument values are bound to the parameter.
|
|
||||||
Possible values: `Parameter.POSITIONAL_ONLY`,
|
|
||||||
`Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
|
|
||||||
`Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ("_name", "_kind", "_default", "_annotation", "_partial_kwarg")
|
|
||||||
|
|
||||||
POSITIONAL_ONLY = _POSITIONAL_ONLY
|
|
||||||
POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
|
|
||||||
VAR_POSITIONAL = _VAR_POSITIONAL
|
|
||||||
KEYWORD_ONLY = _KEYWORD_ONLY
|
|
||||||
VAR_KEYWORD = _VAR_KEYWORD
|
|
||||||
|
|
||||||
empty = _empty
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, name, kind, default=_empty, annotation=_empty, _partial_kwarg=False
|
|
||||||
):
|
|
||||||
|
|
||||||
if kind not in (
|
|
||||||
_POSITIONAL_ONLY,
|
|
||||||
_POSITIONAL_OR_KEYWORD,
|
|
||||||
_VAR_POSITIONAL,
|
|
||||||
_KEYWORD_ONLY,
|
|
||||||
_VAR_KEYWORD,
|
|
||||||
):
|
|
||||||
raise ValueError("invalid value for 'Parameter.kind' attribute")
|
|
||||||
self._kind = kind
|
|
||||||
|
|
||||||
if default is not _empty:
|
|
||||||
if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
|
|
||||||
msg = "{} parameters cannot have default values".format(kind)
|
|
||||||
raise ValueError(msg)
|
|
||||||
self._default = default
|
|
||||||
self._annotation = annotation
|
|
||||||
|
|
||||||
if name is None:
|
|
||||||
if kind != _POSITIONAL_ONLY:
|
|
||||||
raise ValueError(
|
|
||||||
"None is not a valid name for a " "non-positional-only parameter"
|
|
||||||
)
|
|
||||||
self._name = name
|
|
||||||
else:
|
|
||||||
name = str(name)
|
|
||||||
if kind != _POSITIONAL_ONLY and not re.match(r"[a-z_]\w*$", name, re.I):
|
|
||||||
msg = "{!r} is not a valid parameter name".format(name)
|
|
||||||
raise ValueError(msg)
|
|
||||||
self._name = name
|
|
||||||
|
|
||||||
self._partial_kwarg = _partial_kwarg
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default(self):
|
|
||||||
return self._default
|
|
||||||
|
|
||||||
@property
|
|
||||||
def annotation(self):
|
|
||||||
return self._annotation
|
|
||||||
|
|
||||||
@property
|
|
||||||
def kind(self):
|
|
||||||
return self._kind
|
|
||||||
|
|
||||||
def replace(
|
|
||||||
self,
|
|
||||||
name=_void,
|
|
||||||
kind=_void,
|
|
||||||
annotation=_void,
|
|
||||||
default=_void,
|
|
||||||
_partial_kwarg=_void,
|
|
||||||
):
|
|
||||||
"""Creates a customized copy of the Parameter."""
|
|
||||||
|
|
||||||
if name is _void:
|
|
||||||
name = self._name
|
|
||||||
|
|
||||||
if kind is _void:
|
|
||||||
kind = self._kind
|
|
||||||
|
|
||||||
if annotation is _void:
|
|
||||||
annotation = self._annotation
|
|
||||||
|
|
||||||
if default is _void:
|
|
||||||
default = self._default
|
|
||||||
|
|
||||||
if _partial_kwarg is _void:
|
|
||||||
_partial_kwarg = self._partial_kwarg
|
|
||||||
|
|
||||||
return type(self)(
|
|
||||||
name,
|
|
||||||
kind,
|
|
||||||
default=default,
|
|
||||||
annotation=annotation,
|
|
||||||
_partial_kwarg=_partial_kwarg,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
kind = self.kind
|
|
||||||
|
|
||||||
formatted = self._name
|
|
||||||
if kind == _POSITIONAL_ONLY:
|
|
||||||
if formatted is None:
|
|
||||||
formatted = ""
|
|
||||||
formatted = "<{}>".format(formatted)
|
|
||||||
|
|
||||||
# Add annotation and default value
|
|
||||||
if self._annotation is not _empty:
|
|
||||||
formatted = "{}:{}".format(formatted, formatannotation(self._annotation))
|
|
||||||
|
|
||||||
if self._default is not _empty:
|
|
||||||
formatted = "{}={}".format(formatted, repr(self._default))
|
|
||||||
|
|
||||||
if kind == _VAR_POSITIONAL:
|
|
||||||
formatted = "*" + formatted
|
|
||||||
elif kind == _VAR_KEYWORD:
|
|
||||||
formatted = "**" + formatted
|
|
||||||
|
|
||||||
return formatted
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<{} at {:#x} {!r}>".format(self.__class__.__name__, id(self), self.name)
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
msg = "unhashable type: '{}'".format(self.__class__.__name__)
|
|
||||||
raise TypeError(msg)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (
|
|
||||||
issubclass(other.__class__, Parameter)
|
|
||||||
and self._name == other._name
|
|
||||||
and self._kind == other._kind
|
|
||||||
and self._default == other._default
|
|
||||||
and self._annotation == other._annotation
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
|
|
||||||
class BoundArguments(object):
|
|
||||||
"""Result of `Signature.bind` call. Holds the mapping of arguments
|
|
||||||
to the function's parameters.
|
|
||||||
Has the following public attributes:
|
|
||||||
* arguments : OrderedDict
|
|
||||||
An ordered mutable mapping of parameters' names to arguments' values.
|
|
||||||
Does not contain arguments' default values.
|
|
||||||
* signature : Signature
|
|
||||||
The Signature object that created this instance.
|
|
||||||
* args : tuple
|
|
||||||
Tuple of positional arguments values.
|
|
||||||
* kwargs : dict
|
|
||||||
Dict of keyword arguments values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, signature, arguments):
|
|
||||||
self.arguments = arguments
|
|
||||||
self._signature = signature
|
|
||||||
|
|
||||||
@property
|
|
||||||
def signature(self):
|
|
||||||
return self._signature
|
|
||||||
|
|
||||||
@property
|
|
||||||
def args(self):
|
|
||||||
args = []
|
|
||||||
for param_name, param in self._signature.parameters.items():
|
|
||||||
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or param._partial_kwarg:
|
|
||||||
# Keyword arguments mapped by 'functools.partial'
|
|
||||||
# (Parameter._partial_kwarg is True) are mapped
|
|
||||||
# in 'BoundArguments.kwargs', along with VAR_KEYWORD &
|
|
||||||
# KEYWORD_ONLY
|
|
||||||
break
|
|
||||||
|
|
||||||
try:
|
|
||||||
arg = self.arguments[param_name]
|
|
||||||
except KeyError:
|
|
||||||
# We're done here. Other arguments
|
|
||||||
# will be mapped in 'BoundArguments.kwargs'
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if param.kind == _VAR_POSITIONAL:
|
|
||||||
# *args
|
|
||||||
args.extend(arg)
|
|
||||||
else:
|
|
||||||
# plain argument
|
|
||||||
args.append(arg)
|
|
||||||
|
|
||||||
return tuple(args)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def kwargs(self):
|
|
||||||
kwargs = {}
|
|
||||||
kwargs_started = False
|
|
||||||
for param_name, param in self._signature.parameters.items():
|
|
||||||
if not kwargs_started:
|
|
||||||
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or param._partial_kwarg:
|
|
||||||
kwargs_started = True
|
|
||||||
else:
|
|
||||||
if param_name not in self.arguments:
|
|
||||||
kwargs_started = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not kwargs_started:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
arg = self.arguments[param_name]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
if param.kind == _VAR_KEYWORD:
|
|
||||||
# **kwargs
|
|
||||||
kwargs.update(arg)
|
|
||||||
else:
|
|
||||||
# plain keyword argument
|
|
||||||
kwargs[param_name] = arg
|
|
||||||
|
|
||||||
return kwargs
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
msg = "unhashable type: '{}'".format(self.__class__.__name__)
|
|
||||||
raise TypeError(msg)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (
|
|
||||||
issubclass(other.__class__, BoundArguments)
|
|
||||||
and self.signature == other.signature
|
|
||||||
and self.arguments == other.arguments
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
|
|
||||||
class Signature(object):
|
|
||||||
"""A Signature object represents the overall signature of a function.
|
|
||||||
It stores a Parameter object for each parameter accepted by the
|
|
||||||
function, as well as information specific to the function itself.
|
|
||||||
A Signature object has the following public attributes and methods:
|
|
||||||
* parameters : OrderedDict
|
|
||||||
An ordered mapping of parameters' names to the corresponding
|
|
||||||
Parameter objects (keyword-only arguments are in the same order
|
|
||||||
as listed in `code.co_varnames`).
|
|
||||||
* return_annotation : object
|
|
||||||
The annotation for the return type of the function if specified.
|
|
||||||
If the function has no annotation for its return type, this
|
|
||||||
attribute is not set.
|
|
||||||
* bind(*args, **kwargs) -> BoundArguments
|
|
||||||
Creates a mapping from positional and keyword arguments to
|
|
||||||
parameters.
|
|
||||||
* bind_partial(*args, **kwargs) -> BoundArguments
|
|
||||||
Creates a partial mapping from positional and keyword arguments
|
|
||||||
to parameters (simulating 'functools.partial' behavior.)
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ("_return_annotation", "_parameters")
|
|
||||||
|
|
||||||
_parameter_cls = Parameter
|
|
||||||
_bound_arguments_cls = BoundArguments
|
|
||||||
|
|
||||||
empty = _empty
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, parameters=None, return_annotation=_empty, __validate_parameters__=True
|
|
||||||
):
|
|
||||||
"""Constructs Signature from the given list of Parameter
|
|
||||||
objects and 'return_annotation'. All arguments are optional.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if parameters is None:
|
|
||||||
params = OrderedDict()
|
|
||||||
else:
|
|
||||||
if __validate_parameters__:
|
|
||||||
params = OrderedDict()
|
|
||||||
top_kind = _POSITIONAL_ONLY
|
|
||||||
|
|
||||||
for idx, param in enumerate(parameters):
|
|
||||||
kind = param.kind
|
|
||||||
if kind < top_kind:
|
|
||||||
msg = "wrong parameter order: {0} before {1}"
|
|
||||||
msg = msg.format(top_kind, param.kind)
|
|
||||||
raise ValueError(msg)
|
|
||||||
else:
|
|
||||||
top_kind = kind
|
|
||||||
|
|
||||||
name = param.name
|
|
||||||
if name is None:
|
|
||||||
name = str(idx)
|
|
||||||
param = param.replace(name=name)
|
|
||||||
|
|
||||||
if name in params:
|
|
||||||
msg = "duplicate parameter name: {!r}".format(name)
|
|
||||||
raise ValueError(msg)
|
|
||||||
params[name] = param
|
|
||||||
else:
|
|
||||||
params = OrderedDict(((param.name, param) for param in parameters))
|
|
||||||
|
|
||||||
self._parameters = params
|
|
||||||
self._return_annotation = return_annotation
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_function(cls, func):
|
|
||||||
"""Constructs Signature for the given python function"""
|
|
||||||
|
|
||||||
if not isinstance(func, types.FunctionType):
|
|
||||||
raise TypeError("{!r} is not a Python function".format(func))
|
|
||||||
|
|
||||||
Parameter = cls._parameter_cls
|
|
||||||
|
|
||||||
# Parameter information.
|
|
||||||
func_code = func.__code__
|
|
||||||
pos_count = func_code.co_argcount
|
|
||||||
arg_names = func_code.co_varnames
|
|
||||||
positional = tuple(arg_names[:pos_count])
|
|
||||||
keyword_only_count = getattr(func_code, "co_kwonlyargcount", 0)
|
|
||||||
keyword_only = arg_names[pos_count : (pos_count + keyword_only_count)]
|
|
||||||
annotations = getattr(func, "__annotations__", {})
|
|
||||||
defaults = func.__defaults__
|
|
||||||
kwdefaults = getattr(func, "__kwdefaults__", None)
|
|
||||||
|
|
||||||
if defaults:
|
|
||||||
pos_default_count = len(defaults)
|
|
||||||
else:
|
|
||||||
pos_default_count = 0
|
|
||||||
|
|
||||||
parameters = []
|
|
||||||
|
|
||||||
# Non-keyword-only parameters w/o defaults.
|
|
||||||
non_default_count = pos_count - pos_default_count
|
|
||||||
for name in positional[:non_default_count]:
|
|
||||||
annotation = annotations.get(name, _empty)
|
|
||||||
parameters.append(
|
|
||||||
Parameter(name, annotation=annotation, kind=_POSITIONAL_OR_KEYWORD)
|
|
||||||
)
|
|
||||||
|
|
||||||
# ... w/ defaults.
|
|
||||||
for offset, name in enumerate(positional[non_default_count:]):
|
|
||||||
annotation = annotations.get(name, _empty)
|
|
||||||
parameters.append(
|
|
||||||
Parameter(
|
|
||||||
name,
|
|
||||||
annotation=annotation,
|
|
||||||
kind=_POSITIONAL_OR_KEYWORD,
|
|
||||||
default=defaults[offset],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# *args
|
|
||||||
if func_code.co_flags & 0x04:
|
|
||||||
name = arg_names[pos_count + keyword_only_count]
|
|
||||||
annotation = annotations.get(name, _empty)
|
|
||||||
parameters.append(
|
|
||||||
Parameter(name, annotation=annotation, kind=_VAR_POSITIONAL)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Keyword-only parameters.
|
|
||||||
for name in keyword_only:
|
|
||||||
default = _empty
|
|
||||||
if kwdefaults is not None:
|
|
||||||
default = kwdefaults.get(name, _empty)
|
|
||||||
|
|
||||||
annotation = annotations.get(name, _empty)
|
|
||||||
parameters.append(
|
|
||||||
Parameter(
|
|
||||||
name, annotation=annotation, kind=_KEYWORD_ONLY, default=default
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# **kwargs
|
|
||||||
if func_code.co_flags & 0x08:
|
|
||||||
index = pos_count + keyword_only_count
|
|
||||||
if func_code.co_flags & 0x04:
|
|
||||||
index += 1
|
|
||||||
|
|
||||||
name = arg_names[index]
|
|
||||||
annotation = annotations.get(name, _empty)
|
|
||||||
parameters.append(Parameter(name, annotation=annotation, kind=_VAR_KEYWORD))
|
|
||||||
|
|
||||||
return cls(
|
|
||||||
parameters,
|
|
||||||
return_annotation=annotations.get("return", _empty),
|
|
||||||
__validate_parameters__=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def parameters(self):
|
|
||||||
try:
|
|
||||||
return types.MappingProxyType(self._parameters)
|
|
||||||
except AttributeError:
|
|
||||||
return OrderedDict(self._parameters.items())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def return_annotation(self):
|
|
||||||
return self._return_annotation
|
|
||||||
|
|
||||||
def replace(self, parameters=_void, return_annotation=_void):
|
|
||||||
"""Creates a customized copy of the Signature.
|
|
||||||
Pass 'parameters' and/or 'return_annotation' arguments
|
|
||||||
to override them in the new copy.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if parameters is _void:
|
|
||||||
parameters = self.parameters.values()
|
|
||||||
|
|
||||||
if return_annotation is _void:
|
|
||||||
return_annotation = self._return_annotation
|
|
||||||
|
|
||||||
return type(self)(parameters, return_annotation=return_annotation)
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
msg = "unhashable type: '{}'".format(self.__class__.__name__)
|
|
||||||
raise TypeError(msg)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if (
|
|
||||||
not issubclass(type(other), Signature)
|
|
||||||
or self.return_annotation != other.return_annotation
|
|
||||||
or len(self.parameters) != len(other.parameters)
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
|
|
||||||
other_positions = {
|
|
||||||
param: idx for idx, param in enumerate(other.parameters.keys())
|
|
||||||
}
|
|
||||||
|
|
||||||
for idx, (param_name, param) in enumerate(self.parameters.items()):
|
|
||||||
if param.kind == _KEYWORD_ONLY:
|
|
||||||
try:
|
|
||||||
other_param = other.parameters[param_name]
|
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
if param != other_param:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
other_idx = other_positions[param_name]
|
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
if idx != other_idx or param != other.parameters[param_name]:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
def _bind(self, args, kwargs, partial=False):
|
|
||||||
"""Private method. Don't use directly."""
|
|
||||||
|
|
||||||
arguments = OrderedDict()
|
|
||||||
|
|
||||||
parameters = iter(self.parameters.values())
|
|
||||||
parameters_ex = ()
|
|
||||||
arg_vals = iter(args)
|
|
||||||
|
|
||||||
if partial:
|
|
||||||
# Support for binding arguments to 'functools.partial' objects.
|
|
||||||
# See 'functools.partial' case in 'signature()' implementation
|
|
||||||
# for details.
|
|
||||||
for param_name, param in self.parameters.items():
|
|
||||||
if param._partial_kwarg and param_name not in kwargs:
|
|
||||||
# Simulating 'functools.partial' behavior
|
|
||||||
kwargs[param_name] = param.default
|
|
||||||
|
|
||||||
while True:
|
|
||||||
# Let's iterate through the positional arguments and corresponding
|
|
||||||
# parameters
|
|
||||||
try:
|
|
||||||
arg_val = next(arg_vals)
|
|
||||||
except StopIteration:
|
|
||||||
# No more positional arguments
|
|
||||||
try:
|
|
||||||
param = next(parameters)
|
|
||||||
except StopIteration:
|
|
||||||
# No more parameters. That's it. Just need to check that
|
|
||||||
# we have no `kwargs` after this while loop
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if param.kind == _VAR_POSITIONAL:
|
|
||||||
# That's OK, just empty *args. Let's start parsing
|
|
||||||
# kwargs
|
|
||||||
break
|
|
||||||
elif param.name in kwargs:
|
|
||||||
if param.kind == _POSITIONAL_ONLY:
|
|
||||||
msg = "{arg!r} parameter is positional only, " "but was passed as a keyword"
|
|
||||||
msg = msg.format(arg=param.name)
|
|
||||||
raise TypeError(msg)
|
|
||||||
parameters_ex = (param,)
|
|
||||||
break
|
|
||||||
elif param.kind == _VAR_KEYWORD or param.default is not _empty:
|
|
||||||
# That's fine too - we have a default value for this
|
|
||||||
# parameter. So, lets start parsing `kwargs`, starting
|
|
||||||
# with the current parameter
|
|
||||||
parameters_ex = (param,)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if partial:
|
|
||||||
parameters_ex = (param,)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
msg = "{arg!r} parameter lacking default value"
|
|
||||||
msg = msg.format(arg=param.name)
|
|
||||||
raise TypeError(msg)
|
|
||||||
else:
|
|
||||||
# We have a positional argument to process
|
|
||||||
try:
|
|
||||||
param = next(parameters)
|
|
||||||
except StopIteration:
|
|
||||||
raise TypeError("too many positional arguments")
|
|
||||||
else:
|
|
||||||
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
|
|
||||||
# Looks like we have no parameter for this positional
|
|
||||||
# argument
|
|
||||||
raise TypeError("too many positional arguments")
|
|
||||||
|
|
||||||
if param.kind == _VAR_POSITIONAL:
|
|
||||||
# We have an '*args'-like argument, let's fill it with
|
|
||||||
# all positional arguments we have left and move on to
|
|
||||||
# the next phase
|
|
||||||
values = [arg_val]
|
|
||||||
values.extend(arg_vals)
|
|
||||||
arguments[param.name] = tuple(values)
|
|
||||||
break
|
|
||||||
|
|
||||||
if param.name in kwargs:
|
|
||||||
raise TypeError(
|
|
||||||
"multiple values for argument "
|
|
||||||
"{arg!r}".format(arg=param.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
arguments[param.name] = arg_val
|
|
||||||
|
|
||||||
# Now, we iterate through the remaining parameters to process
|
|
||||||
# keyword arguments
|
|
||||||
kwargs_param = None
|
|
||||||
for param in itertools.chain(parameters_ex, parameters):
|
|
||||||
if param.kind == _POSITIONAL_ONLY:
|
|
||||||
# This should never happen in case of a properly built
|
|
||||||
# Signature object (but let's have this check here
|
|
||||||
# to ensure correct behaviour just in case)
|
|
||||||
raise TypeError(
|
|
||||||
"{arg!r} parameter is positional only, "
|
|
||||||
"but was passed as a keyword".format(arg=param.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
if param.kind == _VAR_KEYWORD:
|
|
||||||
# Memorize that we have a '**kwargs'-like parameter
|
|
||||||
kwargs_param = param
|
|
||||||
continue
|
|
||||||
|
|
||||||
param_name = param.name
|
|
||||||
try:
|
|
||||||
arg_val = kwargs.pop(param_name)
|
|
||||||
except KeyError:
|
|
||||||
# We have no value for this parameter. It's fine though,
|
|
||||||
# if it has a default value, or it is an '*args'-like
|
|
||||||
# parameter, left alone by the processing of positional
|
|
||||||
# arguments.
|
|
||||||
if (
|
|
||||||
not partial
|
|
||||||
and param.kind != _VAR_POSITIONAL
|
|
||||||
and param.default is _empty
|
|
||||||
):
|
|
||||||
raise TypeError(
|
|
||||||
"{arg!r} parameter lacking default value".format(arg=param_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
arguments[param_name] = arg_val
|
|
||||||
|
|
||||||
if kwargs:
|
|
||||||
if kwargs_param is not None:
|
|
||||||
# Process our '**kwargs'-like parameter
|
|
||||||
arguments[kwargs_param.name] = kwargs
|
|
||||||
else:
|
|
||||||
raise TypeError("too many keyword arguments")
|
|
||||||
|
|
||||||
return self._bound_arguments_cls(self, arguments)
|
|
||||||
|
|
||||||
def bind(self, *args, **kwargs):
|
|
||||||
"""Get a BoundArguments object, that maps the passed `args`
|
|
||||||
and `kwargs` to the function's signature. Raises `TypeError`
|
|
||||||
if the passed arguments can not be bound.
|
|
||||||
"""
|
|
||||||
return self._bind(args, kwargs)
|
|
||||||
|
|
||||||
def bind_partial(self, *args, **kwargs):
|
|
||||||
"""Get a BoundArguments object, that partially maps the
|
|
||||||
passed `args` and `kwargs` to the function's signature.
|
|
||||||
Raises `TypeError` if the passed arguments can not be bound.
|
|
||||||
"""
|
|
||||||
return self._bind(args, kwargs, partial=True)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
result = []
|
|
||||||
render_kw_only_separator = True
|
|
||||||
for idx, param in enumerate(self.parameters.values()):
|
|
||||||
formatted = str(param)
|
|
||||||
|
|
||||||
kind = param.kind
|
|
||||||
if kind == _VAR_POSITIONAL:
|
|
||||||
# OK, we have an '*args'-like parameter, so we won't need
|
|
||||||
# a '*' to separate keyword-only arguments
|
|
||||||
render_kw_only_separator = False
|
|
||||||
elif kind == _KEYWORD_ONLY and render_kw_only_separator:
|
|
||||||
# We have a keyword-only parameter to render and we haven't
|
|
||||||
# rendered an '*args'-like parameter before, so add a '*'
|
|
||||||
# separator to the parameters list ("foo(arg1, *, arg2)" case)
|
|
||||||
result.append("*")
|
|
||||||
# This condition should be only triggered once, so
|
|
||||||
# reset the flag
|
|
||||||
render_kw_only_separator = False
|
|
||||||
|
|
||||||
result.append(formatted)
|
|
||||||
|
|
||||||
rendered = "({})".format(", ".join(result))
|
|
||||||
|
|
||||||
if self.return_annotation is not _empty:
|
|
||||||
anno = formatannotation(self.return_annotation)
|
|
||||||
rendered += " -> {}".format(anno)
|
|
||||||
|
|
||||||
return rendered
|
|
|
@ -1,5 +1,3 @@
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -19,10 +17,7 @@ def get_version(version=None):
|
||||||
sub = ""
|
sub = ""
|
||||||
if version[3] == "alpha" and version[4] == 0:
|
if version[3] == "alpha" and version[4] == 0:
|
||||||
git_changeset = get_git_changeset()
|
git_changeset = get_git_changeset()
|
||||||
if git_changeset:
|
sub = ".dev%s" % git_changeset if git_changeset else ".dev"
|
||||||
sub = ".dev%s" % git_changeset
|
|
||||||
else:
|
|
||||||
sub = ".dev"
|
|
||||||
elif version[3] != "final":
|
elif version[3] != "final":
|
||||||
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
|
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
|
||||||
sub = mapping[version[3]] + str(version[4])
|
sub = mapping[version[3]] + str(version[4])
|
||||||
|
@ -76,6 +71,6 @@ def get_git_changeset():
|
||||||
)
|
)
|
||||||
timestamp = git_log.communicate()[0]
|
timestamp = git_log.communicate()[0]
|
||||||
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
|
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
return timestamp.strftime("%Y%m%d%H%M%S")
|
return timestamp.strftime("%Y%m%d%H%M%S")
|
||||||
|
|
|
@ -1,13 +1,23 @@
|
||||||
from .node import Node, is_node, GlobalID
|
from .node import Node, is_node, GlobalID
|
||||||
from .mutation import ClientIDMutation
|
from .mutation import ClientIDMutation
|
||||||
from .connection import Connection, ConnectionField, PageInfo
|
from .connection import Connection, ConnectionField, PageInfo
|
||||||
|
from .id_type import (
|
||||||
|
BaseGlobalIDType,
|
||||||
|
DefaultGlobalIDType,
|
||||||
|
SimpleGlobalIDType,
|
||||||
|
UUIDGlobalIDType,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Node",
|
"BaseGlobalIDType",
|
||||||
"is_node",
|
|
||||||
"GlobalID",
|
|
||||||
"ClientIDMutation",
|
"ClientIDMutation",
|
||||||
"Connection",
|
"Connection",
|
||||||
"ConnectionField",
|
"ConnectionField",
|
||||||
|
"DefaultGlobalIDType",
|
||||||
|
"GlobalID",
|
||||||
|
"Node",
|
||||||
"PageInfo",
|
"PageInfo",
|
||||||
|
"SimpleGlobalIDType",
|
||||||
|
"UUIDGlobalIDType",
|
||||||
|
"is_node",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,17 +1,51 @@
|
||||||
import re
|
import re
|
||||||
from collections import Iterable, OrderedDict
|
from collections.abc import Iterable
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
from graphql_relay import connection_from_list
|
from graphql_relay import connection_from_array
|
||||||
from promise import Promise, is_thenable
|
|
||||||
|
|
||||||
from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String, Union
|
from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String, Union
|
||||||
from ..types.field import Field
|
from ..types.field import Field
|
||||||
from ..types.objecttype import ObjectType, ObjectTypeOptions
|
from ..types.objecttype import ObjectType, ObjectTypeOptions
|
||||||
from .node import is_node
|
from ..utils.thenables import maybe_thenable
|
||||||
|
from .node import is_node, AbstractNode
|
||||||
|
|
||||||
|
|
||||||
|
def get_edge_class(
|
||||||
|
connection_class: Type["Connection"],
|
||||||
|
_node: Type[AbstractNode],
|
||||||
|
base_name: str,
|
||||||
|
strict_types: bool = False,
|
||||||
|
):
|
||||||
|
edge_class = getattr(connection_class, "Edge", None)
|
||||||
|
|
||||||
|
class EdgeBase:
|
||||||
|
node = Field(
|
||||||
|
NonNull(_node) if strict_types else _node,
|
||||||
|
description="The item at the end of the edge",
|
||||||
|
)
|
||||||
|
cursor = String(required=True, description="A cursor for use in pagination")
|
||||||
|
|
||||||
|
class EdgeMeta:
|
||||||
|
description = f"A Relay edge containing a `{base_name}` and its cursor."
|
||||||
|
|
||||||
|
edge_name = f"{base_name}Edge"
|
||||||
|
|
||||||
|
edge_bases = [edge_class, EdgeBase] if edge_class else [EdgeBase]
|
||||||
|
if not isinstance(edge_class, ObjectType):
|
||||||
|
edge_bases = [*edge_bases, ObjectType]
|
||||||
|
|
||||||
|
return type(edge_name, tuple(edge_bases), {"Meta": EdgeMeta})
|
||||||
|
|
||||||
|
|
||||||
class PageInfo(ObjectType):
|
class PageInfo(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
description = (
|
||||||
|
"The Relay compliant `PageInfo` type, containing data necessary to"
|
||||||
|
" paginate this connection."
|
||||||
|
)
|
||||||
|
|
||||||
has_next_page = Boolean(
|
has_next_page = Boolean(
|
||||||
required=True,
|
required=True,
|
||||||
name="hasNextPage",
|
name="hasNextPage",
|
||||||
|
@ -35,6 +69,17 @@ class PageInfo(ObjectType):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8Naming
|
||||||
|
def page_info_adapter(startCursor, endCursor, hasPreviousPage, hasNextPage):
|
||||||
|
"""Adapter for creating PageInfo instances"""
|
||||||
|
return PageInfo(
|
||||||
|
start_cursor=startCursor,
|
||||||
|
end_cursor=endCursor,
|
||||||
|
has_previous_page=hasPreviousPage,
|
||||||
|
has_next_page=hasNextPage,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ConnectionOptions(ObjectTypeOptions):
|
class ConnectionOptions(ObjectTypeOptions):
|
||||||
node = None
|
node = None
|
||||||
|
|
||||||
|
@ -44,73 +89,79 @@ class Connection(ObjectType):
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
|
def __init_subclass_with_meta__(
|
||||||
_meta = ConnectionOptions(cls)
|
cls, node=None, name=None, strict_types=False, _meta=None, **options
|
||||||
assert node, "You have to provide a node in {}.Meta".format(cls.__name__)
|
):
|
||||||
assert issubclass(
|
if not _meta:
|
||||||
|
_meta = ConnectionOptions(cls)
|
||||||
|
assert node, f"You have to provide a node in {cls.__name__}.Meta"
|
||||||
|
assert isinstance(node, NonNull) or issubclass(
|
||||||
node, (Scalar, Enum, ObjectType, Interface, Union, NonNull)
|
node, (Scalar, Enum, ObjectType, Interface, Union, NonNull)
|
||||||
), ('Received incompatible node "{}" for Connection {}.').format(
|
), f'Received incompatible node "{node}" for Connection {cls.__name__}.'
|
||||||
node, cls.__name__
|
|
||||||
)
|
|
||||||
|
|
||||||
base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name
|
base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name
|
||||||
if not name:
|
if not name:
|
||||||
name = "{}Connection".format(base_name)
|
name = f"{base_name}Connection"
|
||||||
|
|
||||||
edge_class = getattr(cls, "Edge", None)
|
|
||||||
_node = node
|
|
||||||
|
|
||||||
class EdgeBase(object):
|
|
||||||
node = Field(_node, description="The item at the end of the edge")
|
|
||||||
cursor = String(required=True, description="A cursor for use in pagination")
|
|
||||||
|
|
||||||
edge_name = "{}Edge".format(base_name)
|
|
||||||
if edge_class:
|
|
||||||
edge_bases = (edge_class, EdgeBase, ObjectType)
|
|
||||||
else:
|
|
||||||
edge_bases = (EdgeBase, ObjectType)
|
|
||||||
|
|
||||||
edge = type(edge_name, edge_bases, {})
|
|
||||||
cls.Edge = edge
|
|
||||||
|
|
||||||
options["name"] = name
|
options["name"] = name
|
||||||
|
|
||||||
_meta.node = node
|
_meta.node = node
|
||||||
_meta.fields = OrderedDict(
|
|
||||||
[
|
if not _meta.fields:
|
||||||
("page_info", Field(PageInfo, name="pageInfo", required=True)),
|
_meta.fields = {}
|
||||||
("edges", Field(NonNull(List(edge)))),
|
|
||||||
]
|
if "page_info" not in _meta.fields:
|
||||||
)
|
_meta.fields["page_info"] = Field(
|
||||||
|
PageInfo,
|
||||||
|
name="pageInfo",
|
||||||
|
required=True,
|
||||||
|
description="Pagination data for this connection.",
|
||||||
|
)
|
||||||
|
|
||||||
|
if "edges" not in _meta.fields:
|
||||||
|
edge_class = get_edge_class(cls, node, base_name, strict_types) # type: ignore
|
||||||
|
cls.Edge = edge_class
|
||||||
|
_meta.fields["edges"] = Field(
|
||||||
|
NonNull(List(NonNull(edge_class) if strict_types else edge_class)),
|
||||||
|
description="Contains the nodes in this connection.",
|
||||||
|
)
|
||||||
|
|
||||||
return super(Connection, cls).__init_subclass_with_meta__(
|
return super(Connection, cls).__init_subclass_with_meta__(
|
||||||
_meta=_meta, **options
|
_meta=_meta, **options
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyPep8Naming
|
||||||
|
def connection_adapter(cls, edges, pageInfo):
|
||||||
|
"""Adapter for creating Connection instances"""
|
||||||
|
return cls(edges=edges, page_info=pageInfo)
|
||||||
|
|
||||||
|
|
||||||
class IterableConnectionField(Field):
|
class IterableConnectionField(Field):
|
||||||
def __init__(self, type, *args, **kwargs):
|
def __init__(self, type_, *args, **kwargs):
|
||||||
kwargs.setdefault("before", String())
|
kwargs.setdefault("before", String())
|
||||||
kwargs.setdefault("after", String())
|
kwargs.setdefault("after", String())
|
||||||
kwargs.setdefault("first", Int())
|
kwargs.setdefault("first", Int())
|
||||||
kwargs.setdefault("last", Int())
|
kwargs.setdefault("last", Int())
|
||||||
super(IterableConnectionField, self).__init__(type, *args, **kwargs)
|
super(IterableConnectionField, self).__init__(type_, *args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self):
|
def type(self):
|
||||||
type = super(IterableConnectionField, self).type
|
type_ = super(IterableConnectionField, self).type
|
||||||
connection_type = type
|
connection_type = type_
|
||||||
if isinstance(type, NonNull):
|
if isinstance(type_, NonNull):
|
||||||
connection_type = type.of_type
|
connection_type = type_.of_type
|
||||||
|
|
||||||
if is_node(connection_type):
|
if is_node(connection_type):
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"ConnectionField's now need a explicit ConnectionType for Nodes.\n"
|
"ConnectionFields now need a explicit ConnectionType for Nodes.\n"
|
||||||
"Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections"
|
"Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert issubclass(connection_type, Connection), (
|
assert issubclass(
|
||||||
'{} type have to be a subclass of Connection. Received "{}".'
|
connection_type, Connection
|
||||||
).format(self.__class__.__name__, connection_type)
|
), f'{self.__class__.__name__} type has to be a subclass of Connection. Received "{connection_type}".'
|
||||||
return type
|
return type_
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def resolve_connection(cls, connection_type, args, resolved):
|
def resolve_connection(cls, connection_type, args, resolved):
|
||||||
|
@ -118,15 +169,15 @@ class IterableConnectionField(Field):
|
||||||
return resolved
|
return resolved
|
||||||
|
|
||||||
assert isinstance(resolved, Iterable), (
|
assert isinstance(resolved, Iterable), (
|
||||||
"Resolved value from the connection field have to be iterable or instance of {}. "
|
f"Resolved value from the connection field has to be an iterable or instance of {connection_type}. "
|
||||||
'Received "{}"'
|
f'Received "{resolved}"'
|
||||||
).format(connection_type, resolved)
|
)
|
||||||
connection = connection_from_list(
|
connection = connection_from_array(
|
||||||
resolved,
|
resolved,
|
||||||
args,
|
args,
|
||||||
connection_type=connection_type,
|
connection_type=partial(connection_adapter, connection_type),
|
||||||
edge_type=connection_type.Edge,
|
edge_type=connection_type.Edge,
|
||||||
pageinfo_type=PageInfo,
|
page_info_type=page_info_adapter,
|
||||||
)
|
)
|
||||||
connection.iterable = resolved
|
connection.iterable = resolved
|
||||||
return connection
|
return connection
|
||||||
|
@ -139,13 +190,10 @@ class IterableConnectionField(Field):
|
||||||
connection_type = connection_type.of_type
|
connection_type = connection_type.of_type
|
||||||
|
|
||||||
on_resolve = partial(cls.resolve_connection, connection_type, args)
|
on_resolve = partial(cls.resolve_connection, connection_type, args)
|
||||||
if is_thenable(resolved):
|
return maybe_thenable(resolved, on_resolve)
|
||||||
return Promise.resolve(resolved).then(on_resolve)
|
|
||||||
|
|
||||||
return on_resolve(resolved)
|
def wrap_resolve(self, parent_resolver):
|
||||||
|
resolver = super(IterableConnectionField, self).wrap_resolve(parent_resolver)
|
||||||
def get_resolver(self, parent_resolver):
|
|
||||||
resolver = super(IterableConnectionField, self).get_resolver(parent_resolver)
|
|
||||||
return partial(self.connection_resolver, resolver, self.type)
|
return partial(self.connection_resolver, resolver, self.type)
|
||||||
|
|
||||||
|
|
||||||
|
|
87
graphene/relay/id_type.py
Normal file
87
graphene/relay/id_type.py
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
from graphql_relay import from_global_id, to_global_id
|
||||||
|
|
||||||
|
from ..types import ID, UUID
|
||||||
|
from ..types.base import BaseType
|
||||||
|
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
|
||||||
|
class BaseGlobalIDType:
|
||||||
|
"""
|
||||||
|
Base class that define the required attributes/method for a type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
graphene_type: Type[BaseType] = ID
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
# return _type, _id
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, _type, _id):
|
||||||
|
# return _id
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultGlobalIDType(BaseGlobalIDType):
|
||||||
|
"""
|
||||||
|
Default global ID type: base64 encoded version of "<node type name>: <node id>".
|
||||||
|
"""
|
||||||
|
|
||||||
|
graphene_type = ID
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
try:
|
||||||
|
_type, _id = from_global_id(global_id)
|
||||||
|
if not _type:
|
||||||
|
raise ValueError("Invalid Global ID")
|
||||||
|
return _type, _id
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(
|
||||||
|
f'Unable to parse global ID "{global_id}". '
|
||||||
|
'Make sure it is a base64 encoded string in the format: "TypeName:id". '
|
||||||
|
f"Exception message: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, _type, _id):
|
||||||
|
return to_global_id(_type, _id)
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleGlobalIDType(BaseGlobalIDType):
|
||||||
|
"""
|
||||||
|
Simple global ID type: simply the id of the object.
|
||||||
|
To be used carefully as the user is responsible for ensuring that the IDs are indeed global
|
||||||
|
(otherwise it could cause request caching issues).
|
||||||
|
"""
|
||||||
|
|
||||||
|
graphene_type = ID
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
_type = info.return_type.graphene_type._meta.name
|
||||||
|
return _type, global_id
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, _type, _id):
|
||||||
|
return _id
|
||||||
|
|
||||||
|
|
||||||
|
class UUIDGlobalIDType(BaseGlobalIDType):
|
||||||
|
"""
|
||||||
|
UUID global ID type.
|
||||||
|
By definition UUID are global so they are used as they are.
|
||||||
|
"""
|
||||||
|
|
||||||
|
graphene_type = UUID
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
_type = info.return_type.graphene_type._meta.name
|
||||||
|
return _type, global_id
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, _type, _id):
|
||||||
|
return _id
|
|
@ -1,10 +1,8 @@
|
||||||
import re
|
import re
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from promise import Promise, is_thenable
|
|
||||||
|
|
||||||
from ..types import Field, InputObjectType, String
|
from ..types import Field, InputObjectType, String
|
||||||
from ..types.mutation import Mutation
|
from ..types.mutation import Mutation
|
||||||
|
from ..utils.thenables import maybe_thenable
|
||||||
|
|
||||||
|
|
||||||
class ClientIDMutation(Mutation):
|
class ClientIDMutation(Mutation):
|
||||||
|
@ -29,26 +27,24 @@ class ClientIDMutation(Mutation):
|
||||||
input_fields = {}
|
input_fields = {}
|
||||||
|
|
||||||
cls.Input = type(
|
cls.Input = type(
|
||||||
"{}Input".format(base_name),
|
f"{base_name}Input",
|
||||||
bases,
|
bases,
|
||||||
OrderedDict(
|
dict(input_fields, client_mutation_id=String(name="clientMutationId")),
|
||||||
input_fields, client_mutation_id=String(name="clientMutationId")
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
arguments = OrderedDict(
|
arguments = dict(
|
||||||
input=cls.Input(required=True)
|
input=cls.Input(required=True)
|
||||||
# 'client_mutation_id': String(name='clientMutationId')
|
# 'client_mutation_id': String(name='clientMutationId')
|
||||||
)
|
)
|
||||||
mutate_and_get_payload = getattr(cls, "mutate_and_get_payload", None)
|
mutate_and_get_payload = getattr(cls, "mutate_and_get_payload", None)
|
||||||
if cls.mutate and cls.mutate.__func__ == ClientIDMutation.mutate.__func__:
|
if cls.mutate and cls.mutate.__func__ == ClientIDMutation.mutate.__func__:
|
||||||
assert mutate_and_get_payload, (
|
assert mutate_and_get_payload, (
|
||||||
"{name}.mutate_and_get_payload method is required"
|
f"{name or cls.__name__}.mutate_and_get_payload method is required"
|
||||||
" in a ClientIDMutation."
|
" in a ClientIDMutation."
|
||||||
).format(name=name or cls.__name__)
|
)
|
||||||
|
|
||||||
if not name:
|
if not name:
|
||||||
name = "{}Payload".format(base_name)
|
name = f"{base_name}Payload"
|
||||||
|
|
||||||
super(ClientIDMutation, cls).__init_subclass_with_meta__(
|
super(ClientIDMutation, cls).__init_subclass_with_meta__(
|
||||||
output=None, arguments=arguments, name=name, **options
|
output=None, arguments=arguments, name=name, **options
|
||||||
|
@ -62,14 +58,9 @@ class ClientIDMutation(Mutation):
|
||||||
payload.client_mutation_id = input.get("client_mutation_id")
|
payload.client_mutation_id = input.get("client_mutation_id")
|
||||||
except Exception:
|
except Exception:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
("Cannot set client_mutation_id in the payload object {}").format(
|
f"Cannot set client_mutation_id in the payload object {repr(payload)}"
|
||||||
repr(payload)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
result = cls.mutate_and_get_payload(root, info, **input)
|
result = cls.mutate_and_get_payload(root, info, **input)
|
||||||
if is_thenable(result):
|
return maybe_thenable(result, on_resolve)
|
||||||
return Promise.resolve(result).then(on_resolve)
|
|
||||||
|
|
||||||
return on_resolve(result)
|
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
from collections import OrderedDict
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from inspect import isclass
|
from inspect import isclass
|
||||||
|
|
||||||
from graphql_relay import from_global_id, to_global_id
|
from ..types import Field, Interface, ObjectType
|
||||||
|
|
||||||
from ..types import ID, Field, Interface, ObjectType
|
|
||||||
from ..types.interface import InterfaceOptions
|
from ..types.interface import InterfaceOptions
|
||||||
from ..types.utils import get_type
|
from ..types.utils import get_type
|
||||||
|
from .id_type import BaseGlobalIDType, DefaultGlobalIDType
|
||||||
|
|
||||||
|
|
||||||
def is_node(objecttype):
|
def is_node(objecttype):
|
||||||
|
@ -19,16 +17,22 @@ def is_node(objecttype):
|
||||||
if not issubclass(objecttype, ObjectType):
|
if not issubclass(objecttype, ObjectType):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for i in objecttype._meta.interfaces:
|
return any(issubclass(i, Node) for i in objecttype._meta.interfaces)
|
||||||
if issubclass(i, Node):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class GlobalID(Field):
|
class GlobalID(Field):
|
||||||
def __init__(self, node=None, parent_type=None, required=True, *args, **kwargs):
|
def __init__(
|
||||||
super(GlobalID, self).__init__(ID, required=required, *args, **kwargs)
|
self,
|
||||||
|
node=None,
|
||||||
|
parent_type=None,
|
||||||
|
required=True,
|
||||||
|
global_id_type=DefaultGlobalIDType,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
super(GlobalID, self).__init__(
|
||||||
|
global_id_type.graphene_type, required=required, *args, **kwargs
|
||||||
|
)
|
||||||
self.node = node or Node
|
self.node = node or Node
|
||||||
self.parent_type_name = parent_type._meta.name if parent_type else None
|
self.parent_type_name = parent_type._meta.name if parent_type else None
|
||||||
|
|
||||||
|
@ -38,7 +42,7 @@ class GlobalID(Field):
|
||||||
parent_type_name = parent_type_name or info.parent_type.name
|
parent_type_name = parent_type_name or info.parent_type.name
|
||||||
return node.to_global_id(parent_type_name, type_id) # root._meta.name
|
return node.to_global_id(parent_type_name, type_id) # root._meta.name
|
||||||
|
|
||||||
def get_resolver(self, parent_resolver):
|
def wrap_resolve(self, parent_resolver):
|
||||||
return partial(
|
return partial(
|
||||||
self.id_resolver,
|
self.id_resolver,
|
||||||
parent_resolver,
|
parent_resolver,
|
||||||
|
@ -48,20 +52,22 @@ class GlobalID(Field):
|
||||||
|
|
||||||
|
|
||||||
class NodeField(Field):
|
class NodeField(Field):
|
||||||
def __init__(self, node, type=False, deprecation_reason=None, name=None, **kwargs):
|
def __init__(self, node, type_=False, **kwargs):
|
||||||
assert issubclass(node, Node), "NodeField can only operate in Nodes"
|
assert issubclass(node, Node), "NodeField can only operate in Nodes"
|
||||||
self.node_type = node
|
self.node_type = node
|
||||||
self.field_type = type
|
self.field_type = type_
|
||||||
|
global_id_type = node._meta.global_id_type
|
||||||
|
|
||||||
super(NodeField, self).__init__(
|
super(NodeField, self).__init__(
|
||||||
# If we don's specify a type, the field type will be the node
|
# If we don't specify a type, the field type will be the node interface
|
||||||
# interface
|
type_ or node,
|
||||||
type or node,
|
id=global_id_type.graphene_type(
|
||||||
description="The ID of the object",
|
required=True, description="The ID of the object"
|
||||||
id=ID(required=True),
|
),
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_resolver(self, parent_resolver):
|
def wrap_resolve(self, parent_resolver):
|
||||||
return partial(self.node_type.node_resolver, get_type(self.field_type))
|
return partial(self.node_type.node_resolver, get_type(self.field_type))
|
||||||
|
|
||||||
|
|
||||||
|
@ -70,13 +76,23 @@ class AbstractNode(Interface):
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(cls, **options):
|
def __init_subclass_with_meta__(cls, global_id_type=DefaultGlobalIDType, **options):
|
||||||
|
assert issubclass(
|
||||||
|
global_id_type, BaseGlobalIDType
|
||||||
|
), "Custom ID type need to be implemented as a subclass of BaseGlobalIDType."
|
||||||
_meta = InterfaceOptions(cls)
|
_meta = InterfaceOptions(cls)
|
||||||
_meta.fields = OrderedDict(
|
_meta.global_id_type = global_id_type
|
||||||
id=GlobalID(cls, description="The ID of the object.")
|
_meta.fields = {
|
||||||
)
|
"id": GlobalID(
|
||||||
|
cls, global_id_type=global_id_type, description="The ID of the object"
|
||||||
|
)
|
||||||
|
}
|
||||||
super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
return cls._meta.global_id_type.resolve_global_id(info, global_id)
|
||||||
|
|
||||||
|
|
||||||
class Node(AbstractNode):
|
class Node(AbstractNode):
|
||||||
"""An object with an ID"""
|
"""An object with an ID"""
|
||||||
|
@ -91,29 +107,29 @@ class Node(AbstractNode):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_node_from_global_id(cls, info, global_id, only_type=None):
|
def get_node_from_global_id(cls, info, global_id, only_type=None):
|
||||||
try:
|
_type, _id = cls.resolve_global_id(info, global_id)
|
||||||
_type, _id = cls.from_global_id(global_id)
|
|
||||||
graphene_type = info.schema.get_type(_type).graphene_type
|
graphene_type = info.schema.get_type(_type)
|
||||||
except Exception:
|
if graphene_type is None:
|
||||||
return None
|
raise Exception(f'Relay Node "{_type}" not found in schema')
|
||||||
|
|
||||||
|
graphene_type = graphene_type.graphene_type
|
||||||
|
|
||||||
if only_type:
|
if only_type:
|
||||||
assert graphene_type == only_type, ("Must receive a {} id.").format(
|
assert (
|
||||||
only_type._meta.name
|
graphene_type == only_type
|
||||||
)
|
), f"Must receive a {only_type._meta.name} id."
|
||||||
|
|
||||||
# We make sure the ObjectType implements the "Node" interface
|
# We make sure the ObjectType implements the "Node" interface
|
||||||
if cls not in graphene_type._meta.interfaces:
|
if cls not in graphene_type._meta.interfaces:
|
||||||
return None
|
raise Exception(
|
||||||
|
f'ObjectType "{_type}" does not implement the "{cls}" interface.'
|
||||||
|
)
|
||||||
|
|
||||||
get_node = getattr(graphene_type, "get_node", None)
|
get_node = getattr(graphene_type, "get_node", None)
|
||||||
if get_node:
|
if get_node:
|
||||||
return get_node(info, _id)
|
return get_node(info, _id)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_global_id(cls, global_id):
|
def to_global_id(cls, type_, id):
|
||||||
return from_global_id(global_id)
|
return cls._meta.global_id_type.to_global_id(type_, id)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_global_id(cls, type, id):
|
|
||||||
return to_global_id(type, id)
|
|
||||||
|
|
|
@ -1,7 +1,15 @@
|
||||||
import pytest
|
import re
|
||||||
|
|
||||||
|
from pytest import raises
|
||||||
|
|
||||||
from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String
|
from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String
|
||||||
from ..connection import Connection, ConnectionField, PageInfo
|
from ..connection import (
|
||||||
|
Connection,
|
||||||
|
ConnectionField,
|
||||||
|
PageInfo,
|
||||||
|
ConnectionOptions,
|
||||||
|
get_edge_class,
|
||||||
|
)
|
||||||
from ..node import Node
|
from ..node import Node
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,7 +32,7 @@ def test_connection():
|
||||||
|
|
||||||
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
||||||
fields = MyObjectConnection._meta.fields
|
fields = MyObjectConnection._meta.fields
|
||||||
assert list(fields.keys()) == ["page_info", "edges", "extra"]
|
assert list(fields) == ["page_info", "edges", "extra"]
|
||||||
edge_field = fields["edges"]
|
edge_field = fields["edges"]
|
||||||
pageinfo_field = fields["page_info"]
|
pageinfo_field = fields["page_info"]
|
||||||
|
|
||||||
|
@ -39,7 +47,7 @@ def test_connection():
|
||||||
|
|
||||||
|
|
||||||
def test_connection_inherit_abstracttype():
|
def test_connection_inherit_abstracttype():
|
||||||
class BaseConnection(object):
|
class BaseConnection:
|
||||||
extra = String()
|
extra = String()
|
||||||
|
|
||||||
class MyObjectConnection(BaseConnection, Connection):
|
class MyObjectConnection(BaseConnection, Connection):
|
||||||
|
@ -48,13 +56,118 @@ def test_connection_inherit_abstracttype():
|
||||||
|
|
||||||
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
||||||
fields = MyObjectConnection._meta.fields
|
fields = MyObjectConnection._meta.fields
|
||||||
assert list(fields.keys()) == ["page_info", "edges", "extra"]
|
assert list(fields) == ["page_info", "edges", "extra"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_connection_extra_abstract_fields():
|
||||||
|
class ConnectionWithNodes(Connection):
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
|
||||||
|
_meta = ConnectionOptions(cls)
|
||||||
|
|
||||||
|
_meta.fields = {
|
||||||
|
"nodes": Field(
|
||||||
|
NonNull(List(node)),
|
||||||
|
description="Contains all the nodes in this connection.",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
return super(ConnectionWithNodes, cls).__init_subclass_with_meta__(
|
||||||
|
node=node, name=name, _meta=_meta, **options
|
||||||
|
)
|
||||||
|
|
||||||
|
class MyObjectConnection(ConnectionWithNodes):
|
||||||
|
class Meta:
|
||||||
|
node = MyObject
|
||||||
|
|
||||||
|
class Edge:
|
||||||
|
other = String()
|
||||||
|
|
||||||
|
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
||||||
|
fields = MyObjectConnection._meta.fields
|
||||||
|
assert list(fields) == ["nodes", "page_info", "edges"]
|
||||||
|
edge_field = fields["edges"]
|
||||||
|
pageinfo_field = fields["page_info"]
|
||||||
|
nodes_field = fields["nodes"]
|
||||||
|
|
||||||
|
assert isinstance(edge_field, Field)
|
||||||
|
assert isinstance(edge_field.type, NonNull)
|
||||||
|
assert isinstance(edge_field.type.of_type, List)
|
||||||
|
assert edge_field.type.of_type.of_type == MyObjectConnection.Edge
|
||||||
|
|
||||||
|
assert isinstance(pageinfo_field, Field)
|
||||||
|
assert isinstance(pageinfo_field.type, NonNull)
|
||||||
|
assert pageinfo_field.type.of_type == PageInfo
|
||||||
|
|
||||||
|
assert isinstance(nodes_field, Field)
|
||||||
|
assert isinstance(nodes_field.type, NonNull)
|
||||||
|
assert isinstance(nodes_field.type.of_type, List)
|
||||||
|
assert nodes_field.type.of_type.of_type == MyObject
|
||||||
|
|
||||||
|
|
||||||
|
def test_connection_override_fields():
|
||||||
|
class ConnectionWithNodes(Connection):
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __init_subclass_with_meta__(cls, node=None, name=None, **options):
|
||||||
|
_meta = ConnectionOptions(cls)
|
||||||
|
base_name = (
|
||||||
|
re.sub("Connection$", "", name or cls.__name__) or node._meta.name
|
||||||
|
)
|
||||||
|
|
||||||
|
edge_class = get_edge_class(cls, node, base_name)
|
||||||
|
|
||||||
|
_meta.fields = {
|
||||||
|
"page_info": Field(
|
||||||
|
NonNull(
|
||||||
|
PageInfo,
|
||||||
|
name="pageInfo",
|
||||||
|
required=True,
|
||||||
|
description="Pagination data for this connection.",
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"edges": Field(
|
||||||
|
NonNull(List(NonNull(edge_class))),
|
||||||
|
description="Contains the nodes in this connection.",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
return super(ConnectionWithNodes, cls).__init_subclass_with_meta__(
|
||||||
|
node=node, name=name, _meta=_meta, **options
|
||||||
|
)
|
||||||
|
|
||||||
|
class MyObjectConnection(ConnectionWithNodes):
|
||||||
|
class Meta:
|
||||||
|
node = MyObject
|
||||||
|
|
||||||
|
assert MyObjectConnection._meta.name == "MyObjectConnection"
|
||||||
|
fields = MyObjectConnection._meta.fields
|
||||||
|
assert list(fields) == ["page_info", "edges"]
|
||||||
|
edge_field = fields["edges"]
|
||||||
|
pageinfo_field = fields["page_info"]
|
||||||
|
|
||||||
|
assert isinstance(edge_field, Field)
|
||||||
|
assert isinstance(edge_field.type, NonNull)
|
||||||
|
assert isinstance(edge_field.type.of_type, List)
|
||||||
|
assert isinstance(edge_field.type.of_type.of_type, NonNull)
|
||||||
|
|
||||||
|
assert edge_field.type.of_type.of_type.of_type.__name__ == "MyObjectEdge"
|
||||||
|
|
||||||
|
# This page info is NonNull
|
||||||
|
assert isinstance(pageinfo_field, Field)
|
||||||
|
assert isinstance(edge_field.type, NonNull)
|
||||||
|
assert pageinfo_field.type.of_type == PageInfo
|
||||||
|
|
||||||
|
|
||||||
def test_connection_name():
|
def test_connection_name():
|
||||||
custom_name = "MyObjectCustomNameConnection"
|
custom_name = "MyObjectCustomNameConnection"
|
||||||
|
|
||||||
class BaseConnection(object):
|
class BaseConnection:
|
||||||
extra = String()
|
extra = String()
|
||||||
|
|
||||||
class MyObjectConnection(BaseConnection, Connection):
|
class MyObjectConnection(BaseConnection, Connection):
|
||||||
|
@ -76,7 +189,7 @@ def test_edge():
|
||||||
Edge = MyObjectConnection.Edge
|
Edge = MyObjectConnection.Edge
|
||||||
assert Edge._meta.name == "MyObjectEdge"
|
assert Edge._meta.name == "MyObjectEdge"
|
||||||
edge_fields = Edge._meta.fields
|
edge_fields = Edge._meta.fields
|
||||||
assert list(edge_fields.keys()) == ["node", "cursor", "other"]
|
assert list(edge_fields) == ["node", "cursor", "other"]
|
||||||
|
|
||||||
assert isinstance(edge_fields["node"], Field)
|
assert isinstance(edge_fields["node"], Field)
|
||||||
assert edge_fields["node"].type == MyObject
|
assert edge_fields["node"].type == MyObject
|
||||||
|
@ -86,7 +199,7 @@ def test_edge():
|
||||||
|
|
||||||
|
|
||||||
def test_edge_with_bases():
|
def test_edge_with_bases():
|
||||||
class BaseEdge(object):
|
class BaseEdge:
|
||||||
extra = String()
|
extra = String()
|
||||||
|
|
||||||
class MyObjectConnection(Connection):
|
class MyObjectConnection(Connection):
|
||||||
|
@ -99,7 +212,7 @@ def test_edge_with_bases():
|
||||||
Edge = MyObjectConnection.Edge
|
Edge = MyObjectConnection.Edge
|
||||||
assert Edge._meta.name == "MyObjectEdge"
|
assert Edge._meta.name == "MyObjectEdge"
|
||||||
edge_fields = Edge._meta.fields
|
edge_fields = Edge._meta.fields
|
||||||
assert list(edge_fields.keys()) == ["node", "cursor", "extra", "other"]
|
assert list(edge_fields) == ["node", "cursor", "extra", "other"]
|
||||||
|
|
||||||
assert isinstance(edge_fields["node"], Field)
|
assert isinstance(edge_fields["node"], Field)
|
||||||
assert edge_fields["node"].type == MyObject
|
assert edge_fields["node"].type == MyObject
|
||||||
|
@ -108,10 +221,21 @@ def test_edge_with_bases():
|
||||||
assert edge_fields["other"].type == String
|
assert edge_fields["other"].type == String
|
||||||
|
|
||||||
|
|
||||||
|
def test_edge_with_nonnull_node():
|
||||||
|
class MyObjectConnection(Connection):
|
||||||
|
class Meta:
|
||||||
|
node = NonNull(MyObject)
|
||||||
|
|
||||||
|
edge_fields = MyObjectConnection.Edge._meta.fields
|
||||||
|
assert isinstance(edge_fields["node"], Field)
|
||||||
|
assert isinstance(edge_fields["node"].type, NonNull)
|
||||||
|
assert edge_fields["node"].type.of_type == MyObject
|
||||||
|
|
||||||
|
|
||||||
def test_pageinfo():
|
def test_pageinfo():
|
||||||
assert PageInfo._meta.name == "PageInfo"
|
assert PageInfo._meta.name == "PageInfo"
|
||||||
fields = PageInfo._meta.fields
|
fields = PageInfo._meta.fields
|
||||||
assert list(fields.keys()) == [
|
assert list(fields) == [
|
||||||
"has_next_page",
|
"has_next_page",
|
||||||
"has_previous_page",
|
"has_previous_page",
|
||||||
"start_cursor",
|
"start_cursor",
|
||||||
|
@ -135,10 +259,10 @@ def test_connectionfield():
|
||||||
|
|
||||||
def test_connectionfield_node_deprecated():
|
def test_connectionfield_node_deprecated():
|
||||||
field = ConnectionField(MyObject)
|
field = ConnectionField(MyObject)
|
||||||
with pytest.raises(Exception) as exc_info:
|
with raises(Exception) as exc_info:
|
||||||
field.type
|
field.type
|
||||||
|
|
||||||
assert "ConnectionField's now need a explicit ConnectionType for Nodes." in str(
|
assert "ConnectionFields now need a explicit ConnectionType for Nodes." in str(
|
||||||
exc_info.value
|
exc_info.value
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -175,3 +299,20 @@ def test_connectionfield_required():
|
||||||
executed = schema.execute("{ testConnection { edges { cursor } } }")
|
executed = schema.execute("{ testConnection { edges { cursor } } }")
|
||||||
assert not executed.errors
|
assert not executed.errors
|
||||||
assert executed.data == {"testConnection": {"edges": []}}
|
assert executed.data == {"testConnection": {"edges": []}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_connectionfield_strict_types():
|
||||||
|
class MyObjectConnection(Connection):
|
||||||
|
class Meta:
|
||||||
|
node = MyObject
|
||||||
|
strict_types = True
|
||||||
|
|
||||||
|
connection_field = ConnectionField(MyObjectConnection)
|
||||||
|
edges_field_type = connection_field.type._meta.fields["edges"].type
|
||||||
|
assert isinstance(edges_field_type, NonNull)
|
||||||
|
|
||||||
|
edges_list_element_type = edges_field_type.of_type.of_type
|
||||||
|
assert isinstance(edges_list_element_type, NonNull)
|
||||||
|
|
||||||
|
node_field = edges_list_element_type.of_type._meta.fields["node"]
|
||||||
|
assert isinstance(node_field.type, NonNull)
|
||||||
|
|
121
graphene/relay/tests/test_connection_async.py
Normal file
121
graphene/relay/tests/test_connection_async.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
from pytest import mark
|
||||||
|
|
||||||
|
from graphql_relay.utils import base64
|
||||||
|
|
||||||
|
from graphene.types import ObjectType, Schema, String
|
||||||
|
from graphene.relay.connection import Connection, ConnectionField, PageInfo
|
||||||
|
from graphene.relay.node import Node
|
||||||
|
|
||||||
|
letter_chars = ["A", "B", "C", "D", "E"]
|
||||||
|
|
||||||
|
|
||||||
|
class Letter(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = (Node,)
|
||||||
|
|
||||||
|
letter = String()
|
||||||
|
|
||||||
|
|
||||||
|
class LetterConnection(Connection):
|
||||||
|
class Meta:
|
||||||
|
node = Letter
|
||||||
|
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
letters = ConnectionField(LetterConnection)
|
||||||
|
connection_letters = ConnectionField(LetterConnection)
|
||||||
|
async_letters = ConnectionField(LetterConnection)
|
||||||
|
|
||||||
|
node = Node.Field()
|
||||||
|
|
||||||
|
def resolve_letters(self, info, **args):
|
||||||
|
return list(letters.values())
|
||||||
|
|
||||||
|
async def resolve_async_letters(self, info, **args):
|
||||||
|
return list(letters.values())
|
||||||
|
|
||||||
|
def resolve_connection_letters(self, info, **args):
|
||||||
|
return LetterConnection(
|
||||||
|
page_info=PageInfo(has_next_page=True, has_previous_page=False),
|
||||||
|
edges=[
|
||||||
|
LetterConnection.Edge(node=Letter(id=0, letter="A"), cursor="a-cursor")
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(Query)
|
||||||
|
|
||||||
|
letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter_chars)}
|
||||||
|
|
||||||
|
|
||||||
|
def edges(selected_letters):
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
|
||||||
|
"cursor": base64("arrayconnection:%s" % letter.id),
|
||||||
|
}
|
||||||
|
for letter in [letters[i] for i in selected_letters]
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def cursor_for(ltr):
|
||||||
|
letter = letters[ltr]
|
||||||
|
return base64("arrayconnection:%s" % letter.id)
|
||||||
|
|
||||||
|
|
||||||
|
def execute(args=""):
|
||||||
|
if args:
|
||||||
|
args = "(" + args + ")"
|
||||||
|
|
||||||
|
return schema.execute(
|
||||||
|
"""
|
||||||
|
{
|
||||||
|
letters%s {
|
||||||
|
edges {
|
||||||
|
node {
|
||||||
|
id
|
||||||
|
letter
|
||||||
|
}
|
||||||
|
cursor
|
||||||
|
}
|
||||||
|
pageInfo {
|
||||||
|
hasPreviousPage
|
||||||
|
hasNextPage
|
||||||
|
startCursor
|
||||||
|
endCursor
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
% args
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mark.asyncio
|
||||||
|
async def test_connection_async():
|
||||||
|
result = await schema.execute_async(
|
||||||
|
"""
|
||||||
|
{
|
||||||
|
asyncLetters(first:1) {
|
||||||
|
edges {
|
||||||
|
node {
|
||||||
|
id
|
||||||
|
letter
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pageInfo {
|
||||||
|
hasPreviousPage
|
||||||
|
hasNextPage
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert not result.errors
|
||||||
|
assert result.data == {
|
||||||
|
"asyncLetters": {
|
||||||
|
"edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}],
|
||||||
|
"pageInfo": {"hasPreviousPage": False, "hasNextPage": True},
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,7 +1,6 @@
|
||||||
from collections import OrderedDict
|
from pytest import mark
|
||||||
|
|
||||||
from graphql_relay.utils import base64
|
from graphql_relay.utils import base64
|
||||||
from promise import Promise
|
|
||||||
|
|
||||||
from ...types import ObjectType, Schema, String
|
from ...types import ObjectType, Schema, String
|
||||||
from ..connection import Connection, ConnectionField, PageInfo
|
from ..connection import Connection, ConnectionField, PageInfo
|
||||||
|
@ -25,15 +24,15 @@ class LetterConnection(Connection):
|
||||||
class Query(ObjectType):
|
class Query(ObjectType):
|
||||||
letters = ConnectionField(LetterConnection)
|
letters = ConnectionField(LetterConnection)
|
||||||
connection_letters = ConnectionField(LetterConnection)
|
connection_letters = ConnectionField(LetterConnection)
|
||||||
promise_letters = ConnectionField(LetterConnection)
|
async_letters = ConnectionField(LetterConnection)
|
||||||
|
|
||||||
node = Node.Field()
|
node = Node.Field()
|
||||||
|
|
||||||
def resolve_letters(self, info, **args):
|
def resolve_letters(self, info, **args):
|
||||||
return list(letters.values())
|
return list(letters.values())
|
||||||
|
|
||||||
def resolve_promise_letters(self, info, **args):
|
async def resolve_async_letters(self, info, **args):
|
||||||
return Promise.resolve(list(letters.values()))
|
return list(letters.values())
|
||||||
|
|
||||||
def resolve_connection_letters(self, info, **args):
|
def resolve_connection_letters(self, info, **args):
|
||||||
return LetterConnection(
|
return LetterConnection(
|
||||||
|
@ -46,18 +45,16 @@ class Query(ObjectType):
|
||||||
|
|
||||||
schema = Schema(Query)
|
schema = Schema(Query)
|
||||||
|
|
||||||
letters = OrderedDict()
|
letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter_chars)}
|
||||||
for i, letter in enumerate(letter_chars):
|
|
||||||
letters[letter] = Letter(id=i, letter=letter)
|
|
||||||
|
|
||||||
|
|
||||||
def edges(selected_letters):
|
def edges(selected_letters):
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"node": {"id": base64("Letter:%s" % l.id), "letter": l.letter},
|
"node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
|
||||||
"cursor": base64("arrayconnection:%s" % l.id),
|
"cursor": base64("arrayconnection:%s" % letter.id),
|
||||||
}
|
}
|
||||||
for l in [letters[i] for i in selected_letters]
|
for letter in [letters[i] for i in selected_letters]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -66,11 +63,10 @@ def cursor_for(ltr):
|
||||||
return base64("arrayconnection:%s" % letter.id)
|
return base64("arrayconnection:%s" % letter.id)
|
||||||
|
|
||||||
|
|
||||||
def execute(args=""):
|
async def execute(args=""):
|
||||||
if args:
|
if args:
|
||||||
args = "(" + args + ")"
|
args = "(" + args + ")"
|
||||||
|
return await schema.execute_async(
|
||||||
return schema.execute(
|
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
letters%s {
|
letters%s {
|
||||||
|
@ -94,8 +90,8 @@ def execute(args=""):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def check(args, letters, has_previous_page=False, has_next_page=False):
|
async def check(args, letters, has_previous_page=False, has_next_page=False):
|
||||||
result = execute(args)
|
result = await execute(args)
|
||||||
expected_edges = edges(letters)
|
expected_edges = edges(letters)
|
||||||
expected_page_info = {
|
expected_page_info = {
|
||||||
"hasPreviousPage": has_previous_page,
|
"hasPreviousPage": has_previous_page,
|
||||||
|
@ -110,114 +106,126 @@ def check(args, letters, has_previous_page=False, has_next_page=False):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_returns_all_elements_without_filters():
|
@mark.asyncio
|
||||||
check("", "ABCDE")
|
async def test_returns_all_elements_without_filters():
|
||||||
|
await check("", "ABCDE")
|
||||||
|
|
||||||
|
|
||||||
def test_respects_a_smaller_first():
|
@mark.asyncio
|
||||||
check("first: 2", "AB", has_next_page=True)
|
async def test_respects_a_smaller_first():
|
||||||
|
await check("first: 2", "AB", has_next_page=True)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_an_overly_large_first():
|
@mark.asyncio
|
||||||
check("first: 10", "ABCDE")
|
async def test_respects_an_overly_large_first():
|
||||||
|
await check("first: 10", "ABCDE")
|
||||||
|
|
||||||
|
|
||||||
def test_respects_a_smaller_last():
|
@mark.asyncio
|
||||||
check("last: 2", "DE", has_previous_page=True)
|
async def test_respects_a_smaller_last():
|
||||||
|
await check("last: 2", "DE", has_previous_page=True)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_an_overly_large_last():
|
@mark.asyncio
|
||||||
check("last: 10", "ABCDE")
|
async def test_respects_an_overly_large_last():
|
||||||
|
await check("last: 10", "ABCDE")
|
||||||
|
|
||||||
|
|
||||||
def test_respects_first_and_after():
|
@mark.asyncio
|
||||||
check('first: 2, after: "{}"'.format(cursor_for("B")), "CD", has_next_page=True)
|
async def test_respects_first_and_after():
|
||||||
|
await check(f'first: 2, after: "{cursor_for("B")}"', "CD", has_next_page=True)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_first_and_after_with_long_first():
|
@mark.asyncio
|
||||||
check('first: 10, after: "{}"'.format(cursor_for("B")), "CDE")
|
async def test_respects_first_and_after_with_long_first():
|
||||||
|
await check(f'first: 10, after: "{cursor_for("B")}"', "CDE")
|
||||||
|
|
||||||
|
|
||||||
def test_respects_last_and_before():
|
@mark.asyncio
|
||||||
check('last: 2, before: "{}"'.format(cursor_for("D")), "BC", has_previous_page=True)
|
async def test_respects_last_and_before():
|
||||||
|
await check(f'last: 2, before: "{cursor_for("D")}"', "BC", has_previous_page=True)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_last_and_before_with_long_last():
|
@mark.asyncio
|
||||||
check('last: 10, before: "{}"'.format(cursor_for("D")), "ABC")
|
async def test_respects_last_and_before_with_long_last():
|
||||||
|
await check(f'last: 10, before: "{cursor_for("D")}"', "ABC")
|
||||||
|
|
||||||
|
|
||||||
def test_respects_first_and_after_and_before_too_few():
|
@mark.asyncio
|
||||||
check(
|
async def test_respects_first_and_after_and_before_too_few():
|
||||||
'first: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
await check(
|
||||||
|
f'first: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"',
|
||||||
"BC",
|
"BC",
|
||||||
has_next_page=True,
|
has_next_page=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_first_and_after_and_before_too_many():
|
@mark.asyncio
|
||||||
check(
|
async def test_respects_first_and_after_and_before_too_many():
|
||||||
'first: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
await check(
|
||||||
"BCD",
|
f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_first_and_after_and_before_exactly_right():
|
@mark.asyncio
|
||||||
check(
|
async def test_respects_first_and_after_and_before_exactly_right():
|
||||||
'first: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
await check(
|
||||||
"BCD",
|
f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_last_and_after_and_before_too_few():
|
@mark.asyncio
|
||||||
check(
|
async def test_respects_last_and_after_and_before_too_few():
|
||||||
'last: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
await check(
|
||||||
|
f'last: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"',
|
||||||
"CD",
|
"CD",
|
||||||
has_previous_page=True,
|
has_previous_page=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_last_and_after_and_before_too_many():
|
@mark.asyncio
|
||||||
check(
|
async def test_respects_last_and_after_and_before_too_many():
|
||||||
'last: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
await check(
|
||||||
"BCD",
|
f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_respects_last_and_after_and_before_exactly_right():
|
@mark.asyncio
|
||||||
check(
|
async def test_respects_last_and_after_and_before_exactly_right():
|
||||||
'last: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")),
|
await check(
|
||||||
"BCD",
|
f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_returns_no_elements_if_first_is_0():
|
@mark.asyncio
|
||||||
check("first: 0", "", has_next_page=True)
|
async def test_returns_no_elements_if_first_is_0():
|
||||||
|
await check("first: 0", "", has_next_page=True)
|
||||||
|
|
||||||
|
|
||||||
def test_returns_all_elements_if_cursors_are_invalid():
|
@mark.asyncio
|
||||||
check('before: "invalid" after: "invalid"', "ABCDE")
|
async def test_returns_all_elements_if_cursors_are_invalid():
|
||||||
|
await check('before: "invalid" after: "invalid"', "ABCDE")
|
||||||
|
|
||||||
|
|
||||||
def test_returns_all_elements_if_cursors_are_on_the_outside():
|
@mark.asyncio
|
||||||
check(
|
async def test_returns_all_elements_if_cursors_are_on_the_outside():
|
||||||
'before: "{}" after: "{}"'.format(
|
await check(
|
||||||
base64("arrayconnection:%s" % 6), base64("arrayconnection:%s" % -1)
|
f'before: "{base64("arrayconnection:%s" % 6)}" after: "{base64("arrayconnection:%s" % -1)}"',
|
||||||
),
|
|
||||||
"ABCDE",
|
"ABCDE",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_returns_no_elements_if_cursors_cross():
|
@mark.asyncio
|
||||||
check(
|
async def test_returns_no_elements_if_cursors_cross():
|
||||||
'before: "{}" after: "{}"'.format(
|
await check(
|
||||||
base64("arrayconnection:%s" % 2), base64("arrayconnection:%s" % 4)
|
f'before: "{base64("arrayconnection:%s" % 2)}" after: "{base64("arrayconnection:%s" % 4)}"',
|
||||||
),
|
|
||||||
"",
|
"",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_connection_type_nodes():
|
@mark.asyncio
|
||||||
result = schema.execute(
|
async def test_connection_type_nodes():
|
||||||
|
result = await schema.execute_async(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
connectionLetters {
|
connectionLetters {
|
||||||
|
@ -248,11 +256,12 @@ def test_connection_type_nodes():
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_connection_promise():
|
@mark.asyncio
|
||||||
result = schema.execute(
|
async def test_connection_async():
|
||||||
|
result = await schema.execute_async(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
promiseLetters(first:1) {
|
asyncLetters(first:1) {
|
||||||
edges {
|
edges {
|
||||||
node {
|
node {
|
||||||
id
|
id
|
||||||
|
@ -270,7 +279,7 @@ def test_connection_promise():
|
||||||
|
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == {
|
assert result.data == {
|
||||||
"promiseLetters": {
|
"asyncLetters": {
|
||||||
"edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}],
|
"edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}],
|
||||||
"pageInfo": {"hasPreviousPage": False, "hasNextPage": True},
|
"pageInfo": {"hasPreviousPage": False, "hasNextPage": True},
|
||||||
}
|
}
|
||||||
|
|
325
graphene/relay/tests/test_custom_global_id.py
Normal file
325
graphene/relay/tests/test_custom_global_id.py
Normal file
|
@ -0,0 +1,325 @@
|
||||||
|
import re
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from graphql import graphql_sync
|
||||||
|
|
||||||
|
from ..id_type import BaseGlobalIDType, SimpleGlobalIDType, UUIDGlobalIDType
|
||||||
|
from ..node import Node
|
||||||
|
from ...types import Int, ObjectType, Schema, String
|
||||||
|
|
||||||
|
|
||||||
|
class TestUUIDGlobalID:
|
||||||
|
def setup_method(self):
|
||||||
|
self.user_list = [
|
||||||
|
{"id": uuid4(), "name": "First"},
|
||||||
|
{"id": uuid4(), "name": "Second"},
|
||||||
|
{"id": uuid4(), "name": "Third"},
|
||||||
|
{"id": uuid4(), "name": "Fourth"},
|
||||||
|
]
|
||||||
|
self.users = {user["id"]: user for user in self.user_list}
|
||||||
|
|
||||||
|
class CustomNode(Node):
|
||||||
|
class Meta:
|
||||||
|
global_id_type = UUIDGlobalIDType
|
||||||
|
|
||||||
|
class User(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = [CustomNode]
|
||||||
|
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_node(cls, _type, _id):
|
||||||
|
return self.users[_id]
|
||||||
|
|
||||||
|
class RootQuery(ObjectType):
|
||||||
|
user = CustomNode.Field(User)
|
||||||
|
|
||||||
|
self.schema = Schema(query=RootQuery, types=[User])
|
||||||
|
self.graphql_schema = self.schema.graphql_schema
|
||||||
|
|
||||||
|
def test_str_schema_correct(self):
|
||||||
|
"""
|
||||||
|
Check that the schema has the expected and custom node interface and user type and that they both use UUIDs
|
||||||
|
"""
|
||||||
|
parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema))
|
||||||
|
types = [t for t, f in parsed]
|
||||||
|
fields = [f for t, f in parsed]
|
||||||
|
custom_node_interface = "interface CustomNode"
|
||||||
|
assert custom_node_interface in types
|
||||||
|
assert (
|
||||||
|
'"""The ID of the object"""\n id: UUID!'
|
||||||
|
== fields[types.index(custom_node_interface)]
|
||||||
|
)
|
||||||
|
user_type = "type User implements CustomNode"
|
||||||
|
assert user_type in types
|
||||||
|
assert (
|
||||||
|
'"""The ID of the object"""\n id: UUID!\n name: String'
|
||||||
|
== fields[types.index(user_type)]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_by_id(self):
|
||||||
|
query = """query userById($id: UUID!) {
|
||||||
|
user(id: $id) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
# UUID need to be converted to string for serialization
|
||||||
|
result = graphql_sync(
|
||||||
|
self.graphql_schema,
|
||||||
|
query,
|
||||||
|
variable_values={"id": str(self.user_list[0]["id"])},
|
||||||
|
)
|
||||||
|
assert not result.errors
|
||||||
|
assert result.data["user"]["id"] == str(self.user_list[0]["id"])
|
||||||
|
assert result.data["user"]["name"] == self.user_list[0]["name"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestSimpleGlobalID:
|
||||||
|
def setup_method(self):
|
||||||
|
self.user_list = [
|
||||||
|
{"id": "my global primary key in clear 1", "name": "First"},
|
||||||
|
{"id": "my global primary key in clear 2", "name": "Second"},
|
||||||
|
{"id": "my global primary key in clear 3", "name": "Third"},
|
||||||
|
{"id": "my global primary key in clear 4", "name": "Fourth"},
|
||||||
|
]
|
||||||
|
self.users = {user["id"]: user for user in self.user_list}
|
||||||
|
|
||||||
|
class CustomNode(Node):
|
||||||
|
class Meta:
|
||||||
|
global_id_type = SimpleGlobalIDType
|
||||||
|
|
||||||
|
class User(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = [CustomNode]
|
||||||
|
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_node(cls, _type, _id):
|
||||||
|
return self.users[_id]
|
||||||
|
|
||||||
|
class RootQuery(ObjectType):
|
||||||
|
user = CustomNode.Field(User)
|
||||||
|
|
||||||
|
self.schema = Schema(query=RootQuery, types=[User])
|
||||||
|
self.graphql_schema = self.schema.graphql_schema
|
||||||
|
|
||||||
|
def test_str_schema_correct(self):
|
||||||
|
"""
|
||||||
|
Check that the schema has the expected and custom node interface and user type and that they both use UUIDs
|
||||||
|
"""
|
||||||
|
parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema))
|
||||||
|
types = [t for t, f in parsed]
|
||||||
|
fields = [f for t, f in parsed]
|
||||||
|
custom_node_interface = "interface CustomNode"
|
||||||
|
assert custom_node_interface in types
|
||||||
|
assert (
|
||||||
|
'"""The ID of the object"""\n id: ID!'
|
||||||
|
== fields[types.index(custom_node_interface)]
|
||||||
|
)
|
||||||
|
user_type = "type User implements CustomNode"
|
||||||
|
assert user_type in types
|
||||||
|
assert (
|
||||||
|
'"""The ID of the object"""\n id: ID!\n name: String'
|
||||||
|
== fields[types.index(user_type)]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_by_id(self):
|
||||||
|
query = """query {
|
||||||
|
user(id: "my global primary key in clear 3") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
result = graphql_sync(self.graphql_schema, query)
|
||||||
|
assert not result.errors
|
||||||
|
assert result.data["user"]["id"] == self.user_list[2]["id"]
|
||||||
|
assert result.data["user"]["name"] == self.user_list[2]["name"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestCustomGlobalID:
|
||||||
|
def setup_method(self):
|
||||||
|
self.user_list = [
|
||||||
|
{"id": 1, "name": "First"},
|
||||||
|
{"id": 2, "name": "Second"},
|
||||||
|
{"id": 3, "name": "Third"},
|
||||||
|
{"id": 4, "name": "Fourth"},
|
||||||
|
]
|
||||||
|
self.users = {user["id"]: user for user in self.user_list}
|
||||||
|
|
||||||
|
class CustomGlobalIDType(BaseGlobalIDType):
|
||||||
|
"""
|
||||||
|
Global id that is simply and integer in clear.
|
||||||
|
"""
|
||||||
|
|
||||||
|
graphene_type = Int
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
_type = info.return_type.graphene_type._meta.name
|
||||||
|
return _type, global_id
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, _type, _id):
|
||||||
|
return _id
|
||||||
|
|
||||||
|
class CustomNode(Node):
|
||||||
|
class Meta:
|
||||||
|
global_id_type = CustomGlobalIDType
|
||||||
|
|
||||||
|
class User(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = [CustomNode]
|
||||||
|
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_node(cls, _type, _id):
|
||||||
|
return self.users[_id]
|
||||||
|
|
||||||
|
class RootQuery(ObjectType):
|
||||||
|
user = CustomNode.Field(User)
|
||||||
|
|
||||||
|
self.schema = Schema(query=RootQuery, types=[User])
|
||||||
|
self.graphql_schema = self.schema.graphql_schema
|
||||||
|
|
||||||
|
def test_str_schema_correct(self):
|
||||||
|
"""
|
||||||
|
Check that the schema has the expected and custom node interface and user type and that they both use UUIDs
|
||||||
|
"""
|
||||||
|
parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema))
|
||||||
|
types = [t for t, f in parsed]
|
||||||
|
fields = [f for t, f in parsed]
|
||||||
|
custom_node_interface = "interface CustomNode"
|
||||||
|
assert custom_node_interface in types
|
||||||
|
assert (
|
||||||
|
'"""The ID of the object"""\n id: Int!'
|
||||||
|
== fields[types.index(custom_node_interface)]
|
||||||
|
)
|
||||||
|
user_type = "type User implements CustomNode"
|
||||||
|
assert user_type in types
|
||||||
|
assert (
|
||||||
|
'"""The ID of the object"""\n id: Int!\n name: String'
|
||||||
|
== fields[types.index(user_type)]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_by_id(self):
|
||||||
|
query = """query {
|
||||||
|
user(id: 2) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
result = graphql_sync(self.graphql_schema, query)
|
||||||
|
assert not result.errors
|
||||||
|
assert result.data["user"]["id"] == self.user_list[1]["id"]
|
||||||
|
assert result.data["user"]["name"] == self.user_list[1]["name"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestIncompleteCustomGlobalID:
|
||||||
|
def setup_method(self):
|
||||||
|
self.user_list = [
|
||||||
|
{"id": 1, "name": "First"},
|
||||||
|
{"id": 2, "name": "Second"},
|
||||||
|
{"id": 3, "name": "Third"},
|
||||||
|
{"id": 4, "name": "Fourth"},
|
||||||
|
]
|
||||||
|
self.users = {user["id"]: user for user in self.user_list}
|
||||||
|
|
||||||
|
def test_must_define_to_global_id(self):
|
||||||
|
"""
|
||||||
|
Test that if the `to_global_id` method is not defined, we can query the object, but we can't request its ID.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class CustomGlobalIDType(BaseGlobalIDType):
|
||||||
|
graphene_type = Int
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resolve_global_id(cls, info, global_id):
|
||||||
|
_type = info.return_type.graphene_type._meta.name
|
||||||
|
return _type, global_id
|
||||||
|
|
||||||
|
class CustomNode(Node):
|
||||||
|
class Meta:
|
||||||
|
global_id_type = CustomGlobalIDType
|
||||||
|
|
||||||
|
class User(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = [CustomNode]
|
||||||
|
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_node(cls, _type, _id):
|
||||||
|
return self.users[_id]
|
||||||
|
|
||||||
|
class RootQuery(ObjectType):
|
||||||
|
user = CustomNode.Field(User)
|
||||||
|
|
||||||
|
self.schema = Schema(query=RootQuery, types=[User])
|
||||||
|
self.graphql_schema = self.schema.graphql_schema
|
||||||
|
|
||||||
|
query = """query {
|
||||||
|
user(id: 2) {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
result = graphql_sync(self.graphql_schema, query)
|
||||||
|
assert not result.errors
|
||||||
|
assert result.data["user"]["name"] == self.user_list[1]["name"]
|
||||||
|
|
||||||
|
query = """query {
|
||||||
|
user(id: 2) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
result = graphql_sync(self.graphql_schema, query)
|
||||||
|
assert result.errors is not None
|
||||||
|
assert len(result.errors) == 1
|
||||||
|
assert result.errors[0].path == ["user", "id"]
|
||||||
|
|
||||||
|
def test_must_define_resolve_global_id(self):
|
||||||
|
"""
|
||||||
|
Test that if the `resolve_global_id` method is not defined, we can't query the object by ID.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class CustomGlobalIDType(BaseGlobalIDType):
|
||||||
|
graphene_type = Int
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_global_id(cls, _type, _id):
|
||||||
|
return _id
|
||||||
|
|
||||||
|
class CustomNode(Node):
|
||||||
|
class Meta:
|
||||||
|
global_id_type = CustomGlobalIDType
|
||||||
|
|
||||||
|
class User(ObjectType):
|
||||||
|
class Meta:
|
||||||
|
interfaces = [CustomNode]
|
||||||
|
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_node(cls, _type, _id):
|
||||||
|
return self.users[_id]
|
||||||
|
|
||||||
|
class RootQuery(ObjectType):
|
||||||
|
user = CustomNode.Field(User)
|
||||||
|
|
||||||
|
self.schema = Schema(query=RootQuery, types=[User])
|
||||||
|
self.graphql_schema = self.schema.graphql_schema
|
||||||
|
|
||||||
|
query = """query {
|
||||||
|
user(id: 2) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}"""
|
||||||
|
result = graphql_sync(self.graphql_schema, query)
|
||||||
|
assert result.errors is not None
|
||||||
|
assert len(result.errors) == 1
|
||||||
|
assert result.errors[0].path == ["user"]
|
|
@ -17,7 +17,7 @@ class User(ObjectType):
|
||||||
name = String()
|
name = String()
|
||||||
|
|
||||||
|
|
||||||
class Info(object):
|
class Info:
|
||||||
def __init__(self, parent_type):
|
def __init__(self, parent_type):
|
||||||
self.parent_type = GrapheneObjectType(
|
self.parent_type = GrapheneObjectType(
|
||||||
graphene_type=parent_type,
|
graphene_type=parent_type,
|
||||||
|
@ -45,7 +45,7 @@ def test_global_id_allows_overriding_of_node_and_required():
|
||||||
def test_global_id_defaults_to_info_parent_type():
|
def test_global_id_defaults_to_info_parent_type():
|
||||||
my_id = "1"
|
my_id = "1"
|
||||||
gid = GlobalID()
|
gid = GlobalID()
|
||||||
id_resolver = gid.get_resolver(lambda *_: my_id)
|
id_resolver = gid.wrap_resolve(lambda *_: my_id)
|
||||||
my_global_id = id_resolver(None, Info(User))
|
my_global_id = id_resolver(None, Info(User))
|
||||||
assert my_global_id == to_global_id(User._meta.name, my_id)
|
assert my_global_id == to_global_id(User._meta.name, my_id)
|
||||||
|
|
||||||
|
@ -53,6 +53,6 @@ def test_global_id_defaults_to_info_parent_type():
|
||||||
def test_global_id_allows_setting_customer_parent_type():
|
def test_global_id_allows_setting_customer_parent_type():
|
||||||
my_id = "1"
|
my_id = "1"
|
||||||
gid = GlobalID(parent_type=User)
|
gid = GlobalID(parent_type=User)
|
||||||
id_resolver = gid.get_resolver(lambda *_: my_id)
|
id_resolver = gid.wrap_resolve(lambda *_: my_id)
|
||||||
my_global_id = id_resolver(None, None)
|
my_global_id = id_resolver(None, None)
|
||||||
assert my_global_id == to_global_id(User._meta.name, my_id)
|
assert my_global_id == to_global_id(User._meta.name, my_id)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import pytest
|
from pytest import mark, raises
|
||||||
from promise import Promise
|
|
||||||
|
|
||||||
from ...types import (
|
from ...types import (
|
||||||
ID,
|
ID,
|
||||||
|
@ -15,7 +14,7 @@ from ...types.scalars import String
|
||||||
from ..mutation import ClientIDMutation
|
from ..mutation import ClientIDMutation
|
||||||
|
|
||||||
|
|
||||||
class SharedFields(object):
|
class SharedFields:
|
||||||
shared = String()
|
shared = String()
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +36,7 @@ class SaySomething(ClientIDMutation):
|
||||||
return SaySomething(phrase=str(what))
|
return SaySomething(phrase=str(what))
|
||||||
|
|
||||||
|
|
||||||
class FixedSaySomething(object):
|
class FixedSaySomething:
|
||||||
__slots__ = ("phrase",)
|
__slots__ = ("phrase",)
|
||||||
|
|
||||||
def __init__(self, phrase):
|
def __init__(self, phrase):
|
||||||
|
@ -55,15 +54,15 @@ class SaySomethingFixed(ClientIDMutation):
|
||||||
return FixedSaySomething(phrase=str(what))
|
return FixedSaySomething(phrase=str(what))
|
||||||
|
|
||||||
|
|
||||||
class SaySomethingPromise(ClientIDMutation):
|
class SaySomethingAsync(ClientIDMutation):
|
||||||
class Input:
|
class Input:
|
||||||
what = String()
|
what = String()
|
||||||
|
|
||||||
phrase = String()
|
phrase = String()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def mutate_and_get_payload(self, info, what, client_mutation_id=None):
|
async def mutate_and_get_payload(self, info, what, client_mutation_id=None):
|
||||||
return Promise.resolve(SaySomething(phrase=str(what)))
|
return SaySomething(phrase=str(what))
|
||||||
|
|
||||||
|
|
||||||
# MyEdge = MyNode.Connection.Edge
|
# MyEdge = MyNode.Connection.Edge
|
||||||
|
@ -97,7 +96,7 @@ class RootQuery(ObjectType):
|
||||||
class Mutation(ObjectType):
|
class Mutation(ObjectType):
|
||||||
say = SaySomething.Field()
|
say = SaySomething.Field()
|
||||||
say_fixed = SaySomethingFixed.Field()
|
say_fixed = SaySomethingFixed.Field()
|
||||||
say_promise = SaySomethingPromise.Field()
|
say_async = SaySomethingAsync.Field()
|
||||||
other = OtherMutation.Field()
|
other = OtherMutation.Field()
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,7 +104,7 @@ schema = Schema(query=RootQuery, mutation=Mutation)
|
||||||
|
|
||||||
|
|
||||||
def test_no_mutate_and_get_payload():
|
def test_no_mutate_and_get_payload():
|
||||||
with pytest.raises(AssertionError) as excinfo:
|
with raises(AssertionError) as excinfo:
|
||||||
|
|
||||||
class MyMutation(ClientIDMutation):
|
class MyMutation(ClientIDMutation):
|
||||||
pass
|
pass
|
||||||
|
@ -118,12 +117,12 @@ def test_no_mutate_and_get_payload():
|
||||||
|
|
||||||
def test_mutation():
|
def test_mutation():
|
||||||
fields = SaySomething._meta.fields
|
fields = SaySomething._meta.fields
|
||||||
assert list(fields.keys()) == ["phrase", "client_mutation_id"]
|
assert list(fields) == ["phrase", "client_mutation_id"]
|
||||||
assert SaySomething._meta.name == "SaySomethingPayload"
|
assert SaySomething._meta.name == "SaySomethingPayload"
|
||||||
assert isinstance(fields["phrase"], Field)
|
assert isinstance(fields["phrase"], Field)
|
||||||
field = SaySomething.Field()
|
field = SaySomething.Field()
|
||||||
assert field.type == SaySomething
|
assert field.type == SaySomething
|
||||||
assert list(field.args.keys()) == ["input"]
|
assert list(field.args) == ["input"]
|
||||||
assert isinstance(field.args["input"], Argument)
|
assert isinstance(field.args["input"], Argument)
|
||||||
assert isinstance(field.args["input"].type, NonNull)
|
assert isinstance(field.args["input"].type, NonNull)
|
||||||
assert field.args["input"].type.of_type == SaySomething.Input
|
assert field.args["input"].type.of_type == SaySomething.Input
|
||||||
|
@ -136,7 +135,7 @@ def test_mutation_input():
|
||||||
Input = SaySomething.Input
|
Input = SaySomething.Input
|
||||||
assert issubclass(Input, InputObjectType)
|
assert issubclass(Input, InputObjectType)
|
||||||
fields = Input._meta.fields
|
fields = Input._meta.fields
|
||||||
assert list(fields.keys()) == ["what", "client_mutation_id"]
|
assert list(fields) == ["what", "client_mutation_id"]
|
||||||
assert isinstance(fields["what"], InputField)
|
assert isinstance(fields["what"], InputField)
|
||||||
assert fields["what"].type == String
|
assert fields["what"].type == String
|
||||||
assert isinstance(fields["client_mutation_id"], InputField)
|
assert isinstance(fields["client_mutation_id"], InputField)
|
||||||
|
@ -145,11 +144,11 @@ def test_mutation_input():
|
||||||
|
|
||||||
def test_subclassed_mutation():
|
def test_subclassed_mutation():
|
||||||
fields = OtherMutation._meta.fields
|
fields = OtherMutation._meta.fields
|
||||||
assert list(fields.keys()) == ["name", "my_node_edge", "client_mutation_id"]
|
assert list(fields) == ["name", "my_node_edge", "client_mutation_id"]
|
||||||
assert isinstance(fields["name"], Field)
|
assert isinstance(fields["name"], Field)
|
||||||
field = OtherMutation.Field()
|
field = OtherMutation.Field()
|
||||||
assert field.type == OtherMutation
|
assert field.type == OtherMutation
|
||||||
assert list(field.args.keys()) == ["input"]
|
assert list(field.args) == ["input"]
|
||||||
assert isinstance(field.args["input"], Argument)
|
assert isinstance(field.args["input"], Argument)
|
||||||
assert isinstance(field.args["input"].type, NonNull)
|
assert isinstance(field.args["input"].type, NonNull)
|
||||||
assert field.args["input"].type.of_type == OtherMutation.Input
|
assert field.args["input"].type.of_type == OtherMutation.Input
|
||||||
|
@ -159,7 +158,7 @@ def test_subclassed_mutation_input():
|
||||||
Input = OtherMutation.Input
|
Input = OtherMutation.Input
|
||||||
assert issubclass(Input, InputObjectType)
|
assert issubclass(Input, InputObjectType)
|
||||||
fields = Input._meta.fields
|
fields = Input._meta.fields
|
||||||
assert list(fields.keys()) == ["shared", "additional_field", "client_mutation_id"]
|
assert list(fields) == ["shared", "additional_field", "client_mutation_id"]
|
||||||
assert isinstance(fields["shared"], InputField)
|
assert isinstance(fields["shared"], InputField)
|
||||||
assert fields["shared"].type == String
|
assert fields["shared"].type == String
|
||||||
assert isinstance(fields["additional_field"], InputField)
|
assert isinstance(fields["additional_field"], InputField)
|
||||||
|
@ -185,12 +184,13 @@ def test_node_query_fixed():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_node_query_promise():
|
@mark.asyncio
|
||||||
executed = schema.execute(
|
async def test_node_query_async():
|
||||||
'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }'
|
executed = await schema.execute_async(
|
||||||
|
'mutation a { sayAsync(input: {what:"hello", clientMutationId:"1"}) { phrase } }'
|
||||||
)
|
)
|
||||||
assert not executed.errors
|
assert not executed.errors
|
||||||
assert executed.data == {"sayPromise": {"phrase": "hello"}}
|
assert executed.data == {"sayAsync": {"phrase": "hello"}}
|
||||||
|
|
||||||
|
|
||||||
def test_edge_query():
|
def test_edge_query():
|
||||||
|
|
90
graphene/relay/tests/test_mutation_async.py
Normal file
90
graphene/relay/tests/test_mutation_async.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
from pytest import mark
|
||||||
|
|
||||||
|
from graphene.types import ID, Field, ObjectType, Schema
|
||||||
|
from graphene.types.scalars import String
|
||||||
|
from graphene.relay.mutation import ClientIDMutation
|
||||||
|
from graphene.test import Client
|
||||||
|
|
||||||
|
|
||||||
|
class SharedFields(object):
|
||||||
|
shared = String()
|
||||||
|
|
||||||
|
|
||||||
|
class MyNode(ObjectType):
|
||||||
|
# class Meta:
|
||||||
|
# interfaces = (Node, )
|
||||||
|
id = ID()
|
||||||
|
name = String()
|
||||||
|
|
||||||
|
|
||||||
|
class SaySomethingAsync(ClientIDMutation):
|
||||||
|
class Input:
|
||||||
|
what = String()
|
||||||
|
|
||||||
|
phrase = String()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def mutate_and_get_payload(self, info, what, client_mutation_id=None):
|
||||||
|
return SaySomethingAsync(phrase=str(what))
|
||||||
|
|
||||||
|
|
||||||
|
# MyEdge = MyNode.Connection.Edge
|
||||||
|
class MyEdge(ObjectType):
|
||||||
|
node = Field(MyNode)
|
||||||
|
cursor = String()
|
||||||
|
|
||||||
|
|
||||||
|
class OtherMutation(ClientIDMutation):
|
||||||
|
class Input(SharedFields):
|
||||||
|
additional_field = String()
|
||||||
|
|
||||||
|
name = String()
|
||||||
|
my_node_edge = Field(MyEdge)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def mutate_and_get_payload(
|
||||||
|
self, info, shared="", additional_field="", client_mutation_id=None
|
||||||
|
):
|
||||||
|
edge_type = MyEdge
|
||||||
|
return OtherMutation(
|
||||||
|
name=shared + additional_field,
|
||||||
|
my_node_edge=edge_type(cursor="1", node=MyNode(name="name")),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RootQuery(ObjectType):
|
||||||
|
something = String()
|
||||||
|
|
||||||
|
|
||||||
|
class Mutation(ObjectType):
|
||||||
|
say_promise = SaySomethingAsync.Field()
|
||||||
|
other = OtherMutation.Field()
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=RootQuery, mutation=Mutation)
|
||||||
|
client = Client(schema)
|
||||||
|
|
||||||
|
|
||||||
|
@mark.asyncio
|
||||||
|
async def test_node_query_promise():
|
||||||
|
executed = await client.execute_async(
|
||||||
|
'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }'
|
||||||
|
)
|
||||||
|
assert isinstance(executed, dict)
|
||||||
|
assert "errors" not in executed
|
||||||
|
assert executed["data"] == {"sayPromise": {"phrase": "hello"}}
|
||||||
|
|
||||||
|
|
||||||
|
@mark.asyncio
|
||||||
|
async def test_edge_query():
|
||||||
|
executed = await client.execute_async(
|
||||||
|
'mutation a { other(input: {clientMutationId:"1"}) { clientMutationId, myNodeEdge { cursor node { name }} } }'
|
||||||
|
)
|
||||||
|
assert isinstance(executed, dict)
|
||||||
|
assert "errors" not in executed
|
||||||
|
assert executed["data"] == {
|
||||||
|
"other": {
|
||||||
|
"clientMutationId": "1",
|
||||||
|
"myNodeEdge": {"cursor": "1", "node": {"name": "name"}},
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
from collections import OrderedDict
|
import re
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
from graphql_relay import to_global_id
|
from graphql_relay import to_global_id
|
||||||
|
|
||||||
|
@ -6,8 +7,7 @@ from ...types import ObjectType, Schema, String
|
||||||
from ..node import Node, is_node
|
from ..node import Node, is_node
|
||||||
|
|
||||||
|
|
||||||
class SharedNodeFields(object):
|
class SharedNodeFields:
|
||||||
|
|
||||||
shared = String()
|
shared = String()
|
||||||
something_else = String()
|
something_else = String()
|
||||||
|
|
||||||
|
@ -54,6 +54,7 @@ def test_node_good():
|
||||||
assert "id" in MyNode._meta.fields
|
assert "id" in MyNode._meta.fields
|
||||||
assert is_node(MyNode)
|
assert is_node(MyNode)
|
||||||
assert not is_node(object)
|
assert not is_node(object)
|
||||||
|
assert not is_node("node")
|
||||||
|
|
||||||
|
|
||||||
def test_node_query():
|
def test_node_query():
|
||||||
|
@ -70,23 +71,33 @@ def test_subclassed_node_query():
|
||||||
% to_global_id("MyOtherNode", 1)
|
% to_global_id("MyOtherNode", 1)
|
||||||
)
|
)
|
||||||
assert not executed.errors
|
assert not executed.errors
|
||||||
assert executed.data == OrderedDict(
|
assert executed.data == {
|
||||||
{
|
"node": {
|
||||||
"node": OrderedDict(
|
"shared": "1",
|
||||||
[
|
"extraField": "extra field info.",
|
||||||
("shared", "1"),
|
"somethingElse": "----",
|
||||||
("extraField", "extra field info."),
|
|
||||||
("somethingElse", "----"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_node_requesting_non_node():
|
def test_node_requesting_non_node():
|
||||||
executed = schema.execute(
|
executed = schema.execute(
|
||||||
'{ node(id:"%s") { __typename } } ' % Node.to_global_id("RootQuery", 1)
|
'{ node(id:"%s") { __typename } } ' % Node.to_global_id("RootQuery", 1)
|
||||||
)
|
)
|
||||||
|
assert executed.errors
|
||||||
|
assert re.match(
|
||||||
|
r"ObjectType .* does not implement the .* interface.",
|
||||||
|
executed.errors[0].message,
|
||||||
|
)
|
||||||
|
assert executed.data == {"node": None}
|
||||||
|
|
||||||
|
|
||||||
|
def test_node_requesting_unknown_type():
|
||||||
|
executed = schema.execute(
|
||||||
|
'{ node(id:"%s") { __typename } } ' % Node.to_global_id("UnknownType", 1)
|
||||||
|
)
|
||||||
|
assert executed.errors
|
||||||
|
assert re.match(r"Relay Node .* not found in schema", executed.errors[0].message)
|
||||||
assert executed.data == {"node": None}
|
assert executed.data == {"node": None}
|
||||||
|
|
||||||
|
|
||||||
|
@ -94,7 +105,8 @@ def test_node_query_incorrect_id():
|
||||||
executed = schema.execute(
|
executed = schema.execute(
|
||||||
'{ node(id:"%s") { ... on MyNode { name } } }' % "something:2"
|
'{ node(id:"%s") { ... on MyNode { name } } }' % "something:2"
|
||||||
)
|
)
|
||||||
assert not executed.errors
|
assert executed.errors
|
||||||
|
assert re.match(r"Unable to parse global ID .*", executed.errors[0].message)
|
||||||
assert executed.data == {"node": None}
|
assert executed.data == {"node": None}
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,6 +122,17 @@ def test_node_field_custom():
|
||||||
assert node_field.node_type == Node
|
assert node_field.node_type == Node
|
||||||
|
|
||||||
|
|
||||||
|
def test_node_field_args():
|
||||||
|
field_args = {
|
||||||
|
"name": "my_custom_name",
|
||||||
|
"description": "my_custom_description",
|
||||||
|
"deprecation_reason": "my_custom_deprecation_reason",
|
||||||
|
}
|
||||||
|
node_field = Node.Field(**field_args)
|
||||||
|
for field_arg, value in field_args.items():
|
||||||
|
assert getattr(node_field, field_arg) == value
|
||||||
|
|
||||||
|
|
||||||
def test_node_field_only_type():
|
def test_node_field_only_type():
|
||||||
executed = schema.execute(
|
executed = schema.execute(
|
||||||
'{ onlyNode(id:"%s") { __typename, name } } ' % Node.to_global_id("MyNode", 1)
|
'{ onlyNode(id:"%s") { __typename, name } } ' % Node.to_global_id("MyNode", 1)
|
||||||
|
@ -124,7 +147,7 @@ def test_node_field_only_type_wrong():
|
||||||
% Node.to_global_id("MyOtherNode", 1)
|
% Node.to_global_id("MyOtherNode", 1)
|
||||||
)
|
)
|
||||||
assert len(executed.errors) == 1
|
assert len(executed.errors) == 1
|
||||||
assert str(executed.errors[0]) == "Must receive a MyNode id."
|
assert str(executed.errors[0]).startswith("Must receive a MyNode id.")
|
||||||
assert executed.data == {"onlyNode": None}
|
assert executed.data == {"onlyNode": None}
|
||||||
|
|
||||||
|
|
||||||
|
@ -143,39 +166,54 @@ def test_node_field_only_lazy_type_wrong():
|
||||||
% Node.to_global_id("MyOtherNode", 1)
|
% Node.to_global_id("MyOtherNode", 1)
|
||||||
)
|
)
|
||||||
assert len(executed.errors) == 1
|
assert len(executed.errors) == 1
|
||||||
assert str(executed.errors[0]) == "Must receive a MyNode id."
|
assert str(executed.errors[0]).startswith("Must receive a MyNode id.")
|
||||||
assert executed.data == {"onlyNodeLazy": None}
|
assert executed.data == {"onlyNodeLazy": None}
|
||||||
|
|
||||||
|
|
||||||
def test_str_schema():
|
def test_str_schema():
|
||||||
assert (
|
assert (
|
||||||
str(schema)
|
str(schema).strip()
|
||||||
== """
|
== dedent(
|
||||||
schema {
|
'''
|
||||||
query: RootQuery
|
schema {
|
||||||
}
|
query: RootQuery
|
||||||
|
}
|
||||||
|
|
||||||
type MyNode implements Node {
|
type MyNode implements Node {
|
||||||
id: ID!
|
"""The ID of the object"""
|
||||||
name: String
|
id: ID!
|
||||||
}
|
name: String
|
||||||
|
}
|
||||||
|
|
||||||
type MyOtherNode implements Node {
|
"""An object with an ID"""
|
||||||
id: ID!
|
interface Node {
|
||||||
shared: String
|
"""The ID of the object"""
|
||||||
somethingElse: String
|
id: ID!
|
||||||
extraField: String
|
}
|
||||||
}
|
|
||||||
|
|
||||||
interface Node {
|
type MyOtherNode implements Node {
|
||||||
id: ID!
|
"""The ID of the object"""
|
||||||
}
|
id: ID!
|
||||||
|
shared: String
|
||||||
|
somethingElse: String
|
||||||
|
extraField: String
|
||||||
|
}
|
||||||
|
|
||||||
type RootQuery {
|
type RootQuery {
|
||||||
first: String
|
first: String
|
||||||
node(id: ID!): Node
|
node(
|
||||||
onlyNode(id: ID!): MyNode
|
"""The ID of the object"""
|
||||||
onlyNodeLazy(id: ID!): MyNode
|
id: ID!
|
||||||
}
|
): Node
|
||||||
""".lstrip()
|
onlyNode(
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
): MyNode
|
||||||
|
onlyNodeLazy(
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
): MyNode
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
).strip()
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
from graphql import graphql
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from graphql import graphql_sync
|
||||||
|
|
||||||
from ...types import Interface, ObjectType, Schema
|
from ...types import Interface, ObjectType, Schema
|
||||||
from ...types.scalars import Int, String
|
from ...types.scalars import Int, String
|
||||||
|
@ -10,12 +12,12 @@ class CustomNode(Node):
|
||||||
name = "Node"
|
name = "Node"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def to_global_id(type, id):
|
def to_global_id(type_, id):
|
||||||
return id
|
return id
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_node_from_global_id(info, id, only_type=None):
|
def get_node_from_global_id(info, id, only_type=None):
|
||||||
assert info.schema == schema
|
assert info.schema is graphql_schema
|
||||||
if id in user_data:
|
if id in user_data:
|
||||||
return user_data.get(id)
|
return user_data.get(id)
|
||||||
else:
|
else:
|
||||||
|
@ -23,14 +25,14 @@ class CustomNode(Node):
|
||||||
|
|
||||||
|
|
||||||
class BasePhoto(Interface):
|
class BasePhoto(Interface):
|
||||||
width = Int()
|
width = Int(description="The width of the photo in pixels")
|
||||||
|
|
||||||
|
|
||||||
class User(ObjectType):
|
class User(ObjectType):
|
||||||
class Meta:
|
class Meta:
|
||||||
interfaces = [CustomNode]
|
interfaces = [CustomNode]
|
||||||
|
|
||||||
name = String()
|
name = String(description="The full name of the user")
|
||||||
|
|
||||||
|
|
||||||
class Photo(ObjectType):
|
class Photo(ObjectType):
|
||||||
|
@ -48,37 +50,52 @@ class RootQuery(ObjectType):
|
||||||
|
|
||||||
|
|
||||||
schema = Schema(query=RootQuery, types=[User, Photo])
|
schema = Schema(query=RootQuery, types=[User, Photo])
|
||||||
|
graphql_schema = schema.graphql_schema
|
||||||
|
|
||||||
|
|
||||||
def test_str_schema_correct():
|
def test_str_schema_correct():
|
||||||
assert (
|
assert (
|
||||||
str(schema)
|
str(schema).strip()
|
||||||
== """schema {
|
== dedent(
|
||||||
query: RootQuery
|
'''
|
||||||
}
|
schema {
|
||||||
|
query: RootQuery
|
||||||
|
}
|
||||||
|
|
||||||
interface BasePhoto {
|
type User implements Node {
|
||||||
width: Int
|
"""The ID of the object"""
|
||||||
}
|
id: ID!
|
||||||
|
|
||||||
interface Node {
|
"""The full name of the user"""
|
||||||
id: ID!
|
name: String
|
||||||
}
|
}
|
||||||
|
|
||||||
type Photo implements Node, BasePhoto {
|
interface Node {
|
||||||
id: ID!
|
"""The ID of the object"""
|
||||||
width: Int
|
id: ID!
|
||||||
}
|
}
|
||||||
|
|
||||||
type RootQuery {
|
type Photo implements Node & BasePhoto {
|
||||||
node(id: ID!): Node
|
"""The ID of the object"""
|
||||||
}
|
id: ID!
|
||||||
|
|
||||||
type User implements Node {
|
"""The width of the photo in pixels"""
|
||||||
id: ID!
|
width: Int
|
||||||
name: String
|
}
|
||||||
}
|
|
||||||
"""
|
interface BasePhoto {
|
||||||
|
"""The width of the photo in pixels"""
|
||||||
|
width: Int
|
||||||
|
}
|
||||||
|
|
||||||
|
type RootQuery {
|
||||||
|
node(
|
||||||
|
"""The ID of the object"""
|
||||||
|
id: ID!
|
||||||
|
): Node
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
).strip()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -91,7 +108,7 @@ def test_gets_the_correct_id_for_users():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "1"}}
|
expected = {"node": {"id": "1"}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -105,7 +122,7 @@ def test_gets_the_correct_id_for_photos():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "4"}}
|
expected = {"node": {"id": "4"}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -122,7 +139,7 @@ def test_gets_the_correct_name_for_users():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "1", "name": "John Doe"}}
|
expected = {"node": {"id": "1", "name": "John Doe"}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -139,7 +156,7 @@ def test_gets_the_correct_width_for_photos():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "4", "width": 400}}
|
expected = {"node": {"id": "4", "width": 400}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -154,7 +171,7 @@ def test_gets_the_correct_typename_for_users():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "1", "__typename": "User"}}
|
expected = {"node": {"id": "1", "__typename": "User"}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -169,7 +186,7 @@ def test_gets_the_correct_typename_for_photos():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "4", "__typename": "Photo"}}
|
expected = {"node": {"id": "4", "__typename": "Photo"}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -186,7 +203,7 @@ def test_ignores_photo_fragments_on_user():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": {"id": "1"}}
|
expected = {"node": {"id": "1"}}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -200,7 +217,7 @@ def test_returns_null_for_bad_ids():
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
expected = {"node": None}
|
expected = {"node": None}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -239,7 +256,7 @@ def test_have_correct_node_interface():
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
||||||
|
@ -291,6 +308,6 @@ def test_has_correct_node_root_field():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result = graphql(schema, query)
|
result = graphql_sync(graphql_schema, query)
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
assert result.data == expected
|
assert result.data == expected
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
from promise import Promise, is_thenable
|
|
||||||
import six
|
|
||||||
from graphql.error import format_error as format_graphql_error
|
|
||||||
from graphql.error import GraphQLError
|
from graphql.error import GraphQLError
|
||||||
|
|
||||||
from graphene.types.schema import Schema
|
from graphene.types.schema import Schema
|
||||||
|
@ -8,25 +5,20 @@ from graphene.types.schema import Schema
|
||||||
|
|
||||||
def default_format_error(error):
|
def default_format_error(error):
|
||||||
if isinstance(error, GraphQLError):
|
if isinstance(error, GraphQLError):
|
||||||
return format_graphql_error(error)
|
return error.formatted
|
||||||
|
return {"message": str(error)}
|
||||||
return {"message": six.text_type(error)}
|
|
||||||
|
|
||||||
|
|
||||||
def format_execution_result(execution_result, format_error):
|
def format_execution_result(execution_result, format_error):
|
||||||
if execution_result:
|
if execution_result:
|
||||||
response = {}
|
response = {}
|
||||||
|
|
||||||
if execution_result.errors:
|
if execution_result.errors:
|
||||||
response["errors"] = [format_error(e) for e in execution_result.errors]
|
response["errors"] = [format_error(e) for e in execution_result.errors]
|
||||||
|
response["data"] = execution_result.data
|
||||||
if not execution_result.invalid:
|
|
||||||
response["data"] = execution_result.data
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
class Client(object):
|
class Client:
|
||||||
def __init__(self, schema, format_error=None, **execute_options):
|
def __init__(self, schema, format_error=None, **execute_options):
|
||||||
assert isinstance(schema, Schema)
|
assert isinstance(schema, Schema)
|
||||||
self.schema = schema
|
self.schema = schema
|
||||||
|
@ -38,7 +30,10 @@ class Client(object):
|
||||||
|
|
||||||
def execute(self, *args, **kwargs):
|
def execute(self, *args, **kwargs):
|
||||||
executed = self.schema.execute(*args, **dict(self.execute_options, **kwargs))
|
executed = self.schema.execute(*args, **dict(self.execute_options, **kwargs))
|
||||||
if is_thenable(executed):
|
return self.format_result(executed)
|
||||||
return Promise.resolve(executed).then(self.format_result)
|
|
||||||
|
async def execute_async(self, *args, **kwargs):
|
||||||
|
executed = await self.schema.execute_async(
|
||||||
|
*args, **dict(self.execute_options, **kwargs)
|
||||||
|
)
|
||||||
return self.format_result(executed)
|
return self.format_result(executed)
|
||||||
|
|
41
graphene/tests/issues/test_1293.py
Normal file
41
graphene/tests/issues/test_1293.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
# https://github.com/graphql-python/graphene/issues/1293
|
||||||
|
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
import graphene
|
||||||
|
from graphql.utilities import print_schema
|
||||||
|
|
||||||
|
|
||||||
|
class Filters(graphene.InputObjectType):
|
||||||
|
datetime_after = graphene.DateTime(
|
||||||
|
required=False,
|
||||||
|
default_value=datetime.fromtimestamp(1434549820.776, timezone.utc),
|
||||||
|
)
|
||||||
|
datetime_before = graphene.DateTime(
|
||||||
|
required=False,
|
||||||
|
default_value=datetime.fromtimestamp(1444549820.776, timezone.utc),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SetDatetime(graphene.Mutation):
|
||||||
|
class Arguments:
|
||||||
|
filters = Filters(required=True)
|
||||||
|
|
||||||
|
ok = graphene.Boolean()
|
||||||
|
|
||||||
|
def mutate(root, info, filters):
|
||||||
|
return SetDatetime(ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Query(graphene.ObjectType):
|
||||||
|
goodbye = graphene.String()
|
||||||
|
|
||||||
|
|
||||||
|
class Mutations(graphene.ObjectType):
|
||||||
|
set_datetime = SetDatetime.Field()
|
||||||
|
|
||||||
|
|
||||||
|
def test_schema_printable_with_default_datetime_value():
|
||||||
|
schema = graphene.Schema(query=Query, mutation=Mutations)
|
||||||
|
schema_str = print_schema(schema.graphql_schema)
|
||||||
|
assert schema_str, "empty schema printed"
|
36
graphene/tests/issues/test_1394.py
Normal file
36
graphene/tests/issues/test_1394.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from ...types import ObjectType, Schema, String, NonNull
|
||||||
|
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
hello = String(input=NonNull(String))
|
||||||
|
|
||||||
|
def resolve_hello(self, info, input):
|
||||||
|
if input == "nothing":
|
||||||
|
return None
|
||||||
|
return f"Hello {input}!"
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
|
||||||
|
def test_required_input_provided():
|
||||||
|
"""
|
||||||
|
Test that a required argument works when provided.
|
||||||
|
"""
|
||||||
|
input_value = "Potato"
|
||||||
|
result = schema.execute('{ hello(input: "%s") }' % input_value)
|
||||||
|
assert not result.errors
|
||||||
|
assert result.data == {"hello": "Hello Potato!"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_required_input_missing():
|
||||||
|
"""
|
||||||
|
Test that a required argument raised an error if not provided.
|
||||||
|
"""
|
||||||
|
result = schema.execute("{ hello }")
|
||||||
|
assert result.errors
|
||||||
|
assert len(result.errors) == 1
|
||||||
|
assert (
|
||||||
|
result.errors[0].message
|
||||||
|
== "Field 'hello' argument 'input' of type 'String!' is required, but it was not provided."
|
||||||
|
)
|
53
graphene/tests/issues/test_1419.py
Normal file
53
graphene/tests/issues/test_1419.py
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ...types.base64 import Base64
|
||||||
|
from ...types.datetime import Date, DateTime
|
||||||
|
from ...types.decimal import Decimal
|
||||||
|
from ...types.generic import GenericScalar
|
||||||
|
from ...types.json import JSONString
|
||||||
|
from ...types.objecttype import ObjectType
|
||||||
|
from ...types.scalars import ID, BigInt, Boolean, Float, Int, String
|
||||||
|
from ...types.schema import Schema
|
||||||
|
from ...types.uuid import UUID
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"input_type,input_value",
|
||||||
|
[
|
||||||
|
(Date, '"2022-02-02"'),
|
||||||
|
(GenericScalar, '"foo"'),
|
||||||
|
(Int, "1"),
|
||||||
|
(BigInt, "12345678901234567890"),
|
||||||
|
(Float, "1.1"),
|
||||||
|
(String, '"foo"'),
|
||||||
|
(Boolean, "true"),
|
||||||
|
(ID, "1"),
|
||||||
|
(DateTime, '"2022-02-02T11:11:11"'),
|
||||||
|
(UUID, '"cbebbc62-758e-4f75-a890-bc73b5017d81"'),
|
||||||
|
(Decimal, '"1.1"'),
|
||||||
|
(JSONString, '"{\\"key\\":\\"foo\\",\\"value\\":\\"bar\\"}"'),
|
||||||
|
(Base64, '"Q2hlbG8gd29ycmxkCg=="'),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_parse_literal_with_variables(input_type, input_value):
|
||||||
|
# input_b needs to be evaluated as literal while the variable dict for
|
||||||
|
# input_a is passed along.
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
generic = GenericScalar(input_a=GenericScalar(), input_b=input_type())
|
||||||
|
|
||||||
|
def resolve_generic(self, info, input_a=None, input_b=None):
|
||||||
|
return input
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
query Test($a: GenericScalar){{
|
||||||
|
generic(inputA: $a, inputB: {input_value})
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
result = schema.execute(
|
||||||
|
query,
|
||||||
|
variables={"a": "bar"},
|
||||||
|
)
|
||||||
|
assert not result.errors
|
|
@ -21,7 +21,7 @@ class CreatePostResult(graphene.Union):
|
||||||
|
|
||||||
|
|
||||||
class CreatePost(graphene.Mutation):
|
class CreatePost(graphene.Mutation):
|
||||||
class Input:
|
class Arguments:
|
||||||
text = graphene.String(required=True)
|
text = graphene.String(required=True)
|
||||||
|
|
||||||
result = graphene.Field(CreatePostResult)
|
result = graphene.Field(CreatePostResult)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# https://github.com/graphql-python/graphene/issues/356
|
# https://github.com/graphql-python/graphene/issues/356
|
||||||
|
|
||||||
import pytest
|
from pytest import raises
|
||||||
|
|
||||||
import graphene
|
import graphene
|
||||||
from graphene import relay
|
from graphene import relay
|
||||||
|
@ -23,10 +23,11 @@ def test_issue():
|
||||||
class Query(graphene.ObjectType):
|
class Query(graphene.ObjectType):
|
||||||
things = relay.ConnectionField(MyUnion)
|
things = relay.ConnectionField(MyUnion)
|
||||||
|
|
||||||
with pytest.raises(Exception) as exc_info:
|
with raises(Exception) as exc_info:
|
||||||
graphene.Schema(query=Query)
|
graphene.Schema(query=Query)
|
||||||
|
|
||||||
assert str(exc_info.value) == (
|
assert str(exc_info.value) == (
|
||||||
"IterableConnectionField type have to be a subclass of Connection. "
|
"Query fields cannot be resolved."
|
||||||
'Received "MyUnion".'
|
" IterableConnectionField type has to be a subclass of Connection."
|
||||||
|
' Received "MyUnion".'
|
||||||
)
|
)
|
||||||
|
|
27
graphene/tests/issues/test_881.py
Normal file
27
graphene/tests/issues/test_881.py
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
from ...types.enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class PickleEnum(Enum):
|
||||||
|
# is defined outside of test because pickle unable to dump class inside ot pytest function
|
||||||
|
A = "a"
|
||||||
|
B = 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_enums_pickling():
|
||||||
|
a = PickleEnum.A
|
||||||
|
pickled = pickle.dumps(a)
|
||||||
|
restored = pickle.loads(pickled)
|
||||||
|
assert type(a) is type(restored)
|
||||||
|
assert a == restored
|
||||||
|
assert a.value == restored.value
|
||||||
|
assert a.name == restored.name
|
||||||
|
|
||||||
|
b = PickleEnum.B
|
||||||
|
pickled = pickle.dumps(b)
|
||||||
|
restored = pickle.loads(pickled)
|
||||||
|
assert type(a) is type(restored)
|
||||||
|
assert b == restored
|
||||||
|
assert b.value == restored.value
|
||||||
|
assert b.name == restored.name
|
8
graphene/tests/issues/test_956.py
Normal file
8
graphene/tests/issues/test_956.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
import graphene
|
||||||
|
|
||||||
|
|
||||||
|
def test_issue():
|
||||||
|
options = {"description": "This my enum", "deprecation_reason": "For the funs"}
|
||||||
|
new_enum = graphene.Enum("MyEnum", [("some", "data")], **options)
|
||||||
|
assert new_enum._meta.description == options["description"]
|
||||||
|
assert new_enum._meta.deprecation_reason == options["deprecation_reason"]
|
|
@ -1,55 +1,53 @@
|
||||||
# flake8: noqa
|
from graphql import GraphQLResolveInfo as ResolveInfo
|
||||||
from graphql import ResolveInfo
|
|
||||||
|
|
||||||
from .objecttype import ObjectType
|
from .argument import Argument
|
||||||
from .interface import Interface
|
from .base64 import Base64
|
||||||
from .mutation import Mutation
|
from .context import Context
|
||||||
from .scalars import Scalar, String, ID, Int, Float, Boolean
|
|
||||||
from .datetime import Date, DateTime, Time
|
from .datetime import Date, DateTime, Time
|
||||||
from .json import JSONString
|
from .decimal import Decimal
|
||||||
from .uuid import UUID
|
from .dynamic import Dynamic
|
||||||
from .schema import Schema
|
|
||||||
from .structures import List, NonNull
|
|
||||||
from .enum import Enum
|
from .enum import Enum
|
||||||
from .field import Field
|
from .field import Field
|
||||||
from .inputfield import InputField
|
from .inputfield import InputField
|
||||||
from .argument import Argument
|
|
||||||
from .inputobjecttype import InputObjectType
|
from .inputobjecttype import InputObjectType
|
||||||
from .dynamic import Dynamic
|
from .interface import Interface
|
||||||
|
from .json import JSONString
|
||||||
|
from .mutation import Mutation
|
||||||
|
from .objecttype import ObjectType
|
||||||
|
from .scalars import ID, BigInt, Boolean, Float, Int, Scalar, String
|
||||||
|
from .schema import Schema
|
||||||
|
from .structures import List, NonNull
|
||||||
from .union import Union
|
from .union import Union
|
||||||
from .context import Context
|
from .uuid import UUID
|
||||||
|
|
||||||
# Deprecated
|
|
||||||
from .abstracttype import AbstractType
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ObjectType",
|
"Argument",
|
||||||
"InputObjectType",
|
"Base64",
|
||||||
"Interface",
|
"BigInt",
|
||||||
"Mutation",
|
"Boolean",
|
||||||
"Enum",
|
"Context",
|
||||||
"Field",
|
|
||||||
"InputField",
|
|
||||||
"Schema",
|
|
||||||
"Scalar",
|
|
||||||
"String",
|
|
||||||
"ID",
|
|
||||||
"Int",
|
|
||||||
"Float",
|
|
||||||
"Date",
|
"Date",
|
||||||
"DateTime",
|
"DateTime",
|
||||||
"Time",
|
"Decimal",
|
||||||
"JSONString",
|
|
||||||
"UUID",
|
|
||||||
"Boolean",
|
|
||||||
"List",
|
|
||||||
"NonNull",
|
|
||||||
"Argument",
|
|
||||||
"Dynamic",
|
"Dynamic",
|
||||||
"Union",
|
"Enum",
|
||||||
"Context",
|
"Field",
|
||||||
|
"Float",
|
||||||
|
"ID",
|
||||||
|
"InputField",
|
||||||
|
"InputObjectType",
|
||||||
|
"Int",
|
||||||
|
"Interface",
|
||||||
|
"JSONString",
|
||||||
|
"List",
|
||||||
|
"Mutation",
|
||||||
|
"NonNull",
|
||||||
|
"ObjectType",
|
||||||
"ResolveInfo",
|
"ResolveInfo",
|
||||||
# Deprecated
|
"Scalar",
|
||||||
"AbstractType",
|
"Schema",
|
||||||
|
"String",
|
||||||
|
"Time",
|
||||||
|
"UUID",
|
||||||
|
"Union",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
from ..utils.deprecated import warn_deprecation
|
|
||||||
from ..utils.subclass_with_meta import SubclassWithMeta
|
|
||||||
|
|
||||||
|
|
||||||
class AbstractType(SubclassWithMeta):
|
|
||||||
def __init_subclass__(cls, *args, **kwargs):
|
|
||||||
warn_deprecation(
|
|
||||||
"Abstract type is deprecated, please use normal object inheritance instead.\n"
|
|
||||||
"See more: https://github.com/graphql-python/graphene/blob/master/UPGRADE-v2.0.md#deprecations"
|
|
||||||
)
|
|
||||||
super(AbstractType, cls).__init_subclass__(*args, **kwargs)
|
|
|
@ -1,5 +1,5 @@
|
||||||
from collections import OrderedDict
|
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
from graphql import Undefined
|
||||||
|
|
||||||
from .dynamic import Dynamic
|
from .dynamic import Dynamic
|
||||||
from .mountedtype import MountedType
|
from .mountedtype import MountedType
|
||||||
|
@ -8,10 +8,45 @@ from .utils import get_type
|
||||||
|
|
||||||
|
|
||||||
class Argument(MountedType):
|
class Argument(MountedType):
|
||||||
|
"""
|
||||||
|
Makes an Argument available on a Field in the GraphQL schema.
|
||||||
|
|
||||||
|
Arguments will be parsed and provided to resolver methods for fields as keyword arguments.
|
||||||
|
|
||||||
|
All ``arg`` and ``**extra_args`` for a ``graphene.Field`` are implicitly mounted as Argument
|
||||||
|
using the below parameters.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import String, Boolean, Argument
|
||||||
|
|
||||||
|
age = String(
|
||||||
|
# Boolean implicitly mounted as Argument
|
||||||
|
dog_years=Boolean(description="convert to dog years"),
|
||||||
|
# Boolean explicitly mounted as Argument
|
||||||
|
decades=Argument(Boolean, default_value=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
args:
|
||||||
|
type (class for a graphene.UnmountedType): must be a class (not an instance) of an
|
||||||
|
unmounted graphene type (ex. scalar or object) which is used for the type of this
|
||||||
|
argument in the GraphQL schema.
|
||||||
|
required (optional, bool): indicates this argument as not null in the graphql schema. Same behavior
|
||||||
|
as graphene.NonNull. Default False.
|
||||||
|
name (optional, str): the name of the GraphQL argument. Defaults to parameter name.
|
||||||
|
description (optional, str): the description of the GraphQL argument in the schema.
|
||||||
|
default_value (optional, Any): The value to be provided if the user does not set this argument in
|
||||||
|
the operation.
|
||||||
|
deprecation_reason (optional, str): Setting this value indicates that the argument is
|
||||||
|
depreciated and may provide instruction or reason on how for clients to proceed. Cannot be
|
||||||
|
set if the argument is required (see spec).
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
type,
|
type_,
|
||||||
default_value=None,
|
default_value=Undefined,
|
||||||
|
deprecation_reason=None,
|
||||||
description=None,
|
description=None,
|
||||||
name=None,
|
name=None,
|
||||||
required=False,
|
required=False,
|
||||||
|
@ -20,12 +55,16 @@ class Argument(MountedType):
|
||||||
super(Argument, self).__init__(_creation_counter=_creation_counter)
|
super(Argument, self).__init__(_creation_counter=_creation_counter)
|
||||||
|
|
||||||
if required:
|
if required:
|
||||||
type = NonNull(type)
|
assert (
|
||||||
|
deprecation_reason is None
|
||||||
|
), f"Argument {name} is required, cannot deprecate it."
|
||||||
|
type_ = NonNull(type_)
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self._type = type
|
self._type = type_
|
||||||
self.default_value = default_value
|
self.default_value = default_value
|
||||||
self.description = description
|
self.description = description
|
||||||
|
self.deprecation_reason = deprecation_reason
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def type(self):
|
def type(self):
|
||||||
|
@ -37,6 +76,7 @@ class Argument(MountedType):
|
||||||
and self.type == other.type
|
and self.type == other.type
|
||||||
and self.default_value == other.default_value
|
and self.default_value == other.default_value
|
||||||
and self.description == other.description
|
and self.description == other.description
|
||||||
|
and self.deprecation_reason == other.deprecation_reason
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -50,7 +90,7 @@ def to_arguments(args, extra_args=None):
|
||||||
else:
|
else:
|
||||||
extra_args = []
|
extra_args = []
|
||||||
iter_arguments = chain(args.items(), extra_args)
|
iter_arguments = chain(args.items(), extra_args)
|
||||||
arguments = OrderedDict()
|
arguments = {}
|
||||||
for default_name, arg in iter_arguments:
|
for default_name, arg in iter_arguments:
|
||||||
if isinstance(arg, Dynamic):
|
if isinstance(arg, Dynamic):
|
||||||
arg = arg.get_type()
|
arg = arg.get_type()
|
||||||
|
@ -64,20 +104,17 @@ def to_arguments(args, extra_args=None):
|
||||||
|
|
||||||
if isinstance(arg, (InputField, Field)):
|
if isinstance(arg, (InputField, Field)):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Expected {} to be Argument, but received {}. Try using Argument({}).".format(
|
f"Expected {default_name} to be Argument, "
|
||||||
default_name, type(arg).__name__, arg.type
|
f"but received {type(arg).__name__}. Try using Argument({arg.type})."
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not isinstance(arg, Argument):
|
if not isinstance(arg, Argument):
|
||||||
raise ValueError('Unknown argument "{}".'.format(default_name))
|
raise ValueError(f'Unknown argument "{default_name}".')
|
||||||
|
|
||||||
arg_name = default_name or arg.name
|
arg_name = default_name or arg.name
|
||||||
assert (
|
assert (
|
||||||
arg_name not in arguments
|
arg_name not in arguments
|
||||||
), 'More than one Argument have same name "{}".'.format(
|
), f'More than one Argument have same name "{arg_name}".'
|
||||||
arg_name
|
|
||||||
)
|
|
||||||
arguments[arg_name] = arg
|
arguments[arg_name] = arg
|
||||||
|
|
||||||
return arguments
|
return arguments
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
from ..utils.subclass_with_meta import SubclassWithMeta
|
from typing import Type, Optional
|
||||||
|
|
||||||
|
from ..utils.subclass_with_meta import SubclassWithMeta, SubclassWithMeta_Meta
|
||||||
from ..utils.trim_docstring import trim_docstring
|
from ..utils.trim_docstring import trim_docstring
|
||||||
|
|
||||||
|
|
||||||
class BaseOptions(object):
|
class BaseOptions:
|
||||||
name = None # type: str
|
name: Optional[str] = None
|
||||||
description = None # type: str
|
description: Optional[str] = None
|
||||||
|
|
||||||
_frozen = False # type: bool
|
_frozen: bool = False
|
||||||
|
|
||||||
def __init__(self, class_type):
|
def __init__(self, class_type: Type):
|
||||||
self.class_type = class_type # type: Type
|
self.class_type: Type = class_type
|
||||||
|
|
||||||
def freeze(self):
|
def freeze(self):
|
||||||
self._frozen = True
|
self._frozen = True
|
||||||
|
@ -18,10 +20,13 @@ class BaseOptions(object):
|
||||||
if not self._frozen:
|
if not self._frozen:
|
||||||
super(BaseOptions, self).__setattr__(name, value)
|
super(BaseOptions, self).__setattr__(name, value)
|
||||||
else:
|
else:
|
||||||
raise Exception("Can't modify frozen Options {}".format(self))
|
raise Exception(f"Can't modify frozen Options {self}")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<{} name={}>".format(self.__class__.__name__, repr(self.name))
|
return f"<{self.__class__.__name__} name={repr(self.name)}>"
|
||||||
|
|
||||||
|
|
||||||
|
BaseTypeMeta = SubclassWithMeta_Meta
|
||||||
|
|
||||||
|
|
||||||
class BaseType(SubclassWithMeta):
|
class BaseType(SubclassWithMeta):
|
||||||
|
@ -30,8 +35,10 @@ class BaseType(SubclassWithMeta):
|
||||||
return type(class_name, (cls,), {"Meta": options})
|
return type(class_name, (cls,), {"Meta": options})
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(cls, name=None, description=None, _meta=None):
|
def __init_subclass_with_meta__(
|
||||||
assert "_meta" not in cls.__dict__, "Can't assign directly meta"
|
cls, name=None, description=None, _meta=None, **_kwargs
|
||||||
|
):
|
||||||
|
assert "_meta" not in cls.__dict__, "Can't assign meta directly"
|
||||||
if not _meta:
|
if not _meta:
|
||||||
return
|
return
|
||||||
_meta.name = name or cls.__name__
|
_meta.name = name or cls.__name__
|
||||||
|
|
43
graphene/types/base64.py
Normal file
43
graphene/types/base64.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
from binascii import Error as _Error
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
|
||||||
|
from graphql.error import GraphQLError
|
||||||
|
from graphql.language import StringValueNode, print_ast
|
||||||
|
|
||||||
|
from .scalars import Scalar
|
||||||
|
|
||||||
|
|
||||||
|
class Base64(Scalar):
|
||||||
|
"""
|
||||||
|
The `Base64` scalar type represents a base64-encoded String.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def serialize(value):
|
||||||
|
if not isinstance(value, bytes):
|
||||||
|
if isinstance(value, str):
|
||||||
|
value = value.encode("utf-8")
|
||||||
|
else:
|
||||||
|
value = str(value).encode("utf-8")
|
||||||
|
return b64encode(value).decode("utf-8")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse_literal(cls, node, _variables=None):
|
||||||
|
if not isinstance(node, StringValueNode):
|
||||||
|
raise GraphQLError(
|
||||||
|
f"Base64 cannot represent non-string value: {print_ast(node)}"
|
||||||
|
)
|
||||||
|
return cls.parse_value(node.value)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_value(value):
|
||||||
|
if not isinstance(value, bytes):
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise GraphQLError(
|
||||||
|
f"Base64 cannot represent non-string value: {repr(value)}"
|
||||||
|
)
|
||||||
|
value = value.encode("utf-8")
|
||||||
|
try:
|
||||||
|
return b64decode(value, validate=True).decode("utf-8")
|
||||||
|
except _Error:
|
||||||
|
raise GraphQLError(f"Base64 cannot decode value: {repr(value)}")
|
|
@ -1,4 +1,25 @@
|
||||||
class Context(object):
|
class Context:
|
||||||
|
"""
|
||||||
|
Context can be used to make a convenient container for attributes to provide
|
||||||
|
for execution for resolvers of a GraphQL operation like a query.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import Context
|
||||||
|
|
||||||
|
context = Context(loaders=build_dataloaders(), request=my_web_request)
|
||||||
|
schema.execute('{ hello(name: "world") }', context=context)
|
||||||
|
|
||||||
|
def resolve_hello(parent, info, name):
|
||||||
|
info.context.request # value set in Context
|
||||||
|
info.context.loaders # value set in Context
|
||||||
|
# ...
|
||||||
|
|
||||||
|
args:
|
||||||
|
**params (Dict[str, Any]): values to make available on Context instance as attributes.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, **params):
|
def __init__(self, **params):
|
||||||
for key, value in params.items():
|
for key, value in params.items():
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from aniso8601 import parse_date, parse_datetime, parse_time
|
from dateutil.parser import isoparse
|
||||||
from graphql.language import ast
|
|
||||||
|
from graphql.error import GraphQLError
|
||||||
|
from graphql.language import StringValueNode, print_ast
|
||||||
|
|
||||||
from .scalars import Scalar
|
from .scalars import Scalar
|
||||||
|
|
||||||
|
@ -19,22 +19,28 @@ class Date(Scalar):
|
||||||
def serialize(date):
|
def serialize(date):
|
||||||
if isinstance(date, datetime.datetime):
|
if isinstance(date, datetime.datetime):
|
||||||
date = date.date()
|
date = date.date()
|
||||||
assert isinstance(
|
if not isinstance(date, datetime.date):
|
||||||
date, datetime.date
|
raise GraphQLError(f"Date cannot represent value: {repr(date)}")
|
||||||
), 'Received not compatible date "{}"'.format(repr(date))
|
|
||||||
return date.isoformat()
|
return date.isoformat()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node):
|
def parse_literal(cls, node, _variables=None):
|
||||||
if isinstance(node, ast.StringValue):
|
if not isinstance(node, StringValueNode):
|
||||||
return cls.parse_value(node.value)
|
raise GraphQLError(
|
||||||
|
f"Date cannot represent non-string value: {print_ast(node)}"
|
||||||
|
)
|
||||||
|
return cls.parse_value(node.value)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_value(value):
|
def parse_value(value):
|
||||||
|
if isinstance(value, datetime.date):
|
||||||
|
return value
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise GraphQLError(f"Date cannot represent non-string value: {repr(value)}")
|
||||||
try:
|
try:
|
||||||
return parse_date(value)
|
return datetime.date.fromisoformat(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
raise GraphQLError(f"Date cannot represent value: {repr(value)}")
|
||||||
|
|
||||||
|
|
||||||
class DateTime(Scalar):
|
class DateTime(Scalar):
|
||||||
|
@ -46,22 +52,30 @@ class DateTime(Scalar):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(dt):
|
def serialize(dt):
|
||||||
assert isinstance(
|
if not isinstance(dt, (datetime.datetime, datetime.date)):
|
||||||
dt, (datetime.datetime, datetime.date)
|
raise GraphQLError(f"DateTime cannot represent value: {repr(dt)}")
|
||||||
), 'Received not compatible datetime "{}"'.format(repr(dt))
|
|
||||||
return dt.isoformat()
|
return dt.isoformat()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node):
|
def parse_literal(cls, node, _variables=None):
|
||||||
if isinstance(node, ast.StringValue):
|
if not isinstance(node, StringValueNode):
|
||||||
return cls.parse_value(node.value)
|
raise GraphQLError(
|
||||||
|
f"DateTime cannot represent non-string value: {print_ast(node)}"
|
||||||
|
)
|
||||||
|
return cls.parse_value(node.value)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_value(value):
|
def parse_value(value):
|
||||||
|
if isinstance(value, datetime.datetime):
|
||||||
|
return value
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise GraphQLError(
|
||||||
|
f"DateTime cannot represent non-string value: {repr(value)}"
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return parse_datetime(value)
|
return isoparse(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
raise GraphQLError(f"DateTime cannot represent value: {repr(value)}")
|
||||||
|
|
||||||
|
|
||||||
class Time(Scalar):
|
class Time(Scalar):
|
||||||
|
@ -73,19 +87,25 @@ class Time(Scalar):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(time):
|
def serialize(time):
|
||||||
assert isinstance(
|
if not isinstance(time, datetime.time):
|
||||||
time, datetime.time
|
raise GraphQLError(f"Time cannot represent value: {repr(time)}")
|
||||||
), 'Received not compatible time "{}"'.format(repr(time))
|
|
||||||
return time.isoformat()
|
return time.isoformat()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_literal(cls, node):
|
def parse_literal(cls, node, _variables=None):
|
||||||
if isinstance(node, ast.StringValue):
|
if not isinstance(node, StringValueNode):
|
||||||
return cls.parse_value(node.value)
|
raise GraphQLError(
|
||||||
|
f"Time cannot represent non-string value: {print_ast(node)}"
|
||||||
|
)
|
||||||
|
return cls.parse_value(node.value)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_value(cls, value):
|
def parse_value(cls, value):
|
||||||
|
if isinstance(value, datetime.time):
|
||||||
|
return value
|
||||||
|
if not isinstance(value, str):
|
||||||
|
raise GraphQLError(f"Time cannot represent non-string value: {repr(value)}")
|
||||||
try:
|
try:
|
||||||
return parse_time(value)
|
return datetime.time.fromisoformat(value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return None
|
raise GraphQLError(f"Time cannot represent value: {repr(value)}")
|
||||||
|
|
34
graphene/types/decimal.py
Normal file
34
graphene/types/decimal.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
from decimal import Decimal as _Decimal
|
||||||
|
|
||||||
|
from graphql import Undefined
|
||||||
|
from graphql.language.ast import StringValueNode, IntValueNode
|
||||||
|
|
||||||
|
from .scalars import Scalar
|
||||||
|
|
||||||
|
|
||||||
|
class Decimal(Scalar):
|
||||||
|
"""
|
||||||
|
The `Decimal` scalar type represents a python Decimal.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def serialize(dec):
|
||||||
|
if isinstance(dec, str):
|
||||||
|
dec = _Decimal(dec)
|
||||||
|
assert isinstance(
|
||||||
|
dec, _Decimal
|
||||||
|
), f'Received not compatible Decimal "{repr(dec)}"'
|
||||||
|
return str(dec)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse_literal(cls, node, _variables=None):
|
||||||
|
if isinstance(node, (StringValueNode, IntValueNode)):
|
||||||
|
return cls.parse_value(node.value)
|
||||||
|
return Undefined
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_value(value):
|
||||||
|
try:
|
||||||
|
return _Decimal(value)
|
||||||
|
except Exception:
|
||||||
|
return Undefined
|
|
@ -1,3 +1,5 @@
|
||||||
|
from enum import Enum as PyEnum
|
||||||
|
|
||||||
from graphql import (
|
from graphql import (
|
||||||
GraphQLEnumType,
|
GraphQLEnumType,
|
||||||
GraphQLInputObjectType,
|
GraphQLInputObjectType,
|
||||||
|
@ -8,7 +10,7 @@ from graphql import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class GrapheneGraphQLType(object):
|
class GrapheneGraphQLType:
|
||||||
"""
|
"""
|
||||||
A class for extending the base GraphQLType with the related
|
A class for extending the base GraphQLType with the related
|
||||||
graphene_type
|
graphene_type
|
||||||
|
@ -18,6 +20,11 @@ class GrapheneGraphQLType(object):
|
||||||
self.graphene_type = kwargs.pop("graphene_type")
|
self.graphene_type = kwargs.pop("graphene_type")
|
||||||
super(GrapheneGraphQLType, self).__init__(*args, **kwargs)
|
super(GrapheneGraphQLType, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def __copy__(self):
|
||||||
|
result = GrapheneGraphQLType(graphene_type=self.graphene_type)
|
||||||
|
result.__dict__.update(self.__dict__)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType):
|
class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType):
|
||||||
pass
|
pass
|
||||||
|
@ -36,7 +43,19 @@ class GrapheneScalarType(GrapheneGraphQLType, GraphQLScalarType):
|
||||||
|
|
||||||
|
|
||||||
class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType):
|
class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType):
|
||||||
pass
|
def serialize(self, value):
|
||||||
|
if not isinstance(value, PyEnum):
|
||||||
|
enum = self.graphene_type._meta.enum
|
||||||
|
try:
|
||||||
|
# Try and get enum by value
|
||||||
|
value = enum(value)
|
||||||
|
except ValueError:
|
||||||
|
# Try and get enum by name
|
||||||
|
try:
|
||||||
|
value = enum[value]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
return super(GrapheneEnumType, self).serialize(value)
|
||||||
|
|
||||||
|
|
||||||
class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType):
|
class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType):
|
||||||
|
|
|
@ -10,10 +10,10 @@ class Dynamic(MountedType):
|
||||||
the schema. So we can have lazy fields.
|
the schema. So we can have lazy fields.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, type, with_schema=False, _creation_counter=None):
|
def __init__(self, type_, with_schema=False, _creation_counter=None):
|
||||||
super(Dynamic, self).__init__(_creation_counter=_creation_counter)
|
super(Dynamic, self).__init__(_creation_counter=_creation_counter)
|
||||||
assert inspect.isfunction(type) or isinstance(type, partial)
|
assert inspect.isfunction(type_) or isinstance(type_, partial)
|
||||||
self.type = type
|
self.type = type_
|
||||||
self.with_schema = with_schema
|
self.with_schema = with_schema
|
||||||
|
|
||||||
def get_type(self, schema=None):
|
def get_type(self, schema=None):
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
from collections import OrderedDict
|
from enum import Enum as PyEnum
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from graphene.utils.subclass_with_meta import SubclassWithMeta_Meta
|
from graphene.utils.subclass_with_meta import SubclassWithMeta_Meta
|
||||||
|
|
||||||
from ..pyutils.compat import Enum as PyEnum
|
|
||||||
from .base import BaseOptions, BaseType
|
from .base import BaseOptions, BaseType
|
||||||
from .unmountedtype import UnmountedType
|
from .unmountedtype import UnmountedType
|
||||||
|
|
||||||
|
@ -15,6 +12,10 @@ def eq_enum(self, other):
|
||||||
return self.value is other
|
return self.value is other
|
||||||
|
|
||||||
|
|
||||||
|
def hash_enum(self):
|
||||||
|
return hash(self.name)
|
||||||
|
|
||||||
|
|
||||||
EnumType = type(PyEnum)
|
EnumType = type(PyEnum)
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,15 +25,17 @@ class EnumOptions(BaseOptions):
|
||||||
|
|
||||||
|
|
||||||
class EnumMeta(SubclassWithMeta_Meta):
|
class EnumMeta(SubclassWithMeta_Meta):
|
||||||
def __new__(cls, name, bases, classdict, **options):
|
def __new__(cls, name_, bases, classdict, **options):
|
||||||
enum_members = OrderedDict(classdict, __eq__=eq_enum)
|
enum_members = dict(classdict, __eq__=eq_enum, __hash__=hash_enum)
|
||||||
# We remove the Meta attribute from the class to not collide
|
# We remove the Meta attribute from the class to not collide
|
||||||
# with the enum values.
|
# with the enum values.
|
||||||
enum_members.pop("Meta", None)
|
enum_members.pop("Meta", None)
|
||||||
enum = PyEnum(cls.__name__, enum_members)
|
enum = PyEnum(cls.__name__, enum_members)
|
||||||
return SubclassWithMeta_Meta.__new__(
|
obj = SubclassWithMeta_Meta.__new__(
|
||||||
cls, name, bases, OrderedDict(classdict, __enum__=enum), **options
|
cls, name_, bases, dict(classdict, __enum__=enum), **options
|
||||||
)
|
)
|
||||||
|
globals()[name_] = obj.__enum__
|
||||||
|
return obj
|
||||||
|
|
||||||
def get(cls, value):
|
def get(cls, value):
|
||||||
return cls._meta.enum(value)
|
return cls._meta.enum(value)
|
||||||
|
@ -41,27 +44,60 @@ class EnumMeta(SubclassWithMeta_Meta):
|
||||||
return cls._meta.enum[value]
|
return cls._meta.enum[value]
|
||||||
|
|
||||||
def __prepare__(name, bases, **kwargs): # noqa: N805
|
def __prepare__(name, bases, **kwargs): # noqa: N805
|
||||||
return OrderedDict()
|
return {}
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs): # noqa: N805
|
def __call__(cls, *args, **kwargs): # noqa: N805
|
||||||
if cls is Enum:
|
if cls is Enum:
|
||||||
description = kwargs.pop("description", None)
|
description = kwargs.pop("description", None)
|
||||||
return cls.from_enum(PyEnum(*args, **kwargs), description=description)
|
deprecation_reason = kwargs.pop("deprecation_reason", None)
|
||||||
|
return cls.from_enum(
|
||||||
|
PyEnum(*args, **kwargs),
|
||||||
|
description=description,
|
||||||
|
deprecation_reason=deprecation_reason,
|
||||||
|
)
|
||||||
return super(EnumMeta, cls).__call__(*args, **kwargs)
|
return super(EnumMeta, cls).__call__(*args, **kwargs)
|
||||||
# return cls._meta.enum(*args, **kwargs)
|
# return cls._meta.enum(*args, **kwargs)
|
||||||
|
|
||||||
def from_enum(cls, enum, description=None, deprecation_reason=None): # noqa: N805
|
def __iter__(cls):
|
||||||
description = description or enum.__doc__
|
return cls._meta.enum.__iter__()
|
||||||
|
|
||||||
|
def from_enum(cls, enum, name=None, description=None, deprecation_reason=None): # noqa: N805
|
||||||
|
name = name or enum.__name__
|
||||||
|
description = description or enum.__doc__ or "An enumeration."
|
||||||
meta_dict = {
|
meta_dict = {
|
||||||
"enum": enum,
|
"enum": enum,
|
||||||
"description": description,
|
"description": description,
|
||||||
"deprecation_reason": deprecation_reason,
|
"deprecation_reason": deprecation_reason,
|
||||||
}
|
}
|
||||||
meta_class = type("Meta", (object,), meta_dict)
|
meta_class = type("Meta", (object,), meta_dict)
|
||||||
return type(meta_class.enum.__name__, (Enum,), {"Meta": meta_class})
|
return type(name, (Enum,), {"Meta": meta_class})
|
||||||
|
|
||||||
|
|
||||||
class Enum(six.with_metaclass(EnumMeta, UnmountedType, BaseType)):
|
class Enum(UnmountedType, BaseType, metaclass=EnumMeta):
|
||||||
|
"""
|
||||||
|
Enum type definition
|
||||||
|
|
||||||
|
Defines a static set of values that can be provided as a Field, Argument or InputField.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
from graphene import Enum
|
||||||
|
|
||||||
|
class NameFormat(Enum):
|
||||||
|
FIRST_LAST = "first_last"
|
||||||
|
LAST_FIRST = "last_first"
|
||||||
|
|
||||||
|
Meta:
|
||||||
|
enum (optional, Enum): Python enum to use as a base for GraphQL Enum.
|
||||||
|
|
||||||
|
name (optional, str): Name of the GraphQL type (must be unique in schema). Defaults to class
|
||||||
|
name.
|
||||||
|
description (optional, str): Description of the GraphQL type in the schema. Defaults to class
|
||||||
|
docstring.
|
||||||
|
deprecation_reason (optional, str): Setting this value indicates that the enum is
|
||||||
|
depreciated and may provide instruction or reason on how for clients to proceed.
|
||||||
|
"""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def __init_subclass_with_meta__(cls, enum=None, _meta=None, **options):
|
def __init_subclass_with_meta__(cls, enum=None, _meta=None, **options):
|
||||||
if not _meta:
|
if not _meta:
|
||||||
|
|
|
@ -1,27 +1,72 @@
|
||||||
import inspect
|
import inspect
|
||||||
from collections import Mapping, OrderedDict
|
from collections.abc import Mapping
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from .argument import Argument, to_arguments
|
from .argument import Argument, to_arguments
|
||||||
from .mountedtype import MountedType
|
from .mountedtype import MountedType
|
||||||
|
from .resolver import default_resolver
|
||||||
from .structures import NonNull
|
from .structures import NonNull
|
||||||
from .unmountedtype import UnmountedType
|
from .unmountedtype import UnmountedType
|
||||||
from .utils import get_type
|
from .utils import get_type
|
||||||
|
from ..utils.deprecated import warn_deprecation
|
||||||
|
|
||||||
base_type = type
|
base_type = type
|
||||||
|
|
||||||
|
|
||||||
def source_resolver(source, root, info, **args):
|
def source_resolver(source, root, info, **args):
|
||||||
resolved = getattr(root, source, None)
|
resolved = default_resolver(source, None, root, info, **args)
|
||||||
if inspect.isfunction(resolved) or inspect.ismethod(resolved):
|
if inspect.isfunction(resolved) or inspect.ismethod(resolved):
|
||||||
return resolved()
|
return resolved()
|
||||||
return resolved
|
return resolved
|
||||||
|
|
||||||
|
|
||||||
class Field(MountedType):
|
class Field(MountedType):
|
||||||
|
"""
|
||||||
|
Makes a field available on an ObjectType in the GraphQL schema. Any type can be mounted as a
|
||||||
|
Field:
|
||||||
|
|
||||||
|
- Object Type
|
||||||
|
- Scalar Type
|
||||||
|
- Enum
|
||||||
|
- Interface
|
||||||
|
- Union
|
||||||
|
|
||||||
|
All class attributes of ``graphene.ObjectType`` are implicitly mounted as Field using the below
|
||||||
|
arguments.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
class Person(ObjectType):
|
||||||
|
first_name = graphene.String(required=True) # implicitly mounted as Field
|
||||||
|
last_name = graphene.Field(String, description='Surname') # explicitly mounted as Field
|
||||||
|
|
||||||
|
args:
|
||||||
|
type (class for a graphene.UnmountedType): Must be a class (not an instance) of an
|
||||||
|
unmounted graphene type (ex. scalar or object) which is used for the type of this
|
||||||
|
field in the GraphQL schema. You can provide a dotted module import path (string)
|
||||||
|
to the class instead of the class itself (e.g. to avoid circular import issues).
|
||||||
|
args (optional, Dict[str, graphene.Argument]): Arguments that can be input to the field.
|
||||||
|
Prefer to use ``**extra_args``, unless you use an argument name that clashes with one
|
||||||
|
of the Field arguments presented here (see :ref:`example<ResolverParamGraphQLArguments>`).
|
||||||
|
resolver (optional, Callable): A function to get the value for a Field from the parent
|
||||||
|
value object. If not set, the default resolver method for the schema is used.
|
||||||
|
source (optional, str): attribute name to resolve for this field from the parent value
|
||||||
|
object. Alternative to resolver (cannot set both source and resolver).
|
||||||
|
deprecation_reason (optional, str): Setting this value indicates that the field is
|
||||||
|
depreciated and may provide instruction or reason on how for clients to proceed.
|
||||||
|
required (optional, bool): indicates this field as not null in the graphql schema. Same behavior as
|
||||||
|
graphene.NonNull. Default False.
|
||||||
|
name (optional, str): the name of the GraphQL field (must be unique in a type). Defaults to attribute
|
||||||
|
name.
|
||||||
|
description (optional, str): the description of the GraphQL field in the schema.
|
||||||
|
default_value (optional, Any): Default value to resolve if none set from schema.
|
||||||
|
**extra_args (optional, Dict[str, Union[graphene.Argument, graphene.UnmountedType]): any
|
||||||
|
additional arguments to mount on the field.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
type,
|
type_,
|
||||||
args=None,
|
args=None,
|
||||||
resolver=None,
|
resolver=None,
|
||||||
source=None,
|
source=None,
|
||||||
|
@ -31,21 +76,21 @@ class Field(MountedType):
|
||||||
required=False,
|
required=False,
|
||||||
_creation_counter=None,
|
_creation_counter=None,
|
||||||
default_value=None,
|
default_value=None,
|
||||||
**extra_args
|
**extra_args,
|
||||||
):
|
):
|
||||||
super(Field, self).__init__(_creation_counter=_creation_counter)
|
super(Field, self).__init__(_creation_counter=_creation_counter)
|
||||||
assert not args or isinstance(args, Mapping), (
|
assert not args or isinstance(
|
||||||
'Arguments in a field have to be a mapping, received "{}".'
|
args, Mapping
|
||||||
).format(args)
|
), f'Arguments in a field have to be a mapping, received "{args}".'
|
||||||
assert not (
|
assert not (
|
||||||
source and resolver
|
source and resolver
|
||||||
), "A Field cannot have a source and a resolver in at the same time."
|
), "A Field cannot have a source and a resolver in at the same time."
|
||||||
assert not callable(default_value), (
|
assert not callable(
|
||||||
'The default value can not be a function but received "{}".'
|
default_value
|
||||||
).format(base_type(default_value))
|
), f'The default value can not be a function but received "{base_type(default_value)}".'
|
||||||
|
|
||||||
if required:
|
if required:
|
||||||
type = NonNull(type)
|
type_ = NonNull(type_)
|
||||||
|
|
||||||
# Check if name is actually an argument of the field
|
# Check if name is actually an argument of the field
|
||||||
if isinstance(name, (Argument, UnmountedType)):
|
if isinstance(name, (Argument, UnmountedType)):
|
||||||
|
@ -58,8 +103,8 @@ class Field(MountedType):
|
||||||
source = None
|
source = None
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self._type = type
|
self._type = type_
|
||||||
self.args = to_arguments(args or OrderedDict(), extra_args)
|
self.args = to_arguments(args or {}, extra_args)
|
||||||
if source:
|
if source:
|
||||||
resolver = partial(source_resolver, source)
|
resolver = partial(source_resolver, source)
|
||||||
self.resolver = resolver
|
self.resolver = resolver
|
||||||
|
@ -71,5 +116,24 @@ class Field(MountedType):
|
||||||
def type(self):
|
def type(self):
|
||||||
return get_type(self._type)
|
return get_type(self._type)
|
||||||
|
|
||||||
def get_resolver(self, parent_resolver):
|
get_resolver = None
|
||||||
|
|
||||||
|
def wrap_resolve(self, parent_resolver):
|
||||||
|
"""
|
||||||
|
Wraps a function resolver, using the ObjectType resolve_{FIELD_NAME}
|
||||||
|
(parent_resolver) if the Field definition has no resolver.
|
||||||
|
"""
|
||||||
|
if self.get_resolver is not None:
|
||||||
|
warn_deprecation(
|
||||||
|
"The get_resolver method is being deprecated, please rename it to wrap_resolve."
|
||||||
|
)
|
||||||
|
return self.get_resolver(parent_resolver)
|
||||||
|
|
||||||
return self.resolver or parent_resolver
|
return self.resolver or parent_resolver
|
||||||
|
|
||||||
|
def wrap_subscribe(self, parent_subscribe):
|
||||||
|
"""
|
||||||
|
Wraps a function subscribe, using the ObjectType subscribe_{FIELD_NAME}
|
||||||
|
(parent_subscribe) if the Field definition has no subscribe.
|
||||||
|
"""
|
||||||
|
return parent_subscribe
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
from graphql.language.ast import (
|
from graphql.language.ast import (
|
||||||
BooleanValue,
|
BooleanValueNode,
|
||||||
FloatValue,
|
FloatValueNode,
|
||||||
IntValue,
|
IntValueNode,
|
||||||
ListValue,
|
ListValueNode,
|
||||||
ObjectValue,
|
ObjectValueNode,
|
||||||
StringValue,
|
StringValueNode,
|
||||||
)
|
)
|
||||||
|
|
||||||
from graphene.types.scalars import MAX_INT, MIN_INT
|
from graphene.types.scalars import MAX_INT, MIN_INT
|
||||||
|
@ -29,18 +27,18 @@ class GenericScalar(Scalar):
|
||||||
parse_value = identity
|
parse_value = identity
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_literal(ast):
|
def parse_literal(ast, _variables=None):
|
||||||
if isinstance(ast, (StringValue, BooleanValue)):
|
if isinstance(ast, (StringValueNode, BooleanValueNode)):
|
||||||
return ast.value
|
return ast.value
|
||||||
elif isinstance(ast, IntValue):
|
elif isinstance(ast, IntValueNode):
|
||||||
num = int(ast.value)
|
num = int(ast.value)
|
||||||
if MIN_INT <= num <= MAX_INT:
|
if MIN_INT <= num <= MAX_INT:
|
||||||
return num
|
return num
|
||||||
elif isinstance(ast, FloatValue):
|
elif isinstance(ast, FloatValueNode):
|
||||||
return float(ast.value)
|
return float(ast.value)
|
||||||
elif isinstance(ast, ListValue):
|
elif isinstance(ast, ListValueNode):
|
||||||
return [GenericScalar.parse_literal(value) for value in ast.values]
|
return [GenericScalar.parse_literal(value) for value in ast.values]
|
||||||
elif isinstance(ast, ObjectValue):
|
elif isinstance(ast, ObjectValueNode):
|
||||||
return {
|
return {
|
||||||
field.name.value: GenericScalar.parse_literal(field.value)
|
field.name.value: GenericScalar.parse_literal(field.value)
|
||||||
for field in ast.fields
|
for field in ast.fields
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user