From 78973964b8bb98b13ffd787237aa932cc2cec8d2 Mon Sep 17 00:00:00 2001 From: Drew Hoover Date: Tue, 21 Sep 2021 13:00:19 -0400 Subject: [PATCH 01/39] fix: update ariadne url to the new docs --- docs/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 62d11949..23fd4444 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -28,7 +28,7 @@ Compare Graphene's *code-first* approach to building a GraphQL API with *schema- .. _Apollo Server: https://www.apollographql.com/docs/apollo-server/ -.. _Ariadne: https://ariadne.readthedocs.io +.. _Ariadne: https://ariadnegraphql.org/ Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well. From bf40e6c419c6b224ae482c470161fb76f9b3c4bb Mon Sep 17 00:00:00 2001 From: Ali McMaster Date: Mon, 14 Feb 2022 09:01:42 +0000 Subject: [PATCH 02/39] Update quickstart.rst --- docs/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 0b6c6993..b850d58d 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -59,7 +59,7 @@ When we send a **Query** requesting only one **Field**, ``hello``, and specify a Requirements ~~~~~~~~~~~~ -- Python (2.7, 3.4, 3.5, 3.6, pypy) +- Python (3.6, 3.7, 3.8, 3.9, 3.10, pypy) - Graphene (3.0) Project setup From 0ac4d9397e394b3dfcde981b6e5ec654caed6ab6 Mon Sep 17 00:00:00 2001 From: karming Date: Tue, 16 Aug 2022 19:21:29 -0400 Subject: [PATCH 03/39] fix: use install instead of instal for consistency --- README.md | 2 +- README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a7714e33..897ea529 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ Also, Graphene is fully compatible with the GraphQL spec, working seamlessly wit ## Installation -For instaling graphene, just run this command in your shell +To install `graphene`, just run this command in your shell ```bash pip install "graphene>=3.0" diff --git a/README.rst b/README.rst index 3fb51df2..a38b9376 100644 --- a/README.rst +++ b/README.rst @@ -49,7 +49,7 @@ seamlessly with all GraphQL clients, such as Installation ------------ -For instaling graphene, just run this command in your shell +To install `graphene`, just run this command in your shell .. code:: bash From e6429c3c5b64c6dd60f82fd118e3d3743b058d68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?U=CC=88lgen=20Sar=C4=B1kavak?= Date: Fri, 19 Aug 2022 09:20:51 +0300 Subject: [PATCH 04/39] Update pre-commit hooks --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 87fa4872..2ad4e02f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ default_language_version: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.2.0 + rev: v4.3.0 hooks: - id: check-merge-conflict - id: check-json @@ -17,14 +17,14 @@ repos: - id: trailing-whitespace exclude: README.md - repo: https://github.com/asottile/pyupgrade - rev: v2.32.1 + rev: v2.37.3 hooks: - id: pyupgrade -- repo: https://github.com/ambv/black - rev: 22.3.0 +- repo: https://github.com/psf/black + rev: 22.6.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 5.0.4 hooks: - id: flake8 From cbf59a88ad8acfe613c9cdcf8ae869a76eb541d4 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 27 Aug 2022 18:06:38 +0200 Subject: [PATCH 05/39] Add Python 3.11 release candidate 1 to the testing (#1450) * Add Python 3.11 release candidate 1 to the testing https://www.python.org/download/pre-releases * Update tests.yml --- .github/workflows/tests.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8a962ac6..51832084 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -25,22 +25,22 @@ jobs: fail-fast: false matrix: include: + - {name: '3.11', python: '3.11-dev', os: ubuntu-latest, tox: py311} - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - - { name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38 } - - { name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37 } - - { name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36 } + - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} + - {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37} + - {name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36} steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: update pip run: | - pip install -U wheel - pip install -U setuptools - python -m pip install -U pip + python -m pip install --upgrade pip + pip install --upgrade setuptools wheel - name: get pip cache dir id: pip-cache From 355601bd5cce8c2ec2dacf128e6819af38a0c381 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=9Clgen=20Sar=C4=B1kavak?= Date: Sat, 27 Aug 2022 19:13:48 +0300 Subject: [PATCH 06/39] Remove duplicate flake8 call in tox, it's covered by pre-commit (#1448) --- tox.ini | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/tox.ini b/tox.ini index 07ddc767..d0be5a24 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{6,7,8,9,10}, flake8, mypy, pre-commit +envlist = py3{6,7,8,9,10}, mypy, pre-commit skipsdist = true [testenv] @@ -26,12 +26,4 @@ deps = commands = mypy graphene -[testenv:flake8] -basepython = python3.9 -deps = - flake8>=4,<5 -commands = - pip install --pre -e . - flake8 graphene - [pytest] From 35c281a3cd1fd83bc71b2d9c4b2160e118a7d484 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Sun, 28 Aug 2022 17:30:26 +0200 Subject: [PATCH 07/39] Fix BigInt export (#1456) --- graphene/__init__.py | 2 ++ graphene/types/__init__.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index bf9831b5..52ed205a 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -13,6 +13,7 @@ from .types import ( UUID, Argument, Base64, + BigInt, Boolean, Context, Date, @@ -50,6 +51,7 @@ __all__ = [ "__version__", "Argument", "Base64", + "BigInt", "Boolean", "ClientIDMutation", "Connection", diff --git a/graphene/types/__init__.py b/graphene/types/__init__.py index 2641dd53..70478a05 100644 --- a/graphene/types/__init__.py +++ b/graphene/types/__init__.py @@ -15,7 +15,7 @@ from .interface import Interface from .json import JSONString from .mutation import Mutation from .objecttype import ObjectType -from .scalars import ID, Boolean, Float, Int, Scalar, String +from .scalars import ID, BigInt, Boolean, Float, Int, Scalar, String from .schema import Schema from .structures import List, NonNull from .union import Union @@ -24,6 +24,7 @@ from .uuid import UUID __all__ = [ "Argument", "Base64", + "BigInt", "Boolean", "Context", "Date", From c5ccc9502df2edd949af2f7733bfb659204d9744 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=9Clgen=20Sar=C4=B1kavak?= Date: Sun, 28 Aug 2022 18:33:35 +0300 Subject: [PATCH 08/39] Upgrade base Python version to 3.10 (#1449) --- .github/workflows/deploy.yml | 4 ++-- .github/workflows/lint.yml | 4 ++-- .pre-commit-config.yaml | 2 +- tox.ini | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 07c0766f..12140d88 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -11,10 +11,10 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: Build wheel and source tarball run: | pip install wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c9efc0cf..d8ebd2f6 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,10 +8,10 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ad4e02f..eece56e0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ default_language_version: - python: python3.9 + python: python3.10 repos: - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/tox.ini b/tox.ini index d0be5a24..65fceadd 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ commands = py{36,37,38,39,310}: pytest --cov=graphene graphene --cov-report=term --cov-report=xml examples {posargs} [testenv:pre-commit] -basepython = python3.9 +basepython = python3.10 deps = pre-commit>=2.16,<3 setenv = @@ -20,7 +20,7 @@ commands = pre-commit run --all-files --show-diff-on-failure [testenv:mypy] -basepython = python3.9 +basepython = python3.10 deps = mypy>=0.950,<1 commands = From 45986b18e7c4b05a1143c2e5da42224666a7cf07 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sun, 28 Aug 2022 20:25:55 +0200 Subject: [PATCH 09/39] Upgrade GitHub Actions (#1457) --- .github/workflows/deploy.yml | 4 ++-- .github/workflows/lint.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 12140d88..6cce61d5 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -10,9 +10,9 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.10 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Build wheel and source tarball diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d8ebd2f6..ad5bea6a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,9 +7,9 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.10 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install dependencies From 20219fdc1bc9ce9ff7213ab03b74607556526724 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Tue, 6 Sep 2022 13:42:38 +0200 Subject: [PATCH 10/39] Update README.md Update --- README.md | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 897ea529..0456f888 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [💬 Join the community on Slack](https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM) -**We are looking for contributors**! Please check the [ROADMAP](https://github.com/graphql-python/graphene/blob/master/ROADMAP.md) to see how you can help ❤️ +**We are looking for contributors**! Please check the current issues to see how you can help ❤️ ## Introduction @@ -10,7 +10,7 @@ - **Easy to use:** Graphene helps you use GraphQL in Python without effort. - **Relay:** Graphene has builtin support for Relay. -- **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), NoSQL, custom Python objects, etc. +- **Data agnostic:** Graphene supports any kind of data source: SQL (Django, SQLAlchemy), Mongo, custom Python objects, etc. We believe that by providing a complete API you could plug Graphene anywhere your data lives and make your data available through GraphQL. @@ -20,9 +20,10 @@ Graphene has multiple integrations with different frameworks: | integration | Package | | ----------------- | --------------------------------------------------------------------------------------- | -| Django | [graphene-django](https://github.com/graphql-python/graphene-django/) | | SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) | -| Google App Engine | [graphene-gae](https://github.com/graphql-python/graphene-gae/) | +| Mongo | [graphene-mongo](https://github.com/graphql-python/graphene-mongo/) | +| Apollo Federation | [graphene-federation](https://github.com/graphql-python/graphene-federation/) | +| Django | [graphene-django](https://github.com/graphql-python/graphene-django/) | Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql). @@ -31,7 +32,7 @@ Also, Graphene is fully compatible with the GraphQL spec, working seamlessly wit To install `graphene`, just run this command in your shell ```bash -pip install "graphene>=3.0" +pip install "graphene>=3.1" ``` ## Examples @@ -84,18 +85,24 @@ pip install -e ".[test]" Well-written tests and maintaining good test coverage is important to this project. While developing, run new and existing tests with: ```sh -py.test graphene/relay/tests/test_node.py # Single file -py.test graphene/relay # All tests in directory +pytest graphene/relay/tests/test_node.py # Single file +pytest graphene/relay # All tests in directory ``` Add the `-s` flag if you have introduced breakpoints into the code for debugging. Add the `-v` ("verbose") flag to get more detailed test output. For even more detailed output, use `-vv`. Check out the [pytest documentation](https://docs.pytest.org/en/latest/) for more options and test running controls. +Regularly ensure your `pre-commit` hooks are up to date and enabled: + +```sh +pre-commit install +``` + You can also run the benchmarks with: ```sh -py.test graphene --benchmark-only +pytest graphene --benchmark-only ``` Graphene supports several versions of Python. To make sure that changes do not break compatibility with any of those versions, we use `tox` to create virtualenvs for each Python version and run tests with that version. To run against all Python versions defined in the `tox.ini` config file, just run: @@ -107,10 +114,10 @@ tox If you wish to run against a specific version defined in the `tox.ini` file: ```sh -tox -e py36 +tox -e py39 ``` -Tox can only use whatever versions of Python are installed on your system. When you create a pull request, Travis will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of Python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful! +Tox can only use whatever versions of Python are installed on your system. When you create a pull request, GitHub Actions pipelines will also be running the same tests and report the results, so there is no need for potential contributors to try to install every single version of Python on their own system ahead of time. We appreciate opening issues and pull requests to make graphene even more stable & useful! ### Building Documentation From 694c1db21ed73487484256e211fb56bd292eb0fb Mon Sep 17 00:00:00 2001 From: Cadu Date: Wed, 7 Sep 2022 15:32:53 -0300 Subject: [PATCH 11/39] Vendor `DataLoader` from `aiodataloader` and move `get_event_loop()` out of `__init__` function. (#1459) * Vendor DataLoader from aiodataloader and also move get_event_loop behavior from `__init__` to a property which only gets resolved when actually needed (this will solve PyTest-related to early get_event_loop() issues) * Added DataLoader's specific tests * plug `loop` parameter into `self._loop`, so that we still have the ability to pass in a custom event loop, if needed. Co-authored-by: Erik Wrede --- graphene/utils/dataloader.py | 281 +++++++++++++++ graphene/utils/tests/test_dataloader.py | 452 ++++++++++++++++++++++++ setup.cfg | 4 + setup.py | 1 - tests_asyncio/test_dataloader.py | 79 ----- 5 files changed, 737 insertions(+), 80 deletions(-) create mode 100644 graphene/utils/dataloader.py create mode 100644 graphene/utils/tests/test_dataloader.py delete mode 100644 tests_asyncio/test_dataloader.py diff --git a/graphene/utils/dataloader.py b/graphene/utils/dataloader.py new file mode 100644 index 00000000..143558aa --- /dev/null +++ b/graphene/utils/dataloader.py @@ -0,0 +1,281 @@ +from asyncio import ( + gather, + ensure_future, + get_event_loop, + iscoroutine, + iscoroutinefunction, +) +from collections import namedtuple +from collections.abc import Iterable +from functools import partial + +from typing import List # flake8: noqa + +Loader = namedtuple("Loader", "key,future") + + +def iscoroutinefunctionorpartial(fn): + return iscoroutinefunction(fn.func if isinstance(fn, partial) else fn) + + +class DataLoader(object): + batch = True + max_batch_size = None # type: int + cache = True + + def __init__( + self, + batch_load_fn=None, + batch=None, + max_batch_size=None, + cache=None, + get_cache_key=None, + cache_map=None, + loop=None, + ): + + self._loop = loop + + if batch_load_fn is not None: + self.batch_load_fn = batch_load_fn + + assert iscoroutinefunctionorpartial( + self.batch_load_fn + ), "batch_load_fn must be coroutine. Received: {}".format(self.batch_load_fn) + + if not callable(self.batch_load_fn): + raise TypeError( # pragma: no cover + ( + "DataLoader must be have a batch_load_fn which accepts " + "Iterable and returns Future>, but got: {}." + ).format(batch_load_fn) + ) + + if batch is not None: + self.batch = batch # pragma: no cover + + if max_batch_size is not None: + self.max_batch_size = max_batch_size + + if cache is not None: + self.cache = cache # pragma: no cover + + self.get_cache_key = get_cache_key or (lambda x: x) + + self._cache = cache_map if cache_map is not None else {} + self._queue = [] # type: List[Loader] + + @property + def loop(self): + if not self._loop: + self._loop = get_event_loop() + + return self._loop + + def load(self, key=None): + """ + Loads a key, returning a `Future` for the value represented by that key. + """ + if key is None: + raise TypeError( # pragma: no cover + ( + "The loader.load() function must be called with a value, " + "but got: {}." + ).format(key) + ) + + cache_key = self.get_cache_key(key) + + # If caching and there is a cache-hit, return cached Future. + if self.cache: + cached_result = self._cache.get(cache_key) + if cached_result: + return cached_result + + # Otherwise, produce a new Future for this value. + future = self.loop.create_future() + # If caching, cache this Future. + if self.cache: + self._cache[cache_key] = future + + self.do_resolve_reject(key, future) + return future + + def do_resolve_reject(self, key, future): + # Enqueue this Future to be dispatched. + self._queue.append(Loader(key=key, future=future)) + # Determine if a dispatch of this queue should be scheduled. + # A single dispatch should be scheduled per queue at the time when the + # queue changes from "empty" to "full". + if len(self._queue) == 1: + if self.batch: + # If batching, schedule a task to dispatch the queue. + enqueue_post_future_job(self.loop, self) + else: + # Otherwise dispatch the (queue of one) immediately. + dispatch_queue(self) # pragma: no cover + + def load_many(self, keys): + """ + Loads multiple keys, returning a list of values + + >>> a, b = await my_loader.load_many([ 'a', 'b' ]) + + This is equivalent to the more verbose: + + >>> a, b = await gather( + >>> my_loader.load('a'), + >>> my_loader.load('b') + >>> ) + """ + if not isinstance(keys, Iterable): + raise TypeError( # pragma: no cover + ( + "The loader.load_many() function must be called with Iterable " + "but got: {}." + ).format(keys) + ) + + return gather(*[self.load(key) for key in keys]) + + def clear(self, key): + """ + Clears the value at `key` from the cache, if it exists. Returns itself for + method chaining. + """ + cache_key = self.get_cache_key(key) + self._cache.pop(cache_key, None) + return self + + def clear_all(self): + """ + Clears the entire cache. To be used when some event results in unknown + invalidations across this particular `DataLoader`. Returns itself for + method chaining. + """ + self._cache.clear() + return self + + def prime(self, key, value): + """ + Adds the provied key and value to the cache. If the key already exists, no + change is made. Returns itself for method chaining. + """ + cache_key = self.get_cache_key(key) + + # Only add the key if it does not already exist. + if cache_key not in self._cache: + # Cache a rejected future if the value is an Error, in order to match + # the behavior of load(key). + future = self.loop.create_future() + if isinstance(value, Exception): + future.set_exception(value) + else: + future.set_result(value) + + self._cache[cache_key] = future + + return self + + +def enqueue_post_future_job(loop, loader): + async def dispatch(): + dispatch_queue(loader) + + loop.call_soon(ensure_future, dispatch()) + + +def get_chunks(iterable_obj, chunk_size=1): + chunk_size = max(1, chunk_size) + return ( + iterable_obj[i : i + chunk_size] + for i in range(0, len(iterable_obj), chunk_size) + ) + + +def dispatch_queue(loader): + """ + Given the current state of a Loader instance, perform a batch load + from its current queue. + """ + # Take the current loader queue, replacing it with an empty queue. + queue = loader._queue + loader._queue = [] + + # If a max_batch_size was provided and the queue is longer, then segment the + # queue into multiple batches, otherwise treat the queue as a single batch. + max_batch_size = loader.max_batch_size + + if max_batch_size and max_batch_size < len(queue): + chunks = get_chunks(queue, max_batch_size) + for chunk in chunks: + ensure_future(dispatch_queue_batch(loader, chunk)) + else: + ensure_future(dispatch_queue_batch(loader, queue)) + + +async def dispatch_queue_batch(loader, queue): + # Collect all keys to be loaded in this dispatch + keys = [loaded.key for loaded in queue] + + # Call the provided batch_load_fn for this loader with the loader queue's keys. + batch_future = loader.batch_load_fn(keys) + + # Assert the expected response from batch_load_fn + if not batch_future or not iscoroutine(batch_future): + return failed_dispatch( # pragma: no cover + loader, + queue, + TypeError( + ( + "DataLoader must be constructed with a function which accepts " + "Iterable and returns Future>, but the function did " + "not return a Coroutine: {}." + ).format(batch_future) + ), + ) + + try: + values = await batch_future + if not isinstance(values, Iterable): + raise TypeError( # pragma: no cover + ( + "DataLoader must be constructed with a function which accepts " + "Iterable and returns Future>, but the function did " + "not return a Future of a Iterable: {}." + ).format(values) + ) + + values = list(values) + if len(values) != len(keys): + raise TypeError( # pragma: no cover + ( + "DataLoader must be constructed with a function which accepts " + "Iterable and returns Future>, but the function did " + "not return a Future of a Iterable with the same length as the Iterable " + "of keys." + "\n\nKeys:\n{}" + "\n\nValues:\n{}" + ).format(keys, values) + ) + + # Step through the values, resolving or rejecting each Future in the + # loaded queue. + for loaded, value in zip(queue, values): + if isinstance(value, Exception): + loaded.future.set_exception(value) + else: + loaded.future.set_result(value) + + except Exception as e: + return failed_dispatch(loader, queue, e) + + +def failed_dispatch(loader, queue, error): + """ + Do not cache individual loads if the entire batch dispatch fails, + but still reject each request so they do not hang. + """ + for loaded in queue: + loader.clear(loaded.key) + loaded.future.set_exception(error) diff --git a/graphene/utils/tests/test_dataloader.py b/graphene/utils/tests/test_dataloader.py new file mode 100644 index 00000000..257f6b4d --- /dev/null +++ b/graphene/utils/tests/test_dataloader.py @@ -0,0 +1,452 @@ +from asyncio import gather +from collections import namedtuple +from functools import partial +from unittest.mock import Mock + +from graphene.utils.dataloader import DataLoader +from pytest import mark, raises + +from graphene import ObjectType, String, Schema, Field, List + +CHARACTERS = { + "1": {"name": "Luke Skywalker", "sibling": "3"}, + "2": {"name": "Darth Vader", "sibling": None}, + "3": {"name": "Leia Organa", "sibling": "1"}, +} + +get_character = Mock(side_effect=lambda character_id: CHARACTERS[character_id]) + + +class CharacterType(ObjectType): + name = String() + sibling = Field(lambda: CharacterType) + + async def resolve_sibling(character, info): + if character["sibling"]: + return await info.context.character_loader.load(character["sibling"]) + return None + + +class Query(ObjectType): + skywalker_family = List(CharacterType) + + async def resolve_skywalker_family(_, info): + return await info.context.character_loader.load_many(["1", "2", "3"]) + + +mock_batch_load_fn = Mock( + side_effect=lambda character_ids: [get_character(id) for id in character_ids] +) + + +class CharacterLoader(DataLoader): + async def batch_load_fn(self, character_ids): + return mock_batch_load_fn(character_ids) + + +Context = namedtuple("Context", "character_loader") + + +@mark.asyncio +async def test_basic_dataloader(): + schema = Schema(query=Query) + + character_loader = CharacterLoader() + context = Context(character_loader=character_loader) + + query = """ + { + skywalkerFamily { + name + sibling { + name + } + } + } + """ + + result = await schema.execute_async(query, context=context) + + assert not result.errors + assert result.data == { + "skywalkerFamily": [ + {"name": "Luke Skywalker", "sibling": {"name": "Leia Organa"}}, + {"name": "Darth Vader", "sibling": None}, + {"name": "Leia Organa", "sibling": {"name": "Luke Skywalker"}}, + ] + } + + assert mock_batch_load_fn.call_count == 1 + assert get_character.call_count == 3 + + +def id_loader(**options): + load_calls = [] + + async def default_resolve(x): + return x + + resolve = options.pop("resolve", default_resolve) + + async def fn(keys): + load_calls.append(keys) + return await resolve(keys) + # return keys + + identity_loader = DataLoader(fn, **options) + return identity_loader, load_calls + + +@mark.asyncio +async def test_build_a_simple_data_loader(): + async def call_fn(keys): + return keys + + identity_loader = DataLoader(call_fn) + + promise1 = identity_loader.load(1) + + value1 = await promise1 + assert value1 == 1 + + +@mark.asyncio +async def test_can_build_a_data_loader_from_a_partial(): + value_map = {1: "one"} + + async def call_fn(context, keys): + return [context.get(key) for key in keys] + + partial_fn = partial(call_fn, value_map) + identity_loader = DataLoader(partial_fn) + + promise1 = identity_loader.load(1) + + value1 = await promise1 + assert value1 == "one" + + +@mark.asyncio +async def test_supports_loading_multiple_keys_in_one_call(): + async def call_fn(keys): + return keys + + identity_loader = DataLoader(call_fn) + + promise_all = identity_loader.load_many([1, 2]) + + values = await promise_all + assert values == [1, 2] + + promise_all = identity_loader.load_many([]) + + values = await promise_all + assert values == [] + + +@mark.asyncio +async def test_batches_multiple_requests(): + identity_loader, load_calls = id_loader() + + promise1 = identity_loader.load(1) + promise2 = identity_loader.load(2) + + p = gather(promise1, promise2) + + value1, value2 = await p + + assert value1 == 1 + assert value2 == 2 + + assert load_calls == [[1, 2]] + + +@mark.asyncio +async def test_batches_multiple_requests_with_max_batch_sizes(): + identity_loader, load_calls = id_loader(max_batch_size=2) + + promise1 = identity_loader.load(1) + promise2 = identity_loader.load(2) + promise3 = identity_loader.load(3) + + p = gather(promise1, promise2, promise3) + + value1, value2, value3 = await p + + assert value1 == 1 + assert value2 == 2 + assert value3 == 3 + + assert load_calls == [[1, 2], [3]] + + +@mark.asyncio +async def test_coalesces_identical_requests(): + identity_loader, load_calls = id_loader() + + promise1 = identity_loader.load(1) + promise2 = identity_loader.load(1) + + assert promise1 == promise2 + p = gather(promise1, promise2) + + value1, value2 = await p + + assert value1 == 1 + assert value2 == 1 + + assert load_calls == [[1]] + + +@mark.asyncio +async def test_caches_repeated_requests(): + identity_loader, load_calls = id_loader() + + a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) + + assert a == "A" + assert b == "B" + + assert load_calls == [["A", "B"]] + + a2, c = await gather(identity_loader.load("A"), identity_loader.load("C")) + + assert a2 == "A" + assert c == "C" + + assert load_calls == [["A", "B"], ["C"]] + + a3, b2, c2 = await gather( + identity_loader.load("A"), identity_loader.load("B"), identity_loader.load("C") + ) + + assert a3 == "A" + assert b2 == "B" + assert c2 == "C" + + assert load_calls == [["A", "B"], ["C"]] + + +@mark.asyncio +async def test_clears_single_value_in_loader(): + identity_loader, load_calls = id_loader() + + a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) + + assert a == "A" + assert b == "B" + + assert load_calls == [["A", "B"]] + + identity_loader.clear("A") + + a2, b2 = await gather(identity_loader.load("A"), identity_loader.load("B")) + + assert a2 == "A" + assert b2 == "B" + + assert load_calls == [["A", "B"], ["A"]] + + +@mark.asyncio +async def test_clears_all_values_in_loader(): + identity_loader, load_calls = id_loader() + + a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) + + assert a == "A" + assert b == "B" + + assert load_calls == [["A", "B"]] + + identity_loader.clear_all() + + a2, b2 = await gather(identity_loader.load("A"), identity_loader.load("B")) + + assert a2 == "A" + assert b2 == "B" + + assert load_calls == [["A", "B"], ["A", "B"]] + + +@mark.asyncio +async def test_allows_priming_the_cache(): + identity_loader, load_calls = id_loader() + + identity_loader.prime("A", "A") + + a, b = await gather(identity_loader.load("A"), identity_loader.load("B")) + + assert a == "A" + assert b == "B" + + assert load_calls == [["B"]] + + +@mark.asyncio +async def test_does_not_prime_keys_that_already_exist(): + identity_loader, load_calls = id_loader() + + identity_loader.prime("A", "X") + + a1 = await identity_loader.load("A") + b1 = await identity_loader.load("B") + + assert a1 == "X" + assert b1 == "B" + + identity_loader.prime("A", "Y") + identity_loader.prime("B", "Y") + + a2 = await identity_loader.load("A") + b2 = await identity_loader.load("B") + + assert a2 == "X" + assert b2 == "B" + + assert load_calls == [["B"]] + + +# # Represents Errors +@mark.asyncio +async def test_resolves_to_error_to_indicate_failure(): + async def resolve(keys): + mapped_keys = [ + key if key % 2 == 0 else Exception("Odd: {}".format(key)) for key in keys + ] + return mapped_keys + + even_loader, load_calls = id_loader(resolve=resolve) + + with raises(Exception) as exc_info: + await even_loader.load(1) + + assert str(exc_info.value) == "Odd: 1" + + value2 = await even_loader.load(2) + assert value2 == 2 + assert load_calls == [[1], [2]] + + +@mark.asyncio +async def test_can_represent_failures_and_successes_simultaneously(): + async def resolve(keys): + mapped_keys = [ + key if key % 2 == 0 else Exception("Odd: {}".format(key)) for key in keys + ] + return mapped_keys + + even_loader, load_calls = id_loader(resolve=resolve) + + promise1 = even_loader.load(1) + promise2 = even_loader.load(2) + + with raises(Exception) as exc_info: + await promise1 + + assert str(exc_info.value) == "Odd: 1" + value2 = await promise2 + assert value2 == 2 + assert load_calls == [[1, 2]] + + +@mark.asyncio +async def test_caches_failed_fetches(): + async def resolve(keys): + mapped_keys = [Exception("Error: {}".format(key)) for key in keys] + return mapped_keys + + error_loader, load_calls = id_loader(resolve=resolve) + + with raises(Exception) as exc_info: + await error_loader.load(1) + + assert str(exc_info.value) == "Error: 1" + + with raises(Exception) as exc_info: + await error_loader.load(1) + + assert str(exc_info.value) == "Error: 1" + + assert load_calls == [[1]] + + +@mark.asyncio +async def test_caches_failed_fetches_2(): + identity_loader, load_calls = id_loader() + + identity_loader.prime(1, Exception("Error: 1")) + + with raises(Exception) as _: + await identity_loader.load(1) + + assert load_calls == [] + + +# It is resilient to job queue ordering +@mark.asyncio +async def test_batches_loads_occuring_within_promises(): + identity_loader, load_calls = id_loader() + + async def load_b_1(): + return await load_b_2() + + async def load_b_2(): + return await identity_loader.load("B") + + values = await gather(identity_loader.load("A"), load_b_1()) + + assert values == ["A", "B"] + + assert load_calls == [["A", "B"]] + + +@mark.asyncio +async def test_catches_error_if_loader_resolver_fails(): + exc = Exception("AOH!") + + def do_resolve(x): + raise exc + + a_loader, a_load_calls = id_loader(resolve=do_resolve) + + with raises(Exception) as exc_info: + await a_loader.load("A1") + + assert exc_info.value == exc + + +@mark.asyncio +async def test_can_call_a_loader_from_a_loader(): + deep_loader, deep_load_calls = id_loader() + a_loader, a_load_calls = id_loader( + resolve=lambda keys: deep_loader.load(tuple(keys)) + ) + b_loader, b_load_calls = id_loader( + resolve=lambda keys: deep_loader.load(tuple(keys)) + ) + + a1, b1, a2, b2 = await gather( + a_loader.load("A1"), + b_loader.load("B1"), + a_loader.load("A2"), + b_loader.load("B2"), + ) + + assert a1 == "A1" + assert b1 == "B1" + assert a2 == "A2" + assert b2 == "B2" + + assert a_load_calls == [["A1", "A2"]] + assert b_load_calls == [["B1", "B2"]] + assert deep_load_calls == [[("A1", "A2"), ("B1", "B2")]] + + +@mark.asyncio +async def test_dataloader_clear_with_missing_key_works(): + async def do_resolve(x): + return x + + a_loader, a_load_calls = id_loader(resolve=do_resolve) + assert a_loader.clear("A1") == a_loader diff --git a/setup.cfg b/setup.cfg index 2037bc1b..db1ff134 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,6 +2,10 @@ exclude = setup.py,docs/*,*/examples/*,graphene/pyutils/*,tests max-line-length = 120 +# This is a specific ignore for Black+Flake8 +# source: https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#id1 +extend-ignore = E203 + [coverage:run] omit = graphene/pyutils/*,*/tests/*,graphene/types/scalars.py diff --git a/setup.py b/setup.py index b87f56cc..dce6aa6c 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,6 @@ tests_require = [ "snapshottest>=0.6,<1", "coveralls>=3.3,<4", "promise>=2.3,<3", - "aiodataloader<1", "mock>=4,<5", "pytz==2022.1", "iso8601>=1,<2", diff --git a/tests_asyncio/test_dataloader.py b/tests_asyncio/test_dataloader.py deleted file mode 100644 index fb8d1630..00000000 --- a/tests_asyncio/test_dataloader.py +++ /dev/null @@ -1,79 +0,0 @@ -from collections import namedtuple -from unittest.mock import Mock -from pytest import mark -from aiodataloader import DataLoader - -from graphene import ObjectType, String, Schema, Field, List - - -CHARACTERS = { - "1": {"name": "Luke Skywalker", "sibling": "3"}, - "2": {"name": "Darth Vader", "sibling": None}, - "3": {"name": "Leia Organa", "sibling": "1"}, -} - - -get_character = Mock(side_effect=lambda character_id: CHARACTERS[character_id]) - - -class CharacterType(ObjectType): - name = String() - sibling = Field(lambda: CharacterType) - - async def resolve_sibling(character, info): - if character["sibling"]: - return await info.context.character_loader.load(character["sibling"]) - return None - - -class Query(ObjectType): - skywalker_family = List(CharacterType) - - async def resolve_skywalker_family(_, info): - return await info.context.character_loader.load_many(["1", "2", "3"]) - - -mock_batch_load_fn = Mock( - side_effect=lambda character_ids: [get_character(id) for id in character_ids] -) - - -class CharacterLoader(DataLoader): - async def batch_load_fn(self, character_ids): - return mock_batch_load_fn(character_ids) - - -Context = namedtuple("Context", "character_loader") - - -@mark.asyncio -async def test_basic_dataloader(): - schema = Schema(query=Query) - - character_loader = CharacterLoader() - context = Context(character_loader=character_loader) - - query = """ - { - skywalkerFamily { - name - sibling { - name - } - } - } - """ - - result = await schema.execute_async(query, context=context) - - assert not result.errors - assert result.data == { - "skywalkerFamily": [ - {"name": "Luke Skywalker", "sibling": {"name": "Leia Organa"}}, - {"name": "Darth Vader", "sibling": None}, - {"name": "Leia Organa", "sibling": {"name": "Luke Skywalker"}}, - ] - } - - assert mock_batch_load_fn.call_count == 1 - assert get_character.call_count == 3 From b20bbdcdf728eb1e009edb20da62df1f13fb165f Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Thu, 8 Sep 2022 10:55:05 +0200 Subject: [PATCH 12/39] v3.1.1 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 52ed205a..aeb6d6d2 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -42,7 +42,7 @@ from .types import ( from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 1, 0, "final", 0) +VERSION = (3, 1, 1, "final", 0) __version__ = get_version(VERSION) From ee1ff975d71f6590eb6933d76d12054c9839774a Mon Sep 17 00:00:00 2001 From: Thomas Leonard <64223923+tcleonard@users.noreply.github.com> Date: Mon, 19 Sep 2022 10:17:31 +0200 Subject: [PATCH 13/39] feat: Add support for custom global (Issue #1276) (#1428) Co-authored-by: Thomas Leonard --- .github/workflows/tests.yml | 2 +- Makefile | 1 + graphene/__init__.py | 10 +- graphene/relay/__init__.py | 16 +- graphene/relay/id_type.py | 87 +++++ graphene/relay/node.py | 60 ++-- graphene/relay/tests/test_custom_global_id.py | 325 ++++++++++++++++++ graphene/relay/tests/test_node.py | 1 + 8 files changed, 472 insertions(+), 30 deletions(-) create mode 100644 graphene/relay/id_type.py create mode 100644 graphene/relay/tests/test_custom_global_id.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 51832084..9df18f99 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -58,7 +58,7 @@ jobs: if: ${{ matrix.python == '3.10' }} uses: actions/upload-artifact@v3 with: - name: graphene-sqlalchemy-coverage + name: graphene-coverage path: coverage.xml if-no-files-found: error - name: Upload coverage.xml to codecov diff --git a/Makefile b/Makefile index c78e2b4f..08947707 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ help: install-dev: pip install -e ".[dev]" +.PHONY: test ## Run tests test: py.test graphene examples diff --git a/graphene/__init__.py b/graphene/__init__.py index aeb6d6d2..af83f059 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -1,11 +1,15 @@ from .pyutils.version import get_version from .relay import ( + BaseGlobalIDType, ClientIDMutation, Connection, ConnectionField, + DefaultGlobalIDType, GlobalID, Node, PageInfo, + SimpleGlobalIDType, + UUIDGlobalIDType, is_node, ) from .types import ( @@ -52,6 +56,7 @@ __all__ = [ "Argument", "Base64", "BigInt", + "BaseGlobalIDType", "Boolean", "ClientIDMutation", "Connection", @@ -60,6 +65,7 @@ __all__ = [ "Date", "DateTime", "Decimal", + "DefaultGlobalIDType", "Dynamic", "Enum", "Field", @@ -80,10 +86,12 @@ __all__ = [ "ResolveInfo", "Scalar", "Schema", + "SimpleGlobalIDType", "String", "Time", - "UUID", "Union", + "UUID", + "UUIDGlobalIDType", "is_node", "lazy_import", "resolve_only_args", diff --git a/graphene/relay/__init__.py b/graphene/relay/__init__.py index 7238fa72..3b842cf5 100644 --- a/graphene/relay/__init__.py +++ b/graphene/relay/__init__.py @@ -1,13 +1,23 @@ from .node import Node, is_node, GlobalID from .mutation import ClientIDMutation from .connection import Connection, ConnectionField, PageInfo +from .id_type import ( + BaseGlobalIDType, + DefaultGlobalIDType, + SimpleGlobalIDType, + UUIDGlobalIDType, +) __all__ = [ - "Node", - "is_node", - "GlobalID", + "BaseGlobalIDType", "ClientIDMutation", "Connection", "ConnectionField", + "DefaultGlobalIDType", + "GlobalID", + "Node", "PageInfo", + "SimpleGlobalIDType", + "UUIDGlobalIDType", + "is_node", ] diff --git a/graphene/relay/id_type.py b/graphene/relay/id_type.py new file mode 100644 index 00000000..fb5c30e7 --- /dev/null +++ b/graphene/relay/id_type.py @@ -0,0 +1,87 @@ +from graphql_relay import from_global_id, to_global_id + +from ..types import ID, UUID +from ..types.base import BaseType + +from typing import Type + + +class BaseGlobalIDType: + """ + Base class that define the required attributes/method for a type. + """ + + graphene_type = ID # type: Type[BaseType] + + @classmethod + def resolve_global_id(cls, info, global_id): + # return _type, _id + raise NotImplementedError + + @classmethod + def to_global_id(cls, _type, _id): + # return _id + raise NotImplementedError + + +class DefaultGlobalIDType(BaseGlobalIDType): + """ + Default global ID type: base64 encoded version of ": ". + """ + + graphene_type = ID + + @classmethod + def resolve_global_id(cls, info, global_id): + try: + _type, _id = from_global_id(global_id) + if not _type: + raise ValueError("Invalid Global ID") + return _type, _id + except Exception as e: + raise Exception( + f'Unable to parse global ID "{global_id}". ' + 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' + f"Exception message: {e}" + ) + + @classmethod + def to_global_id(cls, _type, _id): + return to_global_id(_type, _id) + + +class SimpleGlobalIDType(BaseGlobalIDType): + """ + Simple global ID type: simply the id of the object. + To be used carefully as the user is responsible for ensuring that the IDs are indeed global + (otherwise it could cause request caching issues). + """ + + graphene_type = ID + + @classmethod + def resolve_global_id(cls, info, global_id): + _type = info.return_type.graphene_type._meta.name + return _type, global_id + + @classmethod + def to_global_id(cls, _type, _id): + return _id + + +class UUIDGlobalIDType(BaseGlobalIDType): + """ + UUID global ID type. + By definition UUID are global so they are used as they are. + """ + + graphene_type = UUID + + @classmethod + def resolve_global_id(cls, info, global_id): + _type = info.return_type.graphene_type._meta.name + return _type, global_id + + @classmethod + def to_global_id(cls, _type, _id): + return _id diff --git a/graphene/relay/node.py b/graphene/relay/node.py index dabcff6c..54438281 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -1,11 +1,10 @@ from functools import partial from inspect import isclass -from graphql_relay import from_global_id, to_global_id - -from ..types import ID, Field, Interface, ObjectType +from ..types import Field, Interface, ObjectType from ..types.interface import InterfaceOptions from ..types.utils import get_type +from .id_type import BaseGlobalIDType, DefaultGlobalIDType def is_node(objecttype): @@ -22,8 +21,18 @@ def is_node(objecttype): class GlobalID(Field): - def __init__(self, node=None, parent_type=None, required=True, *args, **kwargs): - super(GlobalID, self).__init__(ID, required=required, *args, **kwargs) + def __init__( + self, + node=None, + parent_type=None, + required=True, + global_id_type=DefaultGlobalIDType, + *args, + **kwargs, + ): + super(GlobalID, self).__init__( + global_id_type.graphene_type, required=required, *args, **kwargs + ) self.node = node or Node self.parent_type_name = parent_type._meta.name if parent_type else None @@ -47,12 +56,14 @@ class NodeField(Field): assert issubclass(node, Node), "NodeField can only operate in Nodes" self.node_type = node self.field_type = type_ + global_id_type = node._meta.global_id_type super(NodeField, self).__init__( - # If we don's specify a type, the field type will be the node - # interface + # If we don't specify a type, the field type will be the node interface type_ or node, - id=ID(required=True, description="The ID of the object"), + id=global_id_type.graphene_type( + required=True, description="The ID of the object" + ), **kwargs, ) @@ -65,11 +76,23 @@ class AbstractNode(Interface): abstract = True @classmethod - def __init_subclass_with_meta__(cls, **options): + def __init_subclass_with_meta__(cls, global_id_type=DefaultGlobalIDType, **options): + assert issubclass( + global_id_type, BaseGlobalIDType + ), "Custom ID type need to be implemented as a subclass of BaseGlobalIDType." _meta = InterfaceOptions(cls) - _meta.fields = {"id": GlobalID(cls, description="The ID of the object")} + _meta.global_id_type = global_id_type + _meta.fields = { + "id": GlobalID( + cls, global_id_type=global_id_type, description="The ID of the object" + ) + } super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options) + @classmethod + def resolve_global_id(cls, info, global_id): + return cls._meta.global_id_type.resolve_global_id(info, global_id) + class Node(AbstractNode): """An object with an ID""" @@ -84,16 +107,7 @@ class Node(AbstractNode): @classmethod def get_node_from_global_id(cls, info, global_id, only_type=None): - try: - _type, _id = cls.from_global_id(global_id) - if not _type: - raise ValueError("Invalid Global ID") - except Exception as e: - raise Exception( - f'Unable to parse global ID "{global_id}". ' - 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' - f"Exception message: {e}" - ) + _type, _id = cls.resolve_global_id(info, global_id) graphene_type = info.schema.get_type(_type) if graphene_type is None: @@ -116,10 +130,6 @@ class Node(AbstractNode): if get_node: return get_node(info, _id) - @classmethod - def from_global_id(cls, global_id): - return from_global_id(global_id) - @classmethod def to_global_id(cls, type_, id): - return to_global_id(type_, id) + return cls._meta.global_id_type.to_global_id(type_, id) diff --git a/graphene/relay/tests/test_custom_global_id.py b/graphene/relay/tests/test_custom_global_id.py new file mode 100644 index 00000000..c1bf0fb4 --- /dev/null +++ b/graphene/relay/tests/test_custom_global_id.py @@ -0,0 +1,325 @@ +import re +from uuid import uuid4 + +from graphql import graphql_sync + +from ..id_type import BaseGlobalIDType, SimpleGlobalIDType, UUIDGlobalIDType +from ..node import Node +from ...types import Int, ObjectType, Schema, String + + +class TestUUIDGlobalID: + def setup(self): + self.user_list = [ + {"id": uuid4(), "name": "First"}, + {"id": uuid4(), "name": "Second"}, + {"id": uuid4(), "name": "Third"}, + {"id": uuid4(), "name": "Fourth"}, + ] + self.users = {user["id"]: user for user in self.user_list} + + class CustomNode(Node): + class Meta: + global_id_type = UUIDGlobalIDType + + class User(ObjectType): + class Meta: + interfaces = [CustomNode] + + name = String() + + @classmethod + def get_node(cls, _type, _id): + return self.users[_id] + + class RootQuery(ObjectType): + user = CustomNode.Field(User) + + self.schema = Schema(query=RootQuery, types=[User]) + self.graphql_schema = self.schema.graphql_schema + + def test_str_schema_correct(self): + """ + Check that the schema has the expected and custom node interface and user type and that they both use UUIDs + """ + parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema)) + types = [t for t, f in parsed] + fields = [f for t, f in parsed] + custom_node_interface = "interface CustomNode" + assert custom_node_interface in types + assert ( + '"""The ID of the object"""\n id: UUID!' + == fields[types.index(custom_node_interface)] + ) + user_type = "type User implements CustomNode" + assert user_type in types + assert ( + '"""The ID of the object"""\n id: UUID!\n name: String' + == fields[types.index(user_type)] + ) + + def test_get_by_id(self): + query = """query userById($id: UUID!) { + user(id: $id) { + id + name + } + }""" + # UUID need to be converted to string for serialization + result = graphql_sync( + self.graphql_schema, + query, + variable_values={"id": str(self.user_list[0]["id"])}, + ) + assert not result.errors + assert result.data["user"]["id"] == str(self.user_list[0]["id"]) + assert result.data["user"]["name"] == self.user_list[0]["name"] + + +class TestSimpleGlobalID: + def setup(self): + self.user_list = [ + {"id": "my global primary key in clear 1", "name": "First"}, + {"id": "my global primary key in clear 2", "name": "Second"}, + {"id": "my global primary key in clear 3", "name": "Third"}, + {"id": "my global primary key in clear 4", "name": "Fourth"}, + ] + self.users = {user["id"]: user for user in self.user_list} + + class CustomNode(Node): + class Meta: + global_id_type = SimpleGlobalIDType + + class User(ObjectType): + class Meta: + interfaces = [CustomNode] + + name = String() + + @classmethod + def get_node(cls, _type, _id): + return self.users[_id] + + class RootQuery(ObjectType): + user = CustomNode.Field(User) + + self.schema = Schema(query=RootQuery, types=[User]) + self.graphql_schema = self.schema.graphql_schema + + def test_str_schema_correct(self): + """ + Check that the schema has the expected and custom node interface and user type and that they both use UUIDs + """ + parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema)) + types = [t for t, f in parsed] + fields = [f for t, f in parsed] + custom_node_interface = "interface CustomNode" + assert custom_node_interface in types + assert ( + '"""The ID of the object"""\n id: ID!' + == fields[types.index(custom_node_interface)] + ) + user_type = "type User implements CustomNode" + assert user_type in types + assert ( + '"""The ID of the object"""\n id: ID!\n name: String' + == fields[types.index(user_type)] + ) + + def test_get_by_id(self): + query = """query { + user(id: "my global primary key in clear 3") { + id + name + } + }""" + result = graphql_sync(self.graphql_schema, query) + assert not result.errors + assert result.data["user"]["id"] == self.user_list[2]["id"] + assert result.data["user"]["name"] == self.user_list[2]["name"] + + +class TestCustomGlobalID: + def setup(self): + self.user_list = [ + {"id": 1, "name": "First"}, + {"id": 2, "name": "Second"}, + {"id": 3, "name": "Third"}, + {"id": 4, "name": "Fourth"}, + ] + self.users = {user["id"]: user for user in self.user_list} + + class CustomGlobalIDType(BaseGlobalIDType): + """ + Global id that is simply and integer in clear. + """ + + graphene_type = Int + + @classmethod + def resolve_global_id(cls, info, global_id): + _type = info.return_type.graphene_type._meta.name + return _type, global_id + + @classmethod + def to_global_id(cls, _type, _id): + return _id + + class CustomNode(Node): + class Meta: + global_id_type = CustomGlobalIDType + + class User(ObjectType): + class Meta: + interfaces = [CustomNode] + + name = String() + + @classmethod + def get_node(cls, _type, _id): + return self.users[_id] + + class RootQuery(ObjectType): + user = CustomNode.Field(User) + + self.schema = Schema(query=RootQuery, types=[User]) + self.graphql_schema = self.schema.graphql_schema + + def test_str_schema_correct(self): + """ + Check that the schema has the expected and custom node interface and user type and that they both use UUIDs + """ + parsed = re.findall(r"(.+) \{\n\s*([\w\W]*?)\n\}", str(self.schema)) + types = [t for t, f in parsed] + fields = [f for t, f in parsed] + custom_node_interface = "interface CustomNode" + assert custom_node_interface in types + assert ( + '"""The ID of the object"""\n id: Int!' + == fields[types.index(custom_node_interface)] + ) + user_type = "type User implements CustomNode" + assert user_type in types + assert ( + '"""The ID of the object"""\n id: Int!\n name: String' + == fields[types.index(user_type)] + ) + + def test_get_by_id(self): + query = """query { + user(id: 2) { + id + name + } + }""" + result = graphql_sync(self.graphql_schema, query) + assert not result.errors + assert result.data["user"]["id"] == self.user_list[1]["id"] + assert result.data["user"]["name"] == self.user_list[1]["name"] + + +class TestIncompleteCustomGlobalID: + def setup(self): + self.user_list = [ + {"id": 1, "name": "First"}, + {"id": 2, "name": "Second"}, + {"id": 3, "name": "Third"}, + {"id": 4, "name": "Fourth"}, + ] + self.users = {user["id"]: user for user in self.user_list} + + def test_must_define_to_global_id(self): + """ + Test that if the `to_global_id` method is not defined, we can query the object, but we can't request its ID. + """ + + class CustomGlobalIDType(BaseGlobalIDType): + graphene_type = Int + + @classmethod + def resolve_global_id(cls, info, global_id): + _type = info.return_type.graphene_type._meta.name + return _type, global_id + + class CustomNode(Node): + class Meta: + global_id_type = CustomGlobalIDType + + class User(ObjectType): + class Meta: + interfaces = [CustomNode] + + name = String() + + @classmethod + def get_node(cls, _type, _id): + return self.users[_id] + + class RootQuery(ObjectType): + user = CustomNode.Field(User) + + self.schema = Schema(query=RootQuery, types=[User]) + self.graphql_schema = self.schema.graphql_schema + + query = """query { + user(id: 2) { + name + } + }""" + result = graphql_sync(self.graphql_schema, query) + assert not result.errors + assert result.data["user"]["name"] == self.user_list[1]["name"] + + query = """query { + user(id: 2) { + id + name + } + }""" + result = graphql_sync(self.graphql_schema, query) + assert result.errors is not None + assert len(result.errors) == 1 + assert result.errors[0].path == ["user", "id"] + + def test_must_define_resolve_global_id(self): + """ + Test that if the `resolve_global_id` method is not defined, we can't query the object by ID. + """ + + class CustomGlobalIDType(BaseGlobalIDType): + graphene_type = Int + + @classmethod + def to_global_id(cls, _type, _id): + return _id + + class CustomNode(Node): + class Meta: + global_id_type = CustomGlobalIDType + + class User(ObjectType): + class Meta: + interfaces = [CustomNode] + + name = String() + + @classmethod + def get_node(cls, _type, _id): + return self.users[_id] + + class RootQuery(ObjectType): + user = CustomNode.Field(User) + + self.schema = Schema(query=RootQuery, types=[User]) + self.graphql_schema = self.schema.graphql_schema + + query = """query { + user(id: 2) { + id + name + } + }""" + result = graphql_sync(self.graphql_schema, query) + assert result.errors is not None + assert len(result.errors) == 1 + assert result.errors[0].path == ["user"] diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index 6b310fde..e7564566 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -55,6 +55,7 @@ def test_node_good(): assert "id" in MyNode._meta.fields assert is_node(MyNode) assert not is_node(object) + assert not is_node("node") def test_node_query(): From 6969023491793a7166ff0b4b62a35898b578196c Mon Sep 17 00:00:00 2001 From: Kristian Uzhca Date: Mon, 24 Oct 2022 14:06:24 -0400 Subject: [PATCH 14/39] Add copy function for GrapheneGraphQLType (#1463) --- graphene/types/definitions.py | 5 +++++ graphene/types/tests/test_definition.py | 16 ++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/graphene/types/definitions.py b/graphene/types/definitions.py index e5505fd3..ac574bed 100644 --- a/graphene/types/definitions.py +++ b/graphene/types/definitions.py @@ -20,6 +20,11 @@ class GrapheneGraphQLType: self.graphene_type = kwargs.pop("graphene_type") super(GrapheneGraphQLType, self).__init__(*args, **kwargs) + def __copy__(self): + result = GrapheneGraphQLType(graphene_type=self.graphene_type) + result.__dict__.update(self.__dict__) + return result + class GrapheneInterfaceType(GrapheneGraphQLType, GraphQLInterfaceType): pass diff --git a/graphene/types/tests/test_definition.py b/graphene/types/tests/test_definition.py index 0d8a95df..898fac71 100644 --- a/graphene/types/tests/test_definition.py +++ b/graphene/types/tests/test_definition.py @@ -1,4 +1,7 @@ +import copy + from ..argument import Argument +from ..definitions import GrapheneGraphQLType from ..enum import Enum from ..field import Field from ..inputfield import InputField @@ -312,3 +315,16 @@ def test_does_not_mutate_passed_field_definitions(): pass assert TestInputObject1._meta.fields == TestInputObject2._meta.fields + + +def test_graphene_graphql_type_can_be_copied(): + class Query(ObjectType): + field = String() + + def resolve_field(self, info): + return "" + + schema = Schema(query=Query) + query_type_copy = copy.copy(schema.graphql_schema.query_type) + assert query_type_copy.__dict__ == schema.graphql_schema.query_type.__dict__ + assert isinstance(schema.graphql_schema.query_type, GrapheneGraphQLType) From ccdd35b354007c3899a7b62ffe132414ee226fec Mon Sep 17 00:00:00 2001 From: Kevin Le Date: Thu, 27 Oct 2022 04:55:38 -0700 Subject: [PATCH 15/39] hashable Enum (#1461) --- graphene/types/enum.py | 6 +++++- graphene/types/tests/test_enum.py | 25 +++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/graphene/types/enum.py b/graphene/types/enum.py index e5cc50ed..0f68236b 100644 --- a/graphene/types/enum.py +++ b/graphene/types/enum.py @@ -12,6 +12,10 @@ def eq_enum(self, other): return self.value is other +def hash_enum(self): + return hash(self.name) + + EnumType = type(PyEnum) @@ -22,7 +26,7 @@ class EnumOptions(BaseOptions): class EnumMeta(SubclassWithMeta_Meta): def __new__(cls, name_, bases, classdict, **options): - enum_members = dict(classdict, __eq__=eq_enum) + enum_members = dict(classdict, __eq__=eq_enum, __hash__=hash_enum) # We remove the Meta attribute from the class to not collide # with the enum values. enum_members.pop("Meta", None) diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 679de16e..ab1e093e 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -518,3 +518,28 @@ def test_mutation_enum_input_type(): assert result.data == {"createPaint": {"color": "RED"}} assert color_input_value == RGB.RED + + +def test_hashable_enum(): + class RGB(Enum): + """Available colors""" + + RED = 1 + GREEN = 2 + BLUE = 3 + + color_map = {RGB.RED: "a", RGB.BLUE: "b", 1: "c"} + + assert color_map[RGB.RED] == "a" + assert color_map[RGB.BLUE] == "b" + assert color_map[1] == "c" + + +def test_hashable_instance_creation_enum(): + Episode = Enum("Episode", [("NEWHOPE", 4), ("EMPIRE", 5), ("JEDI", 6)]) + + trilogy_map = {Episode.NEWHOPE: "better", Episode.EMPIRE: "best", 5: "foo"} + + assert trilogy_map[Episode.NEWHOPE] == "better" + assert trilogy_map[Episode.EMPIRE] == "best" + assert trilogy_map[5] == "foo" From b349632a826a1054c17a3aa9ae2f9689776c6e9e Mon Sep 17 00:00:00 2001 From: Rens Groothuijsen Date: Tue, 15 Nov 2022 08:48:48 +0100 Subject: [PATCH 16/39] Clarify execution order in middleware docs (#1475) --- docs/execution/middleware.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/execution/middleware.rst b/docs/execution/middleware.rst index c0a8c792..3d086f3e 100644 --- a/docs/execution/middleware.rst +++ b/docs/execution/middleware.rst @@ -41,6 +41,8 @@ And then execute it with: result = schema.execute('THE QUERY', middleware=[AuthorizationMiddleware()]) +If the ``middleware`` argument includes multiple middlewares, +these middlewares will be executed bottom-up, i.e. from last to first. Functional example ------------------ From a2b63d8d84c7c3aefe528fa84963a4186439406c Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Wed, 16 Nov 2022 21:23:37 +0100 Subject: [PATCH 17/39] fix: MyPy findings due to a mypy version upgrade were corrected (#1477) --- graphene/types/inputobjecttype.py | 2 +- graphene/validation/depth_limit.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/graphene/types/inputobjecttype.py b/graphene/types/inputobjecttype.py index 98f0148d..5d278510 100644 --- a/graphene/types/inputobjecttype.py +++ b/graphene/types/inputobjecttype.py @@ -14,7 +14,7 @@ class InputObjectTypeOptions(BaseOptions): container = None # type: InputObjectTypeContainer -class InputObjectTypeContainer(dict, BaseType): +class InputObjectTypeContainer(dict, BaseType): # type: ignore class Meta: abstract = True diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index 5be852c7..b4599e66 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -53,7 +53,7 @@ IgnoreType = Union[Callable[[str], bool], Pattern, str] def depth_limit_validator( max_depth: int, ignore: Optional[List[IgnoreType]] = None, - callback: Callable[[Dict[str, int]], None] = None, + callback: Optional[Callable[[Dict[str, int]], None]] = None, ): class DepthLimitValidator(ValidationRule): def __init__(self, validation_context: ValidationContext): From f891a3683dbc1198d6b7dfad1835a16ca3562452 Mon Sep 17 00:00:00 2001 From: Rens Groothuijsen Date: Wed, 16 Nov 2022 21:27:34 +0100 Subject: [PATCH 18/39] docs: Disambiguate argument name in quickstart docs (#1474) --- docs/quickstart.rst | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index cd090561..75f201c9 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -37,12 +37,12 @@ An example in Graphene Let’s build a basic GraphQL schema to say "hello" and "goodbye" in Graphene. -When we send a **Query** requesting only one **Field**, ``hello``, and specify a value for the ``name`` **Argument**... +When we send a **Query** requesting only one **Field**, ``hello``, and specify a value for the ``firstName`` **Argument**... .. code:: { - hello(name: "friend") + hello(firstName: "friend") } ...we would expect the following Response containing only the data requested (the ``goodbye`` field is not resolved). @@ -79,14 +79,15 @@ In Graphene, we can define a simple schema using the following code: from graphene import ObjectType, String, Schema class Query(ObjectType): - # this defines a Field `hello` in our Schema with a single Argument `name` - hello = String(name=String(default_value="stranger")) + # this defines a Field `hello` in our Schema with a single Argument `first_name` + # By default, the argument name will automatically be camel-based into firstName in the generated schema + hello = String(first_name=String(default_value="stranger")) goodbye = String() # our Resolver method takes the GraphQL context (root, info) as well as - # Argument (name) for the Field and returns data for the query Response - def resolve_hello(root, info, name): - return f'Hello {name}!' + # Argument (first_name) for the Field and returns data for the query Response + def resolve_hello(root, info, first_name): + return f'Hello {first_name}!' def resolve_goodbye(root, info): return 'See ya!' @@ -110,7 +111,7 @@ In the `GraphQL Schema Definition Language`_, we could describe the fields defin .. code:: type Query { - hello(name: String = "stranger"): String + hello(firstName: String = "stranger"): String goodbye: String } @@ -130,7 +131,7 @@ Then we can start querying our **Schema** by passing a GraphQL query string to ` # "Hello stranger!" # or passing the argument in the query - query_with_argument = '{ hello(name: "GraphQL") }' + query_with_argument = '{ hello(firstName: "GraphQL") }' result = schema.execute(query_with_argument) print(result.data['hello']) # "Hello GraphQL!" From 0b1bfbf65b5c47b69874612aec0328c3a724f0d7 Mon Sep 17 00:00:00 2001 From: Rens Groothuijsen Date: Wed, 16 Nov 2022 21:30:49 +0100 Subject: [PATCH 19/39] chore: Make Graphene enums iterable like Python enums (#1473) * Makes Graphene enums iterable like Python enums by implementing __iter__ --- graphene/types/enum.py | 3 +++ graphene/types/tests/test_enum.py | 22 ++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/graphene/types/enum.py b/graphene/types/enum.py index 0f68236b..58e65c69 100644 --- a/graphene/types/enum.py +++ b/graphene/types/enum.py @@ -56,6 +56,9 @@ class EnumMeta(SubclassWithMeta_Meta): return super(EnumMeta, cls).__call__(*args, **kwargs) # return cls._meta.enum(*args, **kwargs) + def __iter__(cls): + return cls._meta.enum.__iter__() + def from_enum( cls, enum, name=None, description=None, deprecation_reason=None ): # noqa: N805 diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index ab1e093e..298cc233 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -543,3 +543,25 @@ def test_hashable_instance_creation_enum(): assert trilogy_map[Episode.NEWHOPE] == "better" assert trilogy_map[Episode.EMPIRE] == "best" assert trilogy_map[5] == "foo" + + +def test_enum_iteration(): + class TestEnum(Enum): + FIRST = 1 + SECOND = 2 + + result = [] + expected_values = ["FIRST", "SECOND"] + for c in TestEnum: + result.append(c.name) + assert result == expected_values + + +def test_iterable_instance_creation_enum(): + TestEnum = Enum("TestEnum", [("FIRST", 1), ("SECOND", 2)]) + + result = [] + expected_values = ["FIRST", "SECOND"] + for c in TestEnum: + result.append(c.name) + assert result == expected_values From 7f6fa161948fd2c3312493309dbd590db7f95327 Mon Sep 17 00:00:00 2001 From: Mike Roberts <110839704+mike-roberts-healx@users.noreply.github.com> Date: Wed, 16 Nov 2022 20:38:15 +0000 Subject: [PATCH 20/39] feat_ (#1476) Previously, installing graphene and trying to do `from graphene.test import Client` as recommended in the docs caused an `ImportError`, as the 'promise' library is imported but only listed as a requirement in the 'test' section of the setup.py file. --- graphene/relay/tests/test_mutation_async.py | 16 ++++++++++------ graphene/test/__init__.py | 10 ++++++---- setup.py | 1 - 3 files changed, 16 insertions(+), 11 deletions(-) diff --git a/graphene/relay/tests/test_mutation_async.py b/graphene/relay/tests/test_mutation_async.py index 4308a614..bf61555d 100644 --- a/graphene/relay/tests/test_mutation_async.py +++ b/graphene/relay/tests/test_mutation_async.py @@ -3,6 +3,7 @@ from pytest import mark from graphene.types import ID, Field, ObjectType, Schema from graphene.types.scalars import String from graphene.relay.mutation import ClientIDMutation +from graphene.test import Client class SharedFields(object): @@ -61,24 +62,27 @@ class Mutation(ObjectType): schema = Schema(query=RootQuery, mutation=Mutation) +client = Client(schema) @mark.asyncio async def test_node_query_promise(): - executed = await schema.execute_async( + executed = await client.execute_async( 'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) - assert not executed.errors - assert executed.data == {"sayPromise": {"phrase": "hello"}} + assert isinstance(executed, dict) + assert "errors" not in executed + assert executed["data"] == {"sayPromise": {"phrase": "hello"}} @mark.asyncio async def test_edge_query(): - executed = await schema.execute_async( + executed = await client.execute_async( 'mutation a { other(input: {clientMutationId:"1"}) { clientMutationId, myNodeEdge { cursor node { name }} } }' ) - assert not executed.errors - assert dict(executed.data) == { + assert isinstance(executed, dict) + assert "errors" not in executed + assert executed["data"] == { "other": { "clientMutationId": "1", "myNodeEdge": {"cursor": "1", "node": {"name": "name"}}, diff --git a/graphene/test/__init__.py b/graphene/test/__init__.py index 13b05dd3..1813d928 100644 --- a/graphene/test/__init__.py +++ b/graphene/test/__init__.py @@ -1,4 +1,3 @@ -from promise import Promise, is_thenable from graphql.error import GraphQLError from graphene.types.schema import Schema @@ -31,7 +30,10 @@ class Client: def execute(self, *args, **kwargs): executed = self.schema.execute(*args, **dict(self.execute_options, **kwargs)) - if is_thenable(executed): - return Promise.resolve(executed).then(self.format_result) - + return self.format_result(executed) + + async def execute_async(self, *args, **kwargs): + executed = await self.schema.execute_async( + *args, **dict(self.execute_options, **kwargs) + ) return self.format_result(executed) diff --git a/setup.py b/setup.py index dce6aa6c..6c1f29c9 100644 --- a/setup.py +++ b/setup.py @@ -52,7 +52,6 @@ tests_require = [ "pytest-asyncio>=0.16,<2", "snapshottest>=0.6,<1", "coveralls>=3.3,<4", - "promise>=2.3,<3", "mock>=4,<5", "pytz==2022.1", "iso8601>=1,<2", From f09b2e5a81ea3ca108ccc972b65966ec363a4e78 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 21 Nov 2022 15:40:05 +0100 Subject: [PATCH 21/39] housekeeping: pin ubuntu to 20.04 for python 3.6 Ubuntu:latest doesn't include py36 anymore. Keep this until we add 3.11 and drop 3.6. See: https://github.com/actions/setup-python/issues/544 https://github.com/rwth-i6/returnn/issues/1226 --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9df18f99..6635a35b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -30,7 +30,7 @@ jobs: - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} - {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37} - - {name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36} + - {name: '3.6', python: '3.6', os: ubuntu-20.04, tox: py36} steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 From a141e848c3f3a70628272028fc63f7a4226029d2 Mon Sep 17 00:00:00 2001 From: Mike Roberts <110839704+mike-roberts-healx@users.noreply.github.com> Date: Thu, 1 Dec 2022 10:06:24 +0000 Subject: [PATCH 22/39] Do not interpret Enum members called 'description' as description properties (#1478) This is a workaround for `TypeError`s being raised when initialising schemas with Enum members named `description` or `deprecation_reason`. Fixes #1321 --- graphene/types/schema.py | 9 ++++++++- graphene/types/tests/test_enum.py | 33 +++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 1a33a93d..7fa046dd 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -1,3 +1,4 @@ +from enum import Enum as PyEnum import inspect from functools import partial @@ -169,10 +170,16 @@ class TypeMap(dict): values = {} for name, value in graphene_type._meta.enum.__members__.items(): description = getattr(value, "description", None) - deprecation_reason = getattr(value, "deprecation_reason", None) + # if the "description" attribute is an Enum, it is likely an enum member + # called description, not a description property + if isinstance(description, PyEnum): + description = None if not description and callable(graphene_type._meta.description): description = graphene_type._meta.description(value) + deprecation_reason = getattr(value, "deprecation_reason", None) + if isinstance(deprecation_reason, PyEnum): + deprecation_reason = None if not deprecation_reason and callable( graphene_type._meta.deprecation_reason ): diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 298cc233..9b3082df 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -565,3 +565,36 @@ def test_iterable_instance_creation_enum(): for c in TestEnum: result.append(c.name) assert result == expected_values + + +# https://github.com/graphql-python/graphene/issues/1321 +def test_enum_description_member_not_interpreted_as_property(): + class RGB(Enum): + """Description""" + + red = "red" + green = "green" + blue = "blue" + description = "description" + deprecation_reason = "deprecation_reason" + + class Query(ObjectType): + color = RGB() + + def resolve_color(_, info): + return RGB.description + + values = RGB._meta.enum.__members__.values() + assert sorted(v.name for v in values) == [ + "blue", + "deprecation_reason", + "description", + "green", + "red", + ] + + schema = Schema(query=Query) + + results = schema.execute("query { color }") + assert not results.errors + assert results.data["color"] == RGB.description.name From 85963494052bbd19e16ac52d3c3d44341e4bfc9c Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Fri, 9 Dec 2022 10:46:24 +0100 Subject: [PATCH 23/39] release: 3.1.2 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index af83f059..a878a4d3 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -46,7 +46,7 @@ from .types import ( from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 1, 1, "final", 0) +VERSION = (3, 1, 2, "final", 0) __version__ = get_version(VERSION) From d5dadb7b1ba5eb041cf5a0ec50f2cdb44164a507 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Fri, 9 Dec 2022 10:53:50 +0100 Subject: [PATCH 24/39] release: 3.2.0 fixes previous release number 3.1.2 due to a pending feature release --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index a878a4d3..8aebbf1d 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -46,7 +46,7 @@ from .types import ( from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 1, 2, "final", 0) +VERSION = (3, 2, 0, "final", 0) __version__ = get_version(VERSION) From 19ea63b9c541d1b1f956455a88f40ff5c162d715 Mon Sep 17 00:00:00 2001 From: Vladyslav Hutov Date: Sat, 10 Dec 2022 11:25:07 +0000 Subject: [PATCH 25/39] fix: Input fields and Arguments can now be deprecated (#1472) Non-required InputFields and arguments now support deprecation via setting the `deprecation_reason` argument upon creation. --- graphene/types/argument.py | 17 ++++++++--- graphene/types/inputfield.py | 5 +++- graphene/types/schema.py | 2 ++ graphene/types/tests/test_argument.py | 40 +++++++++++++++++++++++-- graphene/types/tests/test_field.py | 9 +++++- graphene/types/tests/test_inputfield.py | 18 +++++++++++ 6 files changed, 83 insertions(+), 8 deletions(-) diff --git a/graphene/types/argument.py b/graphene/types/argument.py index f9dc843b..d9283c41 100644 --- a/graphene/types/argument.py +++ b/graphene/types/argument.py @@ -31,18 +31,22 @@ class Argument(MountedType): type (class for a graphene.UnmountedType): must be a class (not an instance) of an unmounted graphene type (ex. scalar or object) which is used for the type of this argument in the GraphQL schema. - required (bool): indicates this argument as not null in the graphql schema. Same behavior + required (optional, bool): indicates this argument as not null in the graphql schema. Same behavior as graphene.NonNull. Default False. - name (str): the name of the GraphQL argument. Defaults to parameter name. - description (str): the description of the GraphQL argument in the schema. - default_value (Any): The value to be provided if the user does not set this argument in + name (optional, str): the name of the GraphQL argument. Defaults to parameter name. + description (optional, str): the description of the GraphQL argument in the schema. + default_value (optional, Any): The value to be provided if the user does not set this argument in the operation. + deprecation_reason (optional, str): Setting this value indicates that the argument is + depreciated and may provide instruction or reason on how for clients to proceed. Cannot be + set if the argument is required (see spec). """ def __init__( self, type_, default_value=Undefined, + deprecation_reason=None, description=None, name=None, required=False, @@ -51,12 +55,16 @@ class Argument(MountedType): super(Argument, self).__init__(_creation_counter=_creation_counter) if required: + assert ( + deprecation_reason is None + ), f"Argument {name} is required, cannot deprecate it." type_ = NonNull(type_) self.name = name self._type = type_ self.default_value = default_value self.description = description + self.deprecation_reason = deprecation_reason @property def type(self): @@ -68,6 +76,7 @@ class Argument(MountedType): and self.type == other.type and self.default_value == other.default_value and self.description == other.description + and self.deprecation_reason == other.deprecation_reason ) diff --git a/graphene/types/inputfield.py b/graphene/types/inputfield.py index 791ca6a4..e7ededb0 100644 --- a/graphene/types/inputfield.py +++ b/graphene/types/inputfield.py @@ -55,11 +55,14 @@ class InputField(MountedType): description=None, required=False, _creation_counter=None, - **extra_args + **extra_args, ): super(InputField, self).__init__(_creation_counter=_creation_counter) self.name = name if required: + assert ( + deprecation_reason is None + ), f"InputField {name} is required, cannot deprecate it." type_ = NonNull(type_) self._type = type_ self.deprecation_reason = deprecation_reason diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 7fa046dd..bceede6a 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -316,6 +316,7 @@ class TypeMap(dict): default_value=field.default_value, out_name=name, description=field.description, + deprecation_reason=field.deprecation_reason, ) else: args = {} @@ -327,6 +328,7 @@ class TypeMap(dict): out_name=arg_name, description=arg.description, default_value=arg.default_value, + deprecation_reason=arg.deprecation_reason, ) subscribe = field.wrap_subscribe( self.get_function_for_type( diff --git a/graphene/types/tests/test_argument.py b/graphene/types/tests/test_argument.py index db4d6c24..c5521b6c 100644 --- a/graphene/types/tests/test_argument.py +++ b/graphene/types/tests/test_argument.py @@ -18,8 +18,20 @@ def test_argument(): def test_argument_comparasion(): - arg1 = Argument(String, name="Hey", description="Desc", default_value="default") - arg2 = Argument(String, name="Hey", description="Desc", default_value="default") + arg1 = Argument( + String, + name="Hey", + description="Desc", + default_value="default", + deprecation_reason="deprecated", + ) + arg2 = Argument( + String, + name="Hey", + description="Desc", + default_value="default", + deprecation_reason="deprecated", + ) assert arg1 == arg2 assert arg1 != String() @@ -40,6 +52,30 @@ def test_to_arguments(): } +def test_to_arguments_deprecated(): + args = {"unmounted_arg": String(required=False, deprecation_reason="deprecated")} + + my_args = to_arguments(args) + assert my_args == { + "unmounted_arg": Argument( + String, required=False, deprecation_reason="deprecated" + ), + } + + +def test_to_arguments_required_deprecated(): + args = { + "unmounted_arg": String( + required=True, name="arg", deprecation_reason="deprecated" + ) + } + + with raises(AssertionError) as exc_info: + to_arguments(args) + + assert str(exc_info.value) == "Argument arg is required, cannot deprecate it." + + def test_to_arguments_raises_if_field(): args = {"arg_string": Field(String)} diff --git a/graphene/types/tests/test_field.py b/graphene/types/tests/test_field.py index 669ada4f..f0401bfa 100644 --- a/graphene/types/tests/test_field.py +++ b/graphene/types/tests/test_field.py @@ -128,13 +128,20 @@ def test_field_name_as_argument(): def test_field_source_argument_as_kw(): MyType = object() - field = Field(MyType, b=NonNull(True), c=Argument(None), a=NonNull(False)) + deprecation_reason = "deprecated" + field = Field( + MyType, + b=NonNull(True), + c=Argument(None, deprecation_reason=deprecation_reason), + a=NonNull(False), + ) assert list(field.args) == ["b", "c", "a"] assert isinstance(field.args["b"], Argument) assert isinstance(field.args["b"].type, NonNull) assert field.args["b"].type.of_type is True assert isinstance(field.args["c"], Argument) assert field.args["c"].type is None + assert field.args["c"].deprecation_reason == deprecation_reason assert isinstance(field.args["a"], Argument) assert isinstance(field.args["a"].type, NonNull) assert field.args["a"].type.of_type is False diff --git a/graphene/types/tests/test_inputfield.py b/graphene/types/tests/test_inputfield.py index bfedfb05..9b100128 100644 --- a/graphene/types/tests/test_inputfield.py +++ b/graphene/types/tests/test_inputfield.py @@ -1,5 +1,7 @@ from functools import partial +from pytest import raises + from ..inputfield import InputField from ..structures import NonNull from .utils import MyLazyType @@ -12,6 +14,22 @@ def test_inputfield_required(): assert field.type.of_type == MyType +def test_inputfield_deprecated(): + MyType = object() + deprecation_reason = "deprecated" + field = InputField(MyType, required=False, deprecation_reason=deprecation_reason) + assert isinstance(field.type, type(MyType)) + assert field.deprecation_reason == deprecation_reason + + +def test_inputfield_required_deprecated(): + MyType = object() + with raises(AssertionError) as exc_info: + InputField(MyType, name="input", required=True, deprecation_reason="deprecated") + + assert str(exc_info.value) == "InputField input is required, cannot deprecate it." + + def test_inputfield_with_lazy_type(): MyType = object() field = InputField(lambda: MyType) From 340d5ed12f7e736ca6ca6fd82c9ec4abdc635d4a Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Sun, 11 Dec 2022 21:05:25 +0100 Subject: [PATCH 26/39] release: 3.2.1 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 8aebbf1d..b901506e 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -46,7 +46,7 @@ from .types import ( from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 2, 0, "final", 0) +VERSION = (3, 2, 1, "final", 0) __version__ = get_version(VERSION) From 8eb2807ce570389b28aae8b713a0f3b1e97d96b0 Mon Sep 17 00:00:00 2001 From: Pei-Lun H Date: Fri, 23 Dec 2022 14:57:45 +0800 Subject: [PATCH 27/39] docs: Correct the module name of custom scalar example in documentation (#1486) --- docs/types/scalars.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/types/scalars.rst b/docs/types/scalars.rst index f47fffea..2a55245d 100644 --- a/docs/types/scalars.rst +++ b/docs/types/scalars.rst @@ -271,7 +271,7 @@ The following is an example for creating a DateTime scalar: @staticmethod def parse_literal(node, _variables=None): - if isinstance(node, ast.StringValue): + if isinstance(node, ast.StringValueNode): return datetime.datetime.strptime( node.value, "%Y-%m-%dT%H:%M:%S.%f") From 52143473efad141f6700237ecce79b22e8ff4e41 Mon Sep 17 00:00:00 2001 From: Peder Johnsen Date: Sun, 25 Dec 2022 21:59:05 +0000 Subject: [PATCH 28/39] docs: Remove prerelease notice (#1487) --- docs/index.rst | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 54f1f99c..85905788 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,12 +1,6 @@ Graphene ======== ------------- - -The documentation below is for the ``dev`` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the `v2 docs `_. - ------------- - Contents: .. toctree:: From 8b89afeff136ef29300d38375aad838d1b94a4eb Mon Sep 17 00:00:00 2001 From: QuentinN42 <32516498+QuentinN42@users.noreply.github.com> Date: Tue, 28 Feb 2023 13:21:45 +0100 Subject: [PATCH 29/39] docs: update sphinx to the latest version (#1497) --- README.rst | 3 --- docs/api/index.rst | 2 +- docs/conf.py | 3 +-- docs/execution/queryvalidation.rst | 8 ++++---- docs/index.rst | 1 - docs/requirements.txt | 4 ++-- 6 files changed, 8 insertions(+), 13 deletions(-) diff --git a/README.rst b/README.rst index a38b9376..405a8f44 100644 --- a/README.rst +++ b/README.rst @@ -36,9 +36,6 @@ Graphene has multiple integrations with different frameworks: | SQLAlchemy | `graphene-sqlalchemy `__ | +-------------------+-------------------------------------------------+ -| Google App Engine | `graphene-gae `__ | -+-------------------+-------------------------------------------------+ Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as diff --git a/docs/api/index.rst b/docs/api/index.rst index c5e3b6e1..38b74909 100644 --- a/docs/api/index.rst +++ b/docs/api/index.rst @@ -92,7 +92,7 @@ Execution Metadata .. autoclass:: graphene.Context -.. autoclass:: graphql.execution.base.ExecutionResult +.. autoclass:: graphql.ExecutionResult .. Relay .. ----- diff --git a/docs/conf.py b/docs/conf.py index 0166d4c2..75f51541 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -82,7 +82,7 @@ release = "1.0" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -456,5 +456,4 @@ intersphinx_mapping = { "http://docs.graphene-python.org/projects/sqlalchemy/en/latest/", None, ), - "graphene_gae": ("http://docs.graphene-python.org/projects/gae/en/latest/", None), } diff --git a/docs/execution/queryvalidation.rst b/docs/execution/queryvalidation.rst index 9c24a2e3..02e29a35 100644 --- a/docs/execution/queryvalidation.rst +++ b/docs/execution/queryvalidation.rst @@ -1,5 +1,5 @@ Query Validation -========== +================ GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements standard query validators. For example, there is an validator that tests if queried field exists on queried type, that makes query fail with "Cannot query field on type" error if it doesn't. @@ -8,7 +8,7 @@ To help with common use cases, graphene provides a few validation rules out of t Depth limit Validator ------------------ +--------------------- The depth limit validator helps to prevent execution of malicious queries. It takes in the following arguments. @@ -17,7 +17,7 @@ queries. It takes in the following arguments. - ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation. Usage -------- +----- Here is how you would implement depth-limiting on your schema. @@ -54,7 +54,7 @@ the disable introspection validation rule ensures that your schema cannot be int This is a useful security measure in production environments. Usage -------- +----- Here is how you would disable introspection for your schema. diff --git a/docs/index.rst b/docs/index.rst index 85905788..05b7fd87 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -21,7 +21,6 @@ Integrations * `Graphene-Django `_ (`source `_) * Flask-Graphql (`source `_) * `Graphene-SQLAlchemy `_ (`source `_) -* `Graphene-GAE `_ (`source `_) * `Graphene-Mongo `_ (`source `_) * `Starlette `_ (`source `_) * `FastAPI `_ (`source `_) diff --git a/docs/requirements.txt b/docs/requirements.txt index dcc40312..dee009c7 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,5 @@ # Required library -Sphinx==1.5.3 -sphinx-autobuild==0.7.1 +Sphinx==6.1.3 +sphinx-autobuild==2021.3.14 # Docs template http://graphene-python.org/sphinx_graphene_theme.zip From 969a630541606eab947b4b842730dc02bd691349 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Fri, 3 Mar 2023 17:35:05 +0100 Subject: [PATCH 30/39] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0456f888..09fadbab 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) +# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe)](https://discord.gg/T6Gp6NFYHe) -[💬 Join the community on Slack](https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM) +[💬 Join the community on Discord](https://discord.gg/T6Gp6NFYHe) **We are looking for contributors**! Please check the current issues to see how you can help ❤️ From 81e7eee5da4411778200e7d6cb85af4502b29f25 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Fri, 3 Mar 2023 17:35:46 +0100 Subject: [PATCH 31/39] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 09fadbab..3ba0737d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe)](https://discord.gg/T6Gp6NFYHe) +# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe?style=flat)](https://discord.gg/T6Gp6NFYHe) [💬 Join the community on Discord](https://discord.gg/T6Gp6NFYHe) From b76e89c0c2a0e21f69cf6348ecfe8507907b52dc Mon Sep 17 00:00:00 2001 From: Roman Solomatin <36135455+Samoed@users.noreply.github.com> Date: Thu, 9 Mar 2023 14:09:15 +0500 Subject: [PATCH 32/39] docs: remove unpair bracket (#1500) --- docs/execution/dataloader.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/execution/dataloader.rst b/docs/execution/dataloader.rst index 61890951..557db2c1 100644 --- a/docs/execution/dataloader.rst +++ b/docs/execution/dataloader.rst @@ -36,10 +36,10 @@ and then call your batch function with all requested keys. user_loader = UserLoader() user1 = await user_loader.load(1) - user1_best_friend = await user_loader.load(user1.best_friend_id)) + user1_best_friend = await user_loader.load(user1.best_friend_id) user2 = await user_loader.load(2) - user2_best_friend = await user_loader.load(user2.best_friend_id)) + user2_best_friend = await user_loader.load(user2.best_friend_id) A naive application may have issued *four* round-trips to a backend for the From d33e38a391ee99ae48a1f13d26915634a79b3447 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 13 Mar 2023 21:23:28 +0100 Subject: [PATCH 33/39] chore: make relay type fields extendable (#1499) --- graphene/relay/connection.py | 71 +++++++++------ graphene/relay/tests/test_connection.py | 115 +++++++++++++++++++++++- 2 files changed, 156 insertions(+), 30 deletions(-) diff --git a/graphene/relay/connection.py b/graphene/relay/connection.py index 1a4684e5..ea497367 100644 --- a/graphene/relay/connection.py +++ b/graphene/relay/connection.py @@ -1,6 +1,7 @@ import re from collections.abc import Iterable from functools import partial +from typing import Type from graphql_relay import connection_from_array @@ -8,7 +9,28 @@ from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String from ..types.field import Field from ..types.objecttype import ObjectType, ObjectTypeOptions from ..utils.thenables import maybe_thenable -from .node import is_node +from .node import is_node, AbstractNode + + +def get_edge_class( + connection_class: Type["Connection"], _node: Type[AbstractNode], base_name: str +): + edge_class = getattr(connection_class, "Edge", None) + + class EdgeBase: + node = Field(_node, description="The item at the end of the edge") + cursor = String(required=True, description="A cursor for use in pagination") + + class EdgeMeta: + description = f"A Relay edge containing a `{base_name}` and its cursor." + + edge_name = f"{base_name}Edge" + + edge_bases = [edge_class, EdgeBase] if edge_class else [EdgeBase] + if not isinstance(edge_class, ObjectType): + edge_bases = [*edge_bases, ObjectType] + + return type(edge_name, tuple(edge_bases), {"Meta": EdgeMeta}) class PageInfo(ObjectType): @@ -61,8 +83,9 @@ class Connection(ObjectType): abstract = True @classmethod - def __init_subclass_with_meta__(cls, node=None, name=None, **options): - _meta = ConnectionOptions(cls) + def __init_subclass_with_meta__(cls, node=None, name=None, _meta=None, **options): + if not _meta: + _meta = ConnectionOptions(cls) assert node, f"You have to provide a node in {cls.__name__}.Meta" assert isinstance(node, NonNull) or issubclass( node, (Scalar, Enum, ObjectType, Interface, Union, NonNull) @@ -72,39 +95,29 @@ class Connection(ObjectType): if not name: name = f"{base_name}Connection" - edge_class = getattr(cls, "Edge", None) - _node = node - - class EdgeBase: - node = Field(_node, description="The item at the end of the edge") - cursor = String(required=True, description="A cursor for use in pagination") - - class EdgeMeta: - description = f"A Relay edge containing a `{base_name}` and its cursor." - - edge_name = f"{base_name}Edge" - if edge_class: - edge_bases = (edge_class, EdgeBase, ObjectType) - else: - edge_bases = (EdgeBase, ObjectType) - - edge = type(edge_name, edge_bases, {"Meta": EdgeMeta}) - cls.Edge = edge - options["name"] = name + _meta.node = node - _meta.fields = { - "page_info": Field( + + if not _meta.fields: + _meta.fields = {} + + if "page_info" not in _meta.fields: + _meta.fields["page_info"] = Field( PageInfo, name="pageInfo", required=True, description="Pagination data for this connection.", - ), - "edges": Field( - NonNull(List(edge)), + ) + + if "edges" not in _meta.fields: + edge_class = get_edge_class(cls, node, base_name) # type: ignore + cls.Edge = edge_class + _meta.fields["edges"] = Field( + NonNull(List(edge_class)), description="Contains the nodes in this connection.", - ), - } + ) + return super(Connection, cls).__init_subclass_with_meta__( _meta=_meta, **options ) diff --git a/graphene/relay/tests/test_connection.py b/graphene/relay/tests/test_connection.py index 4015f4b4..d45eea96 100644 --- a/graphene/relay/tests/test_connection.py +++ b/graphene/relay/tests/test_connection.py @@ -1,7 +1,15 @@ +import re + from pytest import raises from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String -from ..connection import Connection, ConnectionField, PageInfo +from ..connection import ( + Connection, + ConnectionField, + PageInfo, + ConnectionOptions, + get_edge_class, +) from ..node import Node @@ -51,6 +59,111 @@ def test_connection_inherit_abstracttype(): assert list(fields) == ["page_info", "edges", "extra"] +def test_connection_extra_abstract_fields(): + class ConnectionWithNodes(Connection): + class Meta: + abstract = True + + @classmethod + def __init_subclass_with_meta__(cls, node=None, name=None, **options): + _meta = ConnectionOptions(cls) + + _meta.fields = { + "nodes": Field( + NonNull(List(node)), + description="Contains all the nodes in this connection.", + ), + } + + return super(ConnectionWithNodes, cls).__init_subclass_with_meta__( + node=node, name=name, _meta=_meta, **options + ) + + class MyObjectConnection(ConnectionWithNodes): + class Meta: + node = MyObject + + class Edge: + other = String() + + assert MyObjectConnection._meta.name == "MyObjectConnection" + fields = MyObjectConnection._meta.fields + assert list(fields) == ["nodes", "page_info", "edges"] + edge_field = fields["edges"] + pageinfo_field = fields["page_info"] + nodes_field = fields["nodes"] + + assert isinstance(edge_field, Field) + assert isinstance(edge_field.type, NonNull) + assert isinstance(edge_field.type.of_type, List) + assert edge_field.type.of_type.of_type == MyObjectConnection.Edge + + assert isinstance(pageinfo_field, Field) + assert isinstance(pageinfo_field.type, NonNull) + assert pageinfo_field.type.of_type == PageInfo + + assert isinstance(nodes_field, Field) + assert isinstance(nodes_field.type, NonNull) + assert isinstance(nodes_field.type.of_type, List) + assert nodes_field.type.of_type.of_type == MyObject + + +def test_connection_override_fields(): + class ConnectionWithNodes(Connection): + class Meta: + abstract = True + + @classmethod + def __init_subclass_with_meta__(cls, node=None, name=None, **options): + _meta = ConnectionOptions(cls) + base_name = ( + re.sub("Connection$", "", name or cls.__name__) or node._meta.name + ) + + edge_class = get_edge_class(cls, node, base_name) + + _meta.fields = { + "page_info": Field( + NonNull( + PageInfo, + name="pageInfo", + required=True, + description="Pagination data for this connection.", + ) + ), + "edges": Field( + NonNull(List(NonNull(edge_class))), + description="Contains the nodes in this connection.", + ), + } + + return super(ConnectionWithNodes, cls).__init_subclass_with_meta__( + node=node, name=name, _meta=_meta, **options + ) + + class MyObjectConnection(ConnectionWithNodes): + class Meta: + node = MyObject + + assert MyObjectConnection._meta.name == "MyObjectConnection" + fields = MyObjectConnection._meta.fields + assert list(fields) == ["page_info", "edges"] + edge_field = fields["edges"] + pageinfo_field = fields["page_info"] + + assert isinstance(edge_field, Field) + assert isinstance(edge_field.type, NonNull) + assert isinstance(edge_field.type.of_type, List) + assert isinstance(edge_field.type.of_type.of_type, NonNull) + + assert edge_field.type.of_type.of_type.of_type.__name__ == "MyObjectEdge" + + # This page info is NonNull + assert isinstance(pageinfo_field, Field) + assert isinstance(edge_field.type, NonNull) + assert pageinfo_field.type.of_type == PageInfo + + def test_connection_name(): custom_name = "MyObjectCustomNameConnection" From 57cbef6666e2e466808cce21b8a1769ecd3fd118 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 13 Mar 2023 21:24:16 +0100 Subject: [PATCH 34/39] release: 3.2.2 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index b901506e..73e13a36 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -46,7 +46,7 @@ from .types import ( from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 2, 1, "final", 0) +VERSION = (3, 2, 2, "final", 0) __version__ = get_version(VERSION) From 8ede21e06381c096589c424960a6cfaca304badb Mon Sep 17 00:00:00 2001 From: Firas Kafri <3097061+firaskafri@users.noreply.github.com> Date: Thu, 25 May 2023 13:21:55 +0300 Subject: [PATCH 35/39] chore: default enum description to "An enumeration." (#1502) * Default enum description to "An enumeration." default to this string, which is used in many tests, is causing * Use the docstring descriptions of enums when they are present * Added tests * chore: add missing newline * Fix new line --------- Co-authored-by: Erik Wrede --- graphene/types/enum.py | 2 +- graphene/types/tests/test_enum.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/graphene/types/enum.py b/graphene/types/enum.py index 58e65c69..7d68ccd4 100644 --- a/graphene/types/enum.py +++ b/graphene/types/enum.py @@ -63,7 +63,7 @@ class EnumMeta(SubclassWithMeta_Meta): cls, enum, name=None, description=None, deprecation_reason=None ): # noqa: N805 name = name or enum.__name__ - description = description or enum.__doc__ + description = description or enum.__doc__ or "An enumeration." meta_dict = { "enum": enum, "description": description, diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 9b3082df..e6fce66c 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -65,6 +65,21 @@ def test_enum_from_builtin_enum(): assert RGB.BLUE +def test_enum_custom_description_in_constructor(): + description = "An enumeration, but with a custom description" + RGB = Enum( + "RGB", + "RED,GREEN,BLUE", + description=description, + ) + assert RGB._meta.description == description + + +def test_enum_from_python3_enum_uses_default_builtin_doc(): + RGB = Enum("RGB", "RED,GREEN,BLUE") + assert RGB._meta.description == "An enumeration." + + def test_enum_from_builtin_enum_accepts_lambda_description(): def custom_description(value): if not value: From 2da8e9db5cd6527ca740914ce0095e5004054dfd Mon Sep 17 00:00:00 2001 From: Cadu Date: Sun, 4 Jun 2023 18:01:05 -0300 Subject: [PATCH 36/39] feat: Enable use of Undefined in InputObjectTypes (#1506) * Changed InputObjectType's default builder-from-dict argument to be `Undefined` instead of `None`, removing ambiguity of undefined optional inputs using dot notation access syntax. * Move `set_default_input_object_type_to_undefined()` fixture into conftest.py for sharing it between multiple test files. --- graphene/types/inputobjecttype.py | 27 ++++++++++++++++- graphene/types/tests/conftest.py | 12 ++++++++ graphene/types/tests/test_inputobjecttype.py | 31 ++++++++++++++++++++ graphene/types/tests/test_type_map.py | 14 ++++++++- graphene/validation/depth_limit.py | 6 ++-- 5 files changed, 85 insertions(+), 5 deletions(-) create mode 100644 graphene/types/tests/conftest.py diff --git a/graphene/types/inputobjecttype.py b/graphene/types/inputobjecttype.py index 5d278510..fdf38ba0 100644 --- a/graphene/types/inputobjecttype.py +++ b/graphene/types/inputobjecttype.py @@ -14,6 +14,31 @@ class InputObjectTypeOptions(BaseOptions): container = None # type: InputObjectTypeContainer +# Currently in Graphene, we get a `None` whenever we access an (optional) field that was not set in an InputObjectType +# using the InputObjectType. dot access syntax. This is ambiguous, because in this current (Graphene +# historical) arrangement, we cannot distinguish between a field not being set and a field being set to None. +# At the same time, we shouldn't break existing code that expects a `None` when accessing a field that was not set. +_INPUT_OBJECT_TYPE_DEFAULT_VALUE = None + +# To mitigate this, we provide the function `set_input_object_type_default_value` to allow users to change the default +# value returned in non-specified fields in InputObjectType to another meaningful sentinel value (e.g. Undefined) +# if they want to. This way, we can keep code that expects a `None` working while we figure out a better solution (or +# a well-documented breaking change) for this issue. + + +def set_input_object_type_default_value(default_value): + """ + Change the sentinel value returned by non-specified fields in an InputObjectType + Useful to differentiate between a field not being set and a field being set to None by using a sentinel value + (e.g. Undefined is a good sentinel value for this purpose) + + This function should be called at the beginning of the app or in some other place where it is guaranteed to + be called before any InputObjectType is defined. + """ + global _INPUT_OBJECT_TYPE_DEFAULT_VALUE + _INPUT_OBJECT_TYPE_DEFAULT_VALUE = default_value + + class InputObjectTypeContainer(dict, BaseType): # type: ignore class Meta: abstract = True @@ -21,7 +46,7 @@ class InputObjectTypeContainer(dict, BaseType): # type: ignore def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) for key in self._meta.fields: - setattr(self, key, self.get(key, None)) + setattr(self, key, self.get(key, _INPUT_OBJECT_TYPE_DEFAULT_VALUE)) def __init_subclass__(cls, *args, **kwargs): pass diff --git a/graphene/types/tests/conftest.py b/graphene/types/tests/conftest.py new file mode 100644 index 00000000..43f7d726 --- /dev/null +++ b/graphene/types/tests/conftest.py @@ -0,0 +1,12 @@ +import pytest +from graphql import Undefined + +from graphene.types.inputobjecttype import set_input_object_type_default_value + + +@pytest.fixture() +def set_default_input_object_type_to_undefined(): + """This fixture is used to change the default value of optional inputs in InputObjectTypes for specific tests""" + set_input_object_type_default_value(Undefined) + yield + set_input_object_type_default_value(None) diff --git a/graphene/types/tests/test_inputobjecttype.py b/graphene/types/tests/test_inputobjecttype.py index 0fb7e394..0d7bcf80 100644 --- a/graphene/types/tests/test_inputobjecttype.py +++ b/graphene/types/tests/test_inputobjecttype.py @@ -1,3 +1,5 @@ +from graphql import Undefined + from ..argument import Argument from ..field import Field from ..inputfield import InputField @@ -6,6 +8,7 @@ from ..objecttype import ObjectType from ..scalars import Boolean, String from ..schema import Schema from ..unmountedtype import UnmountedType +from ... import NonNull class MyType: @@ -136,3 +139,31 @@ def test_inputobjecttype_of_input(): assert not result.errors assert result.data == {"isChild": True} + + +def test_inputobjecttype_default_input_as_undefined( + set_default_input_object_type_to_undefined, +): + class TestUndefinedInput(InputObjectType): + required_field = String(required=True) + optional_field = String() + + class Query(ObjectType): + undefined_optionals_work = Field(NonNull(Boolean), input=TestUndefinedInput()) + + def resolve_undefined_optionals_work(self, info, input: TestUndefinedInput): + # Confirm that optional_field comes as Undefined + return ( + input.required_field == "required" and input.optional_field is Undefined + ) + + schema = Schema(query=Query) + result = schema.execute( + """query basequery { + undefinedOptionalsWork(input: {requiredField: "required"}) + } + """ + ) + + assert not result.errors + assert result.data == {"undefinedOptionalsWork": True} diff --git a/graphene/types/tests/test_type_map.py b/graphene/types/tests/test_type_map.py index 55b1706e..55665b6b 100644 --- a/graphene/types/tests/test_type_map.py +++ b/graphene/types/tests/test_type_map.py @@ -20,8 +20,8 @@ from ..inputobjecttype import InputObjectType from ..interface import Interface from ..objecttype import ObjectType from ..scalars import Int, String -from ..structures import List, NonNull from ..schema import Schema +from ..structures import List, NonNull def create_type_map(types, auto_camelcase=True): @@ -227,6 +227,18 @@ def test_inputobject(): assert foo_field.description == "Field description" +def test_inputobject_undefined(set_default_input_object_type_to_undefined): + class OtherObjectType(InputObjectType): + optional_field = String() + + type_map = create_type_map([OtherObjectType]) + assert "OtherObjectType" in type_map + graphql_type = type_map["OtherObjectType"] + + container = graphql_type.out_type({}) + assert container.optional_field is Undefined + + def test_objecttype_camelcase(): class MyObjectType(ObjectType): """Description""" diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index b4599e66..e0f28663 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -30,7 +30,7 @@ try: except ImportError: # backwards compatibility for v3.6 from typing import Pattern -from typing import Callable, Dict, List, Optional, Union +from typing import Callable, Dict, List, Optional, Union, Tuple from graphql import GraphQLError from graphql.validation import ValidationContext, ValidationRule @@ -82,7 +82,7 @@ def depth_limit_validator( def get_fragments( - definitions: List[DefinitionNode], + definitions: Tuple[DefinitionNode, ...], ) -> Dict[str, FragmentDefinitionNode]: fragments = {} for definition in definitions: @@ -94,7 +94,7 @@ def get_fragments( # This will actually get both queries and mutations. # We can basically treat those the same def get_queries_and_mutations( - definitions: List[DefinitionNode], + definitions: Tuple[DefinitionNode, ...], ) -> Dict[str, OperationDefinitionNode]: operations = {} From c636d984c646cf303303f4c5bdb35e5d27846436 Mon Sep 17 00:00:00 2001 From: senseysensor Date: Mon, 5 Jun 2023 00:10:05 +0300 Subject: [PATCH 37/39] fix: Corrected enum metaclass to fix pickle.dumps() (#1495) * Corrected enum metaclass to fix pickle.dumps() * considered case with colliding class names (try to distinguish by file name) * reverted simple solution back (without attempt to support duplicate Enum class names) --------- Co-authored-by: sgrekov Co-authored-by: Erik Wrede --- graphene/tests/issues/test_881.py | 27 +++++++++++++++++++++++++++ graphene/types/enum.py | 4 +++- 2 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 graphene/tests/issues/test_881.py diff --git a/graphene/tests/issues/test_881.py b/graphene/tests/issues/test_881.py new file mode 100644 index 00000000..f97b5917 --- /dev/null +++ b/graphene/tests/issues/test_881.py @@ -0,0 +1,27 @@ +import pickle + +from ...types.enum import Enum + + +class PickleEnum(Enum): + # is defined outside of test because pickle unable to dump class inside ot pytest function + A = "a" + B = 1 + + +def test_enums_pickling(): + a = PickleEnum.A + pickled = pickle.dumps(a) + restored = pickle.loads(pickled) + assert type(a) is type(restored) + assert a == restored + assert a.value == restored.value + assert a.name == restored.name + + b = PickleEnum.B + pickled = pickle.dumps(b) + restored = pickle.loads(pickled) + assert type(a) is type(restored) + assert b == restored + assert b.value == restored.value + assert b.name == restored.name diff --git a/graphene/types/enum.py b/graphene/types/enum.py index 7d68ccd4..d3469a15 100644 --- a/graphene/types/enum.py +++ b/graphene/types/enum.py @@ -31,9 +31,11 @@ class EnumMeta(SubclassWithMeta_Meta): # with the enum values. enum_members.pop("Meta", None) enum = PyEnum(cls.__name__, enum_members) - return SubclassWithMeta_Meta.__new__( + obj = SubclassWithMeta_Meta.__new__( cls, name_, bases, dict(classdict, __enum__=enum), **options ) + globals()[name_] = obj.__enum__ + return obj def get(cls, value): return cls._meta.enum(value) From d77d0b057137452d6d93067002fd7a2c56164e75 Mon Sep 17 00:00:00 2001 From: Jeongseok Kang Date: Mon, 5 Jun 2023 06:49:26 +0900 Subject: [PATCH 38/39] chore: Use `typing.TYPE_CHECKING` instead of MYPY (#1503) Co-authored-by: Erik Wrede --- graphene/types/inputobjecttype.py | 7 ++++--- graphene/types/interface.py | 7 ++++--- graphene/types/mutation.py | 7 ++++--- graphene/types/objecttype.py | 7 ++++--- graphene/types/union.py | 7 ++++--- 5 files changed, 20 insertions(+), 15 deletions(-) diff --git a/graphene/types/inputobjecttype.py b/graphene/types/inputobjecttype.py index fdf38ba0..257f48be 100644 --- a/graphene/types/inputobjecttype.py +++ b/graphene/types/inputobjecttype.py @@ -1,11 +1,12 @@ +from typing import TYPE_CHECKING + from .base import BaseOptions, BaseType from .inputfield import InputField from .unmountedtype import UnmountedType from .utils import yank_fields_from_attrs -# For static type checking with Mypy -MYPY = False -if MYPY: +# For static type checking with type checker +if TYPE_CHECKING: from typing import Dict, Callable # NOQA diff --git a/graphene/types/interface.py b/graphene/types/interface.py index 6503b78b..31bcc7f9 100644 --- a/graphene/types/interface.py +++ b/graphene/types/interface.py @@ -1,10 +1,11 @@ +from typing import TYPE_CHECKING + from .base import BaseOptions, BaseType from .field import Field from .utils import yank_fields_from_attrs -# For static type checking with Mypy -MYPY = False -if MYPY: +# For static type checking with type checker +if TYPE_CHECKING: from typing import Dict, Iterable, Type # NOQA diff --git a/graphene/types/mutation.py b/graphene/types/mutation.py index ad47c62a..2de21b36 100644 --- a/graphene/types/mutation.py +++ b/graphene/types/mutation.py @@ -1,3 +1,5 @@ +from typing import TYPE_CHECKING + from ..utils.deprecated import warn_deprecation from ..utils.get_unbound_function import get_unbound_function from ..utils.props import props @@ -6,9 +8,8 @@ from .objecttype import ObjectType, ObjectTypeOptions from .utils import yank_fields_from_attrs from .interface import Interface -# For static type checking with Mypy -MYPY = False -if MYPY: +# For static type checking with type checker +if TYPE_CHECKING: from .argument import Argument # NOQA from typing import Dict, Type, Callable, Iterable # NOQA diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index 1ff29a2e..b3b829fe 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -1,3 +1,5 @@ +from typing import TYPE_CHECKING + from .base import BaseOptions, BaseType, BaseTypeMeta from .field import Field from .interface import Interface @@ -7,9 +9,8 @@ try: from dataclasses import make_dataclass, field except ImportError: from ..pyutils.dataclasses import make_dataclass, field # type: ignore -# For static type checking with Mypy -MYPY = False -if MYPY: +# For static type checking with type checker +if TYPE_CHECKING: from typing import Dict, Iterable, Type # NOQA diff --git a/graphene/types/union.py b/graphene/types/union.py index f77e833a..b7c5dc62 100644 --- a/graphene/types/union.py +++ b/graphene/types/union.py @@ -1,9 +1,10 @@ +from typing import TYPE_CHECKING + from .base import BaseOptions, BaseType from .unmountedtype import UnmountedType -# For static type checking with Mypy -MYPY = False -if MYPY: +# For static type checking with type checker +if TYPE_CHECKING: from .objecttype import ObjectType # NOQA from typing import Iterable, Type # NOQA From 03cf2e131e655402ccc0a9e2d9897c39d7f7f86a Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Tue, 6 Jun 2023 20:45:01 +0200 Subject: [PATCH 39/39] chore: remove travis ci link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3ba0737d..7beb975c 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe?style=flat)](https://discord.gg/T6Gp6NFYHe) +# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe?style=flat)](https://discord.gg/T6Gp6NFYHe) [💬 Join the community on Discord](https://discord.gg/T6Gp6NFYHe)