From 082186c16969b098fadf42645df35dde3d93f674 Mon Sep 17 00:00:00 2001 From: Syrus Akbary Date: Thu, 20 Apr 2017 00:53:17 -0700 Subject: [PATCH] Added dataloader docs --- docs/execution/dataloader.rst | 106 ++++++++++++++++++++++++++++++++++ docs/execution/execute.rst | 32 ++++++++++ docs/execution/index.rst | 36 +----------- 3 files changed, 141 insertions(+), 33 deletions(-) create mode 100644 docs/execution/dataloader.rst create mode 100644 docs/execution/execute.rst diff --git a/docs/execution/dataloader.rst b/docs/execution/dataloader.rst new file mode 100644 index 00000000..4e6e60dc --- /dev/null +++ b/docs/execution/dataloader.rst @@ -0,0 +1,106 @@ +Dataloader +========== + +DataLoader is a generic utility to be used as part of your application's +data fetching layer to provide a simplified and consistent API over +various remote data sources such as databases or web services via batching +and caching. + + +Batching +-------- + +Batching is not an advanced feature, it's DataLoader's primary feature. +Create loaders by providing a batch loading function. + +.. code:: python + + from promise import Promise + from promise.dataloader import DataLoader + + class UserLoader(DataLoader): + def batch_load_fn(self, keys): + # Here we return a promise that will result on the + # corresponding user for each key in keys + return Promise.resolve([get_user(id=key) for key in keys]) + + +A batch loading function accepts an list of keys, and returns a ``Promise`` +which resolves to an list of ``values``. + +Then load individual values from the loader. ``DataLoader`` will coalesce all +individual loads which occur within a single frame of execution (executed once +the wrapping promise is resolved) and then call your batch function with all +requested keys. + + + +.. code:: python + + user_loader = UserLoader() + + user_loader.load(1).then(lambda user: user_loader.load(user.best_friend_id)) + + user_loader.load(2).then(lambda user: user_loader.load(user.best_friend_id)) + + +A naive application may have issued *four* round-trips to a backend for the +required information, but with ``DataLoader`` this application will make at most *two*. + +``DataLoader`` allows you to decouple unrelated parts of your application without +sacrificing the performance of batch data-loading. While the loader presents +an API that loads individual values, all concurrent requests will be coalesced +and presented to your batch loading function. This allows your application to +safely distribute data fetching requirements throughout your application and +maintain minimal outgoing data requests. + + + +Using with Graphene +------------------- + +DataLoader pairs nicely well with Grapehne/GraphQL. GraphQL fields are designed +to be stand-alone functions. Without a caching or batching mechanism, it's easy +for a naive GraphQL server to issue new database requests each time a field is resolved. + +Consider the following GraphQL request: + + +.. code:: + + { + me { + name + bestFriend { + name + } + friends(first: 5) { + name + bestFriend { + name + } + } + } + } + + +Naively, if ``me``, ``bestFriend`` and ``friends`` each need to request the backend, +there could be at most 13 database requests! + + +When using DataLoader, we could define the User type using our previous example with +learer code and at most 4 database requests, and possibly fewer if there are cache hits. + + +.. code:: python + + class User(graphene.ObjectType): + name = graphene.String() + best_friend = graphene.Field(lambda: User) + friends = graphene.List(lambda: User) + + def resolve_best_friend(self, args, context, info): + return user_loader.load(self.best_friend_id) + + def resolve_friends(self, args, context, info): + return user_loader.load_many(self.friend_ids) diff --git a/docs/execution/execute.rst b/docs/execution/execute.rst new file mode 100644 index 00000000..0e4de5ac --- /dev/null +++ b/docs/execution/execute.rst @@ -0,0 +1,32 @@ +Executing a query +================= + + +For executing a query a schema, you can directly call the ``execute`` method on it. + + +.. code:: python + + schema = graphene.Schema(...) + result = schema.execute('{ name }') + +``result`` represents he result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred. + + +Context +_______ + +You can pass context to a query via ``context_value``. + + +.. code:: python + + class Query(graphene.ObjectType): + name = graphene.String() + + def resolve_name(self, args, context, info): + return context.get('name') + + schema = graphene.Schema(Query) + result = schema.execute('{ name }', context_value={'name': 'Syrus'}) + diff --git a/docs/execution/index.rst b/docs/execution/index.rst index 849832d4..00d98ffb 100644 --- a/docs/execution/index.rst +++ b/docs/execution/index.rst @@ -2,39 +2,9 @@ Execution ========= -For executing a query a schema, you can directly call the ``execute`` method on it. - - -.. code:: python - - schema = graphene.Schema(...) - result = schema.execute('{ name }') - -``result`` represents he result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred. - - -Context -_______ - -You can pass context to a query via ``context_value``. - - -.. code:: python - - class Query(graphene.ObjectType): - name = graphene.String() - - def resolve_name(self, args, context, info): - return context.get('name') - - schema = graphene.Schema(Query) - result = schema.execute('{ name }', context_value={'name': 'Syrus'}) - - -Middleware -__________ - .. toctree:: - :maxdepth: 1 + :maxdepth: 2 + execute middleware + dataloader