diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6834a43d..11e05745 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python: ["3.8", "3.9", "3.10", "3.11","3.12"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 @@ -25,9 +25,9 @@ jobs: uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - - name: Lint with flake8 + - name: Lint with ruff run: | - python -m pip install flake8 + python -m pip install ruff make lint - name: Install dependencies run: | diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index eb4dd686..3e1267ba 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -11,7 +11,7 @@ jobs: build: strategy: matrix: - python: ["3.11"] + python: ["3.12"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -19,9 +19,7 @@ jobs: uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - - name: Install dependencies + - name: Lint with ruff run: | - python -m pip install --upgrade pip - pip install flake8 - - name: Lint with flake8 - run: make lint + python -m pip install ruff + make lint \ No newline at end of file diff --git a/Makefile b/Makefile index b983aa0d..07c3c5d3 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,8 @@ clean: @find . -name "__pycache__" -delete lint: - @flake8 graphene_mongo --count --show-source --statistics + @ruff check graphene_mongo + @ruff format . --check test: clean pytest graphene_mongo/tests --cov=graphene_mongo --cov-report=html --cov-report=term diff --git a/README.md b/README.md index 126a3bf8..abc2f42f 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,6 @@ A [Mongoengine](https://mongoengine-odm.readthedocs.io/) integration for [Graphene](http://graphene-python.org/). - ## Installation For installing graphene-mongo, just run this command in your shell @@ -23,13 +22,14 @@ Here is a simple Mongoengine model as `models.py`: from mongoengine import Document from mongoengine.fields import StringField + class User(Document): meta = {'collection': 'user'} first_name = StringField(required=True) last_name = StringField(required=True) ``` -To create a GraphQL schema for it you simply have to write the following: +To create a GraphQL schema and sync executor; for it you simply have to write the following: ```python import graphene @@ -38,15 +38,60 @@ from graphene_mongo import MongoengineObjectType from .models import User as UserModel + class User(MongoengineObjectType): class Meta: model = UserModel + class Query(graphene.ObjectType): users = graphene.List(User) - + def resolve_users(self, info): - return list(UserModel.objects.all()) + return list(UserModel.objects.all()) + + +schema = graphene.Schema(query=Query) +``` + +Then you can simply query the schema: + +```python +query = ''' + query { + users { + firstName, + lastName + } + } +''' +result = await schema.execute(query) +``` + +To create a GraphQL schema and async executor; for it you simply have to write the following: + +```python +import graphene + +from graphene_mongo import AsyncMongoengineObjectType +from asgiref.sync import sync_to_async +from concurrent.futures import ThreadPoolExecutor + +from .models import User as UserModel + + +class User(AsyncMongoengineObjectType): + class Meta: + model = UserModel + + +class Query(graphene.ObjectType): + users = graphene.List(User) + + async def resolve_users(self, info): + return await sync_to_async(list, thread_sensitive=False, + executor=ThreadPoolExecutor())(UserModel.objects.all()) + schema = graphene.Schema(query=Query) ``` @@ -71,7 +116,6 @@ To learn more check out the following [examples](examples/): * [Django MongoEngine example](examples/django_mongoengine) * [Falcon MongoEngine example](examples/falcon_mongoengine) - ## Contributing After cloning this repo, ensure dependencies are installed by running: diff --git a/docs/conf.py b/docs/conf.py index dba8cd82..deb0f8af 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,18 +60,18 @@ master_doc = "index" # General information about the project. -project = u"Graphene Mongo" -copyright = u"Graphene 2018" -author = u"Abaw Chen" +project = "Graphene Mongo" +copyright = "Graphene 2018" +author = "Abaw Chen" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = u"0.1" +version = "0.1" # The full version, including alpha/beta/rc tags. -release = u"0.1.2" +release = "0.1.2" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -275,9 +275,7 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "Graphene.tex", u"Graphene Documentation", u"Syrus Akbary", "manual") -] +latex_documents = [(master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual")] # The name of an image file (relative to this directory) to place at the top of # the title page. @@ -316,9 +314,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, "graphene_django", u"Graphene Django Documentation", [author], 1) -] +man_pages = [(master_doc, "graphene_django", "Graphene Django Documentation", [author], 1)] # If true, show URL addresses after external links. # @@ -334,7 +330,7 @@ ( master_doc, "Graphene-Django", - u"Graphene Django Documentation", + "Graphene Django Documentation", author, "Graphene Django", "One line description of project.", diff --git a/examples/django_mongoengine/bike/urls.py b/examples/django_mongoengine/bike/urls.py index 55cfd515..b18e9883 100644 --- a/examples/django_mongoengine/bike/urls.py +++ b/examples/django_mongoengine/bike/urls.py @@ -3,7 +3,5 @@ from graphene_django.views import GraphQLView urlpatterns = [ - path( - "graphql", csrf_exempt(GraphQLView.as_view(graphiql=True)), name="graphql-query" - ) + path("graphql", csrf_exempt(GraphQLView.as_view(graphiql=True)), name="graphql-query") ] diff --git a/examples/django_mongoengine/bike_catalog/settings.py b/examples/django_mongoengine/bike_catalog/settings.py index f17c7f24..be86032f 100644 --- a/examples/django_mongoengine/bike_catalog/settings.py +++ b/examples/django_mongoengine/bike_catalog/settings.py @@ -85,9 +85,7 @@ # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" - }, + {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, diff --git a/examples/django_mongoengine/bike_catalog/settings_test.py b/examples/django_mongoengine/bike_catalog/settings_test.py index ddd3467c..0c608b02 100644 --- a/examples/django_mongoengine/bike_catalog/settings_test.py +++ b/examples/django_mongoengine/bike_catalog/settings_test.py @@ -1,5 +1,3 @@ from .settings import * # flake8: noqa -mongoengine.connect( - "graphene-mongo-test", host="mongomock://localhost", alias="default" -) +mongoengine.connect("graphene-mongo-test", host="mongomock://localhost", alias="default") diff --git a/examples/falcon_mongoengine/api.py b/examples/falcon_mongoengine/api.py index 59f3781a..c7f1810e 100644 --- a/examples/falcon_mongoengine/api.py +++ b/examples/falcon_mongoengine/api.py @@ -3,9 +3,7 @@ from .schema import schema -def set_graphql_allow_header( - req: falcon.Request, resp: falcon.Response, resource: object -): +def set_graphql_allow_header(req: falcon.Request, resp: falcon.Response, resource: object): resp.set_header("Allow", "GET, POST, OPTIONS") diff --git a/examples/falcon_mongoengine/tests/tests.py b/examples/falcon_mongoengine/tests/tests.py index 182f749c..80e0def7 100644 --- a/examples/falcon_mongoengine/tests/tests.py +++ b/examples/falcon_mongoengine/tests/tests.py @@ -1,11 +1,10 @@ import mongoengine from graphene.test import Client + from examples.falcon_mongoengine.schema import schema from .fixtures import fixtures_data -mongoengine.connect( - "graphene-mongo-test", host="mongomock://localhost", alias="default" -) +mongoengine.connect("graphene-mongo-test", host="mongomock://localhost", alias="default") def test_category_last_1_item_query(fixtures_data): @@ -23,7 +22,16 @@ def test_category_last_1_item_query(fixtures_data): expected = { "data": { - "categories": {"edges": [{"node": {"name": "Work", "color": "#1769ff"}}]} + "categories": { + "edges": [ + { + "node": { + "name": "Work", + "color": "#1769ff", + } + } + ] + } } } @@ -45,11 +53,7 @@ def test_category_filter_item_query(fixtures_data): } }""" - expected = { - "data": { - "categories": {"edges": [{"node": {"name": "Work", "color": "#1769ff"}}]} - } - } + expected = {"data": {"categories": {"edges": [{"node": {"name": "Work", "color": "#1769ff"}}]}}} client = Client(schema) result = client.execute(query) diff --git a/examples/falcon_mongoengine/types.py b/examples/falcon_mongoengine/types.py index 2e824cb9..2c42c173 100644 --- a/examples/falcon_mongoengine/types.py +++ b/examples/falcon_mongoengine/types.py @@ -1,8 +1,7 @@ -import graphene from graphene import relay -from graphene_mongo import MongoengineObjectType -from .models import Category, Bookmark +from graphene_mongo import MongoengineObjectType +from .models import Bookmark, Category class CategoryType(MongoengineObjectType): diff --git a/examples/flask_mongoengine/app.py b/examples/flask_mongoengine/app.py index 055cd319..d62f9b7d 100644 --- a/examples/flask_mongoengine/app.py +++ b/examples/flask_mongoengine/app.py @@ -42,9 +42,7 @@ } }""".strip() -app.add_url_rule( - "/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True) -) +app.add_url_rule("/graphql", view_func=GraphQLView.as_view("graphql", schema=schema, graphiql=True)) if __name__ == "__main__": init_db() diff --git a/examples/flask_mongoengine/models.py b/examples/flask_mongoengine/models.py index 556282fa..734fc625 100644 --- a/examples/flask_mongoengine/models.py +++ b/examples/flask_mongoengine/models.py @@ -10,25 +10,21 @@ class Department(Document): - meta = {"collection": "department"} name = StringField() class Role(Document): - meta = {"collection": "role"} name = StringField() class Task(EmbeddedDocument): - name = StringField() deadline = DateTimeField(default=datetime.now) class Employee(Document): - meta = {"collection": "employee"} name = StringField() hired_on = DateTimeField(default=datetime.now) diff --git a/examples/flask_mongoengine/schema.py b/examples/flask_mongoengine/schema.py index 2205c67b..228eaca5 100644 --- a/examples/flask_mongoengine/schema.py +++ b/examples/flask_mongoengine/schema.py @@ -1,6 +1,5 @@ import graphene from graphene.relay import Node -from graphene_mongo.tests.nodes import PlayerNode, ReporterNode from graphene_mongo import MongoengineConnectionField, MongoengineObjectType from .models import Department as DepartmentModel @@ -20,7 +19,11 @@ class Meta: model = RoleModel interfaces = (Node,) filter_fields = { - 'name': ['exact', 'icontains', 'istartswith'] + "name": [ + "exact", + "icontains", + "istartswith", + ] } @@ -35,7 +38,11 @@ class Meta: model = EmployeeModel interfaces = (Node,) filter_fields = { - 'name': ['exact', 'icontains', 'istartswith'] + "name": [ + "exact", + "icontains", + "istartswith", + ] } diff --git a/graphene_mongo/__init__.py b/graphene_mongo/__init__.py index 316998ed..e1e64801 100644 --- a/graphene_mongo/__init__.py +++ b/graphene_mongo/__init__.py @@ -1,13 +1,17 @@ from .fields import MongoengineConnectionField +from .fields_async import AsyncMongoengineConnectionField from .types import MongoengineObjectType, MongoengineInputType, MongoengineInterfaceType +from .types_async import AsyncMongoengineObjectType __version__ = "0.1.1" __all__ = [ "__version__", "MongoengineObjectType", + "AsyncMongoengineObjectType", "MongoengineInputType", "MongoengineInterfaceType", - "MongoengineConnectionField" - ] + "MongoengineConnectionField", + "AsyncMongoengineConnectionField", +] diff --git a/graphene_mongo/advanced_types.py b/graphene_mongo/advanced_types.py index 10e2c11e..cedec69e 100644 --- a/graphene_mongo/advanced_types.py +++ b/graphene_mongo/advanced_types.py @@ -1,4 +1,5 @@ import base64 + import graphene @@ -65,5 +66,9 @@ class PolygonFieldType(_CoordinatesTypeField): class MultiPolygonFieldType(_CoordinatesTypeField): coordinates = graphene.List( - graphene.List(graphene.List(graphene.List(graphene.Float))) + graphene.List( + graphene.List( + graphene.List(graphene.Float), + ) + ) ) diff --git a/graphene_mongo/converter.py b/graphene_mongo/converter.py index d9a56245..f4cce42c 100644 --- a/graphene_mongo/converter.py +++ b/graphene_mongo/converter.py @@ -1,3 +1,4 @@ +import asyncio import sys import graphene @@ -7,9 +8,14 @@ from graphene.utils.str_converters import to_snake_case, to_camel_case from mongoengine.base import get_document, LazyReference from . import advanced_types -from .utils import import_single_dispatch, get_field_description, get_query_fields +from .utils import ( + import_single_dispatch, + get_field_description, + get_query_fields, + ExecutorEnum, + sync_to_async, +) from concurrent.futures import ThreadPoolExecutor, as_completed -from asgiref.sync import sync_to_async singledispatch = import_single_dispatch() @@ -19,17 +25,16 @@ class MongoEngineConversionError(Exception): @singledispatch -def convert_mongoengine_field(field, registry=None): +def convert_mongoengine_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): raise MongoEngineConversionError( - "Don't know how to convert the MongoEngine field %s (%s)" - % (field, field.__class__) + "Don't know how to convert the MongoEngine field %s (%s)" % (field, field.__class__) ) @convert_mongoengine_field.register(mongoengine.EmailField) @convert_mongoengine_field.register(mongoengine.StringField) @convert_mongoengine_field.register(mongoengine.URLField) -def convert_field_to_string(field, registry=None): +def convert_field_to_string(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): return graphene.String( description=get_field_description(field, registry), required=field.required ) @@ -37,89 +42,103 @@ def convert_field_to_string(field, registry=None): @convert_mongoengine_field.register(mongoengine.UUIDField) @convert_mongoengine_field.register(mongoengine.ObjectIdField) -def convert_field_to_id(field, registry=None): - return graphene.ID( - description=get_field_description(field, registry), required=field.required - ) - - -@convert_mongoengine_field.register(mongoengine.Decimal128Field) -@convert_mongoengine_field.register(mongoengine.DecimalField) -def convert_field_to_decimal(field, registry=None): - return graphene.Decimal( - description=get_field_description(field, registry), required=field.required - ) +def convert_field_to_id(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.ID(description=get_field_description(field, registry), required=field.required) @convert_mongoengine_field.register(mongoengine.IntField) @convert_mongoengine_field.register(mongoengine.LongField) @convert_mongoengine_field.register(mongoengine.SequenceField) -def convert_field_to_int(field, registry=None): - return graphene.Int( - description=get_field_description(field, registry), required=field.required - ) +def convert_field_to_int(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Int(description=get_field_description(field, registry), required=field.required) @convert_mongoengine_field.register(mongoengine.BooleanField) -def convert_field_to_boolean(field, registry=None): +def convert_field_to_boolean(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): return graphene.Boolean( description=get_field_description(field, registry), required=field.required ) @convert_mongoengine_field.register(mongoengine.FloatField) -def convert_field_to_float(field, registry=None): +def convert_field_to_float(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): return graphene.Float( description=get_field_description(field, registry), required=field.required ) +@convert_mongoengine_field.register(mongoengine.Decimal128Field) +@convert_mongoengine_field.register(mongoengine.DecimalField) +def convert_field_to_decimal(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Decimal( + description=get_field_description(field, registry), required=field.required + ) + + @convert_mongoengine_field.register(mongoengine.DateTimeField) -def convert_field_to_datetime(field, registry=None): +def convert_field_to_datetime(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): return graphene.DateTime( description=get_field_description(field, registry), required=field.required ) -@convert_mongoengine_field.register(mongoengine.DictField) -@convert_mongoengine_field.register(mongoengine.MapField) -def convert_field_to_jsonstring(field, registry=None): - return JSONString( +@convert_mongoengine_field.register(mongoengine.DateField) +def convert_field_to_date(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Date( description=get_field_description(field, registry), required=field.required ) +@convert_mongoengine_field.register(mongoengine.DictField) +@convert_mongoengine_field.register(mongoengine.MapField) +def convert_field_to_jsonstring(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return JSONString(description=get_field_description(field, registry), required=field.required) + + @convert_mongoengine_field.register(mongoengine.PointField) -def convert_point_to_field(field, registry=None): - return graphene.Field(advanced_types.PointFieldType, description=get_field_description(field, registry), - required=field.required) +def convert_point_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Field( + advanced_types.PointFieldType, + description=get_field_description(field, registry), + required=field.required, + ) @convert_mongoengine_field.register(mongoengine.PolygonField) -def convert_polygon_to_field(field, registry=None): - return graphene.Field(advanced_types.PolygonFieldType, description=get_field_description(field, registry), - required=field.required) +def convert_polygon_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Field( + advanced_types.PolygonFieldType, + description=get_field_description(field, registry), + required=field.required, + ) @convert_mongoengine_field.register(mongoengine.MultiPolygonField) -def convert_multipolygon_to_field(field, registry=None): - return graphene.Field(advanced_types.MultiPolygonFieldType, description=get_field_description(field, registry), - required=field.required) +def convert_multipolygon_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Field( + advanced_types.MultiPolygonFieldType, + description=get_field_description(field, registry), + required=field.required, + ) @convert_mongoengine_field.register(mongoengine.FileField) -def convert_file_to_field(field, registry=None): - return graphene.Field(advanced_types.FileFieldType, description=get_field_description(field, registry), - required=field.required) +def convert_file_to_field(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + return graphene.Field( + advanced_types.FileFieldType, + description=get_field_description(field, registry), + required=field.required, + ) @convert_mongoengine_field.register(mongoengine.ListField) @convert_mongoengine_field.register(mongoengine.EmbeddedDocumentListField) @convert_mongoengine_field.register(mongoengine.GeoPointField) -def convert_field_to_list(field, registry=None): - base_type = convert_mongoengine_field(field.field, registry=registry) +def convert_field_to_list(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): + base_type = convert_mongoengine_field(field.field, registry=registry, executor=executor) if isinstance(base_type, graphene.Field): if isinstance(field.field, mongoengine.GenericReferenceField): + def get_reference_objects(*args, **kwargs): document = get_document(args[0][0]) document_field = mongoengine.ReferenceField(document) @@ -135,8 +154,12 @@ def get_reference_objects(*args, **kwargs): item = to_snake_case(each) if item in document._fields_ordered + tuple(filter_args): queried_fields.append(item) - return document.objects().no_dereference().only( - *set(list(document_field_type._meta.required_fields) + queried_fields)).filter(pk__in=args[0][1]) + return ( + document.objects() + .no_dereference() + .only(*set(list(document_field_type._meta.required_fields) + queried_fields)) + .filter(pk__in=args[0][1]) + ) def get_non_querying_object(*args, **kwargs): model = get_document(args[0][0]) @@ -147,11 +170,14 @@ def reference_resolver(root, *args, **kwargs): if to_resolve: choice_to_resolve = dict() querying_union_types = list(get_query_fields(args[0]).keys()) - if '__typename' in querying_union_types: - querying_union_types.remove('__typename') + if "__typename" in querying_union_types: + querying_union_types.remove("__typename") to_resolve_models = list() for each in querying_union_types: - to_resolve_models.append(registry._registry_string_map[each]) + if executor == ExecutorEnum.SYNC: + to_resolve_models.append(registry._registry_string_map[each]) + else: + to_resolve_models.append(registry._registry_async_string_map[each]) to_resolve_object_ids = list() for each in to_resolve: if isinstance(each, LazyReference): @@ -162,17 +188,24 @@ def reference_resolver(root, *args, **kwargs): choice_to_resolve[model].append(each.pk) else: to_resolve_object_ids.append(each["_ref"].id) - if each['_cls'] not in choice_to_resolve: - choice_to_resolve[each['_cls']] = list() - choice_to_resolve[each['_cls']].append(each["_ref"].id) + if each["_cls"] not in choice_to_resolve: + choice_to_resolve[each["_cls"]] = list() + choice_to_resolve[each["_cls"]].append(each["_ref"].id) pool = ThreadPoolExecutor(5) futures = list() for model, object_id_list in choice_to_resolve.items(): if model in to_resolve_models: - futures.append(pool.submit(get_reference_objects, (model, object_id_list, registry, args))) + futures.append( + pool.submit( + get_reference_objects, (model, object_id_list, registry, args) + ) + ) else: futures.append( - pool.submit(get_non_querying_object, (model, object_id_list, registry, args))) + pool.submit( + get_non_querying_object, (model, object_id_list, registry, args) + ) + ) result = list() for x in as_completed(futures): result += x.result() @@ -185,16 +218,95 @@ def reference_resolver(root, *args, **kwargs): return ordered_result return None + async def get_reference_objects_async(*args, **kwargs): + document = get_document(args[0]) + document_field = mongoengine.ReferenceField(document) + document_field = convert_mongoengine_field( + document_field, registry, executor=ExecutorEnum.ASYNC + ) + document_field_type = document_field.get_type().type + queried_fields = list() + filter_args = list() + if document_field_type._meta.filter_fields: + for key, values in document_field_type._meta.filter_fields.items(): + for each in values: + filter_args.append(key + "__" + each) + for each in get_query_fields(args[3][0])[document_field_type._meta.name].keys(): + item = to_snake_case(each) + if item in document._fields_ordered + tuple(filter_args): + queried_fields.append(item) + return await sync_to_async(list)( + document.objects() + .no_dereference() + .only(*set(list(document_field_type._meta.required_fields) + queried_fields)) + .filter(pk__in=args[1]) + ) + + async def get_non_querying_object_async(*args, **kwargs): + model = get_document(args[0]) + return [model(pk=each) for each in args[1]] + + async def reference_resolver_async(root, *args, **kwargs): + to_resolve = getattr(root, field.name or field.db_name) + if to_resolve: + choice_to_resolve = dict() + querying_union_types = list(get_query_fields(args[0]).keys()) + if "__typename" in querying_union_types: + querying_union_types.remove("__typename") + to_resolve_models = list() + for each in querying_union_types: + if executor == ExecutorEnum.SYNC: + to_resolve_models.append(registry._registry_string_map[each]) + else: + to_resolve_models.append(registry._registry_async_string_map[each]) + to_resolve_object_ids = list() + for each in to_resolve: + if isinstance(each, LazyReference): + to_resolve_object_ids.append(each.pk) + model = each.document_type._class_name + if model not in choice_to_resolve: + choice_to_resolve[model] = list() + choice_to_resolve[model].append(each.pk) + else: + to_resolve_object_ids.append(each["_ref"].id) + if each["_cls"] not in choice_to_resolve: + choice_to_resolve[each["_cls"]] = list() + choice_to_resolve[each["_cls"]].append(each["_ref"].id) + loop = asyncio.get_event_loop() + tasks = [] + for model, object_id_list in choice_to_resolve.items(): + if model in to_resolve_models: + task = loop.create_task( + get_reference_objects_async(model, object_id_list, registry, args) + ) + else: + task = loop.create_task( + get_non_querying_object_async(model, object_id_list, registry, args) + ) + tasks.append(task) + result = await asyncio.gather(*tasks) + result = [each[0] for each in result] + result_object_ids = list() + for each in result: + result_object_ids.append(each.id) + ordered_result = list() + for each in to_resolve_object_ids: + ordered_result.append(result[result_object_ids.index(each)]) + return ordered_result + return None + return graphene.List( base_type._type, description=get_field_description(field, registry), required=field.required, resolver=reference_resolver + if executor == ExecutorEnum.SYNC + else reference_resolver_async, ) return graphene.List( base_type._type, description=get_field_description(field, registry), - required=field.required + required=field.required, ) if isinstance(base_type, (graphene.Dynamic)): base_type = base_type.get_type() @@ -208,20 +320,18 @@ def reference_resolver(root, *args, **kwargs): # Non-relationship field relations = (mongoengine.ReferenceField, mongoengine.EmbeddedDocumentField) if not isinstance(base_type, (graphene.List, graphene.NonNull)) and not isinstance( - field.field, relations + field.field, relations ): base_type = type(base_type) return graphene.List( - base_type, - description=get_field_description(field, registry), - required=field.required, + base_type, description=get_field_description(field, registry), required=field.required ) @convert_mongoengine_field.register(mongoengine.GenericEmbeddedDocumentField) @convert_mongoengine_field.register(mongoengine.GenericReferenceField) -def convert_field_to_union(field, registry=None): +def convert_field_to_union(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): _types = [] for choice in field.choices: if isinstance(field, mongoengine.GenericReferenceField): @@ -229,7 +339,7 @@ def convert_field_to_union(field, registry=None): elif isinstance(field, mongoengine.GenericEmbeddedDocumentField): _field = mongoengine.EmbeddedDocumentField(choice) - _field = convert_mongoengine_field(_field, registry) + _field = convert_mongoengine_field(_field, registry, executor=executor) _type = _field.get_type() if _type: _types.append(_type.type) @@ -240,19 +350,20 @@ def convert_field_to_union(field, registry=None): if len(_types) == 0: return None - name = to_camel_case("{}_{}".format( - field._owner_document.__name__, - field.db_field - )) + "UnionType" + name = ( + to_camel_case("{}_{}".format(field._owner_document.__name__, field.db_field)) + "UnionType" + if ExecutorEnum.SYNC + else "AsyncUnionType" + ) Meta = type("Meta", (object,), {"types": tuple(_types)}) _union = type(name, (graphene.Union,), {"Meta": Meta}) - async def reference_resolver(root, *args, **kwargs): + def reference_resolver(root, *args, **kwargs): de_referenced = getattr(root, field.name or field.db_name) if de_referenced: document = get_document(de_referenced["_cls"]) document_field = mongoengine.ReferenceField(document) - document_field = convert_mongoengine_field(document_field, registry) + document_field = convert_mongoengine_field(document_field, registry, executor=executor) _type = document_field.get_type().type filter_args = list() if _type._meta.filter_fields: @@ -266,18 +377,24 @@ async def reference_resolver(root, *args, **kwargs): item = to_snake_case(each) if item in document._fields_ordered + tuple(filter_args): queried_fields.append(item) - return await sync_to_async(document.objects().no_dereference().only(*list( - set(list(_type._meta.required_fields) + queried_fields))).get, thread_sensitive=False, - executor=ThreadPoolExecutor())(pk=de_referenced["_ref"].id) - return await sync_to_async(document, thread_sensitive=False, - executor=ThreadPoolExecutor())() + return ( + document.objects() + .no_dereference() + .only(*list(set(list(_type._meta.required_fields) + queried_fields))) + .get(pk=de_referenced["_ref"].id) + ) + return document() return None - async def lazy_reference_resolver(root, *args, **kwargs): + def lazy_reference_resolver(root, *args, **kwargs): document = getattr(root, field.name or field.db_name) if document: + if document._cached_doc: + return document._cached_doc queried_fields = list() - document_field_type = registry.get_type_for_model(document.document_type) + document_field_type = registry.get_type_for_model( + document.document_type, executor=executor + ) querying_types = list(get_query_fields(args[0]).keys()) filter_args = list() if document_field_type._meta.filter_fields: @@ -289,12 +406,74 @@ async def lazy_reference_resolver(root, *args, **kwargs): item = to_snake_case(each) if item in document.document_type._fields_ordered + tuple(filter_args): queried_fields.append(item) - _type = registry.get_type_for_model(document.document_type) - return await sync_to_async(document.document_type.objects().no_dereference().only( - *(set((list(_type._meta.required_fields) + queried_fields)))).get, thread_sensitive=False, - executor=ThreadPoolExecutor())(pk=document.pk) - return await sync_to_async(document.document_type, thread_sensitive=False, - executor=ThreadPoolExecutor())() + _type = registry.get_type_for_model(document.document_type, executor=executor) + return ( + document.document_type.objects() + .no_dereference() + .only(*(set((list(_type._meta.required_fields) + queried_fields)))) + .get(pk=document.pk) + ) + return document.document_type() + return None + + async def reference_resolver_async(root, *args, **kwargs): + de_referenced = getattr(root, field.name or field.db_name) + if de_referenced: + document = get_document(de_referenced["_cls"]) + document_field = mongoengine.ReferenceField(document) + document_field = convert_mongoengine_field( + document_field, registry, executor=ExecutorEnum.ASYNC + ) + _type = document_field.get_type().type + filter_args = list() + if _type._meta.filter_fields: + for key, values in _type._meta.filter_fields.items(): + for each in values: + filter_args.append(key + "__" + each) + querying_types = list(get_query_fields(args[0]).keys()) + if _type.__name__ in querying_types: + queried_fields = list() + for each in get_query_fields(args[0])[_type._meta.name].keys(): + item = to_snake_case(each) + if item in document._fields_ordered + tuple(filter_args): + queried_fields.append(item) + return await sync_to_async( + document.objects() + .no_dereference() + .only(*list(set(list(_type._meta.required_fields) + queried_fields))) + .get + )(pk=de_referenced["_ref"].id) + return await sync_to_async(document)() + return None + + async def lazy_reference_resolver_async(root, *args, **kwargs): + document = getattr(root, field.name or field.db_name) + if document: + if document._cached_doc: + return document._cached_doc + queried_fields = list() + document_field_type = registry.get_type_for_model( + document.document_type, executor=executor + ) + querying_types = list(get_query_fields(args[0]).keys()) + filter_args = list() + if document_field_type._meta.filter_fields: + for key, values in document_field_type._meta.filter_fields.items(): + for each in values: + filter_args.append(key + "__" + each) + if document_field_type._meta.name in querying_types: + for each in get_query_fields(args[0])[document_field_type._meta.name].keys(): + item = to_snake_case(each) + if item in document.document_type._fields_ordered + tuple(filter_args): + queried_fields.append(item) + _type = registry.get_type_for_model(document.document_type, executor=executor) + return await sync_to_async( + document.document_type.objects() + .no_dereference() + .only(*(set((list(_type._meta.required_fields) + queried_fields)))) + .get + )(pk=document.pk) + return await sync_to_async(document.document_type)() return None if isinstance(field, mongoengine.GenericLazyReferenceField): @@ -302,24 +481,48 @@ async def lazy_reference_resolver(root, *args, **kwargs): required = False if field.db_field is not None: required = field.required - resolver_function = getattr(registry.get_type_for_model(field.owner_document), "resolve_" + field.db_field, - None) + resolver_function = getattr( + registry.get_type_for_model(field.owner_document, executor=executor), + "resolve_" + field.db_field, + None, + ) if resolver_function and callable(resolver_function): field_resolver = resolver_function - return graphene.Field(_union, resolver=field_resolver if field_resolver else lazy_reference_resolver, - description=get_field_description(field, registry), required=required) + return graphene.Field( + _union, + resolver=field_resolver + if field_resolver + else ( + lazy_reference_resolver + if executor == ExecutorEnum.SYNC + else lazy_reference_resolver_async + ), + description=get_field_description(field, registry), + required=required, + ) elif isinstance(field, mongoengine.GenericReferenceField): field_resolver = None required = False if field.db_field is not None: required = field.required - resolver_function = getattr(registry.get_type_for_model(field.owner_document), "resolve_" + field.db_field, - None) + resolver_function = getattr( + registry.get_type_for_model(field.owner_document, executor=executor), + "resolve_" + field.db_field, + None, + ) if resolver_function and callable(resolver_function): field_resolver = resolver_function - return graphene.Field(_union, resolver=field_resolver if field_resolver else reference_resolver, - description=get_field_description(field, registry), required=required) + return graphene.Field( + _union, + resolver=field_resolver + if field_resolver + else ( + reference_resolver if executor == ExecutorEnum.SYNC else reference_resolver_async + ), + description=get_field_description(field, registry), + required=required, + ) return graphene.Field(_union) @@ -327,14 +530,14 @@ async def lazy_reference_resolver(root, *args, **kwargs): @convert_mongoengine_field.register(mongoengine.EmbeddedDocumentField) @convert_mongoengine_field.register(mongoengine.ReferenceField) @convert_mongoengine_field.register(mongoengine.CachedReferenceField) -def convert_field_to_dynamic(field, registry=None): +def convert_field_to_dynamic(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): model = field.document_type - async def reference_resolver(root, *args, **kwargs): + def reference_resolver(root, *args, **kwargs): document = getattr(root, field.name or field.db_name) if document: queried_fields = list() - _type = registry.get_type_for_model(field.document_type) + _type = registry.get_type_for_model(field.document_type, executor=executor) filter_args = list() if _type._meta.filter_fields: for key, values in _type._meta.filter_fields.items(): @@ -344,15 +547,18 @@ async def reference_resolver(root, *args, **kwargs): item = to_snake_case(each) if item in field.document_type._fields_ordered + tuple(filter_args): queried_fields.append(item) - return await sync_to_async(field.document_type.objects().no_dereference().only( - *(set(list(_type._meta.required_fields) + queried_fields))).get, thread_sensitive=False, - executor=ThreadPoolExecutor())(pk=document.id) + return ( + field.document_type.objects() + .no_dereference() + .only(*(set(list(_type._meta.required_fields) + queried_fields))) + .get(pk=document.id) + ) return None - async def cached_reference_resolver(root, *args, **kwargs): + def cached_reference_resolver(root, *args, **kwargs): if field: queried_fields = list() - _type = registry.get_type_for_model(field.document_type) + _type = registry.get_type_for_model(field.document_type, executor=executor) filter_args = list() if _type._meta.filter_fields: for key, values in _type._meta.filter_fields.items(): @@ -362,47 +568,141 @@ async def cached_reference_resolver(root, *args, **kwargs): item = to_snake_case(each) if item in field.document_type._fields_ordered + tuple(filter_args): queried_fields.append(item) - return await sync_to_async(field.document_type.objects().no_dereference().only( - *(set( - list(_type._meta.required_fields) + queried_fields))).get, thread_sensitive=False, - executor=ThreadPoolExecutor())( - pk=getattr(root, field.name or field.db_name)) + return ( + field.document_type.objects() + .no_dereference() + .only(*(set(list(_type._meta.required_fields) + queried_fields))) + .get(pk=getattr(root, field.name or field.db_name)) + ) + return None + + async def reference_resolver_async(root, *args, **kwargs): + document = getattr(root, field.name or field.db_name) + if document: + queried_fields = list() + _type = registry.get_type_for_model(field.document_type, executor=executor) + filter_args = list() + if _type._meta.filter_fields: + for key, values in _type._meta.filter_fields.items(): + for each in values: + filter_args.append(key + "__" + each) + for each in get_query_fields(args[0]).keys(): + item = to_snake_case(each) + if item in field.document_type._fields_ordered + tuple(filter_args): + queried_fields.append(item) + return await sync_to_async( + field.document_type.objects() + .no_dereference() + .only(*(set(list(_type._meta.required_fields) + queried_fields))) + .get + )(pk=document.id) + return None + + async def cached_reference_resolver_async(root, *args, **kwargs): + if field: + queried_fields = list() + _type = registry.get_type_for_model(field.document_type, executor=executor) + filter_args = list() + if _type._meta.filter_fields: + for key, values in _type._meta.filter_fields.items(): + for each in values: + filter_args.append(key + "__" + each) + for each in get_query_fields(args[0]).keys(): + item = to_snake_case(each) + if item in field.document_type._fields_ordered + tuple(filter_args): + queried_fields.append(item) + return await sync_to_async( + field.document_type.objects() + .no_dereference() + .only(*(set(list(_type._meta.required_fields) + queried_fields))) + .get + )(pk=getattr(root, field.name or field.db_name)) return None def dynamic_type(): - _type = registry.get_type_for_model(model) + _type = registry.get_type_for_model(model, executor=executor) if not _type: return None if isinstance(field, mongoengine.EmbeddedDocumentField): - return graphene.Field(_type, - description=get_field_description(field, registry), required=field.required) + return graphene.Field( + _type, description=get_field_description(field, registry), required=field.required + ) field_resolver = None required = False if field.db_field is not None: required = field.required - resolver_function = getattr(registry.get_type_for_model(field.owner_document), "resolve_" + field.db_field, - None) + resolver_function = getattr( + registry.get_type_for_model(field.owner_document, executor=executor), + "resolve_" + field.db_field, + None, + ) if resolver_function and callable(resolver_function): field_resolver = resolver_function if isinstance(field, mongoengine.ReferenceField): - return graphene.Field(_type, resolver=field_resolver if field_resolver else reference_resolver, - description=get_field_description(field, registry), required=required) + return graphene.Field( + _type, + resolver=field_resolver + if field_resolver + else ( + reference_resolver + if executor == ExecutorEnum.SYNC + else reference_resolver_async + ), + description=get_field_description(field, registry), + required=required, + ) else: - return graphene.Field(_type, resolver=field_resolver if field_resolver else cached_reference_resolver, - description=get_field_description(field, registry), required=required) + return graphene.Field( + _type, + resolver=field_resolver + if field_resolver + else ( + cached_reference_resolver + if executor == ExecutorEnum.SYNC + else cached_reference_resolver_async + ), + description=get_field_description(field, registry), + required=required, + ) return graphene.Dynamic(dynamic_type) @convert_mongoengine_field.register(mongoengine.LazyReferenceField) -def convert_lazy_field_to_dynamic(field, registry=None): +def convert_lazy_field_to_dynamic(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): model = field.document_type - async def lazy_resolver(root, *args, **kwargs): + def lazy_resolver(root, *args, **kwargs): + document = getattr(root, field.name or field.db_name) + if document: + if document._cached_doc: + return document._cached_doc + queried_fields = list() + _type = registry.get_type_for_model(document.document_type, executor=executor) + filter_args = list() + if _type._meta.filter_fields: + for key, values in _type._meta.filter_fields.items(): + for each in values: + filter_args.append(key + "__" + each) + for each in get_query_fields(args[0]).keys(): + item = to_snake_case(each) + if item in document.document_type._fields_ordered + tuple(filter_args): + queried_fields.append(item) + return ( + document.document_type.objects() + .no_dereference() + .only(*(set((list(_type._meta.required_fields) + queried_fields)))) + .get(pk=document.pk) + ) + return None + + async def lazy_resolver_async(root, *args, **kwargs): document = getattr(root, field.name or field.db_name) if document: + if document._cached_doc: + return document._cached_doc queried_fields = list() - _type = registry.get_type_for_model(document.document_type) + _type = registry.get_type_for_model(document.document_type, executor=executor) filter_args = list() if _type._meta.filter_fields: for key, values in _type._meta.filter_fields.items(): @@ -412,37 +712,48 @@ async def lazy_resolver(root, *args, **kwargs): item = to_snake_case(each) if item in document.document_type._fields_ordered + tuple(filter_args): queried_fields.append(item) - return await sync_to_async(document.document_type.objects().no_dereference().only( - *(set((list(_type._meta.required_fields) + queried_fields)))).get, thread_sensitive=False, - executor=ThreadPoolExecutor())(pk=document.pk) + return await sync_to_async( + document.document_type.objects() + .no_dereference() + .only(*(set((list(_type._meta.required_fields) + queried_fields)))) + .get + )(pk=document.pk) return None def dynamic_type(): - _type = registry.get_type_for_model(model) + _type = registry.get_type_for_model(model, executor=executor) if not _type: return None field_resolver = None required = False if field.db_field is not None: required = field.required - resolver_function = getattr(registry.get_type_for_model(field.owner_document), "resolve_" + field.db_field, - None) + resolver_function = getattr( + registry.get_type_for_model(field.owner_document, executor=executor), + "resolve_" + field.db_field, + None, + ) if resolver_function and callable(resolver_function): field_resolver = resolver_function return graphene.Field( _type, - resolver=field_resolver if field_resolver else lazy_resolver, - description=get_field_description(field, registry), required=required, + resolver=field_resolver + if field_resolver + else (lazy_resolver if executor == ExecutorEnum.SYNC else lazy_resolver_async), + description=get_field_description(field, registry), + required=required, ) return graphene.Dynamic(dynamic_type) if sys.version_info >= (3, 6): + @convert_mongoengine_field.register(mongoengine.EnumField) - def convert_field_to_enum(field, registry=None): + def convert_field_to_enum(field, registry=None, executor: ExecutorEnum = ExecutorEnum.SYNC): if not registry.check_enum_already_exist(field._enum_cls): registry.register_enum(field._enum_cls) _type = registry.get_type_for_enum(field._enum_cls) - return graphene.Field(_type, - description=get_field_description(field, registry), required=field.required) + return graphene.Field( + _type, description=get_field_description(field, registry), required=field.required + ) diff --git a/graphene_mongo/fields.py b/graphene_mongo/fields.py index 3778fa72..1e6bdf0f 100644 --- a/graphene_mongo/fields.py +++ b/graphene_mongo/fields.py @@ -1,11 +1,14 @@ from __future__ import absolute_import +import logging from collections import OrderedDict from functools import partial, reduce +from itertools import filterfalse import bson import graphene import mongoengine +import pymongo from bson import DBRef, ObjectId from graphene import Context from graphene.relay import ConnectionField @@ -15,26 +18,28 @@ from graphene.types.utils import get_type from graphene.utils.str_converters import to_snake_case from graphql import GraphQLResolveInfo -from graphql_relay import from_global_id -from graphql_relay.connection.arrayconnection import cursor_to_offset +from graphql_relay import cursor_to_offset, from_global_id from mongoengine import QuerySet from mongoengine.base import get_document from promise import Promise from pymongo.errors import OperationFailure -from asgiref.sync import sync_to_async -from concurrent.futures import ThreadPoolExecutor from .advanced_types import ( FileFieldType, - PointFieldType, MultiPolygonFieldType, - PolygonFieldType, PointFieldInputType, + PointFieldInputType, + PointFieldType, + PolygonFieldType, ) -from .converter import convert_mongoengine_field, MongoEngineConversionError +from .converter import MongoEngineConversionError, convert_mongoengine_field from .registry import get_global_registry -from .utils import get_model_reference_fields, get_query_fields, find_skip_and_limit, \ - connection_from_iterables -import pymongo +from .utils import ( + ExecutorEnum, + connection_from_iterables, + find_skip_and_limit, + get_model_reference_fields, + get_query_fields, +) PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) @@ -49,6 +54,10 @@ def __init__(self, type, *args, **kwargs): self._get_queryset = get_queryset super(MongoengineConnectionField, self).__init__(type, *args, **kwargs) + @property + def executor(self) -> ExecutorEnum: + return ExecutorEnum.SYNC + @property def type(self): from .types import MongoengineObjectType @@ -98,14 +107,13 @@ def args(self): _filter_args.pop(_field) if _field in _extended_args: _filter_args.pop(_field) - extra_args = dict(dict(dict(_field_args, **_advance_args), **_filter_args), **_extended_args) + extra_args = dict( + dict(dict(_field_args, **_advance_args), **_filter_args), **_extended_args + ) for key in list(self._base_args.keys()): extra_args.pop(key, None) - return to_arguments( - self._base_args or OrderedDict(), - extra_args - ) + return to_arguments(self._base_args or OrderedDict(), extra_args) @args.setter def args(self, args): @@ -121,7 +129,7 @@ def is_filterable(k): Returns: bool """ - if hasattr(self.fields[k].type, '_sdl'): + if hasattr(self.fields[k].type, "_sdl"): return False if not hasattr(self.model, k): return False @@ -137,32 +145,39 @@ def is_filterable(k): return False try: converted = convert_mongoengine_field( - getattr(self.model, k), self.registry + getattr(self.model, k), self.registry, self.executor ) except MongoEngineConversionError: return False if isinstance(converted, (ConnectionField, Dynamic)): return False if callable(getattr(converted, "type", None)) and isinstance( - converted.type(), - ( - FileFieldType, - PointFieldType, - MultiPolygonFieldType, - graphene.Union, - PolygonFieldType, - ), + converted.type(), + ( + FileFieldType, + PointFieldType, + MultiPolygonFieldType, + graphene.Union, + PolygonFieldType, + ), ): return False - if getattr(converted, "type", None) and getattr(converted.type, "_of_type", None) and issubclass( - (get_type(converted.type.of_type)), graphene.Union): + if ( + getattr(converted, "type", None) + and getattr(converted.type, "_of_type", None) + and issubclass((get_type(converted.type.of_type)), graphene.Union) + ): return False if isinstance(converted, (graphene.List)) and issubclass( - getattr(converted, "_of_type", None), graphene.Union + getattr(converted, "_of_type", None), graphene.Union ): return False # below if condition: workaround for DB filterable field redefined as custom graphene type - if hasattr(field_, 'type') and hasattr(converted, 'type') and converted.type != field_.type: + if ( + hasattr(field_, "type") + and hasattr(converted, "type") + and converted.type != field_.type + ): return False return True @@ -186,8 +201,11 @@ def filter_args(self): if self._type._meta.filter_fields: for field, filter_collection in self._type._meta.filter_fields.items(): for each in filter_collection: - if str(self._type._meta.fields[field].type) in ('PointFieldType', 'PointFieldType!'): - if each == 'max_distance': + if str(self._type._meta.fields[field].type) in ( + "PointFieldType", + "PointFieldType!", + ): + if each == "max_distance": filter_type = graphene.Int else: filter_type = PointFieldInputType @@ -203,9 +221,7 @@ def filter_args(self): "all": graphene.List(filter_type), } filter_type = advanced_filter_types.get(each, filter_type) - filter_args[field + "__" + each] = graphene.Argument( - type_=filter_type - ) + filter_args[field + "__" + each] = graphene.Argument(type_=filter_type) return filter_args @property @@ -217,8 +233,12 @@ def get_advance_field(r, kv): r.update({kv[0]: graphene.Argument(PointFieldInputType)}) return r if isinstance( - mongo_field, - (mongoengine.LazyReferenceField, mongoengine.ReferenceField, mongoengine.GenericReferenceField), + mongo_field, + ( + mongoengine.LazyReferenceField, + mongoengine.ReferenceField, + mongoengine.GenericReferenceField, + ), ): r.update({kv[0]: graphene.ID()}) return r @@ -228,9 +248,13 @@ def get_advance_field(r, kv): if callable(getattr(field, "get_type", None)): _type = field.get_type() if _type: - node = _type.type._meta if hasattr(_type.type, "_meta") else _type.type._of_type._meta + node = ( + _type.type._meta + if hasattr(_type.type, "_meta") + else _type.type._of_type._meta + ) if "id" in node.fields and not issubclass( - node.model, (mongoengine.EmbeddedDocument,) + node.model, (mongoengine.EmbeddedDocument,) ): r.update({kv[0]: node.fields["id"]._type.of_type()}) @@ -242,7 +266,7 @@ def get_advance_field(r, kv): def extended_args(self): args = OrderedDict() for k, each in self.fields.items(): - if hasattr(each.type, '_sdl'): + if hasattr(each.type, "_sdl"): args.update({k: graphene.ID()}) return args @@ -251,7 +275,9 @@ def fields(self): self._type = get_type(self._type) return self._type._meta.fields - def get_queryset(self, model, info, required_fields=None, skip=None, limit=None, reversed=False, **args): + def get_queryset( + self, model, info, required_fields=None, skip=None, limit=None, reversed=False, **args + ) -> QuerySet: if required_fields is None: required_fields = list() @@ -259,28 +285,33 @@ def get_queryset(self, model, info, required_fields=None, skip=None, limit=None, reference_fields = get_model_reference_fields(self.model) hydrated_references = {} for arg_name, arg in args.copy().items(): - if arg_name in reference_fields and not isinstance(arg, - mongoengine.base.metaclasses.TopLevelDocumentMetaclass): + if arg_name in reference_fields and not isinstance( + arg, mongoengine.base.metaclasses.TopLevelDocumentMetaclass + ): try: - reference_obj = reference_fields[arg_name].document_type(pk=from_global_id(arg)[1]) + reference_obj = reference_fields[arg_name].document_type( + pk=from_global_id(arg)[1] + ) except TypeError: reference_obj = reference_fields[arg_name].document_type(pk=arg) hydrated_references[arg_name] = reference_obj - elif arg_name in self.model._fields_ordered and isinstance(getattr(self.model, arg_name), - mongoengine.fields.GenericReferenceField): + elif arg_name in self.model._fields_ordered and isinstance( + getattr(self.model, arg_name), mongoengine.fields.GenericReferenceField + ): try: - reference_obj = get_document(self.registry._registry_string_map[from_global_id(arg)[0]])( - pk=from_global_id(arg)[1]) + reference_obj = get_document( + self.registry._registry_string_map[from_global_id(arg)[0]] + )(pk=from_global_id(arg)[1]) except TypeError: - reference_obj = get_document(arg["_cls"])( - pk=arg["_ref"].id) + reference_obj = get_document(arg["_cls"])(pk=arg["_ref"].id) hydrated_references[arg_name] = reference_obj - elif '__near' in arg_name and isinstance(getattr(self.model, arg_name.split('__')[0]), - mongoengine.fields.PointField): + elif "__near" in arg_name and isinstance( + getattr(self.model, arg_name.split("__")[0]), mongoengine.fields.PointField + ): location = args.pop(arg_name, None) hydrated_references[arg_name] = location["coordinates"] - if (arg_name.split('__')[0] + "__max_distance") not in args: - hydrated_references[arg_name.split('__')[0] + "__max_distance"] = 10000 + if (arg_name.split("__")[0] + "__max_distance") not in args: + hydrated_references[arg_name.split("__")[0] + "__max_distance"] = 10000 elif arg_name == "id": hydrated_references["id"] = from_global_id(args.pop("id", None))[1] args.update(hydrated_references) @@ -297,25 +328,47 @@ def get_queryset(self, model, info, required_fields=None, skip=None, limit=None, order_by = self.order_by + ",-pk" else: order_by = "-pk" - return model.objects(**args).no_dereference().only(*required_fields).order_by(order_by).skip( - skip if skip else 0).limit(limit) + return ( + model.objects(**args) + .no_dereference() + .only(*required_fields) + .order_by(order_by) + .skip(skip if skip else 0) + .limit(limit) + ) else: - return model.objects(**args).no_dereference().only(*required_fields).order_by(self.order_by).skip( - skip if skip else 0).limit(limit) + return ( + model.objects(**args) + .no_dereference() + .only(*required_fields) + .order_by(self.order_by) + .skip(skip if skip else 0) + .limit(limit) + ) elif skip is not None: if reversed: if self.order_by: order_by = self.order_by + ",-pk" else: order_by = "-pk" - return model.objects(**args).no_dereference().only(*required_fields).order_by(order_by).skip( - skip) + return ( + model.objects(**args) + .no_dereference() + .only(*required_fields) + .order_by(order_by) + .skip(skip) + ) else: - return model.objects(**args).no_dereference().only(*required_fields).order_by(self.order_by).skip( - skip) + return ( + model.objects(**args) + .no_dereference() + .only(*required_fields) + .order_by(self.order_by) + .skip(skip) + ) return model.objects(**args).no_dereference().only(*required_fields).order_by(self.order_by) - async def default_resolver(self, _root, info, required_fields=None, resolved=None, **args): + def default_resolver(self, _root, info, required_fields=None, resolved=None, **args): if required_fields is None: required_fields = list() args = args or {} @@ -327,17 +380,19 @@ async def default_resolver(self, _root, info, required_fields=None, resolved=Non if not hasattr(_root, "_fields_ordered"): if isinstance(getattr(_root, field_name, []), list): args["pk__in"] = [r.id for r in getattr(_root, field_name, [])] - elif field_name in _root._fields_ordered and not (isinstance(_root._fields[field_name].field, - mongoengine.EmbeddedDocumentField) or - isinstance(_root._fields[field_name].field, - mongoengine.GenericEmbeddedDocumentField)): + elif field_name in _root._fields_ordered and not ( + isinstance(_root._fields[field_name].field, mongoengine.EmbeddedDocumentField) + or isinstance( + _root._fields[field_name].field, mongoengine.GenericEmbeddedDocumentField + ) + ): if getattr(_root, field_name, []) is not None: args["pk__in"] = [r.id for r in getattr(_root, field_name, [])] - _id = args.pop('id', None) + _id = args.pop("id", None) if _id is not None: - args['pk'] = from_global_id(_id)[-1] + args["pk"] = from_global_id(_id)[-1] iterables = [] list_length = 0 skip = 0 @@ -352,82 +407,120 @@ async def default_resolver(self, _root, info, required_fields=None, resolved=Non before = args.pop("before", None) if before: before = cursor_to_offset(before) + has_next_page = False if resolved is not None: items = resolved if isinstance(items, QuerySet): try: - count = await sync_to_async(items.count, thread_sensitive=False, - executor=ThreadPoolExecutor())(with_limit_and_skip=True) + if last is not None and after is not None: + count = items.count(with_limit_and_skip=False) + else: + count = None except OperationFailure: count = len(items) else: count = len(items) - skip, limit, reverse = find_skip_and_limit(first=first, last=last, after=after, before=before, - count=count) + skip, limit, reverse = find_skip_and_limit( + first=first, last=last, after=after, before=before, count=count + ) - if limit: - if reverse: - items = items[::-1][skip:skip + limit] - else: - items = items[skip:skip + limit] - elif skip: - items = items[skip:] - iterables = items + if isinstance(items, QuerySet): + if limit: + _base_query: QuerySet = ( + items.order_by("-pk").skip(skip) if reverse else items.skip(skip) + ) + items = _base_query.limit(limit) + has_next_page = len(_base_query.skip(limit).only("id").limit(1)) != 0 + elif skip: + items = items.skip(skip) + else: + if limit: + if reverse: + _base_query = items[::-1] + items = _base_query[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) + else: + _base_query = items + items = items[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) + elif skip: + items = items[skip:] + iterables = list(items) list_length = len(iterables) elif callable(getattr(self.model, "objects", None)): - if _root is None or args or isinstance(getattr(_root, field_name, []), MongoengineConnectionField): + if ( + _root is None + or args + or isinstance(getattr(_root, field_name, []), MongoengineConnectionField) + ): args_copy = args.copy() for key in args.copy(): if key not in self.model._fields_ordered: args_copy.pop(key) - elif isinstance(getattr(self.model, key), - mongoengine.fields.ReferenceField) or isinstance(getattr(self.model, key), - mongoengine.fields.GenericReferenceField) or isinstance( - getattr(self.model, key), - mongoengine.fields.LazyReferenceField) or isinstance(getattr(self.model, key), - mongoengine.fields.CachedReferenceField): + elif ( + isinstance(getattr(self.model, key), mongoengine.fields.ReferenceField) + or isinstance( + getattr(self.model, key), mongoengine.fields.GenericReferenceField + ) + or isinstance( + getattr(self.model, key), mongoengine.fields.LazyReferenceField + ) + or isinstance( + getattr(self.model, key), mongoengine.fields.CachedReferenceField + ) + ): if not isinstance(args_copy[key], ObjectId): _from_global_id = from_global_id(args_copy[key])[1] if bson.objectid.ObjectId.is_valid(_from_global_id): args_copy[key] = ObjectId(_from_global_id) else: args_copy[key] = _from_global_id - elif isinstance(getattr(self.model, key), - mongoengine.fields.EnumField): + elif isinstance(getattr(self.model, key), mongoengine.fields.EnumField): if getattr(args_copy[key], "value", None): args_copy[key] = args_copy[key].value if PYMONGO_VERSION >= (3, 7): - count = await sync_to_async( - (mongoengine.get_db()[self.model._get_collection_name()]).count_documents, - thread_sensitive=False, - executor=ThreadPoolExecutor())(args_copy) + if hasattr(self.model, "_meta") and "db_alias" in self.model._meta: + count = ( + mongoengine.get_db(self.model._meta["db_alias"])[ + self.model._get_collection_name() + ] + ).count_documents(args_copy) + else: + count = ( + mongoengine.get_db()[self.model._get_collection_name()] + ).count_documents(args_copy) else: - count = await sync_to_async(self.model.objects(args_copy).count, thread_sensitive=False, - executor=ThreadPoolExecutor())() + count = self.model.objects(args_copy).count() if count != 0: - skip, limit, reverse = find_skip_and_limit(first=first, after=after, last=last, before=before, - count=count) - iterables = self.get_queryset(self.model, info, required_fields, skip, limit, reverse, **args) + skip, limit, reverse = find_skip_and_limit( + first=first, after=after, last=last, before=before, count=count + ) + iterables = self.get_queryset( + self.model, info, required_fields, skip, limit, reverse, **args + ) list_length = len(iterables) if isinstance(info, GraphQLResolveInfo): if not info.context: info = info._replace(context=Context()) - info.context.queryset = self.get_queryset(self.model, info, required_fields, **args) + info.context.queryset = self.get_queryset( + self.model, info, required_fields, **args + ) elif "pk__in" in args and args["pk__in"]: count = len(args["pk__in"]) - skip, limit, reverse = find_skip_and_limit(first=first, last=last, after=after, before=before, - count=count) + skip, limit, reverse = find_skip_and_limit( + first=first, last=last, after=after, before=before, count=count + ) if limit: if reverse: - args["pk__in"] = args["pk__in"][::-1][skip:skip + limit] + args["pk__in"] = args["pk__in"][::-1][skip : skip + limit] else: - args["pk__in"] = args["pk__in"][skip:skip + limit] + args["pk__in"] = args["pk__in"][skip : skip + limit] elif skip: args["pk__in"] = args["pk__in"][skip:] iterables = self.get_queryset(self.model, info, required_fields, **args) @@ -435,43 +528,58 @@ async def default_resolver(self, _root, info, required_fields=None, resolved=Non if isinstance(info, GraphQLResolveInfo): if not info.context: info = info._replace(context=Context()) - info.context.queryset = self.get_queryset(self.model, info, required_fields, **args) + info.context.queryset = self.get_queryset( + self.model, info, required_fields, **args + ) elif _root is not None: field_name = to_snake_case(info.field_name) items = getattr(_root, field_name, []) count = len(items) - skip, limit, reverse = find_skip_and_limit(first=first, last=last, after=after, before=before, - count=count) + skip, limit, reverse = find_skip_and_limit( + first=first, last=last, after=after, before=before, count=count + ) if limit: if reverse: - items = items[::-1][skip:skip + limit] + _base_query = items[::-1] + items = _base_query[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) else: - items = items[skip:skip + limit] + _base_query = items + items = items[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) elif skip: items = items[skip:] iterables = items list_length = len(iterables) - has_next_page = True if (0 if limit is None else limit) + (0 if skip is None else skip) < count else False + if count: + has_next_page = ( + True + if (0 if limit is None else limit) + (0 if skip is None else skip) < count + else False + ) has_previous_page = True if skip else False + if reverse: iterables = list(iterables) iterables.reverse() skip = limit - connection = connection_from_iterables(edges=iterables, start_offset=skip, - has_previous_page=has_previous_page, - has_next_page=has_next_page, - connection_type=self.type, - edge_type=self.type.Edge, - pageinfo_type=graphene.PageInfo) + connection = connection_from_iterables( + edges=iterables, + start_offset=skip, + has_previous_page=has_previous_page, + has_next_page=has_next_page, + connection_type=self.type, + edge_type=self.type.Edge, + pageinfo_type=graphene.PageInfo, + ) connection.iterable = iterables connection.list_length = list_length return connection - async def chained_resolver(self, resolver, is_partial, root, info, **args): - + def chained_resolver(self, resolver, is_partial, root, info, **args): for key, value in dict(args).items(): if value is None: del args[key] @@ -489,20 +597,35 @@ async def chained_resolver(self, resolver, is_partial, root, info, **args): args_copy = args.copy() if not bool(args) or not is_partial: - if isinstance(self.model, mongoengine.Document) or isinstance(self.model, - mongoengine.base.metaclasses.TopLevelDocumentMetaclass): + if isinstance(self.model, mongoengine.Document) or isinstance( + self.model, mongoengine.base.metaclasses.TopLevelDocumentMetaclass + ): + connection_fields = [ + field + for field in self.fields + if type(self.fields[field]) == MongoengineConnectionField + ] + + def filter_connection(x): + return any( + [ + connection_fields.__contains__(x), + self._type._meta.non_filter_fields.__contains__(x), + ] + ) - from itertools import filterfalse - connection_fields = [field for field in self.fields if - type(self.fields[field]) == MongoengineConnectionField] - filterable_args = tuple(filterfalse(connection_fields.__contains__, list(self.model._fields_ordered))) + filterable_args = tuple( + filterfalse(filter_connection, list(self.model._fields_ordered)) + ) for arg_name, arg in args.copy().items(): if arg_name not in filterable_args + tuple(self.filter_args.keys()): args_copy.pop(arg_name) if isinstance(info, GraphQLResolveInfo): if not info.context: info = info._replace(context=Context()) - info.context.queryset = self.get_queryset(self.model, info, required_fields, **args_copy) + info.context.queryset = self.get_queryset( + self.model, info, required_fields, **args_copy + ) # XXX: Filter nested args resolved = resolver(root, info, **args) @@ -514,61 +637,71 @@ async def chained_resolver(self, resolver, is_partial, root, info, **args): elif not isinstance(resolved[0], DBRef): return resolved else: - return await self.default_resolver(root, info, required_fields, **args_copy) + return self.default_resolver(root, info, required_fields, **args_copy) elif isinstance(resolved, QuerySet): args.update(resolved._query) args_copy = args.copy() for arg_name, arg in args.copy().items(): if "." in arg_name or arg_name not in self.model._fields_ordered + ( - 'first', 'last', 'before', 'after') + tuple(self.filter_args.keys()): + "first", + "last", + "before", + "after", + ) + tuple(self.filter_args.keys()): args_copy.pop(arg_name) - if arg_name == '_id' and isinstance(arg, dict): + if arg_name == "_id" and isinstance(arg, dict): operation = list(arg.keys())[0] - args_copy['pk' + operation.replace('$', '__')] = arg[operation] - if not isinstance(arg, ObjectId) and '.' in arg_name: - if type(arg) == dict: + args_copy["pk" + operation.replace("$", "__")] = arg[operation] + if not isinstance(arg, ObjectId) and "." in arg_name: + if isinstance(arg, dict): operation = list(arg.keys())[0] - args_copy[arg_name.replace('.', '__') + operation.replace('$', '__')] = arg[ - operation] + args_copy[ + arg_name.replace(".", "__") + operation.replace("$", "__") + ] = arg[operation] else: - args_copy[arg_name.replace('.', '__')] = arg - elif '.' in arg_name and isinstance(arg, ObjectId): - args_copy[arg_name.replace('.', '__')] = arg + args_copy[arg_name.replace(".", "__")] = arg + elif "." in arg_name and isinstance(arg, ObjectId): + args_copy[arg_name.replace(".", "__")] = arg else: operations = ["$lte", "$gte", "$ne", "$in"] if isinstance(arg, dict) and any(op in arg for op in operations): operation = list(arg.keys())[0] - args_copy[arg_name + operation.replace('$', '__')] = arg[operation] + args_copy[arg_name + operation.replace("$", "__")] = arg[operation] del args_copy[arg_name] - - return await self.default_resolver(root, info, required_fields, resolved=resolved, **args_copy) + return self.default_resolver( + root, info, required_fields, resolved=resolved, **args_copy + ) elif isinstance(resolved, Promise): return resolved.value else: - return await resolved + return resolved - return await self.default_resolver(root, info, required_fields, **args) + return self.default_resolver(root, info, required_fields, **args) @classmethod - async def connection_resolver(cls, resolver, connection_type, root, info, **args): + def connection_resolver(cls, resolver, connection_type, root, info, **args): if root: for key, value in root.__dict__.items(): if value: try: setattr(root, key, from_global_id(value)[1]) - except Exception: - pass - iterable = await resolver(root, info, **args) + except Exception as error: + logging.debug("Exception Occurred: ", exc_info=error) + iterable = resolver(root, info, **args) + if isinstance(connection_type, graphene.NonNull): connection_type = connection_type.of_type - return await sync_to_async(cls.resolve_connection, thread_sensitive=False, - executor=ThreadPoolExecutor())(connection_type, args, iterable) + on_resolve = partial(cls.resolve_connection, connection_type, args) - def get_resolver(self, parent_resolver): + if Promise.is_thenable(iterable): + return Promise.resolve(iterable).then(on_resolve) + + return on_resolve(iterable) + + def wrap_resolve(self, parent_resolver): super_resolver = self.resolver or parent_resolver resolver = partial( self.chained_resolver, super_resolver, isinstance(super_resolver, partial) ) - return partial(self.connection_resolver, resolver, self.type) diff --git a/graphene_mongo/fields_async.py b/graphene_mongo/fields_async.py new file mode 100644 index 00000000..c85c0d62 --- /dev/null +++ b/graphene_mongo/fields_async.py @@ -0,0 +1,394 @@ +from __future__ import absolute_import + +from functools import partial +from itertools import filterfalse +from typing import Coroutine + +import bson +import graphene +import mongoengine +import pymongo +from bson import DBRef, ObjectId +from graphene import Context +from graphene.relay import ConnectionField +from graphene.utils.str_converters import to_snake_case +from graphql import GraphQLResolveInfo +from graphql_relay import cursor_to_offset, from_global_id +from mongoengine import QuerySet +from promise import Promise +from pymongo.errors import OperationFailure + +from . import MongoengineConnectionField +from .registry import get_global_async_registry +from .utils import ( + ExecutorEnum, + connection_from_iterables, + find_skip_and_limit, + get_query_fields, + sync_to_async, +) + +PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) + + +class AsyncMongoengineConnectionField(MongoengineConnectionField): + def __init__(self, type, *args, **kwargs): + super(AsyncMongoengineConnectionField, self).__init__(type, *args, **kwargs) + + @property + def executor(self): + return ExecutorEnum.ASYNC + + @property + def type(self): + from .types_async import AsyncMongoengineObjectType + + _type = super(ConnectionField, self).type + assert issubclass( + _type, AsyncMongoengineObjectType + ), "AsyncMongoengineConnectionField only accepts AsyncMongoengineObjectType types" + assert _type._meta.connection, "The type {} doesn't have a connection".format( + _type.__name__ + ) + return _type._meta.connection + + @property + def fields(self): + return super(AsyncMongoengineConnectionField, self).fields + + @property + def registry(self): + return getattr(self.node_type._meta, "registry", get_global_async_registry()) + + async def default_resolver(self, _root, info, required_fields=None, resolved=None, **args): + if required_fields is None: + required_fields = list() + args = args or {} + for key, value in dict(args).items(): + if value is None: + del args[key] + if _root is not None and not resolved: + field_name = to_snake_case(info.field_name) + if not hasattr(_root, "_fields_ordered"): + if isinstance(getattr(_root, field_name, []), list): + args["pk__in"] = [r.id for r in getattr(_root, field_name, [])] + elif field_name in _root._fields_ordered and not ( + isinstance(_root._fields[field_name].field, mongoengine.EmbeddedDocumentField) + or isinstance( + _root._fields[field_name].field, mongoengine.GenericEmbeddedDocumentField + ) + ): + if getattr(_root, field_name, []) is not None: + args["pk__in"] = [r.id for r in getattr(_root, field_name, [])] + + _id = args.pop("id", None) + + if _id is not None: + args["pk"] = from_global_id(_id)[-1] + iterables = [] + list_length = 0 + skip = 0 + count = 0 + limit = None + reverse = False + first = args.pop("first", None) + after = args.pop("after", None) + if after: + after = cursor_to_offset(after) + last = args.pop("last", None) + before = args.pop("before", None) + if before: + before = cursor_to_offset(before) + has_next_page = False + + if resolved is not None: + items = resolved + + if isinstance(items, QuerySet): + try: + if last is not None and after is not None: + count = await sync_to_async(items.count)(with_limit_and_skip=False) + else: + count = None + except OperationFailure: + count = len(items) + else: + count = len(items) + + skip, limit, reverse = find_skip_and_limit( + first=first, last=last, after=after, before=before, count=count + ) + + if isinstance(items, QuerySet): + if limit: + _base_query: QuerySet = ( + await sync_to_async(items.order_by("-pk").skip)(skip) + if reverse + else await sync_to_async(items.skip)(skip) + ) + items = await sync_to_async(_base_query.limit)(limit) + has_next_page = ( + len(await sync_to_async(_base_query.skip(limit).only("id").limit)(1)) != 0 + ) + elif skip: + items = await sync_to_async(items.skip)(skip) + else: + if limit: + if reverse: + _base_query = items[::-1] + items = _base_query[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) + else: + _base_query = items + items = items[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) + elif skip: + items = items[skip:] + iterables = await sync_to_async(list)(items) + list_length = len(iterables) + + elif callable(getattr(self.model, "objects", None)): + if ( + _root is None + or args + or isinstance(getattr(_root, field_name, []), AsyncMongoengineConnectionField) + ): + args_copy = args.copy() + for key in args.copy(): + if key not in self.model._fields_ordered: + args_copy.pop(key) + elif ( + isinstance(getattr(self.model, key), mongoengine.fields.ReferenceField) + or isinstance( + getattr(self.model, key), mongoengine.fields.GenericReferenceField + ) + or isinstance( + getattr(self.model, key), mongoengine.fields.LazyReferenceField + ) + or isinstance( + getattr(self.model, key), mongoengine.fields.CachedReferenceField + ) + ): + if not isinstance(args_copy[key], ObjectId): + _from_global_id = from_global_id(args_copy[key])[1] + if bson.objectid.ObjectId.is_valid(_from_global_id): + args_copy[key] = ObjectId(_from_global_id) + else: + args_copy[key] = _from_global_id + elif isinstance(getattr(self.model, key), mongoengine.fields.EnumField): + if getattr(args_copy[key], "value", None): + args_copy[key] = args_copy[key].value + + if PYMONGO_VERSION >= (3, 7): + count = await sync_to_async( + (mongoengine.get_db()[self.model._get_collection_name()]).count_documents + )(args_copy) + else: + count = await sync_to_async(self.model.objects(args_copy).count)() + if count != 0: + skip, limit, reverse = find_skip_and_limit( + first=first, after=after, last=last, before=before, count=count + ) + iterables = self.get_queryset( + self.model, info, required_fields, skip, limit, reverse, **args + ) + iterables = await sync_to_async(list)(iterables) + list_length = len(iterables) + if isinstance(info, GraphQLResolveInfo): + if not info.context: + info = info._replace(context=Context()) + info.context.queryset = self.get_queryset( + self.model, info, required_fields, **args + ) + + elif "pk__in" in args and args["pk__in"]: + count = len(args["pk__in"]) + skip, limit, reverse = find_skip_and_limit( + first=first, last=last, after=after, before=before, count=count + ) + if limit: + if reverse: + args["pk__in"] = args["pk__in"][::-1][skip : skip + limit] + else: + args["pk__in"] = args["pk__in"][skip : skip + limit] + elif skip: + args["pk__in"] = args["pk__in"][skip:] + iterables = self.get_queryset(self.model, info, required_fields, **args) + iterables = await sync_to_async(list)(iterables) + list_length = len(iterables) + if isinstance(info, GraphQLResolveInfo): + if not info.context: + info = info._replace(context=Context()) + info.context.queryset = self.get_queryset( + self.model, info, required_fields, **args + ) + + elif _root is not None: + field_name = to_snake_case(info.field_name) + items = getattr(_root, field_name, []) + count = len(items) + skip, limit, reverse = find_skip_and_limit( + first=first, last=last, after=after, before=before, count=count + ) + if limit: + if reverse: + _base_query = items[::-1] + items = _base_query[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) + else: + _base_query = items + items = items[skip : skip + limit] + has_next_page = (skip + limit) < len(_base_query) + elif skip: + items = items[skip:] + iterables = items + iterables = await sync_to_async(list)(iterables) + list_length = len(iterables) + + if count: + has_next_page = ( + True + if (0 if limit is None else limit) + (0 if skip is None else skip) < count + else False + ) + has_previous_page = True if skip else False + + if reverse: + iterables = await sync_to_async(list)(iterables) + iterables.reverse() + skip = limit + + connection = connection_from_iterables( + edges=iterables, + start_offset=skip, + has_previous_page=has_previous_page, + has_next_page=has_next_page, + connection_type=self.type, + edge_type=self.type.Edge, + pageinfo_type=graphene.PageInfo, + ) + connection.iterable = iterables + connection.list_length = list_length + return connection + + async def chained_resolver(self, resolver, is_partial, root, info, **args): + for key, value in dict(args).items(): + if value is None: + del args[key] + + required_fields = list() + + for field in self.required_fields: + if field in self.model._fields_ordered: + required_fields.append(field) + + for field in get_query_fields(info): + if to_snake_case(field) in self.model._fields_ordered: + required_fields.append(to_snake_case(field)) + + args_copy = args.copy() + + if not bool(args) or not is_partial: + if isinstance(self.model, mongoengine.Document) or isinstance( + self.model, mongoengine.base.metaclasses.TopLevelDocumentMetaclass + ): + connection_fields = [ + field + for field in self.fields + if type(self.fields[field]) == AsyncMongoengineConnectionField + ] + + def filter_connection(x): + return any( + [ + connection_fields.__contains__(x), + self._type._meta.non_filter_fields.__contains__(x), + ] + ) + + filterable_args = tuple( + filterfalse(filter_connection, list(self.model._fields_ordered)) + ) + for arg_name, arg in args.copy().items(): + if arg_name not in filterable_args + tuple(self.filter_args.keys()): + args_copy.pop(arg_name) + if isinstance(info, GraphQLResolveInfo): + if not info.context: + info = info._replace(context=Context()) + info.context.queryset = self.get_queryset( + self.model, info, required_fields, **args_copy + ) + + # XXX: Filter nested args + resolved = resolver(root, info, **args) + if isinstance(resolved, Coroutine): + resolved = await resolved + if resolved is not None: + # if isinstance(resolved, Coroutine): + # resolved = await resolved + if isinstance(resolved, list): + if resolved == list(): + return resolved + elif not isinstance(resolved[0], DBRef): + return resolved + else: + return await self.default_resolver(root, info, required_fields, **args_copy) + elif isinstance(resolved, QuerySet): + args.update(resolved._query) + args_copy = args.copy() + for arg_name, arg in args.copy().items(): + if "." in arg_name or arg_name not in self.model._fields_ordered + ( + "first", + "last", + "before", + "after", + ) + tuple(self.filter_args.keys()): + args_copy.pop(arg_name) + if arg_name == "_id" and isinstance(arg, dict): + operation = list(arg.keys())[0] + args_copy["pk" + operation.replace("$", "__")] = arg[operation] + if not isinstance(arg, ObjectId) and "." in arg_name: + if isinstance(arg, dict): + operation = list(arg.keys())[0] + args_copy[ + arg_name.replace(".", "__") + operation.replace("$", "__") + ] = arg[operation] + else: + args_copy[arg_name.replace(".", "__")] = arg + elif "." in arg_name and isinstance(arg, ObjectId): + args_copy[arg_name.replace(".", "__")] = arg + else: + operations = ["$lte", "$gte", "$ne", "$in"] + if isinstance(arg, dict) and any(op in arg for op in operations): + operation = list(arg.keys())[0] + args_copy[arg_name + operation.replace("$", "__")] = arg[operation] + del args_copy[arg_name] + + return await self.default_resolver( + root, info, required_fields, resolved=resolved, **args_copy + ) + elif isinstance(resolved, Promise): + return resolved.value + else: + return await resolved + + return await self.default_resolver(root, info, required_fields, **args) + + @classmethod + async def connection_resolver(cls, resolver, connection_type, root, info, **args): + if root: + for key, value in root.__dict__.items(): + if value: + try: + setattr(root, key, from_global_id(value)[1]) + except Exception: + pass + + iterable = await resolver(root=root, info=info, **args) + + if isinstance(connection_type, graphene.NonNull): + connection_type = connection_type.of_type + on_resolve = partial(cls.resolve_connection, connection_type, args) + if Promise.is_thenable(iterable): + iterable = Promise.resolve(iterable).then(on_resolve).value + return on_resolve(iterable) diff --git a/graphene_mongo/registry.py b/graphene_mongo/registry.py index 8d57713e..70e88480 100644 --- a/graphene_mongo/registry.py +++ b/graphene_mongo/registry.py @@ -1,24 +1,33 @@ from graphene import Enum +from graphene_mongo.utils import ExecutorEnum + class Registry(object): def __init__(self): self._registry = {} + self._registry_async = {} self._registry_string_map = {} + self._registry_async_string_map = {} self._registry_enum = {} def register(self, cls): from .types import GrapheneMongoengineObjectTypes + from .types_async import AsyncGrapheneMongoengineObjectTypes - assert issubclass( - cls, - GrapheneMongoengineObjectTypes - ), 'Only Mongoengine object types can be registered, received "{}"'.format( + assert ( + issubclass(cls, GrapheneMongoengineObjectTypes) + or issubclass(cls, AsyncGrapheneMongoengineObjectTypes) + ), 'Only Mongoengine/Async Mongoengine object types can be registered, received "{}"'.format( cls.__name__ ) assert cls._meta.registry == self, "Registry for a Model have to match." - self._registry[cls._meta.model] = cls - self._registry_string_map[cls.__name__] = cls._meta.model.__name__ + if issubclass(cls, GrapheneMongoengineObjectTypes): + self._registry[cls._meta.model] = cls + self._registry_string_map[cls.__name__] = cls._meta.model.__name__ + else: + self._registry_async[cls._meta.model] = cls + self._registry_async_string_map[cls.__name__] = cls._meta.model.__name__ # Rescan all fields for model, cls in self._registry.items(): @@ -26,18 +35,22 @@ def register(self, cls): def register_enum(self, cls): from enum import EnumMeta + assert isinstance( cls, EnumMeta ), f'Only EnumMeta can be registered, received "{cls.__name__}"' - if not cls.__name__.endswith('Enum'): - name = cls.__name__ + 'Enum' + if not cls.__name__.endswith("Enum"): + name = cls.__name__ + "Enum" else: name = cls.__name__ cls.__name__ = name self._registry_enum[cls] = Enum.from_enum(cls) - def get_type_for_model(self, model): - return self._registry.get(model) + def get_type_for_model(self, model, executor: ExecutorEnum = ExecutorEnum.SYNC): + if executor == ExecutorEnum.SYNC: + return self._registry.get(model) + else: + return self._registry_async.get(model) def check_enum_already_exist(self, cls): return cls in self._registry_enum @@ -47,7 +60,9 @@ def get_type_for_enum(self, cls): registry = None +async_registry = None inputs_registry = None +async_inputs_registry = None def get_inputs_registry(): @@ -57,6 +72,13 @@ def get_inputs_registry(): return inputs_registry +def get_inputs_async_registry(): + global async_inputs_registry + if not async_inputs_registry: + async_inputs_registry = Registry() + return async_inputs_registry + + def get_global_registry(): global registry if not registry: @@ -64,8 +86,22 @@ def get_global_registry(): return registry +def get_global_async_registry(): + global async_registry + if not async_registry: + async_registry = Registry() + return async_registry + + def reset_global_registry(): global registry global inputs_registry registry = None inputs_registry = None + + +def reset_global_async_registry(): + global async_registry + global async_inputs_registry + async_registry = None + async_inputs_registry = None diff --git a/graphene_mongo/tests/conftest.py b/graphene_mongo/tests/conftest.py index 9d197285..479f29f7 100644 --- a/graphene_mongo/tests/conftest.py +++ b/graphene_mongo/tests/conftest.py @@ -1,22 +1,23 @@ import os +from datetime import datetime + import pytest -from datetime import datetime from .models import ( + AnotherChild, Article, + CellTower, + Child, + ChildRegisteredAfter, + ChildRegisteredBefore, Editor, EmbeddedArticle, + ParentWithRelationship, Player, - Reporter, - Child, - AnotherChild, ProfessorMetadata, ProfessorVector, - ChildRegisteredBefore, - ChildRegisteredAfter, - ParentWithRelationship, - CellTower, Publisher, + Reporter, ) current_dirname = os.path.dirname(os.path.abspath(__file__)) @@ -68,7 +69,7 @@ def fixtures(): last_name="Iverson", email="ai@gmail.com", awards=["2010-mvp"], - generic_references=[article1] + generic_references=[article1], ) reporter1.articles = [article1, article2] embedded_article1 = EmbeddedArticle(headline="Real", editor=editor1) @@ -82,13 +83,15 @@ def fixtures(): player1 = Player( first_name="Michael", last_name="Jordan", - articles=[article1, article2]) + articles=[article1, article2], + ) player1.save() player2 = Player( first_name="Magic", last_name="Johnson", opponent=player1, - articles=[article3]) + articles=[article3], + ) player2.save() player3 = Player(first_name="Larry", last_name="Bird", players=[player1, player2]) player3.save() @@ -165,7 +168,9 @@ def fixtures(): child4.save() parent = ParentWithRelationship( - name="Yui", before_child=[child3], after_child=[child4] + name="Yui", + before_child=[child3], + after_child=[child4], ) parent.save() diff --git a/graphene_mongo/tests/models.py b/graphene_mongo/tests/models.py index f9c2cae6..871b5c9e 100644 --- a/graphene_mongo/tests/models.py +++ b/graphene_mongo/tests/models.py @@ -1,5 +1,6 @@ -import mongoengine from datetime import datetime + +import mongoengine import mongomock from mongomock import gridfs @@ -80,7 +81,7 @@ class Reporter(mongoengine.Document): awards = mongoengine.ListField(mongoengine.StringField()) articles = mongoengine.ListField(mongoengine.ReferenceField(Article)) embedded_articles = mongoengine.ListField( - mongoengine.EmbeddedDocumentField(EmbeddedArticle) + mongoengine.EmbeddedDocumentField(EmbeddedArticle), ) embedded_list_articles = mongoengine.EmbeddedDocumentListField(EmbeddedArticle) generic_reference = mongoengine.GenericReferenceField(choices=[Article, Editor], required=True) @@ -116,14 +117,12 @@ class CellTower(mongoengine.Document): class Child(Parent): - meta = {"collection": "test_parent"} baz = mongoengine.StringField() loc = mongoengine.PointField() class AnotherChild(Parent): - meta = {"collection": "test_parent"} qux = mongoengine.StringField() loc = mongoengine.PointField() @@ -146,10 +145,10 @@ class ProfessorVector(mongoengine.Document): class ParentWithRelationship(mongoengine.Document): meta = {"collection": "test_parent_reference"} before_child = mongoengine.ListField( - mongoengine.ReferenceField("ChildRegisteredBefore") + mongoengine.ReferenceField("ChildRegisteredBefore"), ) after_child = mongoengine.ListField( - mongoengine.ReferenceField("ChildRegisteredAfter") + mongoengine.ReferenceField("ChildRegisteredAfter"), ) name = mongoengine.StringField() diff --git a/graphene_mongo/tests/nodes.py b/graphene_mongo/tests/nodes.py index 3be5759a..32f10a8c 100644 --- a/graphene_mongo/tests/nodes.py +++ b/graphene_mongo/tests/nodes.py @@ -3,6 +3,7 @@ from . import models from . import types # noqa: F401 +from .models import ProfessorMetadata from ..types import MongoengineObjectType @@ -65,16 +66,22 @@ class Meta: interfaces = (Node,) +class ChildRegisteredAfterNode(MongoengineObjectType): + class Meta: + model = models.ChildRegisteredAfter + interfaces = (Node,) + + class ParentWithRelationshipNode(MongoengineObjectType): class Meta: model = models.ParentWithRelationship interfaces = (Node,) -class ChildRegisteredAfterNode(MongoengineObjectType): +class ProfessorMetadataNode(MongoengineObjectType): class Meta: - model = models.ChildRegisteredAfter - interfaces = (Node,) + model = ProfessorMetadata + interfaces = (graphene.Node,) class ProfessorVectorNode(MongoengineObjectType): diff --git a/graphene_mongo/tests/nodes_async.py b/graphene_mongo/tests/nodes_async.py new file mode 100644 index 00000000..a38635ce --- /dev/null +++ b/graphene_mongo/tests/nodes_async.py @@ -0,0 +1,108 @@ +import graphene +from graphene.relay import Node + +from . import models +from . import types # noqa: F401 +from .models import ProfessorMetadata +from ..types_async import AsyncMongoengineObjectType + + +class PublisherAsyncNode(AsyncMongoengineObjectType): + legal_name = graphene.String() + bad_field = graphene.String() + + class Meta: + model = models.Publisher + only_fields = ("id", "name") + interfaces = (Node,) + + +class ArticleAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Article + interfaces = (Node,) + + +class EditorAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Editor + interfaces = (Node,) + + +class EmbeddedArticleAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.EmbeddedArticle + interfaces = (Node,) + + +class PlayerAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Player + interfaces = (Node,) + filter_fields = {"first_name": ["istartswith", "in"]} + + +class ReporterAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Reporter + interfaces = (Node,) + + +class ParentAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Parent + interfaces = (Node,) + + +class ChildAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Child + interfaces = (Node,) + + +class ChildRegisteredBeforeAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.ChildRegisteredBefore + interfaces = (Node,) + + +class ChildRegisteredAfterAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.ChildRegisteredAfter + interfaces = (Node,) + + +class ParentWithRelationshipAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.ParentWithRelationship + interfaces = (Node,) + + +class ProfessorMetadataAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = ProfessorMetadata + interfaces = (graphene.Node,) + + +class ProfessorVectorAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.ProfessorVector + interfaces = (Node,) + + +class ErroneousModelAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.ErroneousModel + interfaces = (Node,) + + +class BarAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Bar + interfaces = (Node,) + + +class FooAsyncNode(AsyncMongoengineObjectType): + class Meta: + model = models.Foo + interfaces = (Node,) diff --git a/graphene_mongo/tests/test_converter.py b/graphene_mongo/tests/test_converter.py index 7109bb8f..b111e4a6 100644 --- a/graphene_mongo/tests/test_converter.py +++ b/graphene_mongo/tests/test_converter.py @@ -91,9 +91,7 @@ def test_should_dict_convert_json(): def test_should_map_convert_json(): - assert_conversion( - mongoengine.MapField, graphene.JSONString, field=mongoengine.StringField() - ) + assert_conversion(mongoengine.MapField, graphene.JSONString, field=mongoengine.StringField()) def test_should_point_convert_field(): @@ -127,15 +125,11 @@ def test_should_file_convert_field(): def test_should_field_convert_list(): - assert_conversion( - mongoengine.ListField, graphene.List, field=mongoengine.StringField() - ) + assert_conversion(mongoengine.ListField, graphene.List, field=mongoengine.StringField()) def test_should_geo_convert_list(): - assert_conversion( - mongoengine.GeoPointField, graphene.List, field=mongoengine.FloatField() - ) + assert_conversion(mongoengine.GeoPointField, graphene.List, field=mongoengine.FloatField()) def test_should_reference_convert_dynamic(): @@ -144,9 +138,7 @@ class Meta: model = Editor interfaces = (graphene.Node,) - dynamic_field = convert_mongoengine_field( - EmbeddedArticle._fields["editor"], E._meta.registry - ) + dynamic_field = convert_mongoengine_field(EmbeddedArticle._fields["editor"], E._meta.registry) assert isinstance(dynamic_field, graphene.Dynamic) graphene_type = dynamic_field.get_type() assert isinstance(graphene_type, graphene.Field) @@ -159,9 +151,7 @@ class Meta: model = Publisher interfaces = (graphene.Node,) - dynamic_field = convert_mongoengine_field( - Editor._fields["company"], P._meta.registry - ) + dynamic_field = convert_mongoengine_field(Editor._fields["company"], P._meta.registry) assert isinstance(dynamic_field, graphene.Dynamic) graphene_type = dynamic_field.get_type() @@ -209,9 +199,7 @@ class A(MongoengineObjectType): class Meta: model = Article - graphene_field = convert_mongoengine_field( - Reporter._fields["articles"], A._meta.registry - ) + graphene_field = convert_mongoengine_field(Reporter._fields["articles"], A._meta.registry) assert isinstance(graphene_field, graphene.List) dynamic_field = graphene_field.get_type() assert dynamic_field._of_type == A @@ -270,17 +258,13 @@ class Meta: model = Player interfaces = (graphene.Node,) - dynamic_field = convert_mongoengine_field( - Player._fields["opponent"], P._meta.registry - ) + dynamic_field = convert_mongoengine_field(Player._fields["opponent"], P._meta.registry) assert isinstance(dynamic_field, graphene.Dynamic) graphene_type = dynamic_field.get_type() assert isinstance(graphene_type, graphene.Field) assert graphene_type.type == P - graphene_field = convert_mongoengine_field( - Player._fields["players"], P._meta.registry - ) + graphene_field = convert_mongoengine_field(Player._fields["players"], P._meta.registry) assert isinstance(graphene_field, MongoengineConnectionField) @@ -293,9 +277,7 @@ class P(MongoengineObjectType): class Meta: model = Player - graphene_field = convert_mongoengine_field( - Player._fields["players"], P._meta.registry - ) + graphene_field = convert_mongoengine_field(Player._fields["players"], P._meta.registry) assert isinstance(graphene_field, graphene.List) dynamic_field = graphene_field.get_type() assert dynamic_field._of_type == P @@ -306,27 +288,16 @@ class A(MongoengineObjectType): class Meta: model = Article - headline_field = convert_mongoengine_field( - Article._fields["headline"], A._meta.registry - ) + headline_field = convert_mongoengine_field(Article._fields["headline"], A._meta.registry) assert headline_field.kwargs["description"] == "The article headline." - pubDate_field = convert_mongoengine_field( - Article._fields["pub_date"], A._meta.registry - ) - assert ( - pubDate_field.kwargs["description"] - == "Publication Date\nThe date of first press." - ) + pubDate_field = convert_mongoengine_field(Article._fields["pub_date"], A._meta.registry) + assert pubDate_field.kwargs["description"] == "Publication Date\nThe date of first press." - firstName_field = convert_mongoengine_field( - Editor._fields["first_name"], A._meta.registry - ) + firstName_field = convert_mongoengine_field(Editor._fields["first_name"], A._meta.registry) assert firstName_field.kwargs["description"] == "Editor's first name.\n(fname)" - metadata_field = convert_mongoengine_field( - Editor._fields["metadata"], A._meta.registry - ) + metadata_field = convert_mongoengine_field(Editor._fields["metadata"], A._meta.registry) assert metadata_field.kwargs["description"] == "Arbitrary metadata." @@ -339,9 +310,7 @@ class E(MongoengineObjectType): class Meta: model = Editor - editor_field = convert_mongoengine_field( - Article._fields["editor"], A._meta.registry - ).get_type() + editor_field = convert_mongoengine_field(Article._fields["editor"], A._meta.registry).get_type() assert editor_field.description == "An Editor of a publication." diff --git a/graphene_mongo/tests/test_fields.py b/graphene_mongo/tests/test_fields.py index 50792bfe..157e1c10 100644 --- a/graphene_mongo/tests/test_fields.py +++ b/graphene_mongo/tests/test_fields.py @@ -1,4 +1,7 @@ -from . import nodes +import pytest + +from . import nodes, nodes_async +from .. import AsyncMongoengineConnectionField from ..fields import MongoengineConnectionField @@ -44,15 +47,17 @@ def test_field_args_with_unconverted_field(): assert set(field.field_args.keys()) == set(field_args) -async def test_default_resolver_with_colliding_objects_field(): - field = MongoengineConnectionField(nodes.ErroneousModelNode) +@pytest.mark.asyncio +async def test_default_resolver_with_colliding_objects_field_async(): + field = AsyncMongoengineConnectionField(nodes_async.ErroneousModelAsyncNode) connection = await field.default_resolver(None, {}) assert 0 == len(connection.iterable) -async def test_default_resolver_connection_list_length(fixtures): - field = MongoengineConnectionField(nodes.ArticleNode) +@pytest.mark.asyncio +async def test_default_resolver_connection_list_length_async(fixtures): + field = AsyncMongoengineConnectionField(nodes_async.ArticleAsyncNode) connection = await field.default_resolver(None, {}, **{"first": 1}) assert hasattr(connection, "list_length") diff --git a/graphene_mongo/tests/test_inputs.py b/graphene_mongo/tests/test_inputs.py index 93655e60..9f792ef6 100644 --- a/graphene_mongo/tests/test_inputs.py +++ b/graphene_mongo/tests/test_inputs.py @@ -1,4 +1,5 @@ import graphene +import pytest from graphene.relay import Node @@ -7,6 +8,7 @@ from .types import ArticleInput, EditorInput +@pytest.mark.asyncio async def test_should_create(fixtures): class CreateArticle(graphene.Mutation): class Arguments: @@ -44,6 +46,7 @@ class Mutation(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_update(fixtures): class UpdateEditor(graphene.Mutation): class Arguments: @@ -61,11 +64,9 @@ async def mutate(self, info, id, editor): return UpdateEditor(editor=editor_to_update) class Query(graphene.ObjectType): - node = Node.Field() class Mutation(graphene.ObjectType): - update_editor = UpdateEditor.Field() query = """ diff --git a/graphene_mongo/tests/test_mutation.py b/graphene_mongo/tests/test_mutation.py index 695ea32a..fa7bfcba 100644 --- a/graphene_mongo/tests/test_mutation.py +++ b/graphene_mongo/tests/test_mutation.py @@ -1,4 +1,5 @@ import graphene +import pytest from graphene.relay import Node @@ -6,10 +7,10 @@ from .nodes import ArticleNode, EditorNode +@pytest.mark.asyncio async def test_should_create(fixtures): class CreateArticle(graphene.Mutation): class Arguments: - headline = graphene.String() article = graphene.Field(ArticleNode) @@ -21,11 +22,9 @@ async def mutate(self, info, headline): return CreateArticle(article=article) class Query(graphene.ObjectType): - node = Node.Field() class Mutation(graphene.ObjectType): - create_article = CreateArticle.Field() query = """ @@ -46,6 +45,7 @@ class Mutation(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_update(fixtures): class UpdateEditor(graphene.Mutation): class Arguments: @@ -61,11 +61,9 @@ async def mutate(self, info, id, first_name): return UpdateEditor(editor=editor) class Query(graphene.ObjectType): - node = Node.Field() class Mutation(graphene.ObjectType): - update_editor = UpdateEditor.Field() query = """ diff --git a/graphene_mongo/tests/test_query.py b/graphene_mongo/tests/test_query.py index a1facbea..85a15056 100644 --- a/graphene_mongo/tests/test_query.py +++ b/graphene_mongo/tests/test_query.py @@ -2,11 +2,13 @@ import os import json import graphene +import pytest from . import models from . import types +@pytest.mark.asyncio async def test_should_query_editor(fixtures, fixtures_dirname): class Query(graphene.ObjectType): editor = graphene.Field(types.EditorType) @@ -71,6 +73,7 @@ async def resolve_editors(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_reporter(fixtures): class Query(graphene.ObjectType): reporter = graphene.Field(types.ReporterType) @@ -115,6 +118,7 @@ async def resolve_reporter(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_custom_kwargs(fixtures): class Query(graphene.ObjectType): editors = graphene.List(types.EditorType, first=graphene.Int()) @@ -145,6 +149,7 @@ async def resolve_editors(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_self_reference(fixtures): class Query(graphene.ObjectType): all_players = graphene.List(types.PlayerType) @@ -191,11 +196,10 @@ async def resolve_all_players(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_with_embedded_document(fixtures): class Query(graphene.ObjectType): - professor_vector = graphene.Field( - types.ProfessorVectorType, id=graphene.String() - ) + professor_vector = graphene.Field(types.ProfessorVectorType, id=graphene.String()) async def resolve_professor_vector(self, info, id): return models.ProfessorVector.objects(metadata__id=id).first() @@ -211,15 +215,14 @@ async def resolve_professor_vector(self, info, id): } """ - expected = { - "professorVector": {"vec": [1.0, 2.3], "metadata": {"firstName": "Steven"}} - } + expected = {"professorVector": {"vec": [1.0, 2.3], "metadata": {"firstName": "Steven"}}} schema = graphene.Schema(query=Query, types=[types.ProfessorVectorType]) result = await schema.execute_async(query) assert not result.errors assert result.data == expected +@pytest.mark.asyncio async def test_should_query_child(fixtures): class Query(graphene.ObjectType): children = graphene.List(types.ChildType) @@ -256,6 +259,7 @@ async def resolve_children(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_other_childs(fixtures): class Query(graphene.ObjectType): children = graphene.List(types.AnotherChildType) @@ -292,6 +296,7 @@ async def resolve_children(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_all_childs(fixtures): class Query(graphene.ObjectType): children = graphene.List(types.ChildUnionType) @@ -345,6 +350,7 @@ async def resolve_children(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_cell_tower(fixtures): class Query(graphene.ObjectType): cell_towers = graphene.List(types.CellTowerType) diff --git a/graphene_mongo/tests/test_relay_query.py b/graphene_mongo/tests/test_relay_query.py index 010c2b0f..dc09b4e3 100644 --- a/graphene_mongo/tests/test_relay_query.py +++ b/graphene_mongo/tests/test_relay_query.py @@ -1,8 +1,9 @@ -import os -import json import base64 -import graphene +import json +import os +import graphene +import pytest from graphene.relay import Node from graphql_relay.node.node import to_global_id @@ -12,6 +13,7 @@ from ..types import MongoengineObjectType +@pytest.mark.asyncio async def test_should_query_reporter(fixtures): class Query(graphene.ObjectType): reporter = graphene.Field(nodes.ReporterNode) @@ -90,6 +92,7 @@ async def resolve_reporter(self, *args, **kwargs): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_reporters_with_nested_document(fixtures): class Query(graphene.ObjectType): reporters = MongoengineConnectionField(nodes.ReporterNode) @@ -135,6 +138,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_all_editors(fixtures, fixtures_dirname): class Query(graphene.ObjectType): editors = MongoengineConnectionField(nodes.EditorNode) @@ -202,18 +206,16 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_editors_with_dataloader(fixtures): from promise import Promise from promise.dataloader import DataLoader class ArticleLoader(DataLoader): - async def batch_load_fn(self, instances): + def batch_load_fn(self, instances): queryset = models.Article.objects(editor__in=instances) return Promise.resolve( - [ - [a for a in queryset if a.editor.id == instance.id] - for instance in instances - ] + [[a for a in queryset if a.editor.id == instance.id] for instance in instances] ) article_loader = ArticleLoader() @@ -268,6 +270,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_editors_by_id(fixtures): class Query(graphene.ObjectType): editors = MongoengineConnectionField(nodes.EditorNode) @@ -304,6 +307,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter(fixtures): class Query(graphene.ObjectType): articles = MongoengineConnectionField(nodes.ArticleNode) @@ -342,6 +346,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_by_reference_field(fixtures): class Query(graphene.ObjectType): articles = MongoengineConnectionField(nodes.ArticleNode) @@ -361,9 +366,7 @@ class Query(graphene.ObjectType): } """ expected = { - "articles": { - "edges": [{"node": {"headline": "Hello", "editor": {"firstName": "Penny"}}}] - } + "articles": {"edges": [{"node": {"headline": "Hello", "editor": {"firstName": "Penny"}}}]} } schema = graphene.Schema(query=Query) result = await schema.execute_async(query) @@ -371,6 +374,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_through_inheritance(fixtures): class Query(graphene.ObjectType): node = Node.Field() @@ -411,6 +415,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_by_list_contains(fixtures): # Notes: https://goo.gl/hMNRgs class Query(graphene.ObjectType): @@ -446,9 +451,9 @@ class Query(graphene.ObjectType): "genericReferences": [ { "__typename": "ArticleNode", - "headline": "Hello" + "headline": "Hello", } - ] + ], } } ] @@ -460,6 +465,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_by_id(fixtures): # Notes: https://goo.gl/hMNRgs class Query(graphene.ObjectType): @@ -487,6 +493,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_first_n(fixtures): class Query(graphene.ObjectType): editors = MongoengineConnectionField(nodes.EditorNode) @@ -530,6 +537,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_after(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -562,6 +570,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_before(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -596,6 +605,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_last_n(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -627,6 +637,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_self_reference(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -701,6 +712,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_lazy_reference(fixtures): class Query(graphene.ObjectType): node = Node.Field() @@ -741,14 +753,10 @@ class Query(graphene.ObjectType): { "node": { "beforeChild": { - "edges": [ - {"node": {"name": "Akari", "parent": {"name": "Yui"}}} - ] + "edges": [{"node": {"name": "Akari", "parent": {"name": "Yui"}}}] }, "afterChild": { - "edges": [ - {"node": {"name": "Kyouko", "parent": {"name": "Yui"}}} - ] + "edges": [{"node": {"name": "Kyouko", "parent": {"name": "Yui"}}}] }, } } @@ -761,6 +769,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_query_with_embedded_document(fixtures): class Query(graphene.ObjectType): professors = MongoengineConnectionField(nodes.ProfessorVectorNode) @@ -781,9 +790,7 @@ class Query(graphene.ObjectType): """ expected = { "professors": { - "edges": [ - {"node": {"vec": [1.0, 2.3], "metadata": {"firstName": "Steven"}}} - ] + "edges": [{"node": {"vec": [1.0, 2.3], "metadata": {"firstName": "Steven"}}}] } } schema = graphene.Schema(query=Query) @@ -792,6 +799,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_get_queryset_returns_dict_filters(fixtures): class Query(graphene.ObjectType): node = Node.Field() @@ -833,15 +841,14 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_get_queryset_returns_qs_filters(fixtures): - async def get_queryset(model, info, **args): + def get_queryset(model, info, **args): return model.objects(headline="World") class Query(graphene.ObjectType): node = Node.Field() - articles = MongoengineConnectionField( - nodes.ArticleNode, get_queryset=get_queryset - ) + articles = MongoengineConnectionField(nodes.ArticleNode, get_queryset=get_queryset) query = """ query ArticlesQuery { @@ -877,6 +884,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_mongoengine_queryset(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -904,11 +912,10 @@ class Query(graphene.ObjectType): result = await schema.execute_async(query) assert not result.errors - assert json.dumps(result.data, sort_keys=True) == json.dumps( - expected, sort_keys=True - ) + assert json.dumps(result.data, sort_keys=True) == json.dumps(expected, sort_keys=True) +@pytest.mark.asyncio async def test_should_query_document_with_embedded(fixtures): class Query(graphene.ObjectType): foos = MongoengineConnectionField(nodes.FooNode) @@ -939,6 +946,7 @@ async def resolve_multiple_foos(self, *args, **kwargs): assert not result.errors +@pytest.mark.asyncio async def test_should_filter_mongoengine_queryset_with_list(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -966,11 +974,10 @@ class Query(graphene.ObjectType): result = await schema.execute_async(query) assert not result.errors - assert json.dumps(result.data, sort_keys=True) == json.dumps( - expected, sort_keys=True - ) + assert json.dumps(result.data, sort_keys=True) == json.dumps(expected, sort_keys=True) +@pytest.mark.asyncio async def test_should_get_correct_list_of_documents(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -995,22 +1002,27 @@ class Query(graphene.ObjectType): """ expected = { "players": { - "edges": [{ - "node": { - "firstName": "Michael", - "articles": { - "edges": [{ - "node": { - "headline": "Hello" - } - }, { - "node": { - "headline": "World" - } - }] + "edges": [ + { + "node": { + "firstName": "Michael", + "articles": { + "edges": [ + { + "node": { + "headline": "Hello", + } + }, + { + "node": { + "headline": "World", + } + }, + ] + }, } } - }] + ] } } schema = graphene.Schema(query=Query) @@ -1020,6 +1032,7 @@ class Query(graphene.ObjectType): assert result.data == expected +@pytest.mark.asyncio async def test_should_filter_mongoengine_queryset_by_id_and_other_fields(fixtures): class Query(graphene.ObjectType): players = MongoengineConnectionField(nodes.PlayerNode) @@ -1045,8 +1058,8 @@ class Query(graphene.ObjectType): """.format(larry_relay_id=larry_relay_id) expected = { - 'players': { - 'edges': [] + "players": { + "edges": [], } } schema = graphene.Schema(query=Query) diff --git a/graphene_mongo/tests/test_relay_query_async.py b/graphene_mongo/tests/test_relay_query_async.py new file mode 100644 index 00000000..3b9a542a --- /dev/null +++ b/graphene_mongo/tests/test_relay_query_async.py @@ -0,0 +1,1064 @@ +import base64 +import json +import os + +import graphene +import pytest +from graphene.relay import Node +from graphql_relay.node.node import to_global_id + +from . import models +from . import nodes_async +from .. import AsyncMongoengineConnectionField, AsyncMongoengineObjectType + + +@pytest.mark.asyncio +async def test_should_query_reporter_async(fixtures): + class Query(graphene.ObjectType): + reporter = graphene.Field(nodes_async.ReporterAsyncNode) + + async def resolve_reporter(self, *args, **kwargs): + return models.Reporter.objects.no_dereference().first() + + query = """ + query ReporterQuery { + reporter { + firstName, + lastName, + email, + awards, + articles { + edges { + node { + headline + } + } + }, + embeddedArticles { + edges { + node { + headline + } + } + }, + embeddedListArticles { + edges { + node { + headline + } + } + }, + genericReference { + __typename + ... on ArticleAsyncNode { + headline + } + } + } + } + """ + expected = { + "reporter": { + "firstName": "Allen", + "lastName": "Iverson", + "email": "ai@gmail.com", + "awards": ["2010-mvp"], + "articles": { + "edges": [ + {"node": {"headline": "Hello"}}, + {"node": {"headline": "World"}}, + ] + }, + "embeddedArticles": { + "edges": [ + {"node": {"headline": "Real"}}, + {"node": {"headline": "World"}}, + ] + }, + "embeddedListArticles": { + "edges": [ + {"node": {"headline": "World"}}, + {"node": {"headline": "Real"}}, + ] + }, + "genericReference": {"__typename": "ArticleAsyncNode", "headline": "Hello"}, + } + } + + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_query_reporters_with_nested_document_async(fixtures): + class Query(graphene.ObjectType): + reporters = AsyncMongoengineConnectionField(nodes_async.ReporterAsyncNode) + + query = """ + query ReporterQuery { + reporters(firstName: "Allen") { + edges { + node { + firstName, + lastName, + email, + articles(headline: "Hello") { + edges { + node { + headline + } + } + } + } + } + } + } + """ + expected = { + "reporters": { + "edges": [ + { + "node": { + "firstName": "Allen", + "lastName": "Iverson", + "email": "ai@gmail.com", + "articles": {"edges": [{"node": {"headline": "Hello"}}]}, + } + } + ] + } + } + + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_query_all_editors_async(fixtures, fixtures_dirname): + class Query(graphene.ObjectType): + editors = AsyncMongoengineConnectionField(nodes_async.EditorAsyncNode) + + query = """ + query EditorQuery { + editors { + edges { + node { + id, + firstName, + lastName, + avatar { + contentType, + length, + data + } + } + } + } + } + """ + + avator_filename = os.path.join(fixtures_dirname, "image.jpg") + with open(avator_filename, "rb") as f: + data = base64.b64encode(f.read()) + + expected = { + "editors": { + "edges": [ + { + "node": { + "id": "RWRpdG9yQXN5bmNOb2RlOjE=", + "firstName": "Penny", + "lastName": "Hardaway", + "avatar": { + "contentType": "image/jpeg", + "length": 46928, + "data": data.decode("utf-8"), + }, + } + }, + { + "node": { + "id": "RWRpdG9yQXN5bmNOb2RlOjI=", + "firstName": "Grant", + "lastName": "Hill", + "avatar": {"contentType": None, "length": 0, "data": None}, + } + }, + { + "node": { + "id": "RWRpdG9yQXN5bmNOb2RlOjM=", + "firstName": "Dennis", + "lastName": "Rodman", + "avatar": {"contentType": None, "length": 0, "data": None}, + } + }, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_query_editors_with_dataloader_async(fixtures): + from promise import Promise + from promise.dataloader import DataLoader + + class ArticleLoader(DataLoader): + def batch_load_fn(self, instances): + queryset = models.Article.objects(editor__in=instances) + return Promise.resolve( + [[a for a in queryset if a.editor.id == instance.id] for instance in instances] + ) + + article_loader = ArticleLoader() + + class _EditorNode(AsyncMongoengineObjectType): + class Meta: + model = models.Editor + interfaces = (graphene.Node,) + + articles = AsyncMongoengineConnectionField(nodes_async.ArticleAsyncNode) + + async def resolve_articles(self, *args, **kwargs): + return article_loader.load(self) + + class Query(graphene.ObjectType): + editors = AsyncMongoengineConnectionField(_EditorNode) + + query = """ + query EditorPromiseConnectionQuery { + editors(first: 1) { + edges { + node { + firstName, + articles(first: 1) { + edges { + node { + headline + } + } + } + } + } + } + } + """ + + expected = { + "editors": { + "edges": [ + { + "node": { + "firstName": "Penny", + "articles": {"edges": [{"node": {"headline": "Hello"}}]}, + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_editors_by_id_async(fixtures): + class Query(graphene.ObjectType): + editors = AsyncMongoengineConnectionField(nodes_async.EditorAsyncNode) + + query = """ + query EditorQuery { + editors(id: "RWRpdG9yQXN5bmNOb2RlOjI=") { + edges { + node { + id, + firstName, + lastName + } + } + } + } + """ + expected = { + "editors": { + "edges": [ + { + "node": { + "id": "RWRpdG9yQXN5bmNOb2RlOjI=", + "firstName": "Grant", + "lastName": "Hill", + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_async(fixtures): + class Query(graphene.ObjectType): + articles = AsyncMongoengineConnectionField(nodes_async.ArticleAsyncNode) + + query = """ + query ArticlesQuery { + articles(headline: "World") { + edges { + node { + headline, + pubDate, + editor { + firstName + } + } + } + } + } + """ + expected = { + "articles": { + "edges": [ + { + "node": { + "headline": "World", + "editor": {"firstName": "Grant"}, + "pubDate": "2020-01-01T00:00:00", + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_by_reference_field_async(fixtures): + class Query(graphene.ObjectType): + articles = AsyncMongoengineConnectionField(nodes_async.ArticleAsyncNode) + + query = """ + query ArticlesQuery { + articles(editor: "RWRpdG9yTm9kZTox") { + edges { + node { + headline, + editor { + firstName + } + } + } + } + } + """ + expected = { + "articles": {"edges": [{"node": {"headline": "Hello", "editor": {"firstName": "Penny"}}}]} + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_through_inheritance_async(fixtures): + class Query(graphene.ObjectType): + node = Node.Field() + children = AsyncMongoengineConnectionField(nodes_async.ChildAsyncNode) + + query = """ + query ChildrenQuery { + children(bar: "bar") { + edges { + node { + bar, + baz, + loc { + type, + coordinates + } + } + } + } + } + """ + expected = { + "children": { + "edges": [ + { + "node": { + "bar": "bar", + "baz": "baz", + "loc": {"type": "Point", "coordinates": [10.0, 20.0]}, + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_by_list_contains_async(fixtures): + # Notes: https://goo.gl/hMNRgs + class Query(graphene.ObjectType): + reporters = AsyncMongoengineConnectionField(nodes_async.ReporterAsyncNode) + + query = """ + query ReportersQuery { + reporters (awards: "2010-mvp") { + edges { + node { + id, + firstName, + awards, + genericReferences { + __typename + ... on ArticleAsyncNode { + headline + } + } + } + } + } + } + """ + expected = { + "reporters": { + "edges": [ + { + "node": { + "id": "UmVwb3J0ZXJBc3luY05vZGU6MQ==", + "firstName": "Allen", + "awards": ["2010-mvp"], + "genericReferences": [ + {"__typename": "ArticleAsyncNode", "headline": "Hello"} + ], + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_by_id_async(fixtures): + # Notes: https://goo.gl/hMNRgs + class Query(graphene.ObjectType): + reporter = Node.Field(nodes_async.ReporterAsyncNode) + + query = """ + query ReporterQuery { + reporter (id: "UmVwb3J0ZXJBc3luY05vZGU6MQ==") { + id, + firstName, + awards + } + } + """ + expected = { + "reporter": { + "id": "UmVwb3J0ZXJBc3luY05vZGU6MQ==", + "firstName": "Allen", + "awards": ["2010-mvp"], + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_first_n_async(fixtures): + class Query(graphene.ObjectType): + editors = AsyncMongoengineConnectionField(nodes_async.EditorAsyncNode) + + query = """ + query EditorQuery { + editors(first: 2) { + edges { + cursor, + node { + firstName + } + } + pageInfo { + hasNextPage + hasPreviousPage + startCursor + endCursor + } + } + } + """ + expected = { + "editors": { + "edges": [ + {"cursor": "YXJyYXljb25uZWN0aW9uOjA=", "node": {"firstName": "Penny"}}, + {"cursor": "YXJyYXljb25uZWN0aW9uOjE=", "node": {"firstName": "Grant"}}, + ], + "pageInfo": { + "hasNextPage": True, + "hasPreviousPage": False, + "startCursor": "YXJyYXljb25uZWN0aW9uOjA=", + "endCursor": "YXJyYXljb25uZWN0aW9uOjE=", + }, + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_after_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query EditorQuery { + players(after: "YXJyYXljb25uZWN0aW9uOjA=") { + edges { + cursor, + node { + firstName + } + } + } + } + """ + expected = { + "players": { + "edges": [ + {"cursor": "YXJyYXljb25uZWN0aW9uOjE=", "node": {"firstName": "Magic"}}, + {"cursor": "YXJyYXljb25uZWN0aW9uOjI=", "node": {"firstName": "Larry"}}, + {"cursor": "YXJyYXljb25uZWN0aW9uOjM=", "node": {"firstName": "Chris"}}, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_before_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query EditorQuery { + players(before: "YXJyYXljb25uZWN0aW9uOjI=") { + edges { + cursor, + node { + firstName + } + } + } + } + """ + expected = { + "players": { + "edges": [ + { + "cursor": "YXJyYXljb25uZWN0aW9uOjA=", + "node": {"firstName": "Michael"}, + }, + {"cursor": "YXJyYXljb25uZWN0aW9uOjE=", "node": {"firstName": "Magic"}}, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_last_n_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query PlayerQuery { + players(last: 2) { + edges { + cursor, + node { + firstName + } + } + } + } + """ + expected = { + "players": { + "edges": [ + {"cursor": "YXJyYXljb25uZWN0aW9uOjI=", "node": {"firstName": "Larry"}}, + {"cursor": "YXJyYXljb25uZWN0aW9uOjM=", "node": {"firstName": "Chris"}}, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_self_reference_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query PlayersQuery { + players { + edges { + node { + firstName, + players { + edges { + node { + firstName + } + } + }, + embeddedListArticles { + edges { + node { + headline + } + } + } + } + } + } + } + """ + expected = { + "players": { + "edges": [ + { + "node": { + "firstName": "Michael", + "players": {"edges": [{"node": {"firstName": "Magic"}}]}, + "embeddedListArticles": {"edges": []}, + } + }, + { + "node": { + "firstName": "Magic", + "players": {"edges": [{"node": {"firstName": "Michael"}}]}, + "embeddedListArticles": {"edges": []}, + } + }, + { + "node": { + "firstName": "Larry", + "players": { + "edges": [ + {"node": {"firstName": "Michael"}}, + {"node": {"firstName": "Magic"}}, + ] + }, + "embeddedListArticles": {"edges": []}, + } + }, + { + "node": { + "firstName": "Chris", + "players": {"edges": []}, + "embeddedListArticles": {"edges": []}, + } + }, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_lazy_reference_async(fixtures): + class Query(graphene.ObjectType): + node = Node.Field() + parents = AsyncMongoengineConnectionField(nodes_async.ParentWithRelationshipAsyncNode) + + schema = graphene.Schema(query=Query) + print(schema) + + query = """ + query { + parents { + edges { + node { + beforeChild { + edges { + node { + name, + parent { name } + } + } + }, + afterChild { + edges { + node { + name, + parent { name } + } + } + } + } + } + } + } + """ + + expected = { + "parents": { + "edges": [ + { + "node": { + "beforeChild": { + "edges": [{"node": {"name": "Akari", "parent": {"name": "Yui"}}}] + }, + "afterChild": { + "edges": [{"node": {"name": "Kyouko", "parent": {"name": "Yui"}}}] + }, + } + } + ] + } + } + + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_query_with_embedded_document_async(fixtures): + class Query(graphene.ObjectType): + professors = AsyncMongoengineConnectionField(nodes_async.ProfessorVectorAsyncNode) + + query = """ + query { + professors { + edges { + node { + vec, + metadata { + firstName + } + } + } + } + } + """ + expected = { + "professors": { + "edges": [{"node": {"vec": [1.0, 2.3], "metadata": {"firstName": "Steven"}}}] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_get_queryset_returns_dict_filters_async(fixtures): + class Query(graphene.ObjectType): + node = Node.Field() + articles = AsyncMongoengineConnectionField( + nodes_async.ArticleAsyncNode, get_queryset=lambda *_, **__: {"headline": "World"} + ) + + query = """ + query ArticlesQuery { + articles { + edges { + node { + headline, + pubDate, + editor { + firstName + } + } + } + } + } + """ + expected = { + "articles": { + "edges": [ + { + "node": { + "headline": "World", + "editor": {"firstName": "Grant"}, + "pubDate": "2020-01-01T00:00:00", + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_get_queryset_returns_qs_filters_async(fixtures): + def get_queryset(model, info, **args): + return model.objects(headline="World") + + class Query(graphene.ObjectType): + node = Node.Field() + articles = AsyncMongoengineConnectionField( + nodes_async.ArticleAsyncNode, get_queryset=get_queryset + ) + + query = """ + query ArticlesQuery { + articles { + edges { + node { + headline, + pubDate, + editor { + firstName + } + } + } + } + } + """ + expected = { + "articles": { + "edges": [ + { + "node": { + "headline": "World", + "editor": {"firstName": "Grant"}, + "pubDate": "2020-01-01T00:00:00", + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_mongoengine_queryset_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query players { + players(firstName_Istartswith: "M") { + edges { + node { + firstName + } + } + } + } + """ + expected = { + "players": { + "edges": [ + {"node": {"firstName": "Michael"}}, + {"node": {"firstName": "Magic"}}, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert json.dumps(result.data, sort_keys=True) == json.dumps(expected, sort_keys=True) + + +@pytest.mark.asyncio +async def test_should_query_document_with_embedded_async(fixtures): + class Query(graphene.ObjectType): + foos = AsyncMongoengineConnectionField(nodes_async.FooAsyncNode) + + async def resolve_multiple_foos(self, *args, **kwargs): + return list(models.Foo.objects.all()) + + query = """ + query { + foos { + edges { + node { + bars { + edges { + node { + someListField + } + } + } + } + } + } + } + """ + + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + assert not result.errors + + +@pytest.mark.asyncio +async def test_should_filter_mongoengine_queryset_with_list_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query players { + players(firstName_In: ["Michael", "Magic"]) { + edges { + node { + firstName + } + } + } + } + """ + expected = { + "players": { + "edges": [ + {"node": {"firstName": "Michael"}}, + {"node": {"firstName": "Magic"}}, + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert json.dumps(result.data, sort_keys=True) == json.dumps(expected, sort_keys=True) + + +@pytest.mark.asyncio +async def test_should_get_correct_list_of_documents_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + query = """ + query players { + players(firstName: "Michael") { + edges { + node { + firstName, + articles(first: 3) { + edges { + node { + headline + } + } + } + } + } + } + } + """ + expected = { + "players": { + "edges": [ + { + "node": { + "firstName": "Michael", + "articles": { + "edges": [ + { + "node": { + "headline": "Hello", + } + }, + { + "node": { + "headline": "World", + } + }, + ] + }, + } + } + ] + } + } + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert result.data == expected + + +@pytest.mark.asyncio +async def test_should_filter_mongoengine_queryset_by_id_and_other_fields_async(fixtures): + class Query(graphene.ObjectType): + players = AsyncMongoengineConnectionField(nodes_async.PlayerAsyncNode) + + larry = models.Player.objects.get(first_name="Larry") + larry_relay_id = to_global_id("PlayerAsyncNode", larry.id) + + # "Larry" id && firstName == "Michael" should return nothing + query = """ + query players {{ + players( + id: "{larry_relay_id}", + firstName: "Michael" + ) {{ + edges {{ + node {{ + id + firstName + }} + }} + }} + }} + """.format(larry_relay_id=larry_relay_id) + + expected = {"players": {"edges": []}} + schema = graphene.Schema(query=Query) + result = await schema.execute_async(query) + + assert not result.errors + assert json.dumps(result.data, sort_keys=True) == json.dumps(expected, sort_keys=True) diff --git a/graphene_mongo/tests/test_types.py b/graphene_mongo/tests/test_types.py index 3e9ecbd1..c1217723 100644 --- a/graphene_mongo/tests/test_types.py +++ b/graphene_mongo/tests/test_types.py @@ -13,7 +13,6 @@ class Human(MongoengineObjectType): - pub_date = Int() class Meta: @@ -135,6 +134,7 @@ class A(MongoengineObjectType): class Meta: model = Article order_by = "some_order_by_statement" + assert "some_order_by_statement" not in list(A._meta.fields.keys()) diff --git a/graphene_mongo/tests/test_utils.py b/graphene_mongo/tests/test_utils.py index 3aef4848..e8b6c5bf 100644 --- a/graphene_mongo/tests/test_utils.py +++ b/graphene_mongo/tests/test_utils.py @@ -1,8 +1,9 @@ -from ..utils import get_model_fields, is_valid_mongoengine_model, get_query_fields -from .models import Article, Reporter, Child -from . import types import graphene +from . import types +from .models import Article, Child, Reporter +from ..utils import get_model_fields, get_query_fields, is_valid_mongoengine_model + def test_get_model_fields_no_duplication(): reporter_fields = get_model_fields(Reporter) @@ -82,22 +83,22 @@ def resolve_children(self, info, *args, **kwargs): schema.execute(query) assert get_query_fields(test_get_query_fields.child_info) == { - 'bar': {}, - 'loc': { - 'type': {}, - 'coordinates': {} - } + "bar": {}, + "loc": { + "type": {}, + "coordinates": {}, + }, } assert get_query_fields(test_get_query_fields.children_info) == { - 'ChildType': { - 'baz': {}, - 'loc': { - 'type': {}, - 'coordinates': {} - } + "ChildType": { + "baz": {}, + "loc": { + "type": {}, + "coordinates": {}, + }, + }, + "AnotherChildType": { + "qux": {}, }, - 'AnotherChildType': { - 'qux': {} - } } diff --git a/graphene_mongo/types.py b/graphene_mongo/types.py index 71a34ab2..4eeda046 100644 --- a/graphene_mongo/types.py +++ b/graphene_mongo/types.py @@ -1,7 +1,9 @@ from collections import OrderedDict +from concurrent.futures import ThreadPoolExecutor import graphene import mongoengine +from asgiref.sync import sync_to_async from graphene.relay import Connection, Node from graphene.types.objecttype import ObjectType, ObjectTypeOptions from graphene.types.inputobjecttype import InputObjectType, InputObjectTypeOptions @@ -12,16 +14,24 @@ from .converter import convert_mongoengine_field from .registry import Registry, get_global_registry, get_inputs_registry -from .utils import get_model_fields, is_valid_mongoengine_model, get_query_fields +from .utils import get_model_fields, is_valid_mongoengine_model, get_query_fields, ExecutorEnum -def construct_fields(model, registry, only_fields, exclude_fields, non_required_fields): +def construct_fields( + model, + registry, + only_fields, + exclude_fields, + non_required_fields, + executor: ExecutorEnum = ExecutorEnum.SYNC, +): """ Args: model (mongoengine.Document): registry (.registry.Registry): only_fields ([str]): exclude_fields ([str]): + executor : ExecutorEnum Returns: (OrderedDict, OrderedDict): converted fields and self reference fields. @@ -43,27 +53,27 @@ def construct_fields(model, registry, only_fields, exclude_fields, non_required_ # Take care of list of self-reference. document_type_obj = field.field.__dict__.get("document_type_obj", None) if ( - document_type_obj == model._class_name - or isinstance(document_type_obj, model) - or document_type_obj == model + document_type_obj == model._class_name + or isinstance(document_type_obj, model) + or document_type_obj == model ): self_referenced[name] = field continue - converted = convert_mongoengine_field(field, registry) + converted = convert_mongoengine_field(field, registry, executor) if not converted: continue else: - if name in non_required_fields and 'required' in converted.kwargs: - converted.kwargs['required'] = False + if name in non_required_fields and "required" in converted.kwargs: + converted.kwargs["required"] = False fields[name] = converted return fields, self_referenced -def construct_self_referenced_fields(self_referenced, registry): +def construct_self_referenced_fields(self_referenced, registry, executor=ExecutorEnum.SYNC): fields = OrderedDict() for name, field in self_referenced.items(): - converted = convert_mongoengine_field(field, registry) + converted = convert_mongoengine_field(field, registry, executor) if not converted: continue fields[name] = converted @@ -73,7 +83,6 @@ def construct_self_referenced_fields(self_referenced, registry): def create_graphene_generic_class(object_type, option_type): class MongoengineGenericObjectTypeOptions(option_type): - model = None registry = None # type: Registry connection = None @@ -84,26 +93,25 @@ class MongoengineGenericObjectTypeOptions(option_type): class GrapheneMongoengineGenericType(object_type): @classmethod def __init_subclass_with_meta__( - cls, - model=None, - registry=None, - skip_registry=False, - only_fields=(), - required_fields=(), - exclude_fields=(), - non_required_fields=(), - filter_fields=None, - non_filter_fields=(), - connection=None, - connection_class=None, - use_connection=None, - connection_field_class=None, - interfaces=(), - _meta=None, - order_by=None, - **options + cls, + model=None, + registry=None, + skip_registry=False, + only_fields=(), + required_fields=(), + exclude_fields=(), + non_required_fields=(), + filter_fields=None, + non_filter_fields=(), + connection=None, + connection_class=None, + use_connection=None, + connection_field_class=None, + interfaces=(), + _meta=None, + order_by=None, + **options, ): - assert is_valid_mongoengine_model(model), ( "The attribute model in {}.Meta must be a valid Mongoengine Model. " 'Received "{}" instead.' @@ -123,13 +131,9 @@ def __init_subclass_with_meta__( converted_fields, self_referenced = construct_fields( model, registry, only_fields, exclude_fields, non_required_fields ) - mongoengine_fields = yank_fields_from_attrs( - converted_fields, _as=graphene.Field - ) + mongoengine_fields = yank_fields_from_attrs(converted_fields, _as=graphene.Field) if use_connection is None and interfaces: - use_connection = any( - (issubclass(interface, Node) for interface in interfaces) - ) + use_connection = any((issubclass(interface, Node) for interface in interfaces)) if use_connection and not connection: # We create the connection automatically @@ -137,7 +141,7 @@ def __init_subclass_with_meta__( connection_class = Connection connection = connection_class.create_type( - "{}Connection".format(options.get('name') or cls.__name__), node=cls + "{}Connection".format(options.get("name") or cls.__name__), node=cls ) if connection is not None: @@ -183,9 +187,7 @@ def __init_subclass_with_meta__( if not skip_registry: registry.register(cls) # Notes: Take care list of self-reference fields. - converted_fields = construct_self_referenced_fields( - self_referenced, registry - ) + converted_fields = construct_self_referenced_fields(self_referenced, registry) if converted_fields: mongoengine_fields = yank_fields_from_attrs( converted_fields, _as=graphene.Field @@ -202,12 +204,10 @@ def rescan_fields(cls): cls._meta.registry, cls._meta.only_fields, cls._meta.exclude_fields, - cls._meta.non_required_fields + cls._meta.non_required_fields, ) - mongoengine_fields = yank_fields_from_attrs( - converted_fields, _as=graphene.Field - ) + mongoengine_fields = yank_fields_from_attrs(converted_fields, _as=graphene.Field) # The initial scan should take precedence for field in mongoengine_fields: @@ -227,7 +227,7 @@ def is_type_of(cls, root, info): return isinstance(root, cls._meta.model) @classmethod - def get_node(cls, info, id): + async def get_node(cls, info, id): required_fields = list() for field in cls._meta.required_fields: if field in cls._meta.model._fields_ordered: @@ -239,7 +239,11 @@ def get_node(cls, info, id): if to_snake_case(field) in cls._meta.model._fields_ordered: required_fields.append(to_snake_case(field)) required_fields = list(set(required_fields)) - return cls._meta.model.objects.no_dereference().only(*required_fields).get(pk=id) + return await sync_to_async( + cls._meta.model.objects.no_dereference().only(*required_fields).get, + thread_sensitive=False, + executor=ThreadPoolExecutor(), + )(pk=id) def resolve_id(self, info): return str(self.id) @@ -247,9 +251,18 @@ def resolve_id(self, info): return GrapheneMongoengineGenericType, MongoengineGenericObjectTypeOptions -MongoengineObjectType, MongoengineObjectTypeOptions = create_graphene_generic_class(ObjectType, ObjectTypeOptions) -MongoengineInterfaceType, MongoengineInterfaceTypeOptions = create_graphene_generic_class(Interface, InterfaceOptions) -MongoengineInputType, MongoengineInputTypeOptions = create_graphene_generic_class(InputObjectType, - InputObjectTypeOptions) - -GrapheneMongoengineObjectTypes = (MongoengineObjectType, MongoengineInputType, MongoengineInterfaceType) +MongoengineObjectType, MongoengineObjectTypeOptions = create_graphene_generic_class( + ObjectType, ObjectTypeOptions +) +MongoengineInterfaceType, MongoengineInterfaceTypeOptions = create_graphene_generic_class( + Interface, InterfaceOptions +) +MongoengineInputType, MongoengineInputTypeOptions = create_graphene_generic_class( + InputObjectType, InputObjectTypeOptions +) + +GrapheneMongoengineObjectTypes = ( + MongoengineObjectType, + MongoengineInputType, + MongoengineInterfaceType, +) diff --git a/graphene_mongo/types_async.py b/graphene_mongo/types_async.py new file mode 100644 index 00000000..033cd9e1 --- /dev/null +++ b/graphene_mongo/types_async.py @@ -0,0 +1,202 @@ +import graphene +import mongoengine +from asgiref.sync import sync_to_async +from graphene import InputObjectType +from graphene.relay import Connection, Node +from graphene.types.interface import Interface, InterfaceOptions +from graphene.types.objecttype import ObjectType, ObjectTypeOptions +from graphene.types.utils import yank_fields_from_attrs +from graphene.utils.str_converters import to_snake_case + +from graphene_mongo import AsyncMongoengineConnectionField +from .registry import Registry, get_global_async_registry, get_inputs_async_registry +from .types import construct_fields, construct_self_referenced_fields +from .utils import ExecutorEnum, get_query_fields, is_valid_mongoengine_model + + +def create_graphene_generic_class_async(object_type, option_type): + class AsyncMongoengineGenericObjectTypeOptions(option_type): + model = None + registry = None # type: Registry + connection = None + filter_fields = () + non_required_fields = () + order_by = None + + class AsyncGrapheneMongoengineGenericType(object_type): + @classmethod + def __init_subclass_with_meta__( + cls, + model=None, + registry=None, + skip_registry=False, + only_fields=(), + required_fields=(), + exclude_fields=(), + non_required_fields=(), + filter_fields=None, + non_filter_fields=(), + connection=None, + connection_class=None, + use_connection=None, + connection_field_class=None, + interfaces=(), + _meta=None, + order_by=None, + **options, + ): + assert is_valid_mongoengine_model(model), ( + "The attribute model in {}.Meta must be a valid Mongoengine Model. " + 'Received "{}" instead.' + ).format(cls.__name__, type(model)) + + if not registry: + # input objects shall be registred in a separated registry + if issubclass(cls, InputObjectType): + registry = get_inputs_async_registry() + else: + registry = get_global_async_registry() + + assert isinstance(registry, Registry), ( + "The attribute registry in {}.Meta needs to be an instance of " + 'Registry({}), received "{}".' + ).format(object_type, cls.__name__, registry) + converted_fields, self_referenced = construct_fields( + model, + registry, + only_fields, + exclude_fields, + non_required_fields, + ExecutorEnum.ASYNC, + ) + mongoengine_fields = yank_fields_from_attrs(converted_fields, _as=graphene.Field) + if use_connection is None and interfaces: + use_connection = any((issubclass(interface, Node) for interface in interfaces)) + + if use_connection and not connection: + # We create the connection automatically + if not connection_class: + connection_class = Connection + + connection = connection_class.create_type( + "{}Connection".format(options.get("name") or cls.__name__), node=cls + ) + + if connection is not None: + assert issubclass(connection, Connection), ( + "The attribute connection in {}.Meta must be of type Connection. " + 'Received "{}" instead.' + ).format(cls.__name__, type(connection)) + + if connection_field_class is not None: + assert issubclass(connection_field_class, graphene.ConnectionField), ( + "The attribute connection_field_class in {}.Meta must be of type graphene.ConnectionField. " + 'Received "{}" instead.' + ).format(cls.__name__, type(connection_field_class)) + else: + connection_field_class = AsyncMongoengineConnectionField + + if _meta: + assert isinstance(_meta, AsyncMongoengineGenericObjectTypeOptions), ( + "_meta must be an instance of AsyncMongoengineGenericObjectTypeOptions, " + "received {}" + ).format(_meta.__class__) + else: + _meta = AsyncMongoengineGenericObjectTypeOptions(option_type) + + _meta.model = model + _meta.registry = registry + _meta.fields = mongoengine_fields + _meta.filter_fields = filter_fields + _meta.non_filter_fields = non_filter_fields + _meta.connection = connection + _meta.connection_field_class = connection_field_class + # Save them for later + _meta.only_fields = only_fields + _meta.required_fields = required_fields + _meta.exclude_fields = exclude_fields + _meta.non_required_fields = non_required_fields + _meta.order_by = order_by + + super(AsyncGrapheneMongoengineGenericType, cls).__init_subclass_with_meta__( + _meta=_meta, interfaces=interfaces, **options + ) + + if not skip_registry: + registry.register(cls) + # Notes: Take care list of self-reference fields. + converted_fields = construct_self_referenced_fields( + self_referenced, registry, ExecutorEnum.ASYNC + ) + if converted_fields: + mongoengine_fields = yank_fields_from_attrs( + converted_fields, _as=graphene.Field + ) + cls._meta.fields.update(mongoengine_fields) + registry.register(cls) + + @classmethod + def rescan_fields(cls): + """Attempts to rescan fields and will insert any not converted initially""" + + converted_fields, self_referenced = construct_fields( + cls._meta.model, + cls._meta.registry, + cls._meta.only_fields, + cls._meta.exclude_fields, + cls._meta.non_required_fields, + ExecutorEnum.ASYNC, + ) + + mongoengine_fields = yank_fields_from_attrs(converted_fields, _as=graphene.Field) + + # The initial scan should take precedence + for field in mongoengine_fields: + if field not in cls._meta.fields: + cls._meta.fields.update({field: mongoengine_fields[field]}) + # Self-referenced fields can't change between scans! + + @classmethod + def is_type_of(cls, root, info): + if isinstance(root, cls): + return True + # XXX: Take care FileField + if isinstance(root, mongoengine.GridFSProxy): + return True + if not is_valid_mongoengine_model(type(root)): + raise Exception(('Received incompatible instance "{}".').format(root)) + return isinstance(root, cls._meta.model) + + @classmethod + async def get_node(cls, info, id): + required_fields = list() + for field in cls._meta.required_fields: + if field in cls._meta.model._fields_ordered: + required_fields.append(field) + queried_fields = get_query_fields(info) + if cls._meta.name in queried_fields: + queried_fields = queried_fields[cls._meta.name] + for field in queried_fields: + if to_snake_case(field) in cls._meta.model._fields_ordered: + required_fields.append(to_snake_case(field)) + required_fields = list(set(required_fields)) + return await sync_to_async( + cls._meta.model.objects.no_dereference().only(*required_fields).get + )(pk=id) + + def resolve_id(self, info): + return str(self.id) + + return AsyncGrapheneMongoengineGenericType, AsyncMongoengineGenericObjectTypeOptions + + +AsyncMongoengineObjectType, AsyncMongoengineObjectTypeOptions = create_graphene_generic_class_async( + ObjectType, ObjectTypeOptions +) + +( + AsyncMongoengineInterfaceType, + MongoengineInterfaceTypeOptions, +) = create_graphene_generic_class_async(Interface, InterfaceOptions) + +AsyncGrapheneMongoengineObjectTypes = (AsyncMongoengineObjectType, AsyncMongoengineInterfaceType) diff --git a/graphene_mongo/utils.py b/graphene_mongo/utils.py index b80b9178..1c4e31a7 100644 --- a/graphene_mongo/utils.py +++ b/graphene_mongo/utils.py @@ -1,16 +1,24 @@ from __future__ import unicode_literals +import enum import inspect from collections import OrderedDict +from concurrent.futures import ThreadPoolExecutor +from typing import Any, Callable, Union import mongoengine +from asgiref.sync import SyncToAsync, sync_to_async as asgiref_sync_to_async from graphene import Node from graphene.utils.trim_docstring import trim_docstring -# from graphql.utils.ast_to_dict import ast_to_dict from graphql import FieldNode from graphql_relay.connection.array_connection import offset_to_cursor +class ExecutorEnum(enum.Enum): + ASYNC = enum.auto() + SYNC = enum.auto() + + def get_model_fields(model, excluding=None): excluding = excluding or [] attributes = dict() @@ -26,8 +34,8 @@ def get_model_reference_fields(model, excluding=None): attributes = dict() for attr_name, attr in model._fields.items(): if attr_name in excluding or not isinstance( - attr, - (mongoengine.fields.ReferenceField, mongoengine.fields.LazyReferenceField), + attr, + (mongoengine.fields.ReferenceField, mongoengine.fields.LazyReferenceField), ): continue attributes[attr_name] = attr @@ -36,8 +44,7 @@ def get_model_reference_fields(model, excluding=None): def is_valid_mongoengine_model(model): return inspect.isclass(model) and ( - issubclass(model, mongoengine.Document) - or issubclass(model, mongoengine.EmbeddedDocument) + issubclass(model, mongoengine.Document) or issubclass(model, mongoengine.EmbeddedDocument) ) @@ -68,9 +75,7 @@ def import_single_dispatch(): def get_type_for_document(schema, document): types = schema.types.values() for _type in types: - type_document = hasattr(_type, "_meta") and getattr( - _type._meta, "document", None - ) + type_document = hasattr(_type, "_meta") and getattr(_type._meta, "document", None) if document == type_document: return _type @@ -128,23 +133,20 @@ def collect_query_fields(node, fragments): field = {} selection_set = None - if type(node) == dict: - selection_set = node.get('selection_set') + if isinstance(node, dict): + selection_set = node.get("selection_set") else: selection_set = node.selection_set if selection_set: for leaf in selection_set.selections: - if leaf.kind == 'field': - field.update({ - leaf.name.value: collect_query_fields(leaf, fragments) - }) - elif leaf.kind == 'fragment_spread': - field.update(collect_query_fields(fragments[leaf.name.value], - fragments)) - elif leaf.kind == 'inline_fragment': - field.update({ - leaf.type_condition.name.value: collect_query_fields(leaf, fragments) - }) + if leaf.kind == "field": + field.update({leaf.name.value: collect_query_fields(leaf, fragments)}) + elif leaf.kind == "fragment_spread": + field.update(collect_query_fields(fragments[leaf.name.value], fragments)) + elif leaf.kind == "inline_fragment": + field.update( + {leaf.type_condition.name.value: collect_query_fields(leaf, fragments)} + ) return field @@ -189,7 +191,7 @@ def ast_to_dict(node, include_loc=False): return node -def find_skip_and_limit(first, last, after, before, count): +def find_skip_and_limit(first, last, after, before, count=None): reverse = False skip = 0 limit = None @@ -212,6 +214,8 @@ def find_skip_and_limit(first, last, after, before, count): limit = last skip = before - last elif last is not None and after is not None: + if not count: + raise ValueError("Count Missing") reverse = True if last + after < count: limit = last @@ -228,13 +232,12 @@ def find_skip_and_limit(first, last, after, before, count): return skip, limit, reverse -def connection_from_iterables(edges, start_offset, has_previous_page, has_next_page, connection_type, - edge_type, - pageinfo_type): +def connection_from_iterables( + edges, start_offset, has_previous_page, has_next_page, connection_type, edge_type, pageinfo_type +): edges_items = [ edge_type( - node=node, - cursor=offset_to_cursor((0 if start_offset is None else start_offset) + i) + node=node, cursor=offset_to_cursor((0 if start_offset is None else start_offset) + i) ) for i, node in enumerate(edges) ] @@ -248,6 +251,30 @@ def connection_from_iterables(edges, start_offset, has_previous_page, has_next_p start_cursor=first_edge_cursor, end_cursor=last_edge_cursor, has_previous_page=has_previous_page, - has_next_page=has_next_page - ) + has_next_page=has_next_page, + ), ) + + +def sync_to_async( + func: Callable = None, + thread_sensitive: bool = False, + executor: Any = None, # noqa +) -> Union[SyncToAsync, Callable[[Callable[..., Any]], SyncToAsync]]: + """ + Wrapper over sync_to_async from asgiref.sync + Defaults to thread insensitive with ThreadPoolExecutor of n workers + Args: + func: + Function to be converted to coroutine + thread_sensitive: + If the operation is thread sensitive and should run in synchronous thread + executor: + Threadpool executor, if thread_sensitive=False + + Returns: + coroutine version of func + """ + if executor is None: + executor = ThreadPoolExecutor() + return asgiref_sync_to_async(func=func, thread_sensitive=thread_sensitive, executor=executor) diff --git a/poetry.lock b/poetry.lock index 58da7e7a..59034374 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -17,46 +16,25 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "asgiref" -version = "3.6.0" +version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, - {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, ] [package.dependencies] -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -66,63 +44,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.2" +version = "7.3.2" description = "Code coverage measurement for Python" -category = "dev" optional = false -python-versions = ">=3.7" -files = [ - {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"}, - {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"}, - {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"}, - {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"}, - {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"}, - {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"}, - {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"}, - {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"}, - {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"}, - {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"}, - {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"}, - {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"}, - {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"}, - {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"}, - {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"}, - {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"}, - {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"}, - {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"}, - {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"}, - {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"}, - {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"}, - {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"}, - {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"}, - {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"}, - {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"}, - {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"}, - {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"}, - {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"}, - {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"}, - {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"}, - {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"}, - {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"}, - {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"}, - {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"}, - {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"}, - {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"}, - {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"}, - {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"}, - {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"}, - {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"}, - {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"}, - {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"}, - {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"}, - {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"}, - {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"}, - {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"}, - {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"}, - {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"}, - {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"}, - {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"}, - {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"}, +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -133,20 +111,18 @@ toml = ["tomli"] [[package]] name = "dnspython" -version = "2.3.0" +version = "2.4.2" description = "DNS toolkit" -category = "main" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8,<4.0" files = [ - {file = "dnspython-2.3.0-py3-none-any.whl", hash = "sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46"}, - {file = "dnspython-2.3.0.tar.gz", hash = "sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9"}, + {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, + {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, ] [package.extras] -curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] -dnssec = ["cryptography (>=2.6,<40.0)"] -doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.11.0)"] +dnssec = ["cryptography (>=2.6,<42.0)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] doq = ["aioquic (>=0.9.20)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.23)"] @@ -154,47 +130,27 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - [[package]] name = "graphene" -version = "3.2.2" +version = "3.3" description = "GraphQL Framework for Python" -category = "main" optional = false python-versions = "*" files = [ - {file = "graphene-3.2.2-py2.py3-none-any.whl", hash = "sha256:753de13948cbf42e32cc87fb533167c88907066eb984251fdbb006c0aab8da00"}, - {file = "graphene-3.2.2.tar.gz", hash = "sha256:5b03e72770dc901f40be55784058d6bb1d952a49eb819a4a085962d5e1cf5fcf"}, + {file = "graphene-3.3-py2.py3-none-any.whl", hash = "sha256:bb3810be33b54cb3e6969506671eb72319e8d7ba0d5ca9c8066472f75bf35a38"}, + {file = "graphene-3.3.tar.gz", hash = "sha256:529bf40c2a698954217d3713c6041d69d3f719ad0080857d7ee31327112446b0"}, ] [package.dependencies] @@ -210,7 +166,6 @@ test = ["coveralls (>=3.3,<4)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>= name = "graphql-core" version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -218,14 +173,10 @@ files = [ {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.2,<5", markers = "python_version < \"3.8\""} - [[package]] name = "graphql-relay" version = "3.2.0" description = "Relay library for graphql-core" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -235,34 +186,11 @@ files = [ [package.dependencies] graphql-core = ">=3.2,<3.3" -typing-extensions = {version = ">=4.1,<5", markers = "python_version < \"3.8\""} - -[[package]] -name = "importlib-metadata" -version = "6.1.0" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"}, - {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -270,40 +198,15 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "iso8601" -version = "1.1.0" -description = "Simple module to parse ISO 8601 dates" -category = "main" -optional = false -python-versions = ">=3.6.2,<4.0" -files = [ - {file = "iso8601-1.1.0-py3-none-any.whl", hash = "sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"}, - {file = "iso8601-1.1.0.tar.gz", hash = "sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"}, -] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] - [[package]] name = "mock" -version = "5.0.1" +version = "5.1.0" description = "Rolling backport of unittest.mock for all Pythons" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "mock-5.0.1-py3-none-any.whl", hash = "sha256:c41cfb1e99ba5d341fbcc5308836e7d7c9786d302f995b2c271ce2144dece9eb"}, - {file = "mock-5.0.1.tar.gz", hash = "sha256:e3ea505c03babf7977fd21674a69ad328053d414f05e6433c30d8fa14a534a6b"}, + {file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"}, + {file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"}, ] [package.extras] @@ -315,7 +218,6 @@ test = ["pytest", "pytest-cov"] name = "mongoengine" version = "0.27.0" description = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -330,7 +232,6 @@ pymongo = ">=3.4,<5.0" name = "mongomock" version = "4.1.2" description = "Fake pymongo stub for testing simple MongoDB-dependent code" -category = "dev" optional = false python-versions = "*" files = [ @@ -344,31 +245,26 @@ sentinels = "*" [[package]] name = "packaging" -version = "23.0" +version = "23.2" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -377,7 +273,6 @@ testing = ["pytest", "pytest-benchmark"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -390,112 +285,94 @@ six = "*" [package.extras] test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", "pytest-cov"] -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] - -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] - [[package]] name = "pymongo" -version = "4.3.3" +version = "4.6.0" description = "Python driver for MongoDB " -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"}, - {file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"}, - {file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"}, - {file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"}, - {file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"}, - {file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"}, - {file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"}, - {file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"}, - {file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"}, - {file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"}, - {file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"}, - {file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"}, - {file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"}, - {file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"}, - {file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"}, - {file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"}, - {file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"}, - {file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"}, - {file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"}, - {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"}, - {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"}, + {file = "pymongo-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c011bd5ad03cc096f99ffcfdd18a1817354132c1331bed7a837a25226659845f"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:5e63146dbdb1eac207464f6e0cfcdb640c9c5ff0f57b754fa96fe252314a1dc6"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:2972dd1f1285866aba027eff2f4a2bbf8aa98563c2ced14cb34ee5602b36afdf"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:a0be99b599da95b7a90a918dd927b20c434bea5e1c9b3efc6a3c6cd67c23f813"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:9b0f98481ad5dc4cb430a60bbb8869f05505283b9ae1c62bdb65eb5e020ee8e3"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:256c503a75bd71cf7fb9ebf889e7e222d49c6036a48aad5a619f98a0adf0e0d7"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:b4ad70d7cac4ca0c7b31444a0148bd3af01a2662fa12b1ad6f57cd4a04e21766"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5717a308a703dda2886a5796a07489c698b442f5e409cf7dc2ac93de8d61d764"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f7f9feecae53fa18d6a3ea7c75f9e9a1d4d20e5c3f9ce3fba83f07bcc4eee2"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128b1485753106c54af481789cdfea12b90a228afca0b11fb3828309a907e10e"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3077a31633beef77d057c6523f5de7271ddef7bde5e019285b00c0cc9cac1e3"}, + {file = "pymongo-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebf02c32afa6b67e5861a27183dd98ed88419a94a2ab843cc145fb0bafcc5b28"}, + {file = "pymongo-4.6.0-cp310-cp310-win32.whl", hash = "sha256:b14dd73f595199f4275bed4fb509277470d9b9059310537e3b3daba12b30c157"}, + {file = "pymongo-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:8adf014f2779992eba3b513e060d06f075f0ab2fb3ad956f413a102312f65cdf"}, + {file = "pymongo-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ba51129fcc510824b6ca6e2ce1c27e3e4d048b6e35d3ae6f7e517bed1b8b25ce"}, + {file = "pymongo-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2973f113e079fb98515722cd728e1820282721ec9fd52830e4b73cabdbf1eb28"}, + {file = "pymongo-4.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af425f323fce1b07755edd783581e7283557296946212f5b1a934441718e7528"}, + {file = "pymongo-4.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ec71ac633b126c0775ed4604ca8f56c3540f5c21a1220639f299e7a544b55f9"}, + {file = "pymongo-4.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ec6c20385c5a58e16b1ea60c5e4993ea060540671d7d12664f385f2fb32fe79"}, + {file = "pymongo-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85f2cdc400ee87f5952ebf2a117488f2525a3fb2e23863a8efe3e4ee9e54e4d1"}, + {file = "pymongo-4.6.0-cp311-cp311-win32.whl", hash = "sha256:7fc2bb8a74dcfcdd32f89528e38dcbf70a3a6594963d60dc9595e3b35b66e414"}, + {file = "pymongo-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6695d7136a435c1305b261a9ddb9b3ecec9863e05aab3935b96038145fd3a977"}, + {file = "pymongo-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d603edea1ff7408638b2504905c032193b7dcee7af269802dbb35bc8c3310ed5"}, + {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79f41576b3022c2fe9780ae3e44202b2438128a25284a8ddfa038f0785d87019"}, + {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f2af6cf82509b15093ce3569229e0d53c90ad8ae2eef940652d4cf1f81e045"}, + {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecd9e1fa97aa11bf67472220285775fa15e896da108f425e55d23d7540a712ce"}, + {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d2be5c9c3488fa8a70f83ed925940f488eac2837a996708d98a0e54a861f212"}, + {file = "pymongo-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab6bcc8e424e07c1d4ba6df96f7fb963bcb48f590b9456de9ebd03b88084fe8"}, + {file = "pymongo-4.6.0-cp312-cp312-win32.whl", hash = "sha256:47aa128be2e66abd9d1a9b0437c62499d812d291f17b55185cb4aa33a5f710a4"}, + {file = "pymongo-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:014e7049dd019a6663747ca7dae328943e14f7261f7c1381045dfc26a04fa330"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:288c21ab9531b037f7efa4e467b33176bc73a0c27223c141b822ab4a0e66ff2a"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:747c84f4e690fbe6999c90ac97246c95d31460d890510e4a3fa61b7d2b87aa34"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:055f5c266e2767a88bb585d01137d9c7f778b0195d3dbf4a487ef0638be9b651"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:82e620842e12e8cb4050d2643a81c8149361cd82c0a920fa5a15dc4ca8a4000f"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:6b18276f14b4b6d92e707ab6db19b938e112bd2f1dc3f9f1a628df58e4fd3f0d"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:680fa0fc719e1a3dcb81130858368f51d83667d431924d0bcf249644bce8f303"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:3919708594b86d0f5cdc713eb6fccd3f9b9532af09ea7a5d843c933825ef56c4"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db082f728160369d9a6ed2e722438291558fc15ce06d0a7d696a8dad735c236b"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e4ed21029d80c4f62605ab16398fe1ce093fff4b5f22d114055e7d9fbc4adb0"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bea9138b0fc6e2218147e9c6ce1ff76ff8e29dc00bb1b64842bd1ca107aee9f"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a0269811661ba93c472c8a60ea82640e838c2eb148d252720a09b5123f2c2fe"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d6a1b1361f118e7fefa17ae3114e77f10ee1b228b20d50c47c9f351346180c8"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e3b0127b260d4abae7b62203c4c7ef0874c901b55155692353db19de4b18bc4"}, + {file = "pymongo-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a49aca4d961823b2846b739380c847e8964ff7ae0f0a683992b9d926054f0d6d"}, + {file = "pymongo-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:09c7de516b08c57647176b9fc21d929d628e35bcebc7422220c89ae40b62126a"}, + {file = "pymongo-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:81dd1308bd5630d2bb5980f00aa163b986b133f1e9ed66c66ce2a5bc3572e891"}, + {file = "pymongo-4.6.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:2f8c04277d879146eacda920476e93d520eff8bec6c022ac108cfa6280d84348"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5802acc012bbb4bce4dff92973dff76482f30ef35dd4cb8ab5b0e06aa8f08c80"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ccd785fafa1c931deff6a7116e9a0d402d59fabe51644b0d0c268295ff847b25"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fe03bf25fae4b95d8afe40004a321df644400fdcba4c8e5e1a19c1085b740888"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:2ca0ba501898b2ec31e6c3acf90c31910944f01d454ad8e489213a156ccf1bda"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:10a379fb60f1b2406ae57b8899bacfe20567918c8e9d2d545e1b93628fcf2050"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:a4dc1319d0c162919ee7f4ee6face076becae2abbd351cc14f1fe70af5fb20d9"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ddef295aaf80cefb0c1606f1995899efcb17edc6b327eb6589e234e614b87756"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:518c90bdd6e842c446d01a766b9136fec5ec6cc94f3b8c3f8b4a332786ee6b64"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b80a4ee19b3442c57c38afa978adca546521a8822d663310b63ae2a7d7b13f3a"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb438a8bf6b695bf50d57e6a059ff09652a07968b2041178b3744ea785fcef9b"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3db7d833a7c38c317dc95b54e27f1d27012e031b45a7c24e360b53197d5f6e7"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3729b8db02063da50eeb3db88a27670d85953afb9a7f14c213ac9e3dca93034b"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:39a1cd5d383b37285641d5a7a86be85274466ae336a61b51117155936529f9b3"}, + {file = "pymongo-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7b0e6361754ac596cd16bfc6ed49f69ffcd9b60b7bc4bcd3ea65c6a83475e4ff"}, + {file = "pymongo-4.6.0-cp38-cp38-win32.whl", hash = "sha256:806e094e9e85d8badc978af8c95b69c556077f11844655cb8cd2d1758769e521"}, + {file = "pymongo-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1394c4737b325166a65ae7c145af1ebdb9fb153ebedd37cf91d676313e4a67b8"}, + {file = "pymongo-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a8273e1abbcff1d7d29cbbb1ea7e57d38be72f1af3c597c854168508b91516c2"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:e16ade71c93f6814d095d25cd6d28a90d63511ea396bd96e9ffcb886b278baaa"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:325701ae7b56daa5b0692305b7cb505ca50f80a1288abb32ff420a8a209b01ca"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:cc94f9fea17a5af8cf1a343597711a26b0117c0b812550d99934acb89d526ed2"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:21812453354b151200034750cd30b0140e82ec2a01fd4357390f67714a1bfbde"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:0634994b026336195778e5693583c060418d4ab453eff21530422690a97e1ee8"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ad4f66fbb893b55f96f03020e67dcab49ffde0177c6565ccf9dec4fdf974eb61"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:2703a9f8f5767986b4f51c259ff452cc837c5a83c8ed5f5361f6e49933743b2f"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bafea6061d63059d8bc2ffc545e2f049221c8a4457d236c5cd6a66678673eab"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f28ae33dc5a0b9cee06e95fd420e42155d83271ab75964baf747ce959cac5f52"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16a534da0e39785687b7295e2fcf9a339f4a20689024983d11afaa4657f8507"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef67fedd863ffffd4adfd46d9d992b0f929c7f61a8307366d664d93517f2c78e"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05c30fd35cc97f14f354916b45feea535d59060ef867446b5c3c7f9b609dd5dc"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c63e3a2e8fb815c4b1f738c284a4579897e37c3cfd95fdb199229a1ccfb638a"}, + {file = "pymongo-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e5e193f89f4f8c1fe273f9a6e6df915092c9f2af6db2d1afb8bd53855025c11f"}, + {file = "pymongo-4.6.0-cp39-cp39-win32.whl", hash = "sha256:a09bfb51953930e7e838972ddf646c5d5f984992a66d79da6ba7f6a8d8a890cd"}, + {file = "pymongo-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:107a234dc55affc5802acb3b6d83cbb8c87355b38a9457fcd8806bdeb8bce161"}, + {file = "pymongo-4.6.0.tar.gz", hash = "sha256:fb1c56d891f9e34303c451998ef62ba52659648bb0d75b03c5e4ac223a3342c2"}, ] [package.dependencies] @@ -503,47 +380,62 @@ dnspython = ">=1.16.0,<3.0.0" [package.extras] aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"] -gssapi = ["pykerberos"] -ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"] +gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] +ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] +test = ["pytest (>=7)"] zstd = ["zstandard"] [[package]] name = "pytest" -version = "7.2.2" +version = "7.4.3" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, - {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -553,38 +445,77 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "ruff" +version = "0.1.6" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"}, + {file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"}, + {file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"}, + {file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"}, + {file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"}, + {file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"}, + {file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"}, + {file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"}, +] + [[package]] name = "sentinels" version = "1.0.0" description = "Various objects to denote special meanings in python" -category = "dev" optional = false python-versions = "*" files = [ {file = "sentinels-1.0.0.tar.gz", hash = "sha256:7be0704d7fe1925e397e92d18669ace2f619c92b5d4eb21a89f31e026f9ff4b1"}, ] +[[package]] +name = "setuptools" +version = "69.0.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "singledispatch" -version = "4.0.0" +version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "singledispatch-4.0.0-py2.py3-none-any.whl", hash = "sha256:b8f69397a454b45b91e2f949fcc87896c53718ca59aab6367966e8b3f010ec77"}, - {file = "singledispatch-4.0.0.tar.gz", hash = "sha256:f3c327a968651a7f4b03586eab7d90a07b05ff3ef7942d1967036eb9f75ab8fc"}, + {file = "singledispatch-4.1.0-py2.py3-none-any.whl", hash = "sha256:6061bd291204beaeac90cdbc342b68d213b7a6efb44ae6c5e6422a78be351c8a"}, + {file = "singledispatch-4.1.0.tar.gz", hash = "sha256:f3430b886d5b4213d07d715096a75da5e4a8105284c497b9aee6d6d48bfe90cb"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -596,7 +527,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -606,33 +536,16 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] -[[package]] -name = "zipp" -version = "3.15.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [metadata] lock-version = "2.0" -python-versions = ">=3.7,<4" -content-hash = "51ad1be0be6ab1c6aff81bbedfa5bf8e986a1bd5e8e6471234d2adcc002bc30a" +python-versions = ">=3.8,<4" +content-hash = "3abff70f60eafa1b72442b3a5cb3dce4e313ed9029de12d87ea8e29060a44952" diff --git a/pyproject.toml b/pyproject.toml index 1c208d3c..0bc3d0a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,11 +14,11 @@ classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: PyPy", "License :: OSI Approved :: MIT License", ] @@ -27,22 +27,26 @@ keywords = [ ] [tool.poetry.dependencies] -python = ">=3.7,<4" +python = ">=3.8,<4" graphene = ">=3.1.1" promise = ">=2.3" mongoengine = ">=0.27" -singledispatch = ">=3.7.0" -iso8601 = "*" -asgiref = "^3.6.0" +singledispatch = ">=4.1.0" +asgiref = "^3.7.2" [tool.poetry.group.dev.dependencies] pytest = "*" mongomock = ">=4.1.2" mock = ">=5.0.1" -flake8 = "*" pytest-cov = "*" +pytest-asyncio = "^0.21.0" +ruff = "^0.1.6" +setuptools = "^69.0.2" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 100 \ No newline at end of file