Skip to content

Support graphene v3 #172

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 18 commits into from
Jul 5, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
409460f
Graphene Core 3> : Passed 93 Tests
arunsureshkumar Mar 29, 2021
76027ba
Graphene Core 3>= : Passed all tests
arunsureshkumar Mar 29, 2021
1d14e7e
support added for graphene==3.0b7
arunsureshkumar Mar 29, 2021
c95b914
requirements.txt updated
arunsureshkumar Mar 29, 2021
d430b7b
setup.py updated
arunsureshkumar Mar 29, 2021
79508d1
import condition for Enum Field updated
arunsureshkumar Mar 29, 2021
5d0bef0
Merge remote-tracking branch 'origin/support-monogengine-field-enum' …
arunsureshkumar Mar 29, 2021
0dab9bb
setup.cfg updated
arunsureshkumar Mar 29, 2021
f44dd7f
setup.cfg updated
arunsureshkumar Mar 29, 2021
cee1d30
Bug fixed while using same enum field for multiple times
arunsureshkumar Apr 17, 2021
0579c4c
Merge remote-tracking branch 'original/master' into support-graphene-v3
arunsureshkumar Apr 17, 2021
54f545e
Merge branch 'support-monogengine-field-enum' into support-graphene-v3
arunsureshkumar Apr 17, 2021
df0d6fe
Bug fix in Enum filed python import version
arunsureshkumar Apr 21, 2021
5a1859a
Merge branch 'support-monogengine-field-enum' into support-graphene-v3
arunsureshkumar Apr 21, 2021
ecb2012
Slow query in processing documents count fixed
arunsureshkumar Jun 24, 2021
abb7bf5
Fix: While querying embedded document with key value as ObjectID
arunsureshkumar Jun 30, 2021
dfd25f5
Fix: While querying embedded document with key value as ObjectID
arunsureshkumar Jun 30, 2021
de158a3
Fix: While decoding ID from base64 to ObjectID
arunsureshkumar Jun 30, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion graphene_mongo/advanced_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def resolve_data(self, info):
v = getattr(self.instance, self.key)
data = v.read()
if data is not None:
return base64.b64encode(data)
return base64.b64encode(data).decode("utf-8")
return None


Expand Down
10 changes: 6 additions & 4 deletions graphene_mongo/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,9 +429,11 @@ def dynamic_type():
return graphene.Dynamic(dynamic_type)


if sys.version_info[0] > 3.5:
if sys.version_info >= (3, 6):
@convert_mongoengine_field.register(mongoengine.EnumField)
def convert_field_to_enum(field, registry=None):
return graphene.Field(graphene.Enum.from_enum(field._enum_cls),
description=get_field_description(field, registry), required=field.required
)
if not registry.check_enum_already_exist(field._enum_cls):
registry.register_enum(field._enum_cls)
_type = registry.get_type_for_enum(field._enum_cls)
return graphene.Field(_type,
description=get_field_description(field, registry), required=field.required)
63 changes: 47 additions & 16 deletions graphene_mongo/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@

import graphene
import mongoengine
from bson import DBRef
from bson import DBRef, ObjectId
from graphene import Context
from graphene.types.utils import get_type
from graphene.utils.str_converters import to_snake_case
from graphql import ResolveInfo
from graphql import GraphQLResolveInfo
from mongoengine.base import get_document
from promise import Promise
from graphql_relay import from_global_id
Expand Down Expand Up @@ -168,7 +168,7 @@ def filter_args(self):
}
filter_type = advanced_filter_types.get(each, filter_type)
filter_args[field + "__" + each] = graphene.Argument(
type=filter_type
type_=filter_type
)
return filter_args

Expand Down Expand Up @@ -215,7 +215,10 @@ def fields(self):
self._type = get_type(self._type)
return self._type._meta.fields

def get_queryset(self, model, info, required_fields=list(), skip=None, limit=None, reversed=False, **args):
def get_queryset(self, model, info, required_fields=None, skip=None, limit=None, reversed=False, **args):
if required_fields is None:
required_fields = list()

if args:
reference_fields = get_model_reference_fields(self.model)
hydrated_references = {}
Expand Down Expand Up @@ -276,8 +279,13 @@ def get_queryset(self, model, info, required_fields=list(), skip=None, limit=Non
skip)
return model.objects(**args).no_dereference().only(*required_fields).order_by(self.order_by)

def default_resolver(self, _root, info, required_fields=list(), **args):
def default_resolver(self, _root, info, required_fields=None, **args):
if required_fields is None:
required_fields = list()
args = args or {}
for key, value in dict(args).items():
if value is None:
del args[key]
if _root is not None:
field_name = to_snake_case(info.field_name)
if not hasattr(_root, "_fields_ordered"):
Expand All @@ -301,9 +309,13 @@ def default_resolver(self, _root, info, required_fields=list(), **args):
limit = None
reverse = False
first = args.pop("first", None)
after = cursor_to_offset(args.pop("after", None))
after = args.pop("after", None)
if after:
after = cursor_to_offset(after)
last = args.pop("last", None)
before = cursor_to_offset(args.pop("before", None))
before = args.pop("before", None)
if before:
before = cursor_to_offset(before)
if callable(getattr(self.model, "objects", None)):
if "pk__in" in args and args["pk__in"]:
count = len(args["pk__in"])
Expand All @@ -318,20 +330,32 @@ def default_resolver(self, _root, info, required_fields=list(), **args):
args["pk__in"] = args["pk__in"][skip:]
iterables = self.get_queryset(self.model, info, required_fields, **args)
list_length = len(iterables)
if isinstance(info, ResolveInfo):
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info.context = Context()
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(self.model, info, required_fields, **args)
elif _root is None or args:
count = self.get_queryset(self.model, info, required_fields, **args).count()
args_copy = args.copy()
for key in args.copy():
if key not in self.model._fields_ordered:
args_copy.pop(key)
elif isinstance(getattr(self.model, key),
mongoengine.fields.ReferenceField) or isinstance(getattr(self.model, key),
mongoengine.fields.GenericReferenceField) or isinstance(
getattr(self.model, key),
mongoengine.fields.LazyReferenceField) or isinstance(getattr(self.model, key),
mongoengine.fields.CachedReferenceField):
if not isinstance(args_copy[key], ObjectId):
args_copy[key] = from_global_id(args_copy[key])[1]
count = mongoengine.get_db()[self.model._get_collection_name()].find(args_copy).count()
if count != 0:
skip, limit, reverse = find_skip_and_limit(first=first, after=after, last=last, before=before,
count=count)
iterables = self.get_queryset(self.model, info, required_fields, skip, limit, reverse, **args)
list_length = len(iterables)
if isinstance(info, ResolveInfo):
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info.context = Context()
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(self.model, info, required_fields, **args)

elif _root is not None:
Expand Down Expand Up @@ -367,6 +391,9 @@ def default_resolver(self, _root, info, required_fields=list(), **args):
return connection

def chained_resolver(self, resolver, is_partial, root, info, **args):
for key, value in dict(args).items():
if value is None:
del args[key]
required_fields = list()
for field in self.required_fields:
if field in self.model._fields_ordered:
Expand All @@ -378,13 +405,15 @@ def chained_resolver(self, resolver, is_partial, root, info, **args):
if not bool(args) or not is_partial:
if isinstance(self.model, mongoengine.Document) or isinstance(self.model,
mongoengine.base.metaclasses.TopLevelDocumentMetaclass):

for arg_name, arg in args.copy().items():
if arg_name not in self.model._fields_ordered + tuple(self.filter_args.keys()):
args_copy.pop(arg_name)
if isinstance(info, ResolveInfo):
if isinstance(info, GraphQLResolveInfo):
if not info.context:
info.context = Context()
info.context.queryset = self.get_queryset(self.model, info, required_fields, **args_copy)
info = info._replace(context=Context())
info.context.queryset = self.get_queryset(self.model, info, required_fields, **args)

# XXX: Filter nested args
resolved = resolver(root, info, **args)
if resolved is not None:
Expand All @@ -405,7 +434,7 @@ def chained_resolver(self, resolver, is_partial, root, info, **args):
if arg_name == '_id' and isinstance(arg, dict):
operation = list(arg.keys())[0]
args_copy['pk' + operation.replace('$', '__')] = arg[operation]
if '.' in arg_name:
if not isinstance(arg, ObjectId) and '.' in arg_name:
operation = list(arg.keys())[0]
args_copy[arg_name.replace('.', '__') + operation.replace('$', '__')] = arg[operation]
else:
Expand All @@ -415,6 +444,8 @@ def chained_resolver(self, resolver, is_partial, root, info, **args):
args_copy[arg_name + operation.replace('$', '__')] = arg[operation]
del args_copy[arg_name]
return self.default_resolver(root, info, required_fields, **args_copy)
elif isinstance(resolved, Promise):
return resolved.value
else:
return resolved
return self.default_resolver(root, info, required_fields, **args)
Expand Down
19 changes: 18 additions & 1 deletion graphene_mongo/registry.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
from graphene import Enum


class Registry(object):
def __init__(self):
self._registry = {}
self._registry_string_map = {}
self._registry_enum = {}

def register(self, cls):
from .types import GrapheneMongoengineObjectTypes

assert issubclass(
cls,
GrapheneMongoengineObjectTypes
), 'Only Mongoengine object types can be registered, received "{}"'.format(
), 'Only Mongoengine object types can be registered, received "{}"'.format(
cls.__name__
)
assert cls._meta.registry == self, "Registry for a Model have to match."
Expand All @@ -20,9 +24,22 @@ def register(self, cls):
for model, cls in self._registry.items():
cls.rescan_fields()

def register_enum(self, cls):
from enum import EnumMeta
assert type(cls) == EnumMeta, 'Only EnumMeta can be registered, received "{}"'.format(
cls.__name__
)
self._registry_enum[cls] = Enum.from_enum(cls)

def get_type_for_model(self, model):
return self._registry.get(model)

def check_enum_already_exist(self, cls):
return cls in self._registry_enum

def get_type_for_enum(self, cls):
return self._registry_enum.get(cls)


registry = None
inputs_registry = None
Expand Down
5 changes: 2 additions & 3 deletions graphene_mongo/tests/test_inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,9 @@ def mutate(self, info, article):
return CreateArticle(article=article)

class Query(graphene.ObjectType):

node = Node.Field()

class Mutation(graphene.ObjectType):

create_article = CreateArticle.Field()

query = """
Expand Down Expand Up @@ -57,7 +55,8 @@ class Arguments:
def mutate(self, info, id, editor):
editor_to_update = Editor.objects.get(id=id)
for key, value in editor.items():
setattr(editor_to_update, key, value)
if value:
setattr(editor_to_update, key, value)
editor_to_update.save()
return UpdateEditor(editor=editor_to_update)

Expand Down
2 changes: 1 addition & 1 deletion graphene_mongo/tests/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def resolve_editors(self, *args, **kwargs):
"chunkSize": 261120,
"length": 46928,
"md5": "f3c657fd472fdc4bc2ca9056a1ae6106",
"data": str(data),
"data": data.decode("utf-8"),
},
},
"editors": [
Expand Down
8 changes: 1 addition & 7 deletions graphene_mongo/tests/test_relay_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ class Query(graphene.ObjectType):
"avatar": {
"contentType": "image/jpeg",
"length": 46928,
"data": str(data),
"data": data.decode("utf-8"),
},
}
},
Expand Down Expand Up @@ -489,7 +489,6 @@ class Query(graphene.ObjectType):

def test_should_first_n(fixtures):
class Query(graphene.ObjectType):

editors = MongoengineConnectionField(nodes.EditorNode)

query = """
Expand Down Expand Up @@ -533,7 +532,6 @@ class Query(graphene.ObjectType):

def test_should_after(fixtures):
class Query(graphene.ObjectType):

players = MongoengineConnectionField(nodes.PlayerNode)

query = """
Expand Down Expand Up @@ -566,7 +564,6 @@ class Query(graphene.ObjectType):

def test_should_before(fixtures):
class Query(graphene.ObjectType):

players = MongoengineConnectionField(nodes.PlayerNode)

query = """
Expand Down Expand Up @@ -632,7 +629,6 @@ class Query(graphene.ObjectType):

def test_should_self_reference(fixtures):
class Query(graphene.ObjectType):

players = MongoengineConnectionField(nodes.PlayerNode)

query = """
Expand Down Expand Up @@ -767,7 +763,6 @@ class Query(graphene.ObjectType):

def test_should_query_with_embedded_document(fixtures):
class Query(graphene.ObjectType):

professors = MongoengineConnectionField(nodes.ProfessorVectorNode)

query = """
Expand Down Expand Up @@ -1026,7 +1021,6 @@ class Query(graphene.ObjectType):


def test_should_filter_mongoengine_queryset_by_id_and_other_fields(fixtures):

class Query(graphene.ObjectType):
players = MongoengineConnectionField(nodes.PlayerNode)

Expand Down
44 changes: 33 additions & 11 deletions graphene_mongo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
import mongoengine
from graphene import Node
from graphene.utils.trim_docstring import trim_docstring
from graphql.utils.ast_to_dict import ast_to_dict
# from graphql.utils.ast_to_dict import ast_to_dict
from graphql import FieldNode
from graphql_relay.connection.arrayconnection import offset_to_cursor


Expand Down Expand Up @@ -126,21 +127,24 @@ def collect_query_fields(node, fragments):
"""

field = {}

if node.get('selection_set'):
for leaf in node['selection_set']['selections']:
if leaf['kind'] == 'Field':
selection_set = None
if type(node) == dict:
selection_set = node.get('selection_set')
else:
selection_set = node.selection_set
if selection_set:
for leaf in selection_set.selections:
if leaf.kind == 'field':
field.update({
leaf['name']['value']: collect_query_fields(leaf, fragments)
leaf.name.value: collect_query_fields(leaf, fragments)
})
elif leaf['kind'] == 'FragmentSpread':
elif leaf.kind == 'fragment_spread':
field.update(collect_query_fields(fragments[leaf['name']['value']],
fragments))
elif leaf['kind'] == 'InlineFragment':
elif leaf.kind == 'inline_fragment':
field.update({
leaf["type_condition"]["name"]['value']: collect_query_fields(leaf, fragments)
leaf.type_condition.name.value: collect_query_fields(leaf, fragments)
})
pass

return field

Expand All @@ -156,7 +160,7 @@ def get_query_fields(info):
"""

fragments = {}
node = ast_to_dict(info.field_asts[0])
node = ast_to_dict(info.field_nodes[0])

for name, value in info.fragments.items():
fragments[name] = ast_to_dict(value)
Expand All @@ -167,6 +171,24 @@ def get_query_fields(info):
return query


def ast_to_dict(node, include_loc=False):
if isinstance(node, FieldNode):
d = {"kind": node.__class__.__name__}
if hasattr(node, "keys"):
for field in node.keys:
d[field] = ast_to_dict(getattr(node, field), include_loc)

if include_loc and hasattr(node, "loc") and node.loc:
d["loc"] = {"start": node.loc.start, "end": node.loc.end}

return d

elif isinstance(node, list):
return [ast_to_dict(item, include_loc) for item in node]

return node


def find_skip_and_limit(first, last, after, before, count):
reverse = False
skip = 0
Expand Down
Loading