jeudi 29 novembre 2018

graphene-sqlalchemy appending null filled stream of rows after valid table content

I am using graphene-sqlalchemy to reflectively generate a a complete GraphQL interface to a Postgres database.

With my code I am able to explore the schema successfully with GraphiQL. When I query it for the contents of a table I get the correct rows, but they are followed by a stream of thousands of null filled rows with freshly generated ascending primary keys that are not in the DB. What could be causing this?

The code is included below. Both query1 and query2 display the above behaviour:

import re
import sys
import inflect
import warnings
import graphene
from graphene import relay
from graphene_sqlalchemy import SQLAlchemyObjectType, SQLAlchemyConnectionField

def camelize_classname(base, tablename, table):
    "Produce a 'camelized' class name, e.g. "
    "'words_and_underscores' -> 'WordsAndUnderscores'"

    return str(tablename[0].upper() + \
            re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:]))

_pluralizer = inflect.engine()

def pluralize_collection(base, local_cls, referred_cls, constraint):
    "Produce an 'uncamelized', 'pluralized' class name, e.g. "
    "'SomeTerm' -> 'some_terms'"

    referred_name = referred_cls.__name__
    uncamelized = re.sub(r'[A-Z]',
                         lambda m: "_%s" % m.group(0).lower(),
                         referred_name)[1:]
    pluralized = _pluralizer.plural(uncamelized)
    assert isinstance(pluralized, object)
    return pluralized

def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
    name = referred_cls.__name__.lower()
    local_table = local_cls.__table__
    if name in local_table.columns:
        newname = name + "_"
        warnings.warn(
            "Already detected name %s present.  using %s" %
            (name, newname))
        return newname
    return name


from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine

from sqlalchemy.orm import (scoped_session, sessionmaker, relationship, backref)

Base = automap_base()

engine = create_engine(MyURL)
db_session = scoped_session(sessionmaker(autocommit=False,
                                         autoflush=False,
                                         bind=engine))

Base.prepare(
    engine, reflect=True,
    classname_for_table=camelize_classname,
    name_for_collection_relationship=pluralize_collection,
    name_for_scalar_relationship=name_for_scalar_relationship
)

def meta(field, cls):
    return {'Meta': type('Meta', (), {field: cls})}


roots = {'User', 'Topic', 'Resource'}

nodes =  {  
    name: type(name, (SQLAlchemyObjectType,), meta('model', model))  #type{'Meta': type('Meta', (), {'model': model})})  
    for name, model in Base.classes.items() 
}

conns =  {  
    name: type(name+'Connection', (relay.Connection,), meta('node', node)) #type{'Meta': type('Meta', (), {'model': model})})  
    for name, node in nodes.items()
    if name in roots
}

fields = {
    'all_' +  name.lower() + 's': SQLAlchemyConnectionField(conn)
    for name, conn in conns.items()
}

lists = {
    name.lower() + 's': graphene.List(node)
    for name, node in nodes.items()
    if name in roots
}

def resolver(node):
    def resolve(self, info):
        return node.get_query(info).all()
    return resolve

resolvers = {
    'resolve_' + name.lower() + 's': resolver(node)
    for name, node in nodes.items()
    if name in roots
}

#locals().update(xx)

Query = type('Query', (graphene.ObjectType,), dict(node = relay.Node.Field(), **fields, **lists, **resolvers))
schema = graphene.Schema(query=Query)

query1 = """
{
  allUsers(sort: username_asc) {
    edges {
      node {
        id
        username
      }
    }
  }
}
"""
result1 = schema.execute(query1, context_value={'session': db_session})
print(result1.data['allUsers']['edges'])


query2 = '''
{
  users {
    id
    username
  }
}
'''
result2 = schema.execute(query2, context_value={'session': db_session})
print(result2.data['users'])





Aucun commentaire:

Enregistrer un commentaire