[英]How to add filtering by id to my user resolver?
我有这样的 schema.py:
import graphene
from app import db
from graphene import relay
from app.models import User as UserModel, Event as EventModel
from graphene_sqlalchemy import SQLAlchemyConnectionField, SQLAlchemyObjectType
class User(SQLAlchemyObjectType):
class Meta:
model = UserModel
interfaces = (relay.Node, )
class UserConnection(relay.Connection):
class Meta:
node = User
class Event(SQLAlchemyObjectType):
class Meta:
model = EventModel
interfaces = (relay.Node, )
class EventConnections(relay.Connection):
class Meta:
node = Event
class CreateUser(graphene.Mutation):
class Arguments:
username = graphene.String(required=True)
fname = graphene.String(required=True)
surname = graphene.String(required=True)
email = graphene.String(required=True)
password = graphene.String(required=True)
user = graphene.Field(lambda: User)
def mutate(self, info, username, fname, surname, email, password):
user = UserModel.query.filter_by(username=username).first()
if user is None:
user = UserModel(username=username, fname=fname, surname=surname, email=email, password=password)
else:
return None
db.session.add(user)
db.session.commit()
return CreateUser(user=user)
class Query(graphene.ObjectType):
node = relay.Node.Field()
# queries that return individual models
user = graphene.Field(lambda: User, username=graphene.String())
event = graphene.Field(lambda: Event, title=graphene.String())
# queries that return all models of given type
all_users = SQLAlchemyConnectionField(UserConnection)
all_events = SQLAlchemyConnectionField(EventConnections)
# resolvers
def resolve_user(self, info, **kwargs):
query = User.get_query(info)
# id = kwargs.get("id")
username = kwargs.get('username')
return query.filter(UserModel.username == username).first()
def resolve_event(self, info, **kwargs):
query = Event.get_query(info)
# id = kwargs.get("id")
title = kwargs.get('title')
return query.filter(EventModel.title == title).first()
class Mutation(graphene.ObjectType):
create_user = CreateUser.Field()
schema = graphene.Schema(query=Query, mutation=Mutation, types=[User, Event])
我的模型是这样的:
from app import db
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30), unique=True, nullable=False)
fname = db.Column(db.String(20), nullable=False)
surname = db.Column(db.String(35), nullable=False)
is_verified = db.Column(db.Boolean(), default=False)
profile_pic = db.Column(db.Text(), default="default.jpg")
email = db.Column(db.String(120), nullable=False)
password = db.Column(db.String(50), nullable=False)
events = db.relationship("Event", backref="organizer", lazy=True)
def __repr__(self):
print(f"User({self.id}, {self.name}, {self.surname}, {self.email}")
class Event(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(120), nullable=False)
date_posted = db.Column(db.String(50), nullable=False, default=datetime.now().isoformat())
description = db.Column(db.Text)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
因此,如果我想获取所有用户,请输入以下内容:
query{
allUsers{
edges{
node{
id
username
email
}
}
}
}
当我得到 id 时,我会得到类似"id": "VXNlcjoy"
但在我的模型中,id 是一个整数。 如果我确实更改了我的解析器以按前面提到的 id 过滤,sqlalchemy 会抛出一个错误,因为数据库中的 id 是整数。 然后我更改我的解析器以接受作为整数的 id,并按真实的 id 进行过滤。 更改后使用解析器进行过滤工作,但会发生这种情况:
{
user(id: 1){
id
}
}
回复:
{
"data": {
"user": {
"id": "VXNlcjoy"
}
}
}
我想了一下,我认为有两种方法:
- 让 graphql 将数据库中的整数 id 返回给我
- 以某种方式通过 graphql 给出的字符串 id 使解析器查询用户和事件(“VXNlcjoy”)
我怎样才能做到这一点?
我找到了它,我覆盖了称为id
graphql 保留变量,该变量用于缓存和分页。 在models.py 中,我将列id
重命名为uuid
并在表事件中
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
到
user_uuid = db.Column(db.Integer, db.ForeignKey("user.uuid"), nullable=False)
得到了这个:
mutation{
createUser(email:"saki1@saki.com", fname:"saki",surname:"sakal",username:"saki709", password:"sakisaki"){
user{
uuid
id
}
}
}
回复:
{
"data": {
"createUser": {
"user": {
"uuid": "1",
"id": "VXNlcjox"
}
}
}
}
TL;DR - 将 id 重命名为 uuid 因为 graphql 有它自己的变量 id
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.