[英]How to add filtering by id to my user resolver?
我有這樣的 schema.py:
import graphene
from app import db
from graphene import relay
from app.models import User as UserModel, Event as EventModel
from graphene_sqlalchemy import SQLAlchemyConnectionField, SQLAlchemyObjectType
class User(SQLAlchemyObjectType):
class Meta:
model = UserModel
interfaces = (relay.Node, )
class UserConnection(relay.Connection):
class Meta:
node = User
class Event(SQLAlchemyObjectType):
class Meta:
model = EventModel
interfaces = (relay.Node, )
class EventConnections(relay.Connection):
class Meta:
node = Event
class CreateUser(graphene.Mutation):
class Arguments:
username = graphene.String(required=True)
fname = graphene.String(required=True)
surname = graphene.String(required=True)
email = graphene.String(required=True)
password = graphene.String(required=True)
user = graphene.Field(lambda: User)
def mutate(self, info, username, fname, surname, email, password):
user = UserModel.query.filter_by(username=username).first()
if user is None:
user = UserModel(username=username, fname=fname, surname=surname, email=email, password=password)
else:
return None
db.session.add(user)
db.session.commit()
return CreateUser(user=user)
class Query(graphene.ObjectType):
node = relay.Node.Field()
# queries that return individual models
user = graphene.Field(lambda: User, username=graphene.String())
event = graphene.Field(lambda: Event, title=graphene.String())
# queries that return all models of given type
all_users = SQLAlchemyConnectionField(UserConnection)
all_events = SQLAlchemyConnectionField(EventConnections)
# resolvers
def resolve_user(self, info, **kwargs):
query = User.get_query(info)
# id = kwargs.get("id")
username = kwargs.get('username')
return query.filter(UserModel.username == username).first()
def resolve_event(self, info, **kwargs):
query = Event.get_query(info)
# id = kwargs.get("id")
title = kwargs.get('title')
return query.filter(EventModel.title == title).first()
class Mutation(graphene.ObjectType):
create_user = CreateUser.Field()
schema = graphene.Schema(query=Query, mutation=Mutation, types=[User, Event])
我的模型是這樣的:
from app import db
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(30), unique=True, nullable=False)
fname = db.Column(db.String(20), nullable=False)
surname = db.Column(db.String(35), nullable=False)
is_verified = db.Column(db.Boolean(), default=False)
profile_pic = db.Column(db.Text(), default="default.jpg")
email = db.Column(db.String(120), nullable=False)
password = db.Column(db.String(50), nullable=False)
events = db.relationship("Event", backref="organizer", lazy=True)
def __repr__(self):
print(f"User({self.id}, {self.name}, {self.surname}, {self.email}")
class Event(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(120), nullable=False)
date_posted = db.Column(db.String(50), nullable=False, default=datetime.now().isoformat())
description = db.Column(db.Text)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
因此,如果我想獲取所有用戶,請輸入以下內容:
query{
allUsers{
edges{
node{
id
username
email
}
}
}
}
當我得到 id 時,我會得到類似"id": "VXNlcjoy"
但在我的模型中,id 是一個整數。 如果我確實更改了我的解析器以按前面提到的 id 過濾,sqlalchemy 會拋出一個錯誤,因為數據庫中的 id 是整數。 然后我更改我的解析器以接受作為整數的 id,並按真實的 id 進行過濾。 更改后使用解析器進行過濾工作,但會發生這種情況:
{
user(id: 1){
id
}
}
回復:
{
"data": {
"user": {
"id": "VXNlcjoy"
}
}
}
我想了一下,我認為有兩種方法:
- 讓 graphql 將數據庫中的整數 id 返回給我
- 以某種方式通過 graphql 給出的字符串 id 使解析器查詢用戶和事件(“VXNlcjoy”)
我怎樣才能做到這一點?
我找到了它,我覆蓋了稱為id
graphql 保留變量,該變量用於緩存和分頁。 在models.py 中,我將列id
重命名為uuid
並在表事件中
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
到
user_uuid = db.Column(db.Integer, db.ForeignKey("user.uuid"), nullable=False)
得到了這個:
mutation{
createUser(email:"saki1@saki.com", fname:"saki",surname:"sakal",username:"saki709", password:"sakisaki"){
user{
uuid
id
}
}
}
回復:
{
"data": {
"createUser": {
"user": {
"uuid": "1",
"id": "VXNlcjox"
}
}
}
}
TL;DR - 將 id 重命名為 uuid 因為 graphql 有它自己的變量 id
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.