![](/img/trans.png)
[英]How do you save multitype/form data to a hard file in Python with FastAPI UploadFile?
[英]How to save UploadFile in FastAPI
我通过 POST 接受文件。 当我保存到本地时,我可以使用file.read()读取内容,但是显示的是via file.name incorrect(16) 的名称。 当我试图用这个名字找到它时,我得到了一个错误。 可能是什么问题?
我的代码:
@router.post(
path="/upload",
response_model=schema.ContentUploadedResponse,
)
async def upload_file(
background_tasks: BackgroundTasks,
uploaded_file: UploadFile = File(...)):
uploaded_file.file.rollover()
uploaded_file.file.flush()
#shutil.copy(uploaded_file.file.name, f'../api/{uploaded_file.filename}')
background_tasks.add_task(s3_upload, uploaded_file=fp)
return schema.ContentUploadedResponse()
UploadFile
只是SpooledTemporaryFile
的包装,可以作为UploadFile.file
访问。
SpooledTemporaryFile() [...] 函数的运行方式与 TemporaryFile() 完全相同
关于TemporaryFile
的文档说:
返回一个可以用作临时存储区域的类文件对象。 [..] 它一关闭就会被销毁(包括对象被垃圾回收时的隐式关闭)。 在 Unix 下,文件的目录条目要么根本不创建,要么在文件创建后立即删除。 其他平台不支持; 您的代码不应依赖于使用此函数创建的临时文件,该文件在文件系统中具有或不具有可见名称。
async def
端点您应该使用UploadFile
的以下异步方法: write
、 read
、 seek
和close
。 它们在线程池中执行并异步等待。
对于将文件异步写入磁盘,您可以使用aiofiles
。 例子:
@app.post("/")
async def post_endpoint(in_file: UploadFile=File(...)):
# ...
async with aiofiles.open(out_file_path, 'wb') as out_file:
content = await in_file.read() # async read
await out_file.write(content) # async write
return {"Result": "OK"}
或者以分块的方式,以免将整个文件加载到内存中:
@app.post("/")
async def post_endpoint(in_file: UploadFile=File(...)):
# ...
async with aiofiles.open(out_file_path, 'wb') as out_file:
while content := await in_file.read(1024): # async read chunk
await out_file.write(content) # async write chunk
return {"Result": "OK"}
def
端点另外,我想引用本主题中的几个有用的实用程序函数(所有学分@dmontagu),使用shutil.copyfileobj
和内部UploadFile.file
:
import shutil
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Callable
from fastapi import UploadFile
def save_upload_file(upload_file: UploadFile, destination: Path) -> None:
try:
with destination.open("wb") as buffer:
shutil.copyfileobj(upload_file.file, buffer)
finally:
upload_file.file.close()
def save_upload_file_tmp(upload_file: UploadFile) -> Path:
try:
suffix = Path(upload_file.filename).suffix
with NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
shutil.copyfileobj(upload_file.file, tmp)
tmp_path = Path(tmp.name)
finally:
upload_file.file.close()
return tmp_path
def handle_upload_file(
upload_file: UploadFile, handler: Callable[[Path], None]
) -> None:
tmp_path = save_upload_file_tmp(upload_file)
try:
handler(tmp_path) # Do something with the saved temp file
finally:
tmp_path.unlink() # Delete the temp file
注意:您希望在
def
端点中使用上述函数,而不是async def
,因为它们使用了阻塞 API。
您可以通过这种方式保存上传的文件,
from fastapi import FastAPI, File, UploadFile
app = FastAPI()
@app.post("/upload-file/")
async def create_upload_file(uploaded_file: UploadFile = File(...)):
file_location = f"files/{uploaded_file.filename}" with open(file_location, "wb+") as file_object: file_object.write(uploaded_file.file.read())
return {"info": f"file '{uploaded_file.filename}' saved at '{file_location}'"}
这与shutil.copyfileobj(...)
方法的用法几乎相同。
所以,上面的函数可以重写为,
import shutil
from fastapi import FastAPI, File, UploadFile
app = FastAPI()
@app.post("/upload-file/")
async def create_upload_file(uploaded_file: UploadFile = File(...)):
file_location = f"files/{uploaded_file.filename}"
with open(file_location, "wb+") as file_object:
shutil.copyfileobj(uploaded_file.file, file_object)
return {"info": f"file '{uploaded_file.filename}' saved at '{file_location}'"}
就我而言,我需要处理大文件,所以我必须避免将它们全部读入内存。 我想要的是将它们以块的形式异步保存到磁盘。
我正在对此进行试验,它似乎可以完成这项工作(CHUNK_SIZE 的选择非常随意,需要进一步测试才能找到最佳尺寸):
import os
import logging
from fastapi import FastAPI, BackgroundTasks, File, UploadFile
log = logging.getLogger(__name__)
app = FastAPI()
DESTINATION = "/"
CHUNK_SIZE = 2 ** 20 # 1MB
async def chunked_copy(src, dst):
await src.seek(0)
with open(dst, "wb") as buffer:
while True:
contents = await src.read(CHUNK_SIZE)
if not contents:
log.info(f"Src completely consumed\n")
break
log.info(f"Consumed {len(contents)} bytes from Src file\n")
buffer.write(contents)
@app.post("/uploadfile/")
async def create_upload_file(file: UploadFile = File(...)):
fullpath = os.path.join(DESTINATION, file.filename)
await chunked_copy(file, fullpath)
return {"File saved to disk at": fullpath}
但是,我很快意识到create_upload_file
在文件被完全接收之前不会被调用。 所以,如果这个代码片段是正确的,它可能对性能有好处,但不会启用任何东西,比如向客户端提供关于上传进度的反馈,它会在服务器中执行完整的数据复制。 不能只访问原始 UploadFile 临时文件,刷新它并将其移动到其他地方,从而避免复制,这似乎很愚蠢。
您可以通过复制和粘贴以下代码来保存文件。
fastapi import (
FastAPI
UploadFile,
File,
status
)
from fastapi.responses import JSONResponse
import aiofiles
app = FastAPI( debug = True )
@app.post("/upload_file/", response_description="", response_model = "")
async def result(file:UploadFile = File(...)):
try:
async with aiofiles.open(file.filename, 'wb') as out_file:
content = await file.read() # async read
await out_file.write(content) # async write
except Exception as e:
return JSONResponse(
status_code = status.HTTP_400_BAD_REQUEST,
content = { 'message' : str(e) }
)
else:
return JSONResponse(
status_code = status.HTTP_200_OK,
content = {"result":'success'}
)
如果您想上传多个文件,请复制粘贴以下代码
fastapi import (
FastAPI
UploadFile,
File,
status
)
from fastapi.responses import JSONResponse
import aiofiles
app = FastAPI( debug = True )
@router.post("/upload_multiple_file/", response_description="", response_model = "")
async def result(files:List[UploadFile] = File(...), secret_key: str = Depends(secretkey_middleware)):
try:
for file in files:
async with aiofiles.open(eventid+file.filename, 'wb') as out_file:
content = await file.read()
await out_file.write(content)
pass
except Exception as e:
return JSONResponse(
status_code = status.HTTP_400_BAD_REQUEST,
content = { 'message' : str(e) }
)
else:
return JSONResponse(
status_code = status.HTTP_200_OK,
content = {"result":'result'}
)
使用这个助手 function 来保存文件
from fastapi import UploadFile
import shutil
from pathlib import Path
def save_upload_file(upload_file: UploadFile, destination: Path) -> str:
try:
with destination.open("wb") as buffer:
shutil.copyfileobj(upload_file.file, buffer)
file_name = buffer.name
print(type(file_name))
finally:
upload_file.file.close()
return file_name
使用这个 function 为每个保存文件指定一个唯一的名称,假设您将保存多个文件
def unique_id():
return str(uuid.uuid4())
def delete_file(filename):
os.remove(filename)
在您的端点中
@router.post("/use_upload_file", response_model=dict)
async def use_uploaded_file(
file_one: UploadFile = File(),
file_two: UploadFile = File()
):
file_one_path = save_upload_file(audio_one, Path(f"{unique_id()}"))
file_two_path = save_upload_file(audio_two, Path(f"{unique_id()}"))
result = YourFunctionThatUsestheSaveFile(audio_one_path, audio_two_path)
delete_file(audio_one_path)
delete_file(audio_two_path)
return result
通过 Endpoints(发布请求)在 fast-API 中上传文件的代码:
@router.post(path="/test", tags=['File Upload'])
def color_classification_predict(uploadFile: UploadFile):
try:
if uploadFile.filename:
# saved_dir- directory path where we'll save the uploaded file
test_filename = os.path.join(saved_dir, uploadFile.filename)
with open(test_filename, "wb+") as file_object:
shutil.copyfileobj(uploadFile.file, file_object)
except Exception as e:
raise e
print('[INFO] Uploaded file saved.')
只是这样做是为了上传文件并且工作正常。
from fastapi import APIRouter, File, status, Depends, HTTPException, UploadFile
import shutil
from pathlib import Path
from database.user_functions import *
from database.auth_functions import *
from database.form_functions import *
from model import *
from model_form import *
file_routes = APIRouter()
# @file_routes.post("/files/")
# async def create_file(file: bytes = File()):
# return {"file_size": len(file)}
# @file_routes.post("/uploadfile/")
# async def create_upload_file(file: UploadFile):
# return {"filename": file.filename}
@file_routes.post("/upload-file/")
async def create_upload_file(uploaded_file: UploadFile = File(...)):
file_location = f"./{uploaded_file.filename}"
with open(file_location, "wb+") as file_object:
shutil.copyfileobj(uploaded_file.file, file_object)
return {"info": f"file '{uploaded_file.filename}' saved at '{file_location}'"}
tbh 我确实在一篇媒体文章上找到了它
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.