I've been scratching my head on this for days now. I still couldn't solve the problem. Basically, I just wanted to put a CSV file in a LocalStack S3, and I can't get it working.
Here's the snippet of my code:
api.py
from files import s3, AWS_S3_BUCKET_NAME, upload_file_to_bucket
@router.post('/api/customer/generate/upload',
name='Upload CSV to AWS S3 Bucket',
status_code=201)
async def post_upload_user_csv(file_obj: UploadFile = File(...)):
upload_obj = upload_file_to_bucket(s3_client=s3(),
file_obj=file_obj.file,
bucket=AWS_S3_BUCKET_NAME,
folder='CSV', # To Be updated
object_name=file_obj.filename)
if upload_obj:
return JSONResponse(content="Object has been uploaded to bucket successfully",
status_code=status.HTTP_201_CREATED)
else:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="File could not be uploaded")
files.py
import os
import boto3
import logging
from botocore.client import BaseClient
from botocore.exceptions import ClientError
AWS_ACCESS_KEY_ID = os.getenv('POSTGRES_HOST')
AWS_SECRET_KEY = os.getenv('AWS_SECRET_KEY')
AWS_S3_BUCKET_NAME = os.getenv('AWS_S3_BUCKET_NAME')
def s3() -> BaseClient:
client = boto3.client(service_name='s3',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_KEY,
endpoint_url='http://localhost:4566/') # Use LocalStack Endpoint
return client
def upload_file_to_bucket(s3_client, file_obj, bucket, folder, object_name=None):
"""Upload a file to an S3 bucket
:param s3_client: S3 Client
:param file_obj: File to upload
:param bucket: Bucket to upload to
:param folder: Folder to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_obj
# Upload the file
try:
# with open("files", "rb") as f:
s3_client.upload_fileobj(file_obj, bucket, f"{folder}/{object_name}")
except ClientError as e:
logging.error(e)
return False
return True
The problem is that s3_client needs to open the file in binary mode first before I can upload it to s3 bucket. However, this can't be done directly and the file needs to be saved temporarily on the FastAPI server but I really don't want to do that for obvious reasons.
Any help will be much appreciated. Thank you in advance!
Can you show the error first? Bassicly. I have done it before. Upload directly a file from the front-end to aws-s3. Can you try to add ContentType
to upload_fileobj. Here is my code
content_type = mimetypes.guess_type(fpath)[0]
s3.Bucket(bucket_name).upload_fileobj(Fileobj=file, Key=file_path,
ExtraArgs={"ACL": "public-read",
"ContentType": content_type})
another way. You should try to convert files to io.BytesIO
def s3_upload(self, file, file_path, bucket_name, width=None, height=None, make_thumb=False, make_cover=False):
s3 = boto3.resource(service_name='s3')
obj = BytesIO(self.image_optimize_from_buffer(file, width, height, make_thumb, make_cover))
s3.Bucket(bucket_name).upload_fileobj(Fileobj=obj, Key=file_path,
ExtraArgs={"ACL": "public-read", "ContentType": file.content_type})
return f'https://{bucket_name}.s3.amazonaws.com/{file_path}'
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.