I successfully create files in bucket of object storage in Linode. But while deleting all files in that storage it prompts an error.
import boto3
cfg = {
"aws_access_key_id":"XXXXXXXXXXXXXXXXXX",
"aws_secret_access_key": "XXXXXXXXXXXXXXXXXXXXXXXX",
"endpoint_url": "*********************",
}
S3_BUCKET = "test"
# empty existing bucket
def empty_s3_bucket():
client = boto3.client(
's3',
**cfg,
)
response = client.list_objects_v2(Bucket=S3_BUCKET)
if 'Contents' in response:
for item in response['Contents']:
print('deleting file', item['Key'])
client.delete_object(Bucket=S3_BUCKET, Key=item['Key'])
while response['KeyCount'] == 1000:
response = client.list_objects_v2(
Bucket=S3_BUCKET,
StartAfter=response['Contents'][0]['Key'],
)
for item in response['Contents']:
print('deleting file', item['Key'])
client.delete_object(Bucket=S3_BUCKET, Key=item['Key'])
empty_s3_bucket()
Above code cannot delete all files in that object storage however can delete single file by using different logic. Following error generates on above code:
Traceback (most recent call last):
File "c:/********/linode_empty.py", line 30, in <module>
empty_s3_bucket()
File "c:/*********/linode_empty.py", line 16, in empty_s3_bucket
response = client.list_objects_v2(Bucket=S3_BUCKET)
File "C:\********\venv\lib\site-packages\botocore\client.py", line 357, in _api_call
return self._make_api_call(operation_name, kwargs)
File "C:\*******\venv\lib\site-packages\botocore\client.py", line 676, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.errorfactory.NoSuchKey: An error occurred (NoSuchKey) when calling the ListObjectsV2 operation: Unknown
I tried for different codes suggested in old post about this on stackoverflow Amazon S3 boto - how to delete folder? but got same error.
try this: it'll collect all the keys and batch delete 1000 at a time
import math
s3sr = boto3.resource('s3')
s3sc = boto3.client('s3')
def get_list_of_keys_from_prefix(bucket, prefix):
"""gets list of keys for given bucket and prefix"""
keys_list = []
paginator = s3sr.meta.client.get_paginator('list_objects_v2')
# use Delimiter to limit search to that level of hierarchy
for page in paginator.paginate(Bucket=bucket, Prefix=prefix, Delimiter='/'):
keys = [content['Key'] for content in page.get('Contents')]
# print('keys in page: ', len(keys))
keys_list.extend(keys)
# print(keys_list)
print('total keys in bucket: ', len(keys_list))
return keys_list
bucket = 'test'
prefix = '' #if you have 'subfolders' enter the prefix, otherwise use ''
keys_list = get_list_of_keys_from_prefix(bucket, prefix)
# print(keys_list)
total_keys = len(keys_list)
chunk_size = 1000
num_batches = math.ceil(total_keys / chunk_size)
for b in range(0, num_batches):
batch_to_delete = []
for k in keys_list[chunk_size*b:chunk_size*b+chunk_size]:
batch_to_delete.append({'Key': k})
# print({'Key': k})
# print(batch_to_delete)
s3sc.delete_objects(Bucket=bucket, Delete={'Objects': batch_to_delete,},'Quiet': True)
I'm using the previous code for less than 1000 objects
import boto3
linode_obj_config = {
"aws_access_key_id": "",
"aws_secret_access_key": "",
"endpoint_url": "",
}
client = boto3.client("s3", **linode_obj_config)
def get_list_of_keys_from_prefix(bucket, prefix):
keys_list = []
response = client.list_objects(Bucket=bucket, Prefix=prefix)
for obj in response['Contents']:
keys_list.append(obj["Key"])
# print(keys_list)
# print('total keys in bucket: ', len(keys_list))
# print("list keys: ", keys_list)
return keys_list
bucket = ''
prefix = ''
keys_list = get_list_of_keys_from_prefix(bucket, prefix)
for b in keys_list:
client.delete_object(Bucket=bucket, Key=b)
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.