-
Notifications
You must be signed in to change notification settings - Fork 0
/
s3_buckets_remover.py
80 lines (63 loc) · 2.9 KB
/
s3_buckets_remover.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import boto3
import concurrent.futures
import datetime
s3_resource = boto3.resource('s3')
s3 = boto3.client('s3')
def read_bucket_names_from_config(file_path):
with open(file_path, 'r') as file:
return [line.strip() for line in file if line.strip()]
def delete_bucket_new(each_bucket, region_info):
try:
current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(
f"[{current_time}] - Starting removal of bucket {each_bucket} in region {region_info}")
s3_new = boto3.client('s3', region_name=region_info)
s3_resource_new = boto3.resource('s3', region_name=region_info)
bucket_new = s3_resource_new.Bucket(each_bucket)
current_object = 0
objects_batch_size = 1000
# Delete objects in the bucket
for obj in bucket_new.objects.all():
obj.delete()
current_object += 1
if current_object % objects_batch_size == 0:
print(
f"Thread: {each_bucket} - Deleted objects count: {current_object}")
current_version = 0
versions_batch_size = 1000
for obj_version in bucket_new.object_versions.all():
obj_version.delete()
current_version += 1
if current_version % versions_batch_size == 0:
print(
f"Thread: {each_bucket} - Deleted versions count: {current_version}")
# Delete the bucket
s3_new.delete_bucket(Bucket=each_bucket)
print(f"[{current_time}] - Bucket {each_bucket} deleted successfully")
except Exception as e:
if 'NoSuchBucket' in str(e):
current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(
f"[{current_time}] - Bucket {each_bucket} no longer exists, already deleted")
else:
current_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(
f"[{current_time}] - Failed to delete bucket {each_bucket}. Error: {str(e)}")
if __name__ == '__main__':
buckets = read_bucket_names_from_config('./remove-bucket-list.txt')
# Process the list of buckets and objects to be deleted in parallel using multiple threads
with concurrent.futures.ThreadPoolExecutor() as executor:
params = []
for each_bucket in buckets:
try:
region_info = s3.get_bucket_location(Bucket=each_bucket)[
'LocationConstraint'] or 'us-east-1'
params.append((each_bucket, region_info))
except Exception as err:
print(
f'Failed to get region info for bucket {each_bucket}. Error: {err}')
continue
print('Bucket collection complete!')
# Submit tasks to the thread pool
results = [executor.submit(lambda args: delete_bucket_new(
*args), param) for param in params]