This is code I used to move files within sub-directories of a s3 bucket
# =============================================================================
# CODE TO MOVE FILES within subfolders in S3 BUCKET
# =============================================================================
from boto3.session import Session
ACCESS_KEY = 'a_key'
SECRET_KEY = 's_key'
session = Session(aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY)
s3 = session.resource('s3')#creating session of S3 as resource
s3client = session.client('s3')
resp_dw = s3client.list_objects(Bucket='main_bucket', Prefix='sub_folder/', Delimiter="/")
forms2_dw = [x['Key'] for x in resp_dw['Contents'][1:]]#here we got all files list (max limit is 1000 at a time)
reload_no = 0
while len(forms2_dw) != 0 :
#resp_dw = s3client.list_objects(Bucket='main_bucket', Prefix='sub_folder/', Delimiter="/")
#with open('dw_bucket.json','w') as f:
# resp_dws =str(resp_dw)
# f.write(json.dumps(resp_dws))
#forms_dw = [x['Prefix'] for x in resp_dw['CommonPrefixes']]
#forms2_dw = [x['Key'] for x in resp_dw['Contents'][1:]]
#forms2_dw[-1]
total_files = len(forms2_dw)
#i=0
for i in range(total_files):
#zip_filename='1819.zip'
foldername = resp_dw['Contents'][1:][i]['LastModified'].strftime('%Y%m%d')#Put your logic here for folder name
my_bcket = 'main_bucket'
my_file_old = resp_dw['Contents'][1:][i]['Key'] #file to be copied path
zip_filename =my_file_old.split('/')[-1]
subpath_nw='new_sub_folder/'+foldername+"/"+zip_filename #destination path
my_file_new = subpath_nw
#
print str(reload_no)+ '::: copying from====:'+my_file_old+' to :====='+s3_archive_subpath_nw
#print my_bcket+'/'+my_file_old
if zip_filename[-4:] == '.zip':
s3.Object(my_bcket,my_file_new).copy_from(CopySource=my_bcket+'/'+my_file_old)
s3.Object(my_bcket,my_file_old).delete()
print str(i)+' files moved of '+str(total_files)
resp_dw = s3client.list_objects(Bucket='main_bucket', Prefix='sub-folder/', Delimiter="/")
forms2_dw = [x['Key'] for x in resp_dw['Contents'][1:]]
reload_no +=1