1313parser = ArgumentParser ()
1414parser .add_argument ("-k" , "--key" ,dest = "accesskey" ,default = [],help = "AWS Access Key" )
1515parser .add_argument ("-s" , "--secretkey" ,dest = "secretkey" , default = [],help = "AWS Secret Access Key" )
16+ parser .add_argument ("-d" , "--directory" ,dest = "base_dir" ,default = [],help = "Directory to upload" )
17+ parser .add_argument ("-b" , "--bucket" ,dest = "bucketname" , default = [],help = "Name of S3 bucket" )
18+ parser .add_argument ("-i" , "--keyfile" ,dest = "keyfile" ,default = [],help = "AWS Access Key" )
1619args = parser .parse_args ()
1720
1821def getListOfFiles (dirName ):
@@ -55,13 +58,21 @@ def metadataType(file):
5558
5659#If access codes have not been passed as inputs then check if file is there
5760if not args .accesskey or not args .secretkey :
61+
62+ if not args .keyfile :
63+ keypath = 'aws_access_keys.txt'
64+ else :
65+ keypath = args .keyfile
66+
67+ #Set absolute path to key file
68+ #cwd = os.getcwd()
69+ #keypath = os.path.join(cwd,keypath)
5870
5971 #Load and read the secure access keys to connect to the AWS S3 client
60- if os .path .isfile (". \a ws_access_keys.txt" ):
72+ if not os .path .isfile (keypath ):
6173 print ("AWS access keys not found. You may not have rights to request this action." )
6274 exit ()
63-
64- with open ('aws_access_keys.txt' , 'r' ) as file :
75+ with open (keypath , 'r' ) as file :
6576 AccessKeys = [line .rstrip ('\n ' ) for line in file ]
6677else :
6778 AccessKeys = [args .accesskey , args .secretkey ]
@@ -78,24 +89,27 @@ def metadataType(file):
7889 aws_access_key_id = AccessKeys [0 ],
7990 aws_secret_access_key = AccessKeys [1 ],
8091 region_name = 'eu-west-1' )
81- bucket = s3 .Bucket ('deeranalysis.org' )
8292
83- base_dir = "../docs"
93+ bucketname = str (args .bucketname )
94+ bucket = s3 .Bucket (bucketname )
95+
96+ base_dir = str (args .base_dir )
8497#Get full list of local files
8598localFiles = getListOfFiles (base_dir )
99+ base_dir += '/'
86100
87101for key in bucket .objects .all ():
88102
89103 #Get full local path of current file in bucket
90104 file = '/' .join ([base_dir , key .key ])
91-
105+
92106 #Remove bucket file from list of local files
93107 if file in localFiles : localFiles .remove (file )
94108
95109 #If file has been removed from the local source, remove it from the bucket
96110 if not os .path .isfile (file ):
97111 print ("Removing " ,file ," from bucket, not found in local source..." )
98- s3_client .delete_object (Bucket = 'deeranalysis.org' ,Key = key .key )
112+ s3_client .delete_object (Bucket = bucketname ,Key = key .key )
99113 continue
100114
101115 #Get last modified date of local files
@@ -107,13 +121,14 @@ def metadataType(file):
107121
108122 #Update the file if the local source file is newer than the version in the S3 bucket
109123 if modifyDate > key .last_modified :
110- print ("Updating" , file , "in web bucket... " )
111- s3 .meta .client .upload_file (file , 'deeranalysis.org' , key .key , ExtraArgs = {'ContentType' : metadataType (file )} )
124+ print ("Updating" , file , "in " , bucketname , " bucket... " )
125+ s3 .meta .client .upload_file (file , bucketname , key .key , ExtraArgs = {'ContentType' : metadataType (file )} )
112126
113127#Add the remaining local files which are still not on the we bucket
114128for files in localFiles :
115- key = files .replace ("../docs/" ,"" )
116- print ("Adding" , key , "to web bucket... " )
117- s3 .meta .client .upload_file (file , 'deeranalysis.org' , key , ExtraArgs = {'ContentType' : metadataType (file )} )
129+ file = files ;
130+ key = files .replace (base_dir ,"" )
131+ print ("Adding" , key , "to" ,bucketname ,"bucket... " )
132+ s3 .meta .client .upload_file (file , bucketname , key , ExtraArgs = {'ContentType' : metadataType (str (key ))} )
118133
119- print ("Finished: AWS S3 DeerAnalysis.org bucket is up to date." )
134+ print ("Finished: AWS S3 " , bucketname , " bucket is up to date." )
0 commit comments