@@ -33,8 +33,16 @@ def lambda_handler(event, context):
33
33
s3 .download_file (os .environ ['BUCKET_NAME' ], s3_repo_dir + '/repodata/' + f , repo .repodir + 'repodata/' + f )
34
34
repo .read ()
35
35
print ('Creating Metadata files' )
36
- repo , cache = check_changed_files (repo , s3_repo_dir )
37
- #Check if object was removed
36
+ if event ['Records' ][0 ]['eventName' ].startswith ('ObjectCreated' ):
37
+ print ('helpme' )
38
+ repo , cache = check_changed_files (repo , s3_repo_dir , newfile = event ['Records' ][0 ]['s3' ]['object' ]['key' ])
39
+ else :
40
+ repo , cache = check_changed_files (repo , s3_repo_dir )
41
+ #save cache to bucket
42
+ s3 = boto3 .resource ('s3' )
43
+ f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repo_cache' )
44
+ print ("Writing file: %s" % (str (f_index_obj )))
45
+ f_index_obj .put (Body = str (json .dumps (cache )))
38
46
39
47
repo .save ()
40
48
@@ -43,16 +51,12 @@ def lambda_handler(event, context):
43
51
sign_md_file (repo , s3_repo_dir )
44
52
45
53
#save files to bucket
46
- s3 = boto3 .resource ('s3' )
47
54
for f in files :
48
55
with open (repo .repodir + 'repodata/' + f , 'rb' ) as g :
49
56
f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repodata/' + f )
50
57
print ("Writing file: %s" % (str (f_index_obj )))
51
58
f_index_obj .put (Body = g .read (- 1 ), ACL = get_public ())
52
- f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repo_cache' )
53
- print ("Writing file: %s" % (str (f_index_obj )))
54
- f_index_obj .put (Body = str (json .dumps (cache )))
55
-
59
+
56
60
#Let us clean up
57
61
shutil .rmtree (repo .repodir )
58
62
if os .path .exists ('/tmp/gpgdocs' ):
@@ -114,14 +118,35 @@ def get_cache(repo, s3_repo_dir):
114
118
cache = {}
115
119
return cache
116
120
117
- def check_changed_files (repo , s3_repo_dir ):
121
+ def remove_overwritten_file_from_cache (cache , newfile , s3_repo_dir , repo ):
122
+ """
123
+ remove pkg from metadata and repo
124
+ """
125
+ fname = newfile [len (s3_repo_dir ):] # '/filename.rpm' - without path
126
+ print ('file %s has been overwritten and will be removed from md and repo' % (fname ))
127
+ pkg_id = cache [fname ]
128
+ del cache [fname ]
129
+
130
+ # save cache in case new event occurs
131
+ s3 = boto3 .resource ('s3' )
132
+ f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repo_cache' )
133
+ f_index_obj .put (Body = str (json .dumps (cache )))
134
+
135
+ repo .remove_package (pkg_id )
136
+ return cache
137
+
138
+
139
+ def check_changed_files (repo , s3_repo_dir ,newfile = None ):
118
140
"""
119
141
check if there are any new files in bucket or any deleted files
120
142
"""
121
143
print ("Checking for changes : %s" % (s3_repo_dir ))
122
144
cache = get_cache (repo , s3_repo_dir )
123
145
s3 = boto3 .resource ('s3' )
124
146
files = []
147
+ #if file was overwriten and is in repocache then remove it from cache, so next for loop will add back the new
148
+ if newfile != None and newfile [len (s3_repo_dir ):] in cache :
149
+ cache = remove_overwritten_file_from_cache (cache , newfile , s3_repo_dir , repo )
125
150
#cycle through all objects ending with .rpm in REPO_DIR and check if they are already in repodata, if not add them
126
151
for obj in s3 .Bucket (os .environ ['BUCKET_NAME' ]).objects .filter (Prefix = s3_repo_dir ):
127
152
files .append (obj .key )
0 commit comments