Skip to content

Commit 8e60ae9

Browse files
author
Janez Justin
committed
FIX overwritting packages in bucket, now readds the package to repo if it is overwritten
1 parent d463e7e commit 8e60ae9

File tree

2 files changed

+35
-10
lines changed

2 files changed

+35
-10
lines changed

rpm/s3rpm.py

Lines changed: 33 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,16 @@ def lambda_handler(event, context):
3333
s3.download_file(os.environ['BUCKET_NAME'], s3_repo_dir+'/repodata/'+f, repo.repodir+'repodata/'+f)
3434
repo.read()
3535
print('Creating Metadata files')
36-
repo, cache = check_changed_files(repo, s3_repo_dir)
37-
#Check if object was removed
36+
if event['Records'][0]['eventName'].startswith('ObjectCreated'):
37+
print('helpme')
38+
repo, cache = check_changed_files(repo, s3_repo_dir, newfile=event['Records'][0]['s3']['object']['key'])
39+
else:
40+
repo, cache = check_changed_files(repo, s3_repo_dir)
41+
#save cache to bucket
42+
s3 = boto3.resource('s3')
43+
f_index_obj = s3.Object(bucket_name=os.environ['BUCKET_NAME'], key=s3_repo_dir+'/repo_cache')
44+
print("Writing file: %s" % (str(f_index_obj)))
45+
f_index_obj.put(Body=str(json.dumps(cache)))
3846

3947
repo.save()
4048

@@ -43,16 +51,12 @@ def lambda_handler(event, context):
4351
sign_md_file(repo, s3_repo_dir)
4452

4553
#save files to bucket
46-
s3 = boto3.resource('s3')
4754
for f in files:
4855
with open(repo.repodir+'repodata/'+f, 'rb') as g:
4956
f_index_obj = s3.Object(bucket_name=os.environ['BUCKET_NAME'], key=s3_repo_dir+'/repodata/'+f)
5057
print("Writing file: %s" % (str(f_index_obj)))
5158
f_index_obj.put(Body=g.read(-1), ACL=get_public())
52-
f_index_obj = s3.Object(bucket_name=os.environ['BUCKET_NAME'], key=s3_repo_dir+'/repo_cache')
53-
print("Writing file: %s" % (str(f_index_obj)))
54-
f_index_obj.put(Body=str(json.dumps(cache)))
55-
59+
5660
#Let us clean up
5761
shutil.rmtree(repo.repodir)
5862
if os.path.exists('/tmp/gpgdocs'):
@@ -114,14 +118,35 @@ def get_cache(repo, s3_repo_dir):
114118
cache = {}
115119
return cache
116120

117-
def check_changed_files(repo, s3_repo_dir):
121+
def remove_overwritten_file_from_cache(cache, newfile, s3_repo_dir, repo):
122+
"""
123+
remove pkg from metadata and repo
124+
"""
125+
fname = newfile[len(s3_repo_dir):] # '/filename.rpm' - without path
126+
print('file %s has been overwritten and will be removed from md and repo' % (fname))
127+
pkg_id = cache[fname]
128+
del cache[fname]
129+
130+
# save cache in case new event occurs
131+
s3 = boto3.resource('s3')
132+
f_index_obj = s3.Object(bucket_name=os.environ['BUCKET_NAME'], key=s3_repo_dir+'/repo_cache')
133+
f_index_obj.put(Body=str(json.dumps(cache)))
134+
135+
repo.remove_package(pkg_id)
136+
return cache
137+
138+
139+
def check_changed_files(repo, s3_repo_dir,newfile=None):
118140
"""
119141
check if there are any new files in bucket or any deleted files
120142
"""
121143
print("Checking for changes : %s" % (s3_repo_dir))
122144
cache = get_cache(repo, s3_repo_dir)
123145
s3 = boto3.resource('s3')
124146
files = []
147+
#if file was overwriten and is in repocache then remove it from cache, so next for loop will add back the new
148+
if newfile != None and newfile[len(s3_repo_dir):] in cache:
149+
cache = remove_overwritten_file_from_cache(cache, newfile, s3_repo_dir, repo)
125150
#cycle through all objects ending with .rpm in REPO_DIR and check if they are already in repodata, if not add them
126151
for obj in s3.Bucket(os.environ['BUCKET_NAME']).objects.filter(Prefix=s3_repo_dir):
127152
files.append(obj.key)

rpm/s3rpm_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def tearDown(self):
145145
@patch('s3rpm.get_cache')
146146
@patch('s3rpm.YumRepository')
147147
@patch('s3rpm.boto3')
148-
def test_defined_repodir(self, s3_mock, yum_mock, cache_mock, ):
148+
def test_defined_repodir(self, s3_mock, yum_mock, cache_mock):
149149
cache_mock.return_value = {"pkgname":"ID"}
150150

151151
yum_mock.return_value = MagicMock(repodir='test_s3rpm/')
@@ -178,6 +178,6 @@ def test_bad_bucket_name(self, s3_mock):
178178
s3rpm.lambda_handler(S3_EVENT, {})
179179
s3_mock.client.assert_called_with('s3')
180180
self.assertEqual(len(s3_mock.resource().Object().put.mock_calls), 0)
181-
S3_EVENT = {"Records":[{"s3": {"object": {"key": "test_s3rpm/repo/pkgname-0.3.8-x86_64.rpm",},"bucket": {"name": "bucket",},},"eventName": "ObjectCreated:Put"}]}
181+
S3_EVENT = {"Records":[{"s3": {"object": {"key": "test_s3rpm/repo/pkgname-0.3.8-x86_64.rpm",},"bucket": {"name": "bucket",},},"eventName": "ObjectCreated:*"}]}
182182
if __name__ == '__main__':
183183
unittest.main()

0 commit comments

Comments
 (0)