Skip to content

Commit ea289c8

Browse files
committed
Merge branch 'morosi-asset' into 'master'
Add an AWS CDK Asset equivalent See merge request it/e3-aws!62
2 parents 9c9fae7 + 23d314c commit ea289c8

26 files changed

+591
-223
lines changed

src/e3/aws/cfn/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -482,7 +482,7 @@ def __contains__(self, key: str) -> bool:
482482
return key in self.resources
483483

484484
def create_data_dir(self, root_dir: str) -> None:
485-
"""Populate directory that will be exported into a S3 bucket for the stack.
485+
"""Populate data directory that will be exported into a S3 bucket for the stack.
486486
487487
:param root_dir: temporary local directory
488488
"""

src/e3/aws/cfn/main.py

Lines changed: 161 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from e3.os.process import PIPE
1414
from e3.vcs.git import GitRepository
1515
from e3.aws import AWSEnv, Session
16+
from e3.aws.s3 import bucket
1617
from e3.aws.cfn import Stack
1718
from e3.env import Env
1819
from e3.fs import find, sync_tree
@@ -26,6 +27,7 @@ def __init__(
2627
self,
2728
regions: list[str],
2829
default_profile: str | None = None,
30+
assets_dir: str | None = None,
2931
data_dir: str | None = None,
3032
s3_bucket: str | None = None,
3133
s3_key: str = "",
@@ -37,6 +39,7 @@ def __init__(
3739
3840
:param regions: list of regions on which we can operate
3941
:param default_profile: default AWS profile to use to create the stack
42+
:param assets_dir: directory containing assets of the stack
4043
:param data_dir: directory containing files used by cfn-init
4144
:param s3_bucket: if defined S3 will be used as a proxy for resources.
4245
Template body will be uploaded to S3 before calling operation on
@@ -140,8 +143,11 @@ def __init__(
140143

141144
self.regions = regions
142145

146+
self.assets_dir = assets_dir
143147
self.data_dir = data_dir
144148
self.s3_bucket = s3_bucket
149+
self.s3_assets_key = None
150+
self.s3_assets_url = None
145151
self.s3_data_key = None
146152
self.s3_data_url = None
147153
self.s3_template_key = None
@@ -150,23 +156,28 @@ def __init__(
150156
self.assume_role = assume_role
151157
self.aws_env: Session | AWSEnv | None = None
152158
self.deploy_branch = deploy_branch
159+
# A temporary dir will be assigned when generating assets
160+
self.gen_assets_dir: str | None = None
153161

154162
self.timestamp = datetime.utcnow().strftime("%Y-%m-%d/%H:%M:%S.%f")
155163

156164
if s3_bucket is not None:
157-
s3_root_key = (
158-
"/".join([s3_key.rstrip("/"), self.timestamp]).strip("/") + "/"
159-
)
160-
self.s3_data_key = s3_root_key + "data/"
161-
self.s3_data_url = "https://%s.s3.amazonaws.com/%s" % (
162-
self.s3_bucket,
163-
self.s3_data_key,
164-
)
165-
self.s3_template_key = s3_root_key + "template"
166-
self.s3_template_url = "https://%s.s3.amazonaws.com/%s" % (
167-
self.s3_bucket,
168-
self.s3_template_key,
165+
s3_root_key = f"{s3_key.strip('/')}/"
166+
s3_root_url = f"https://{self.s3_bucket}.s3.amazonaws.com/"
167+
168+
# Assets use a static key
169+
self.s3_assets_key = f"{s3_root_key}assets/"
170+
self.s3_assets_url = f"{s3_root_url}{self.s3_assets_key}"
171+
172+
# Data and template use a dynamic key based on the timestamp
173+
s3_timestamp_key = (
174+
"/".join([s3_root_key.rstrip("/"), self.timestamp]).strip("/") + "/"
169175
)
176+
self.s3_data_key = f"{s3_timestamp_key}data/"
177+
self.s3_data_url = f"{s3_root_url}{self.s3_data_key}"
178+
179+
self.s3_template_key = f"{s3_timestamp_key}template"
180+
self.s3_template_url = f"{s3_root_url}{self.s3_template_key}"
170181

171182
@property
172183
def dry_run(self) -> bool:
@@ -200,6 +211,116 @@ def _prompt_yes(self, msg: str) -> bool:
200211
ask = input(f"{msg} (y/N): ")
201212
return ask[0] in "Yy"
202213

214+
def _upload_dir(
215+
self,
216+
root_dir: str,
217+
s3_bucket: str,
218+
s3_key: str,
219+
s3_client: botocore.client.S3 | None = None,
220+
check_exists: bool = False,
221+
) -> None:
222+
"""Upload directory to S3 bucket.
223+
224+
:param root_dir: directory
225+
:param s3_bucket: bucket where to upload files
226+
:param s3_key: key prefix for uploaded files
227+
:param s3_client: a client for the S3 API
228+
:param check_exists: check if an S3 object exists before uploading it
229+
"""
230+
assert self.args is not None
231+
232+
for f in find(root_dir):
233+
subkey = os.path.relpath(f, root_dir).replace("\\", "/")
234+
235+
logging.info(
236+
"Upload %s to %s:%s%s",
237+
subkey,
238+
s3_bucket,
239+
s3_key,
240+
subkey,
241+
)
242+
243+
if s3_client is None:
244+
continue
245+
246+
with bucket(
247+
s3_bucket, client=s3_client, auto_create=False
248+
) as upload_bucket:
249+
# Check already existing S3 objects.
250+
# Ignore the potential 403 error as CFN roles often only have the
251+
# s3:GetObject permission on the bucket
252+
s3_object_key = f"{s3_key}{subkey}"
253+
if check_exists and upload_bucket.object_exists(
254+
s3_object_key, ignore_error_403=True
255+
):
256+
logging.info(
257+
"Skip already existing %s",
258+
subkey,
259+
)
260+
continue
261+
262+
if not self.args.dry_run:
263+
with open(f, "rb") as fd:
264+
upload_bucket.push(key=s3_object_key, content=fd, exist_ok=True)
265+
266+
def _upload_stack(self, stack: Stack) -> None:
267+
"""Upload stack assets, data, and template to S3.
268+
269+
:param stack: the stack to upload
270+
"""
271+
# Nothing to upload if there is no S3 bucket set
272+
if self.s3_bucket is None:
273+
return
274+
275+
assert self.args is not None
276+
277+
if self.aws_env:
278+
s3 = self.aws_env.client("s3")
279+
else:
280+
s3 = None
281+
logging.warning(
282+
"no aws session, won't be able to check if assets exist in the bucket"
283+
)
284+
285+
# Synchronize assets to the bucket before creating the stack
286+
if self.gen_assets_dir is not None and self.s3_assets_key is not None:
287+
self._upload_dir(
288+
root_dir=self.gen_assets_dir,
289+
s3_bucket=self.s3_bucket,
290+
s3_key=self.s3_assets_key,
291+
s3_client=s3,
292+
check_exists=True,
293+
)
294+
295+
with tempfile.TemporaryDirectory() as tempd:
296+
# Push data associated with CFNMain and then all data
297+
# related to the stack
298+
self.create_data_dir(root_dir=tempd)
299+
stack.create_data_dir(root_dir=tempd)
300+
301+
if self.s3_data_key is not None:
302+
# synchronize data to the bucket before creating the stack
303+
self._upload_dir(
304+
root_dir=tempd,
305+
s3_bucket=self.s3_bucket,
306+
s3_key=self.s3_data_key,
307+
s3_client=s3,
308+
)
309+
310+
if self.s3_template_key is not None:
311+
logging.info(
312+
"Upload template to %s:%s",
313+
self.s3_bucket,
314+
self.s3_template_key,
315+
)
316+
if s3 is not None and not self.args.dry_run:
317+
s3.put_object(
318+
Bucket=self.s3_bucket,
319+
Body=stack.body.encode("utf-8"),
320+
ServerSideEncryption="AES256",
321+
Key=self.s3_template_key,
322+
)
323+
203324
def _push_stack_changeset(self, stack: Stack, s3_template_url: str | None) -> int:
204325
"""Push the changeset of a stack from an already uploaded S3 template.
205326
@@ -280,47 +401,7 @@ def execute_for_stack(self, stack: Stack, aws_env: Session | None = None) -> int
280401

281402
if self.args.command in ("push", "update"):
282403
# Synchronize resources to the S3 bucket
283-
if not self.args.dry_run:
284-
assert self.aws_env
285-
s3 = self.aws_env.client("s3")
286-
287-
with tempfile.TemporaryDirectory() as tempd:
288-
# Push data associated with CFNMain and then all data
289-
# related to the stack
290-
self.create_data_dir(root_dir=tempd)
291-
stack.create_data_dir(root_dir=tempd)
292-
293-
if self.s3_data_key is not None:
294-
# synchronize data to the bucket before creating the stack
295-
for f in find(tempd):
296-
with open(f, "rb") as fd:
297-
subkey = os.path.relpath(f, tempd).replace("\\", "/")
298-
logging.info(
299-
"Upload %s to %s:%s%s",
300-
subkey,
301-
self.s3_bucket,
302-
self.s3_data_key,
303-
subkey,
304-
)
305-
if not self.args.dry_run:
306-
s3.put_object(
307-
Bucket=self.s3_bucket,
308-
Body=fd,
309-
ServerSideEncryption="AES256",
310-
Key=self.s3_data_key + subkey,
311-
)
312-
313-
if self.s3_template_key is not None:
314-
logging.info(
315-
"Upload template to %s:%s", self.s3_bucket, self.s3_template_key
316-
)
317-
if not self.args.dry_run:
318-
s3.put_object(
319-
Bucket=self.s3_bucket,
320-
Body=stack.body.encode("utf-8"),
321-
ServerSideEncryption="AES256",
322-
Key=self.s3_template_key,
323-
)
404+
self._upload_stack(stack)
324405

325406
logging.info("Validate template for stack %s" % stack.name)
326407
if not self.args.dry_run:
@@ -334,6 +415,7 @@ def execute_for_stack(self, stack: Stack, aws_env: Session | None = None) -> int
334415
return self._push_stack_changeset(
335416
stack=stack, s3_template_url=self.s3_template_url
336417
)
418+
337419
elif self.args.command == "show":
338420
print(stack.body)
339421
elif self.args.command == "protect":
@@ -428,19 +510,31 @@ def execute(
428510
return 1
429511

430512
return_val = 0
431-
stacks = self.create_stack()
432-
433-
if isinstance(stacks, list):
434-
for stack in stacks:
435-
return_val = self.execute_for_stack(stack, aws_env=aws_env)
436-
# Stop at first failure
437-
if return_val:
438-
return return_val
439-
else:
440-
return_val = self.execute_for_stack(stacks, aws_env=aws_env)
441513

514+
# Create a temporary assets dir here as assets need to be generated at the
515+
# time of create_stack
516+
with tempfile.TemporaryDirectory() as temp_assets_dir:
517+
self.gen_assets_dir = temp_assets_dir
518+
self.pre_create_stack()
519+
stacks = self.create_stack()
520+
self.post_create_stack()
521+
522+
if isinstance(stacks, list):
523+
for stack in stacks:
524+
return_val = self.execute_for_stack(stack, aws_env=aws_env)
525+
# Stop at first failure
526+
if return_val:
527+
return return_val
528+
else:
529+
return_val = self.execute_for_stack(stacks, aws_env=aws_env)
530+
531+
self.gen_assets_dir = None
442532
return return_val
443533

534+
def pre_create_stack(self) -> None:
535+
"""Before create_stack."""
536+
pass
537+
444538
@abc.abstractmethod
445539
def create_stack(self) -> Stack | list[Stack]:
446540
"""Create a stack.
@@ -449,6 +543,10 @@ def create_stack(self) -> Stack | list[Stack]:
449543
"""
450544
pass
451545

546+
def post_create_stack(self) -> None:
547+
"""After create_stack."""
548+
pass
549+
452550
@property
453551
def stack_policy_body(self) -> str | None:
454552
"""Stack Policy that can be set by calling the command ``protect``.

src/e3/aws/mock/__init__.py

Whitespace-only changes.

src/e3/aws/mock/troposphere/__init__.py

Whitespace-only changes.
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from __future__ import annotations
2+
from typing import TYPE_CHECKING
3+
from unittest.mock import patch
4+
from contextlib import contextmanager
5+
6+
if TYPE_CHECKING:
7+
from typing import Any
8+
from collections.abc import Iterator
9+
10+
from e3.aws.troposphere.awslambda import PyFunctionAsset
11+
12+
13+
@contextmanager
14+
def mock_pyfunctionasset() -> Iterator[None]:
15+
"""Mock PyFunctionAsset.
16+
17+
PyFunctionAsset does a pip install and packaging of source files that
18+
may be necessary to disable in some tests. With this mock, the checksum
19+
"dummychecksum" is assigned to assets instead.
20+
"""
21+
22+
def mock_create_assets_dir(self: PyFunctionAsset, *args: Any, **kargs: Any) -> Any:
23+
"""Disable create_assets_dir and assign a dummy checksum."""
24+
self.checksum = "dummychecksum"
25+
26+
with patch(
27+
"e3.aws.troposphere.awslambda.PyFunctionAsset.create_assets_dir",
28+
mock_create_assets_dir,
29+
):
30+
yield

src/e3/aws/s3/__init__.py

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import boto3
99

1010
if TYPE_CHECKING:
11-
from typing import Any
11+
from typing import Any, BinaryIO
1212
from collections.abc import Iterable, Iterator
1313

1414
logger = logging.getLogger("e3.aws.s3")
@@ -99,7 +99,9 @@ def delete_bucket(self) -> None:
9999
self.clear_bucket()
100100
self.client.delete_bucket(Bucket=self.bucket)
101101

102-
def push(self, key: str, content: bytes, exist_ok: bool | None = None) -> None:
102+
def push(
103+
self, key: str, content: bytes | BinaryIO, exist_ok: bool | None = None
104+
) -> None:
103105
"""Push content to S3.
104106
105107
You can set exist_ok to false to prevent the object from being
@@ -179,6 +181,28 @@ def bucket_exists(self) -> bool:
179181
return False
180182
raise
181183

184+
def object_exists(self, key: str, /, ignore_error_403: bool = False) -> bool:
185+
"""Check if an object exists.
186+
187+
:param key: object key
188+
:param ignore_error_403: boto3.head_object returns a 403 error when the
189+
object doesn't exist and the IAM role doesn't have the s3:ListBucket
190+
permission. Setting ignore_error_403=True makes the function return
191+
False instead of raising a ClientError
192+
:return: if the object exists
193+
:raises ClientError: in case of error, or in case of permission issue
194+
when the object doesn't exist, the IAM role doesn't have the
195+
s3:ListBucket permission, and ignore_error_403 is False
196+
"""
197+
try:
198+
self.client.head_object(Bucket=self.bucket, Key=key)
199+
return True
200+
except ClientError as e:
201+
error_code = e.response["Error"]["Code"]
202+
if error_code == "404" or (error_code == "403" and ignore_error_403):
203+
return False
204+
raise
205+
182206
@property
183207
def key_count(self) -> int:
184208
"""Return the number of keys from S3."""

0 commit comments

Comments
 (0)