Browse Source

changed Lambda demo to support AWS Academy and create individual bucket for group with globally unique name in S3

pull/3/head
Sebastian Rieger 2 years ago
parent
commit
f081a50a4a
  1. 4
      example-projects/counter-demo/aws-lambda/invoke-function.py
  2. 1
      example-projects/counter-demo/aws-lambda/lambda_function.py
  3. 3
      example-projects/counter-demo/aws-lambda/start.py
  4. 31
      example-projects/counter-demo/aws-lambda/stop.py

4
example-projects/counter-demo/aws-lambda/invoke-function.py

@ -36,8 +36,10 @@ except lClient.exceptions.ResourceNotFoundException:
streamingBody = response['Payload']
result = streamingBody.read()
jsonResult = json.loads(result)
print(json.dumps(response, indent=4, sort_keys=True, default=str))
print('Payload:\n' + str(result))
print('Payload:\n' + str(result) + "\n")
print("Counter is now at: " + jsonResult['headers']['x-hsfd-counter'])

1
example-projects/counter-demo/aws-lambda/lambda_function.py

@ -46,6 +46,7 @@ def lambda_handler(event, context):
'statusCode': 200,
'headers': {
'Content-Type': 'text/html',
'x-hsfd-counter': str(counter)
},
'body': output
}

3
example-projects/counter-demo/aws-lambda/start.py

@ -35,6 +35,7 @@ roleName = 'arn:aws:iam::488766701848:role/LabRole'
#
################################################################################################
def cleanup_s3_bucket(s3_bucket):
# Deleting objects
for s3_object in s3_bucket.objects.all():
@ -140,4 +141,4 @@ print("Lambda Function and S3 Bucket to store the counter are available. Sadly,
#
# #response = lClient.create_event_source_mapping(
# # EventSourceArn=apiArn,
# #)
# #)

31
example-projects/counter-demo/aws-lambda/stop.py

@ -1,3 +1,4 @@
from datetime import date
import boto3
################################################################################################
@ -6,6 +7,15 @@ import boto3
#
################################################################################################
# you need to create a bucket in S3, here in this demo it is called "cloudcomp-counter", but
# bucket names need to be world wide unique ;) The demo looks for a file that is named
# "us-east-1" (same as our default region) in the bucket and expects a number in it to increase
groupNr = 22
currentYear = date.today().year
globallyUniqueS3GroupBucketName = "cloudcomp-counter-" + str(currentYear) + "-group" + str(groupNr)
# region = 'eu-central-1'
region = 'us-east-1'
functionName = 'cloudcomp-counter-lambda-demo'
@ -19,7 +29,18 @@ roleName = 'arn:aws:iam::919927306708:role/cloudcomp-s3-access'
################################################################################################
def cleanup_s3_bucket(s3_bucket):
# Deleting objects
for s3_object in s3_bucket.objects.all():
s3_object.delete()
# Deleting objects versions if S3 versioning enabled
for s3_object_ver in s3_bucket.object_versions.all():
s3_object_ver.delete()
client = boto3.setup_default_session(region_name=region)
s3Client = boto3.client('s3')
s3Resource = boto3.resource('s3')
lClient = boto3.client('lambda')
@ -31,3 +52,13 @@ try:
)
except lClient.exceptions.ResourceNotFoundException:
print('Function not available. No need to delete it.')
print("Deleting old bucket...")
print("------------------------------------")
try:
currentBucket = s3Resource.Bucket(globallyUniqueS3GroupBucketName)
cleanup_s3_bucket(currentBucket)
currentBucket.delete()
except ClientError as e:
print(e)
Loading…
Cancel
Save