Sebastian Rieger
2 years ago
6 changed files with 314 additions and 0 deletions
-
2example-projects/counter-demo/aws-lambda-localstack/aws-mockup-credentials/config
-
3example-projects/counter-demo/aws-lambda-localstack/aws-mockup-credentials/credentials
-
47example-projects/counter-demo/aws-lambda-localstack/invoke-function.py
-
52example-projects/counter-demo/aws-lambda-localstack/lambda_function.py
-
146example-projects/counter-demo/aws-lambda-localstack/start.py
-
64example-projects/counter-demo/aws-lambda-localstack/stop.py
@ -0,0 +1,2 @@ |
|||||
|
[default] |
||||
|
region = us-east-1 |
@ -0,0 +1,3 @@ |
|||||
|
[default] |
||||
|
aws_access_key_id=test |
||||
|
aws_secret_access_key=test |
@ -0,0 +1,47 @@ |
|||||
|
from datetime import date |
||||
|
import json |
||||
|
import boto3 |
||||
|
|
||||
|
################################################################################################ |
||||
|
# |
||||
|
# Configuration Parameters |
||||
|
# |
||||
|
################################################################################################ |
||||
|
|
||||
|
endpoint_url = "http://localhost.localstack.cloud:4566" |
||||
|
|
||||
|
# region = 'eu-central-1' |
||||
|
region = 'us-east-1' |
||||
|
functionName = 'cloudcomp-counter-lambda-demo' |
||||
|
|
||||
|
|
||||
|
################################################################################################ |
||||
|
# |
||||
|
# boto3 code |
||||
|
# |
||||
|
################################################################################################ |
||||
|
|
||||
|
|
||||
|
client = boto3.setup_default_session(region_name=region) |
||||
|
lClient = boto3.client('lambda', endpoint_url=endpoint_url) |
||||
|
|
||||
|
|
||||
|
print("Invoking function...") |
||||
|
print("------------------------------------") |
||||
|
try: |
||||
|
response = lClient.invoke( |
||||
|
FunctionName=functionName, |
||||
|
Payload='{ "input": "1" }' |
||||
|
) |
||||
|
except lClient.exceptions.ResourceNotFoundException: |
||||
|
print('Function not available.') |
||||
|
|
||||
|
streamingBody = response['Payload'] |
||||
|
result = streamingBody.read() |
||||
|
jsonResult = json.loads(result) |
||||
|
|
||||
|
print(json.dumps(response, indent=4, sort_keys=True, default=str)) |
||||
|
|
||||
|
print('Payload:\n' + str(result) + "\n") |
||||
|
|
||||
|
print("Counter is now at: " + jsonResult['headers']['x-hsfd-counter']) |
@ -0,0 +1,52 @@ |
|||||
|
# import json |
||||
|
import base64 |
||||
|
import os |
||||
|
import boto3 |
||||
|
|
||||
|
|
||||
|
def lambda_handler(event, context): |
||||
|
print('## ENVIRONMENT VARIABLES') |
||||
|
print(os.environ) |
||||
|
print('## EVENT') |
||||
|
print(event) |
||||
|
|
||||
|
globally_unique_s3_group_bucket_name = os.environ.get("bucketName") |
||||
|
print('Trying to access bucket: ' + globally_unique_s3_group_bucket_name) |
||||
|
|
||||
|
s3_client = boto3.client('s3') |
||||
|
response = s3_client.get_object(Bucket=globally_unique_s3_group_bucket_name, Key='us-east-1') |
||||
|
|
||||
|
counter = int(response['Body'].read().decode('utf-8')) |
||||
|
|
||||
|
debug = "" |
||||
|
incr = 0 |
||||
|
if 'body' in event: |
||||
|
body = str(base64.b64decode(event['body']).decode("utf-8")) |
||||
|
if body.startswith('input'): |
||||
|
incr = int(body.rsplit('=')[1]) |
||||
|
elif 'input' in event: |
||||
|
incr = int(event['input']) |
||||
|
|
||||
|
if incr is not 0: |
||||
|
counter = counter + incr |
||||
|
response = s3_client.put_object(Bucket=globally_unique_s3_group_bucket_name, Key='us-east-1', Body=str(counter)) |
||||
|
|
||||
|
output = ('<html><head><title>Counter Demo</title>\n' |
||||
|
# '<meta http-equiv="refresh" content="5"/></head><body>\n' |
||||
|
'<h2>HS Fulda Cloud Computing - Counter Demo</h2>\n' |
||||
|
'<p><b>HTML-Output:</b> ' + str(counter) + '</p></body>\n' |
||||
|
'<form method=POST action="">\n' |
||||
|
'<input type="hidden" name="input" value="1">\n' |
||||
|
'<input type="submit" value="Increment"></form>\n' |
||||
|
# '<hr><b>Lambda Event:</b><br>' + repr(event) + '\n' |
||||
|
# '<hr><b>Lambda Context:</b><br>' + repr(context) + '\n' |
||||
|
'</body></html>\n') |
||||
|
|
||||
|
return { |
||||
|
'statusCode': 200, |
||||
|
'headers': { |
||||
|
'Content-Type': 'text/html', |
||||
|
'x-hsfd-counter': str(counter) |
||||
|
}, |
||||
|
'body': output |
||||
|
} |
@ -0,0 +1,146 @@ |
|||||
|
from datetime import date |
||||
|
import zipfile |
||||
|
import boto3 |
||||
|
from botocore.exceptions import ClientError |
||||
|
|
||||
|
################################################################################################ |
||||
|
# |
||||
|
# Configuration Parameters |
||||
|
# |
||||
|
################################################################################################ |
||||
|
|
||||
|
endpoint_url = "http://localhost.localstack.cloud:4566" |
||||
|
|
||||
|
# you need to create a bucket in S3, here in this demo it is called "cloudcomp-counter", but |
||||
|
# bucket names need to be world wide unique ;) The demo looks for a file that is named |
||||
|
# "us-east-1" (same as our default region) in the bucket and expects a number in it to increase |
||||
|
|
||||
|
groupNr = 22 |
||||
|
currentYear = date.today().year |
||||
|
|
||||
|
globallyUniqueS3GroupBucketName = "cloudcomp-counter-" + str(currentYear) + "-group" + str(groupNr) |
||||
|
|
||||
|
# region = 'eu-central-1' |
||||
|
region = 'us-east-1' |
||||
|
functionName = 'cloudcomp-counter-lambda-demo' |
||||
|
|
||||
|
# see ARN for AWS Academy LabRole function here: |
||||
|
# https://us-east-1.console.aws.amazon.com/iamv2/home?region=us-east-1#/roles/details/LabRole?section=permissions |
||||
|
|
||||
|
# roleName = 'arn:aws:iam::309000625112:role/service-role/cloudcomp-counter-demo-role-6rs7pah3' |
||||
|
# roleName = 'arn:aws:iam::919927306708:role/cloudcomp-s3-access' |
||||
|
roleName = 'arn:aws:iam::488766701848:role/LabRole' |
||||
|
|
||||
|
################################################################################################ |
||||
|
# |
||||
|
# boto3 code |
||||
|
# |
||||
|
################################################################################################ |
||||
|
|
||||
|
|
||||
|
def cleanup_s3_bucket(s3_bucket): |
||||
|
# Deleting objects |
||||
|
for s3_object in s3_bucket.objects.all(): |
||||
|
s3_object.delete() |
||||
|
# Deleting objects versions if S3 versioning enabled |
||||
|
for s3_object_ver in s3_bucket.object_versions.all(): |
||||
|
s3_object_ver.delete() |
||||
|
|
||||
|
|
||||
|
client = boto3.setup_default_session(region_name=region) |
||||
|
s3Client = boto3.client('s3', endpoint_url=endpoint_url) |
||||
|
s3Resource = boto3.resource('s3', endpoint_url=endpoint_url) |
||||
|
lClient = boto3.client('lambda', endpoint_url=endpoint_url) |
||||
|
apiClient = boto3.client("apigatewayv2", endpoint_url=endpoint_url) |
||||
|
|
||||
|
print("Deleting old function...") |
||||
|
print("------------------------------------") |
||||
|
try: |
||||
|
response = lClient.delete_function( |
||||
|
FunctionName=functionName, |
||||
|
) |
||||
|
except lClient.exceptions.ResourceNotFoundException: |
||||
|
print('Function not available. No need to delete it.') |
||||
|
|
||||
|
print("Deleting old bucket...") |
||||
|
print("------------------------------------") |
||||
|
|
||||
|
try: |
||||
|
currentBucket = s3Resource.Bucket(globallyUniqueS3GroupBucketName) |
||||
|
cleanup_s3_bucket(currentBucket) |
||||
|
currentBucket.delete() |
||||
|
except ClientError as e: |
||||
|
print(e) |
||||
|
|
||||
|
print("creating S3 bucket (must be globally unique)...") |
||||
|
print("------------------------------------") |
||||
|
|
||||
|
try: |
||||
|
response = s3Client.create_bucket(Bucket=globallyUniqueS3GroupBucketName) |
||||
|
response = s3Client.put_object(Bucket=globallyUniqueS3GroupBucketName, Key='us-east-1', Body=str(0)) |
||||
|
except ClientError as e: |
||||
|
print(e) |
||||
|
|
||||
|
print("creating new function...") |
||||
|
print("------------------------------------") |
||||
|
|
||||
|
zf = zipfile.ZipFile('lambda-deployment-archive.zip', 'w', zipfile.ZIP_DEFLATED) |
||||
|
zf.write('lambda_function.py') |
||||
|
zf.close() |
||||
|
|
||||
|
lambdaFunctionARN = "" |
||||
|
with open('lambda-deployment-archive.zip', mode='rb') as file: |
||||
|
zipfileContent = file.read() |
||||
|
|
||||
|
response = lClient.create_function( |
||||
|
FunctionName=functionName, |
||||
|
Runtime='python3.9', |
||||
|
Role=roleName, |
||||
|
Code={ |
||||
|
'ZipFile': zipfileContent |
||||
|
}, |
||||
|
Handler='lambda_function.lambda_handler', |
||||
|
Publish=True, |
||||
|
Environment={ |
||||
|
'Variables': { |
||||
|
'bucketName': globallyUniqueS3GroupBucketName |
||||
|
} |
||||
|
} |
||||
|
) |
||||
|
lambdaFunctionARN = response['FunctionArn'] |
||||
|
|
||||
|
print("Lambda Function and S3 Bucket to store the counter are available. Sadly, AWS Academy labs do not allow\n" |
||||
|
"creating an API gateway to be able to access the Lambda function directly via HTTP from the browser, as\n" |
||||
|
"shown in https://348yxdily0.execute-api.eu-central-1.amazonaws.com/default/cloudcomp-counter-demo.\n" |
||||
|
"\n" |
||||
|
"However you can now run invoke-function.py to view an increment the counter. You can also use \n" |
||||
|
"the test button in the Lambda AWS console. In this case you need to send the content\n" |
||||
|
"\n" |
||||
|
"{\n" |
||||
|
" \"input\": \"1\"\n" |
||||
|
"}\n" |
||||
|
"\n" |
||||
|
"to increment the counter by 1.\n" |
||||
|
"Try to understand how Lambda can be used to cut costs regarding cloud services and what its pros\n" |
||||
|
"and cons are.\n") |
||||
|
|
||||
|
# sadly, AWS Academy Labs don't allow API gateways |
||||
|
# API gateway would allow getting an HTTP endpoint that we could access directly in the browser, |
||||
|
# that would call our function, as in the provided demo: |
||||
|
# |
||||
|
# https://348yxdily0.execute-api.eu-central-1.amazonaws.com/default/cloudcomp-counter-demo |
||||
|
|
||||
|
# print("creating API gateway...") |
||||
|
# print("------------------------------------") |
||||
|
# |
||||
|
# #apiArn = "" |
||||
|
# response = apiClient.create_api( |
||||
|
# Name=functionName + '-api', |
||||
|
# ProtocolType='HTTP', |
||||
|
# Target=lambdaFunctionARN |
||||
|
# ) |
||||
|
# #apiArn=response[''] |
||||
|
# |
||||
|
# #response = lClient.create_event_source_mapping( |
||||
|
# # EventSourceArn=apiArn, |
||||
|
# #) |
@ -0,0 +1,64 @@ |
|||||
|
from datetime import date |
||||
|
import boto3 |
||||
|
|
||||
|
################################################################################################ |
||||
|
# |
||||
|
# Configuration Parameters |
||||
|
# |
||||
|
################################################################################################ |
||||
|
|
||||
|
# you need to create a bucket in S3, here in this demo it is called "cloudcomp-counter", but |
||||
|
# bucket names need to be world wide unique ;) The demo looks for a file that is named |
||||
|
# "us-east-1" (same as our default region) in the bucket and expects a number in it to increase |
||||
|
|
||||
|
groupNr = 22 |
||||
|
currentYear = date.today().year |
||||
|
|
||||
|
globallyUniqueS3GroupBucketName = "cloudcomp-counter-" + str(currentYear) + "-group" + str(groupNr) |
||||
|
|
||||
|
# region = 'eu-central-1' |
||||
|
region = 'us-east-1' |
||||
|
functionName = 'cloudcomp-counter-lambda-demo' |
||||
|
# roleName = 'arn:aws:iam::309000625112:role/service-role/cloudcomp-counter-demo-role-6rs7pah3' |
||||
|
roleName = 'arn:aws:iam::919927306708:role/cloudcomp-s3-access' |
||||
|
|
||||
|
################################################################################################ |
||||
|
# |
||||
|
# boto3 code |
||||
|
# |
||||
|
################################################################################################ |
||||
|
|
||||
|
|
||||
|
def cleanup_s3_bucket(s3_bucket): |
||||
|
# Deleting objects |
||||
|
for s3_object in s3_bucket.objects.all(): |
||||
|
s3_object.delete() |
||||
|
# Deleting objects versions if S3 versioning enabled |
||||
|
for s3_object_ver in s3_bucket.object_versions.all(): |
||||
|
s3_object_ver.delete() |
||||
|
|
||||
|
|
||||
|
client = boto3.setup_default_session(region_name=region) |
||||
|
s3Client = boto3.client('s3') |
||||
|
s3Resource = boto3.resource('s3') |
||||
|
lClient = boto3.client('lambda') |
||||
|
|
||||
|
|
||||
|
print("Deleting old function...") |
||||
|
print("------------------------------------") |
||||
|
try: |
||||
|
response = lClient.delete_function( |
||||
|
FunctionName=functionName, |
||||
|
) |
||||
|
except lClient.exceptions.ResourceNotFoundException: |
||||
|
print('Function not available. No need to delete it.') |
||||
|
|
||||
|
print("Deleting old bucket...") |
||||
|
print("------------------------------------") |
||||
|
|
||||
|
try: |
||||
|
currentBucket = s3Resource.Bucket(globallyUniqueS3GroupBucketName) |
||||
|
cleanup_s3_bucket(currentBucket) |
||||
|
currentBucket.delete() |
||||
|
except ClientError as e: |
||||
|
print(e) |
Write
Preview
Loading…
Cancel
Save
Reference in new issue