-
Notifications
You must be signed in to change notification settings - Fork 0
/
lambda.py
112 lines (82 loc) · 2.64 KB
/
lambda.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
"""
serialize Image Data
"""
import boto3
import base64
s3 = boto3.client('s3')
def lambda_handler(event, context):
"""A function to serialize target data from S3"""
# Get the s3 address from the Step Function event input
key = event["s3_key"] ## TODO: fill in
bucket = event["s3_bucket"] ## TODO: fill in
# Download the data from s3 to /tmp/image.png
## TODO: fill in
s3.download_file(bucket, key, "/tmp/image.png")
# We read the data from a file
with open("/tmp/image.png", "rb") as f:
image_data = base64.b64encode(f.read())
# Pass the data back to the Step Function
print("Event:", event.keys())
return {
'statusCode': 200,
"image_data": image_data,
"s3_bucket": bucket,
"s3_key": key,
"inferences": []
}
"""
Image Classifier
"""
import os
import io
import boto3
import json
import base64
# Set the environment variable
ENDPOINT_NAME = "image-classification-2023-10-20-20-46-51-552"
# Initialize the SageMaker runtime client
runtime = boto3.client('runtime.sagemaker')
def lambda_handler(event, context):
# Decode the image data
print(event.keys())
image = base64.b64decode(event["image_data"])
# Instantiate a Predictor
response = runtime.invoke_endpoint(EndpointName=ENDPOINT_NAME,
ContentType='image/png',
Body=image) ## TODO: fill in
# For this model the IdentitySerializer needs to be "image/png"
#predictor.serializer = IdentitySerializer("image/png")
# Make a prediction:
inferences = json.loads(response['Body'].read().decode())
# We return the data back to the Step Function
event["inferences"] = inferences
return {
'statusCode': 200,
"image_data": event["image_data"],
"s3_bucket": event["s3_bucket"],
"s3_key": event["s3_key"],
"inferences": event["inferences"]
}
"""
Inference Confidence Filter
"""
import json
THRESHOLD = .93
def lambda_handler(event, context):
# Get the inferences from the event
inferences = event["inferences"]
# Check if any values in any inferences are above THRESHOLD
meets_threshold = (max(inferences) > THRESHOLD)
# If our threshold is met, pass our data back out of the
# Step Function, else, end the Step Function with an error
if meets_threshold:
pass
else:
raise("THRESHOLD_CONFIDENCE_NOT_MET")
return {
'statusCode': 200,
"image_data": event["image_data"],
"s3_bucket": event["s3_bucket"],
"s3_key": event["s3_key"],
"inferences": event["inferences"]
}