AWS Lambda function with DynamoDB giving schema Error - python

I have a below aws lambda code which is basically for ONTAP FileSystem monitoring and works if I do not integrate that to Dynamodb, while using this for now its giving me an error element does not match the schema.
Being a First time user of DynamoDB, i would love you seek some guidance on this.
Code:
import json
import os
import boto3
from datetime import datetime, timedelta
from boto3.dynamodb.conditions import Key
from botocore.exceptions import ClientError
def lambda_handler(event, context):
fsx = boto3.client('fsx')
cloudwatch = boto3.client('cloudwatch')
ses = boto3.client('ses')
region_name = os.environ['AWS_REGION']
dynamodb = boto3.resource('dynamodb', region_name=region_name)
dbtable = dynamodb.Table('FsxNMonitorFsx')
now = datetime.utcnow()
start_time = (now - timedelta(minutes=5)).strftime('%Y-%m-%dT%H:%M:%SZ')
end_time = now.strftime('%Y-%m-%dT%H:%M:%SZ')
table = []
result = []
next_token = None
while True:
if next_token:
response = fsx.describe_file_systems(NextToken=next_token)
else:
response = fsx.describe_file_systems()
for filesystem in response.get('FileSystems'):
filesystem_id = filesystem.get('FileSystemId')
table.append(filesystem_id)
next_token = response.get('NextToken')
if not next_token:
break
try:
# Create the DynamoDB table if it does not exist
dbtable = dynamodb.create_table(
TableName='FsxNMonitorFsx',
KeySchema=[
{
'AttributeName': filesystem_id,
'KeyType': 'HASH'
},
{
'AttributeName': 'alert_sent',
'KeyType': 'RANGE'
}
],
AttributeDefinitions=[
{
'AttributeName': filesystem_id,
'AttributeType': 'S'
},
{
'AttributeName': 'alert_sent',
'AttributeType': 'B'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
# Wait for the table to be created
dbtable.meta.client.get_waiter('table_exists').wait(TableName='FsxNMonitorFsx')
except ClientError as e:
if e.response['Error']['Code'] != 'ResourceInUseException':
raise
# Code to retrieve metric data and check if alert needs to be sent
for filesystem_id in table:
response = cloudwatch.get_metric_data(
MetricDataQueries=[
{
'Id': 'm1',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageCapacity',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
},
{
'Id': 'm2',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageUsed',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
}
],
StartTime=start_time,
EndTime=end_time
)
storage_capacity = response['MetricDataResults'][0]['Values']
storage_used = response['MetricDataResults'][1]['Values']
if storage_capacity:
storage_capacity = storage_capacity[0]
else:
storage_capacity = None
if storage_used:
storage_used = storage_used[0]
else:
storage_used = None
if storage_capacity and storage_used:
percent_used = (storage_used / storage_capacity) * 100
else:
percent_used = None
######################################################################
### Check if an alert has already been sent for this filesystem_id ###
######################################################################
response = dbtable.get_item(
Key={'filesystem_id': filesystem_id}
)
if 'Item' in response:
alert_sent = response['Item']['alert_sent']
else:
alert_sent = False
# Send alert if storage usage exceeds threshold and no alert has been sent yet
if percent_used > 80 and not alert_sent:
email_body = "Dear Team,<br><br> Please Find the FSx ONTAP FileSystem Alert Report Below for the {} region.".format(region)
email_body += "<br></br>"
email_body += "<table>"
email_body += "<tr>"
email_body += "<th style='text-align: left'>FileSystemId</th>"
email_body += "<th style='text-align: right'>Used %</th>"
email_body += "</tr>"
for fs in result:
if fs['percent_used'] > 80:
email_body += "<tr>"
email_body += "<td style='text-align: left'>" + fs['filesystem_id'] + "</td>"
email_body += "<td style='text-align: right; color:red;'>" + str(round(fs['percent_used'], 2)) + "%</td>"
email_body += "</tr>"
email_body += "</table>"
email_body += "<br></br>"
email_body += "Sincerely,<br>AWS FSx Alert Team"
email_subject = "FSx ONTAP FileSystem Alert Report - {}".format(region)
ses.send_email(
Source='test#example.com',
Destination={
'ToAddresses': ['test#example.com'],
},
Message={
'Subject': {
'Data': email_subject
},
'Body': {
'Html': {
'Data': email_body
}
}
}
)
dbtable.update_item(
TableName='FsxNMonitorFsx',
Key={'filesystem_id': {'S': filesystem_id}},
UpdateExpression='SET alert_sent = :val',
ExpressionAttributeValues={':val': {'BOOL': True}}
)
return {
'statusCode': 200,
'body': json.dumps('Email sent!')
}
Result without using DB:
FileSystemId Used %
fs-0c700005a823f755c 87.95%
fs-074999ef7111b8315 84.51%
Execution Error:
[ERROR] ClientError: An error occurred (ValidationException) when calling the GetItem operation: The provided key element does not match the schema
Code edit based on the feedback:
import os
import boto3, json
from datetime import datetime, timedelta
from boto3.dynamodb.conditions import Key
from botocore.exceptions import ClientError
fsx = boto3.client('fsx')
cloudwatch = boto3.client('cloudwatch')
ses = boto3.client('ses')
region_name = os.environ['AWS_REGION']
dynamodb = boto3.resource('dynamodb', region_name=region_name)
def lambda_handler(event, context):
now = datetime.utcnow()
start_time = (now - timedelta(minutes=5)).strftime('%Y-%m-%dT%H:%M:%SZ')
end_time = now.strftime('%Y-%m-%dT%H:%M:%SZ')
table = []
result = []
next_token = None
while True:
if next_token:
response = fsx.describe_file_systems(NextToken=next_token)
else:
response = fsx.describe_file_systems()
for filesystem in response.get('FileSystems'):
filesystem_id = filesystem.get('FileSystemId')
table.append(filesystem_id)
next_token = response.get('NextToken')
if not next_token:
break
try:
# Create the DynamoDB table if it does not exist
dbtable = dynamodb.Table('FsxNMonitorFsx')
dbtable = dynamodb.create_table(
TableName='FsxNMonitorFsx',
KeySchema=[
{
'AttributeName': 'filesystem_id',
'KeyType': 'HASH'
}
],
AttributeDefinitions=[
{
'AttributeName': 'filesystem_id',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
# Wait for the table to be created
dbtable.meta.client.get_waiter(
'table_exists').wait(TableName='FsxNMonitorFsx')
except ClientError as e:
if e.response['Error']['Code'] != 'ResourceInUseException':
raise
# Code to retrieve metric data and check if alert needs to be sent
for filesystem_id in table:
response = cloudwatch.get_metric_data(
MetricDataQueries=[
{
'Id': 'm1',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageCapacity',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
},
{
'Id': 'm2',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageUsed',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
}
],
StartTime=start_time,
EndTime=end_time
)
storage_capacity = response['MetricDataResults'][0]['Values']
storage_used = response['MetricDataResults'][1]['Values']
if storage_capacity:
storage_capacity = storage_capacity[0]
else:
storage_capacity = None
if storage_used:
storage_used = storage_used[0]
else:
storage_used = None
if storage_capacity and storage_used:
percent_used = (storage_used / storage_capacity) * 100
else:
percent_used = None
######################################################################
### Check if an alert has already been sent for this filesystem_id ###
######################################################################
response = dbtable.get_item(
Key={'filesystem_id': filesystem_id}
)
if 'Item' in response:
alert_sent = response['Item']['alert_sent']
else:
alert_sent = False
# Send alert if storage usage exceeds threshold and no alert has been sent yet
if percent_used > 80 and not alert_sent:
email_body = "Dear Team,<br><br> Please Find the FSx ONTAP FileSystem Alert Report Below for the {} region.".format(
region_name)
email_body += "<br></br>"
email_body += "<table>"
email_body += "<tr>"
email_body += "<th style='text-align: left'>FileSystemId</th>"
email_body += "<th style='text-align: right'>Used %</th>"
email_body += "</tr>"
for fs in result:
if fs['percent_used'] > 80:
email_body += "<tr>"
email_body += "<td style='text-align: left'>" + \
fs['filesystem_id'] + "</td>"
email_body += "<td style='text-align: right; color:red;'>" + \
str(round(fs['percent_used'], 2)) + "%</td>"
email_body += "</tr>"
email_body += "</table>"
email_body += "<br></br>"
email_body += "Sincerely,<br>AWS FSx Alert Team"
email_subject = "FSx ONTAP FileSystem Alert Report - {}".format(
region_name)
ses.send_email(
Source='test#example.com',
Destination={
'ToAddresses': ['test#example.com'],
},
Message={
'Subject': {
'Data': email_subject
},
'Body': {
'Html': {
'Data': email_body
}
}
}
)
dbtable.put_item(
Item={
'filesystem_id': filesystem_id,
'alert_sent': now.strftime('%Y-%m-%d %H:%M:%S')
}
)
return {
'statusCode': 200,
'body': json.dumps('Email sent!')
}
Above doesnt through any error but send empty e-mail and keep Db also empty, i'm lost a bit

You have another problem on your lambda function as well.
You are creating table with variable of filesystem_id. I think you want to create table partition key as filesystem_id not with variable value of filesystem_id
dbtable = dynamodb.create_table(
TableName='FsxNMonitorFsx',
KeySchema=[
{
'AttributeName': 'filesystem_id',
'KeyType': 'HASH'
},
{
'AttributeName': 'alert_sent',
'KeyType': 'RANGE'
}
],
AttributeDefinitions=[
{
'AttributeName': 'filesystem_id',
'AttributeType': 'S'
},
{
'AttributeName': 'alert_sent',
'AttributeType': 'B'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
And you can not use get_item with only Hash_key you need you use query if you want to fetch data only with filesystem_id.
UPDATE LAMBDA CODE
import os
import boto3
import json
from datetime import datetime, timedelta
from boto3.dynamodb.conditions import Key
from botocore.exceptions import ClientError
fsx = boto3.client('fsx')
cloudwatch = boto3.client('cloudwatch')
ses = boto3.client('ses')
region_name = os.environ['AWS_REGION']
dynamodb = boto3.resource('dynamodb', region_name=region_name)
dbtable = dynamodb.Table('FsxNMonitorFsx')
def lambda_handler(event, context):
now = datetime.utcnow()
start_time = (now - timedelta(minutes=5)).strftime('%Y-%m-%dT%H:%M:%SZ')
end_time = now.strftime('%Y-%m-%dT%H:%M:%SZ')
filesystem_ids = []
result = []
next_token = None
# get all filesystem_ids
while True:
if next_token:
response = fsx.describe_file_systems(NextToken=next_token)
else:
response = fsx.describe_file_systems()
for filesystem in response.get('FileSystems'):
filesystem_id = filesystem.get('FileSystemId')
filesystem_ids.append(filesystem_id)
next_token = response.get('NextToken')
if not next_token:
break
# create table if not exist
# I think here is not good point to create table. (I prefer you create table outside of this lambda)
try:
# Create the DynamoDB table if it does not exist
dbtable = dynamodb.create_table(
TableName='FsxNMonitorFsx',
KeySchema=[
{
'AttributeName': 'filesystem_id',
'KeyType': 'HASH'
}
],
AttributeDefinitions=[
{
'AttributeName': 'filesystem_id',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
# Wait for the table to be created
dbtable.meta.client.get_waiter(
'table_exists').wait(TableName='FsxNMonitorFsx')
except ClientError as e:
if e.response['Error']['Code'] != 'ResourceInUseException':
raise
# Code to retrieve metric data and check if alert needs to be sent
for filesystem_id in filesystem_ids:
response = cloudwatch.get_metric_data(
MetricDataQueries=[
{
'Id': 'm1',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageCapacity',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
},
{
'Id': 'm2',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageUsed',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
}
],
StartTime=start_time,
EndTime=end_time
)
storage_capacity = response['MetricDataResults'][0]['Values']
storage_used = response['MetricDataResults'][1]['Values']
if storage_capacity:
storage_capacity = storage_capacity[0]
else:
storage_capacity = None
if storage_used:
storage_used = storage_used[0]
else:
storage_used = None
if storage_capacity and storage_used:
percent_used = (storage_used / storage_capacity) * 100
else:
percent_used = None
######################################################################
### Check if an alert has already been sent for this filesystem_id ###
######################################################################
response = dbtable.get_item(Key={'filesystem_id': filesystem_id})
if 'Item' in response:
alert_sent = response['Item']['alert_sent']
else:
alert_sent = False
# Send alert if storage usage exceeds threshold and no alert has been sent yet
if percent_used > 80 and not alert_sent:
result.append({'filesystem_id': filesystem_id, 'percent_used': percent_used})
header = f"""
Dear Team,<br><br> Please Find the FSx ONTAP FileSystem Alert Report Below for the {region_name} region.
<br></br>
<table>
<tr>
<th style='text-align: left'>FileSystemId</th>
<th style='text-align: right'>Used %</th>
</tr>
"""
body = ""
for fs in result:
body += f"""
<tr>
<td style='text-align: left'>{fs['filesystem_id']}</td>
<td style='text-align: right; color:red;'>{str(round(fs['percent_used'], 2))}%</td>
</tr>
"""
footer = f"""</table>
<br></br>
Sincerely,<br>AWS FSx Alert Team
FSx ONTAP FileSystem Alert Report - {region_name}
"""
email_body = header + body + footer
ses.send_email(
Source='test#example.com',
Destination={
'ToAddresses': ['test#example.com'],
},
Message={
'Subject': {
'Data': "Emai Subject"
},
'Body': {
'Html': {
'Data': email_body
}
}
}
)
for fs in result:
filesystem_id = fs['filesystem_id']
dbtable.put_item(
Item = {
'filesystem_id': filesystem_id,
'alert_sent': True
}
)
return {
'statusCode': 200,
'body': json.dumps('Email sent!')
}

You are setting your table with a Partition Key and Sort Key, but your GetItem only indicates the Partition Key. You can do one of two things:
Supply Sort Key also
response = dbtable.get_item(
Key={
'filesystem_id': filesystem_id,
'alert_sent': alert_value
}
)
Use Query
Note: This option will return multiple items, if multiple items should exist for a given filesystem_id
response = dbtable.query(
KeyConditionExpression='#id=:id',
ExpressionAttributeValues={':id':filesystem_id},
ExpressionAttributeNames={'#id':'filesystem_id'}
)
Table Creation
filesystem_id should be a string not your variables value.
KeySchema=[
{
'AttributeName': 'filesystem_id',
'KeyType': 'HASH'
},
{
'AttributeName': 'alert_sent',
'KeyType': 'RANGE'
}
],
Lambda clients
Clients should be created outside of the request handler
import json
import os
import boto3
from datetime import datetime, timedelta
from boto3.dynamodb.conditions import Key
from botocore.exceptions import ClientError
fsx = boto3.client('fsx')
cloudwatch = boto3.client('cloudwatch')
ses = boto3.client('ses')
region_name = os.environ['AWS_REGION']
dynamodb = boto3.resource('dynamodb', region_name=region_name)
dbtable = dynamodb.Table('FsxNMonitorFsx')
def lambda_handler(event, context):
Code
import os
import boto3, json
from datetime import datetime, timedelta
from boto3.dynamodb.conditions import Key
from botocore.exceptions import ClientError
fsx = boto3.client('fsx')
cloudwatch = boto3.client('cloudwatch')
ses = boto3.client('ses')
region_name = os.environ['AWS_REGION']
dynamodb = boto3.resource('dynamodb', region_name=region_name)
dbtable = dynamodb.Table('FsxNMonitorFsx')
def lambda_handler(event, context):
now = datetime.utcnow()
start_time = (now - timedelta(minutes=5)).strftime('%Y-%m-%dT%H:%M:%SZ')
end_time = now.strftime('%Y-%m-%dT%H:%M:%SZ')
table = []
result = []
next_token = None
while True:
if next_token:
response = fsx.describe_file_systems(NextToken=next_token)
else:
response = fsx.describe_file_systems()
for filesystem in response.get('FileSystems'):
filesystem_id = filesystem.get('FileSystemId')
table.append(filesystem_id)
next_token = response.get('NextToken')
if not next_token:
break
try:
# Create the DynamoDB table if it does not exist
dbtable = dynamodb.create_table(
TableName='FsxNMonitorFsx',
KeySchema=[
{
'AttributeName': 'filesystem_id',
'KeyType': 'HASH'
}
],
AttributeDefinitions=[
{
'AttributeName': 'filesystem_id',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
# Wait for the table to be created
dbtable.meta.client.get_waiter(
'table_exists').wait(TableName='FsxNMonitorFsx')
except ClientError as e:
if e.response['Error']['Code'] != 'ResourceInUseException':
raise
# Code to retrieve metric data and check if alert needs to be sent
for filesystem_id in table:
response = cloudwatch.get_metric_data(
MetricDataQueries=[
{
'Id': 'm1',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageCapacity',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
},
{
'Id': 'm2',
'MetricStat': {
'Metric': {
'Namespace': 'AWS/FSx',
'MetricName': 'StorageUsed',
'Dimensions': [
{
'Name': 'FileSystemId',
'Value': filesystem_id
},
{
'Name': 'StorageTier',
'Value': 'SSD'
},
{
'Name': 'DataType',
'Value': 'All'
}
]
},
'Period': 60,
'Stat': 'Sum'
},
'ReturnData': True
}
],
StartTime=start_time,
EndTime=end_time
)
storage_capacity = response['MetricDataResults'][0]['Values']
storage_used = response['MetricDataResults'][1]['Values']
if storage_capacity:
storage_capacity = storage_capacity[0]
else:
storage_capacity = None
if storage_used:
storage_used = storage_used[0]
else:
storage_used = None
if storage_capacity and storage_used:
percent_used = (storage_used / storage_capacity) * 100
else:
percent_used = None
######################################################################
### Check if an alert has already been sent for this filesystem_id ###
######################################################################
response = dbtable.get_item(
Key={'filesystem_id': filesystem_id}
)
if 'Item' in response:
alert_sent = response['Item']['alert_sent']
else:
alert_sent = False
# Send alert if storage usage exceeds threshold and no alert has been sent yet
if percent_used > 80 and not alert_sent:
email_body = "Dear Team,<br><br> Please Find the FSx ONTAP FileSystem Alert Report Below for the {} region.".format(
region_name)
email_body += "<br></br>"
email_body += "<table>"
email_body += "<tr>"
email_body += "<th style='text-align: left'>FileSystemId</th>"
email_body += "<th style='text-align: right'>Used %</th>"
email_body += "</tr>"
for fs in result:
if fs['percent_used'] > 80:
email_body += "<tr>"
email_body += "<td style='text-align: left'>" + \
fs['filesystem_id'] + "</td>"
email_body += "<td style='text-align: right; color:red;'>" + \
str(round(fs['percent_used'], 2)) + "%</td>"
email_body += "</tr>"
email_body += "</table>"
email_body += "<br></br>"
email_body += "Sincerely,<br>AWS FSx Alert Team"
email_subject = "FSx ONTAP FileSystem Alert Report - {}".format(
region_name)
ses.send_email(
Source='test#example.com',
Destination={
'ToAddresses': ['test#example.com'],
},
Message={
'Subject': {
'Data': email_subject
},
'Body': {
'Html': {
'Data': email_body
}
}
}
)
dbtable.update_item(
TableName='FsxNMonitorFsx',
Key={'filesystem_id': {'S': filesystem_id}},
UpdateExpression='SET alert_sent = :val',
ExpressionAttributeValues={':val': {'BOOL': True}}
)
return {
'statusCode': 200,
'body': json.dumps('Email sent!')
}

Related

How to use Zeep to skip first elements and only pass second elements in Python

I have SOAP request example below. Would like to skip "Request_References" elements and only pass "Response_Filter" using Zeep
<?xml version="1.0" encoding="UTF-8"?>
<wd:Get_Workers_Request xmlns:wd="urn:com.workday/bsvc" wd:version="v38.0">
<wd:Request_References
wd:Skip_Non_Existing_Instances="true"
wd:Ignore_Invalid_References="true">
<wd:Worker_Reference>
<wd:ID wd:type="Employee_ID">abcdef</wd:ID>
</wd:Worker_Reference>
</wd:Request_References>
<wd:Response_Filter>
<wd:As_Of_Effective_Date>2022-09-03</wd:As_Of_Effective_Date>
<wd:As_Of_Entry_DateTime>2022-09-03T14:26:34</wd:As_Of_Entry_DateTime>
<wd:Page>1</wd:Page>
<wd:Count>100</wd:Count>
</wd:Response_Filter>
</wd:Get_Workers_Request>
If I pass request_dict as below, it works
request_dict = {
'Worker_Reference': {
'ID': {
'type': 'Employee_ID',
'_value_1': employee_id
},
'Descriptor': None
},
'Skip_Non_Existing_Instances': None,
'Ignore_Invalid_References': None
}
If I want to skip "Request_References" elements using code below, then it doesn't work.
client = Client(url, wsse=UsernameToken(user, password), plugins=[history])
request_dict = {
'Request_References': xsd.SkipValue,
'Response_Filter': {
'Page': '1',
'Count': '1'
}
}
client.service.Get_Workers(request_dict)
Reall
Figured it out. Here is the answer
request_dict = {
'Worker_Reference': {
'ID': {
'type': 'Employee_ID',
'_value_1': employee_id
},
'Descriptor': None
},
'Skip_Non_Existing_Instances': None,
'Ignore_Invalid_References': None
}
filter_dict = {
'Page':1,
'Count': 1
}
try :
print (client.service.Get_Workers(Request_References=request_dict,Response_Filter=filter_dict ))
except Exception:
pass

CloudTrail logs in DyanmoDB using Boto

I am working on a boto script that will create an IAM Policy and store it's attributes in a Dynamodb table. I have a python function which calls from another file for attributes like region, instance_type, ebs_volume_size, meta_template_name, start_time, end_time. While writing the code for Cloudtrail I am getting an error for putItem saying
"An error occurred (ValidationException) when calling the CreateTable operation: Invalid KeySchema: Some index key attribute have no definition".
This is my code I am not sure what is wrong.
import jmespath
import boto3
import sys
import json
import time
import meta_templates
from jinja2 import Template
iam = boto3.client('iam')
sts = boto3.client('sts')
ec2 = boto3.resource('ec2')
cloudtrail = boto3.client('cloudtrail')
s3 = boto3.client('s3')
sqs = boto3.client('sqs')
lambd = boto3.client('lambda')
dynamodb = boto3.resource('dynamodb')
###########################
##### Global variables ####
###########################
region="us-east-2"
instance_type="t2.micro"
ebs_volume_size="20"
meta_template_name="ec2_policy_meta_template"
###############################
start_time_1 = input("What's the start time")
end_time1 = input("What's the end time")
def create_aws_iam_policy_template(**kwargs):
template_data = {}
template_data["region"] = kwargs.get('region')
template_data["start_time"] = kwargs.get('end_time')
template_data["end_time"] = kwargs.get('start_time')
template_data["instance_types"] = kwargs.get('instance_type')
template_data["ebs_volume_size"] = kwargs.get('ebs_volume_size')
template_data["meta_template_name"] = kwargs.get('meta_template_name')
meta_template_dict = getattr(meta_templates, template_data["meta_template_name"])
meta_template_json = json.dumps(meta_template_dict)
template_json = Template(meta_template_json).render(template_data)
return template_json
template_json = create_aws_iam_policy_template(
region=region,
instance_type=instance_type,
ebs_volume_size=ebs_volume_size,
meta_template_name=meta_template_name,
start_time = start_time_1,
end_time = end_time1
)
print(template_json)
#Create S3 Bucket for CloudTrail
# Create a bucket policy
bucket_name = 'goodbucket3'
bucket_policy = {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {"Service": "cloudtrail.amazonaws.com"},
"Action": "s3:GetBucketAcl",
"Resource": f"arn:aws:s3:::{bucket_name}"
},
{
"Effect": "Allow",
"Principal": {"Service": "cloudtrail.amazonaws.com"},
"Action": "s3:PutObject",
"Resource": f"arn:aws:s3:::{bucket_name}/AWSLogs/562922379100/*",
"Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}
}
]
}
# Convert the policy from JSON dict to string
bucket_policy = json.dumps(bucket_policy)
# Set the new policy
s3.put_bucket_policy(Bucket='goodbucket3', Policy=bucket_policy)
result = s3.get_bucket_policy(Bucket='goodbucket3')
logs = cloudtrail.create_trail(
Name='GoodTrail',
S3BucketName='goodbucket3',
)
response = cloudtrail.start_logging(
Name= f"arn:aws:cloudtrail:us-east-1:562922379100:trail/GoodTrail"
)
table =dynamodb.create_table(
TableName='GoodTable',
KeySchema=[
{
'AttributeName': 'Content',
'KeyType': 'HASH' # Partition key
},
{
'AttributeName': 'Details',
'KeyType': 'HASH' # Sort key
}
],
AttributeDefinitions=[
{
"AttributeName": "Content",
"AttributeType": "S"
}
],
ProvisionedThroughput={
"ReadCapacityUnits": 1,
"WriteCapacityUnits": 1
}
)
time.sleep(20)
table = dynamodb.Table('GoodTable')
response = table.put_item(
Item= {
'Content': 'Volume Size',
'Details': f'{ebs_volume_size}',
}
)
response = table.put_item(
Item= {
'Content': 'Instance Type',
'Details': f'{instance_type}',
}
)
response = table.put_item(
Item= {
'Content': 'Region',
'Details': f'{region}',
}
)
from the documentation PutItem
When you add an item, the primary key attributes are the only required attributes. Attribute values cannot be null.
You declared your Schema with S3BucketName as the Partition key.
],
KeySchema=[
{
"AttributeName": "S3BucketName",
"KeyType": "HASH"
}
],
So when you add an item you have to provide that too.
For example:
KeySchema=[
{
'AttributeName': 'year',
'KeyType': 'HASH' # Partition key
},
{
'AttributeName': 'title',
'KeyType': 'RANGE' # Sort key
}
]
response = table.put_item(
Item={
'year': year,
'title': title,
'info': {
'plot': plot,
'rating': rating
}
}
)
Getting Started Developing with Python and DynamoDB

How do I append a list to my DynamoDB table using Python?

I have an existing DynamoDB table, and I want to write some Python code to append an attribute (of type List) to the table. Here is what I tried:
users.put_item(
Item={
"new_attribute": []
}
)
But this didn't work. I looked everywhere online but couldn't find anything, I know I must be missing something basic. Any help?
Here is a full example which works
### Simulating an Insert and Update to a List
#Create Table
import boto3
dynamodb = boto3.resource('dynamodb')
try:
table = dynamodb.create_table(
TableName='Test_list',
KeySchema=[
{
'AttributeName': '_id',
'KeyType': 'HASH' # Partition key
}
],
AttributeDefinitions=[
{
'AttributeName': '_id',
'AttributeType': 'N'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
except ClientError as e:
if e.response['Error']['Code']:
print(e.response['Error']['Message'])
print( e.response)
## Add a record with a list
table= dynamodb.Table('Test_list')
ll=['one','two']
resp=table.put_item(
Item={
'_id': 1,
'mylist': ll
}
)
#Update the list
new_ll=['three','four']
response = table.update_item(
Key={
'_id': 1
},
UpdateExpression="SET #l = list_append(#l, :vals)",
ExpressionAttributeNames={
"#l": 'mylist'
},
ExpressionAttributeValues={
":vals": new_ll
}
)
# fetch the record to verify
resp=table.get_item(Key={'_id':1})
resp['Item']
You will see the output :
{'_id': Decimal('1'), 'mylist': ['one', 'two', 'three', 'four']}
import boto3
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('<your-ddb-table-name>')
table.update_item(
Key={
'PK': '<pk>',
'SK': '<sk>'
},
UpdateExpression='SET new_attribute = :list',
ExpressionAttributeValues={
':list': []
}
)

Access the user utterance(first) which triggers the bot(lambda function) in Python aws lex?

I am using python 2.7 trying to achieve the user utterance so after I can prompt it back to user.
I tried
mess = intent_request['currentIntent']['slots']['drink']['inputTranscript']
I want to access the user utterance(current utterance), example "can i order pizza".
I am using python 2.7 aws lambda.
How can i get this utterance?
My code:
import os
import logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
def lambda_handler(event,context):
logger.debug('event.bot.name={}'.format(event['bot']['name']))
return dispatch(event)
def dispatch(intent_request):
intent_name = intent_request['currentIntent']['name']
#prompt = intent_request['currentIntent']['inputTranscript']
if intent_name == 'BurgerType':
return get_burger(intent_request)
if intent_name == 'drinks':
return get_drink(intent_request)
def get_drink(intent_request):
slot = intent_request['currentIntent']['slots']
value2 = intent_request['currentIntent']['slots']['drink']
mess = intent_request['currentIntent']['slots']['drink']['inputTranscript']
session_attributes = {}
return close(
session_attributes,
'Fulfilled',{
'contentType':'PlainText',
'content':'Your order of 000'+mess
})
def get_burger(intent_request):
slots = intent_request['currentIntent']['slots']
value = intent_request['currentIntent']['slots']['Burgers']
#value2 = intent_request['currentIntent']['slots']['cheese']
session_attributes = {}
return close(
session_attributes,
'Fulfilled',{
'contentType': 'PlainText',
'content':'Your order of '+value+' burger '#+value2+'has been placed'
})
def close(session_attributes, fulfillment_state, message):
response = {
'sessionAttributes': session_attributes,
'dialogAction':{
'type':'Close',
'fulfillmentState': fulfillment_state,
'message': message
}
}
return response
As you can see the below example request, the utterance by the user i.e the user input is stored in 'inputTranscript' in the 'intent_request' object.
{
'messageVersion': '1.0',
'invocationSource': 'DialogCodeHook',
'userId': 'some_user_id',
'sessionAttributes': {},
'requestAttributes': None,
'bot':
{'name': 'bot_name',
'alias': '$LATEST', 'version': '$LATEST'
},
'outputDialogMode': 'Text',
'currentIntent':
{
'name': 'intent_name',
'slots':
{
'slot1': 'value1'
},
'slotDetails':
{
'slot1':
{
'resolutions': [],
'originalValue': 'value1'
}
},
'confirmationStatus': 'None'
},
'inputTranscript': 'what am i saying to this chatbot'
}
So you can access it using below code:
mess = intent_request['inputTranscript']
Hope it helps.

Paypal Transactionsearch using Python

I have the necessary authentication details and I'm trying to do a TransactionSearch. I keep getting an error:
TIMESTAMP=2013%2d09%2d07T19%3a06%3a35Z&CORRELATIONID=b7af040415e92&ACK=Failure&VERSION=0%2e000000&BUILD=7507921&L_ERRORCODE0=10002&L_SHORTMESSAGE0=Authentication%2fAuthorization%20Failed&L_LONGMESSAGE0=You%20do%20not%20have%20permissions%20to%20make%20this%20API%20call&L_SEVERITYCODE0=Error
Here is my code:
(timestamp, signature) = signaturegen.getAuthHeader(apiUser=settings.USERNAME, apiPass=settings.PASSWORD, accessTok=res2["token"], secTok=res2["tokenSecret"], httpMethod="POST", scriptURI="https://api-3t.sandbox.paypal.com/nvp")
#the above operation is used to generate the timestamp and signature
headers = {"X-PAYPAL-AUTHORIZATION": "timestamp="+<timestamp>+",token="+<token>+",signature="+<signature>, "SUBJECT": settings.<API_USERNAME>}
data = {
"METHOD": "TransactionSearch",
"STARTDATE": "2012-01-01T05:38:48Z",
}
req= urllib2.Request("https://api-3t.sandbox.paypal.com/nvp", simplejson.dumps(data), headers)
res = urllib2.urlopen(req).read()
I finally managed to fix the code, here is the complete version:
import ast
import signaturegen
headers = {
"X-PAYPAL-SECURITY-USERID": "xxxxxxxxx.xxxx.com",
"X-PAYPAL-SECURITY-PASSWORD": "xxxxxxxx",
"X-PAYPAL-SECURITY-SIGNATURE": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"X-PAYPAL-REQUEST-DATA-FORMAT": "JSON",
"X-PAYPAL-RESPONSE-DATA-FORMAT": "JSON",
"X-PAYPAL-APPLICATION-ID": "APP-80W284485P519543T", #APP ID for sandbox
}
headers = {
"X-PAYPAL-SECURITY-USERID": settings.USERNAME,
"X-PAYPAL-SECURITY-PASSWORD": settings.PASSWORD,
"X-PAYPAL-SECURITY-SIGNATURE": settings.SIGNATURE,
"X-PAYPAL-REQUEST-DATA-FORMAT": "JSON",
"X-PAYPAL-RESPONSE-DATA-FORMAT": "JSON",
"X-PAYPAL-APPLICATION-ID": "APP-80W284485P519543T"
}
data = {"scope":"TRANSACTION_SEARCH", "callback":"http://www.example.com/success.html", "requestEnvelope": {"errorLanguage":"en_US"}}
req = urllib2.Request("https://svcs.sandbox.paypal.com/Permissions/RequestPermissions/", simplejson.dumps(data), headers)
res = ast.literal_eval(urllib2.urlopen(req).read())
token = res['token']
red_url = "https://www.sandbox.paypal.com/cgi-bin/webscr?cmd=_grant-permission&request_token=%s" % token
if red_url:
return HttpResponseRedirect(red_url)
token = "xxxxxxxxxxxxxxxxxx"
verification = "xxxxxxxxxxxxxxxxxx"
headers2 = {
"X-PAYPAL-SECURITY-USERID": "xxxxxxxxxxxx",
"X-PAYPAL-SECURITY-PASSWORD": "xxxxxxxxxxxxxxxx",
"X-PAYPAL-SECURITY-SIGNATURE": "xxxxxxxxxxxxx",
"X-PAYPAL-REQUEST-DATA-FORMAT": "JSON",
"X-PAYPAL-RESPONSE-DATA-FORMAT": "JSON",
"X-PAYPAL-APPLICATION-ID": "APP-80W284485P519543T",
}
url = "https://svcs.sandbox.paypal.com/Permissions/GetAccessToken/"
dat2 = {
"requestEnvelope": {"errorLanguage":"en_US"},
"token": "AAAAAAAYcambja9iJfUw",
"verifier": "iVUJ6c-6ZNk8M6Q9hkC12A"}
req2 = urllib2.Request("https://svcs.sandbox.paypal.com/Permissions/GetAccessToken/", simplejson.dumps(dat2), headers2)
res2 = ast.literal_eval(urllib2.urlopen(req2).read())
(timestamp, signature) = signaturegen.getAuthHeader(apiUser="xxxxxxxxxxxx", apiPass="xxxxxxxxxxxxxxxx", accessTok=res2["token"], secTok=res2["tokenSecret"], httpMethod="POST", scriptURI="https://api-3t.sandbox.paypal.com/nvp")
ultimate = {"X-PAYPAL-AUTHORIZATION": "timestamp="+timestamp+",token="+res2["token"]+",signature="+signature, "SUBJECT": settings.USERNAME}
da = {
"METHOD": "TransactionSearch",
"STARTDATE": "2012-01-01T05:38:48Z",
}
req3 = urllib2.Request("https://api-3t.sandbox.paypal.com/nvp", urllib.urlencode(da), ultimate)
res3 = urllib2.urlopen(req3).read()

Categories

Resources