Retrieve secrets from AWS Secrets Manager in a Lambda function - python

I have been trying to use Secret Manager in the lambda function in AWS.
I am using Secret Manager to store my Redshift credentials and want to use the sample code given by the AWS Secret manager to retrieve the secret via the lambda function.
I have set up a Secret in secret manager which contains my redshift credentials (username, password)
I am trying to set up a lambda function which would get the secrets from Secret Manger: below is the sample code:
import boto3
import base64
from botocore.exceptions import ClientError
def lambda_handler(event, context):
def get_secret():
secret_name = "test/MySecret"
region_name = "eu-west-2"
# Create a Secrets Manager client
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name
)
try:
get_secret_value_response = client.get_secret_value(
SecretId = secret_name
)
except ClientError as e:
if e.response['Error']['Code'] == 'DecryptionFailureException':
# Secrets Manager can't decrypt the protected secret text using the provided KMS key.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InternalServiceErrorException':
# An error occurred on the server side.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidParameterException':
# You provided an invalid value for a parameter.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'InvalidRequestException':
# You provided a parameter value that is not valid for the current state of the resource.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
elif e.response['Error']['Code'] == 'ResourceNotFoundException':
# We can't find the resource that you asked for.
# Deal with the exception here, and/or rethrow at your discretion.
raise e
else:
# Decrypts secret using the associated KMS CMK.
# Depending on whether the secret is a string or binary, one of these fields will be populated.
if 'SecretString' in get_secret_value_response:
secret = get_secret_value_response['SecretString']
else:
decoded_binary_secret = base64.b64decode(get_secret_value_response['SecretBinary'])
I am getting the following errors whilst running the lambda function:
{
"errorMessage": "name 'secret_name' is not defined",
"errorType": "NameError",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 22, in lambda_handler\n SecretId = secret_name\n"
]
}
I have defined the secret_name at the start of the lambda function but I am getting the secret_name' is not defined error. Any suggestions how i can fix the issue

So the thing is python cannot get the value of secret_name variable, the reason is it is under a function
def get_secret():
secret_name = "test/MySecret"
region_name = "eu-west-2"
So instead if you just use
secret_name = "test/MySecret" without the function part, the sample code should work

Add VersionStage Parameter in your code!
get_secret_value_response = client.get_secret_value(
SecretId = secret_name,VersionStage='AWSCURRENT')

Related

Mock secret manager with pytest

I'm using the default Lambda function to rotate our Aurora password in AWS Code here: https://github.com/aws-samples/aws-secrets-manager-rotation-lambdas/blob/master/SecretsManagerRDSMariaDBRotationSingleUser/lambda_function.py
I have to test this code before it's deployed however I'm not sure how to do it. Can anybody help? I know the code is probably completely wrong, but just need some guidance.
I want to test the following function with Pytest.
def test_secret(service_client, arn, token):
"""Args:
service_client (client): The secrets manager service client
arn (string): The secret ARN or other identifier
token (string): The ClientRequestToken associated with the secret version
Raises:
ResourceNotFoundException: If the secret with the specified arn and stage does not exist
ValueError: If the secret is not valid JSON or valid credentials are found to login to the database
KeyError: If the secret json does not contain the expected keys
"""
# Try to login with the pending secret, if it succeeds, return
conn = get_connection(get_secret_dict(service_client, arn, "AWSPENDING", token))
if conn:
# This is where the lambda will validate the user's permissions. Uncomment/modify the below lines to
# tailor these validations to your needs
try:
with conn.cursor() as cur:
cur.execute("SELECT NOW()")
conn.commit()
finally:
conn.close()
logger.info("testSecret: Successfully signed into MariaDB DB with AWSPENDING secret in %s." % arn)
return
else:
logger.error("testSecret: Unable to log into database with pending secret of secret ARN %s" % arn)
raise ValueError("Unable to log into database with pending secret of secret ARN %s" % arn)
import lambda_function.py as testpass
import boto3
import moto import mock_secretsmanager
#Not sure where to get these values from to mock"
token = "akd93939-383838-999388"
arn = "secret-arn"
token = "9393939302931883487"
#mock_secretsmanager
def test_testsecret(mock_secret_manager):
conn = boto3.client("secretsmanager", region_name="us-east-1")
test = testpass.test_secret("secretsmanager", arn, token)
assert test
You can mock nested functions using mocking functionality. I renamed the function from test_secret to secret_test because pytest is not happy with that name:
import uuid
from unittest.mock import patch, call, MagicMock
import boto3
import pytest
from moto import mock_secretsmanager
from lambda_function import secret_test
class TestSecret:
TEST_SECRET_DICT = {'engine': 'mariadb', 'username': 'user123', 'password': 'test_pass', 'host': 'localhost'}
TEST_ARN = "secret-arn"
#pytest.fixture
def mock_get_secret_dict(self):
with patch('lambda_function.get_secret_dict') as mock:
mock.return_value = self.TEST_SECRET_DICT
yield mock
#pytest.fixture
def mock_get_connection(self):
with patch('lambda_function.get_connection') as mock:
yield mock
#mock_secretsmanager
def test_secret_test(self, mock_get_secret_dict, mock_get_connection):
mock_cursor = MagicMock()
mock_get_connection.return_value.cursor.return_value.__enter__.return_value = mock_cursor
request_token = str(uuid.uuid4())
sm_client = boto3.client("secretsmanager", region_name="us-east-1")
result = secret_test(sm_client, self.TEST_ARN, request_token)
assert result is None
assert mock_get_secret_dict.call_args == call(
sm_client, self.TEST_ARN, 'AWSPENDING', request_token
)
assert mock_get_connection.call_args == call(self.TEST_SECRET_DICT)
assert mock_get_connection.return_value.method_calls == [
call.cursor(),
call.commit(),
call.close()
]
assert mock_cursor.method_calls == [call.execute('SELECT NOW()')]

how to use python try except output (AWS SDK)

Requirement: Find out unencrypted s3 buckets from AWS account and add tag to it.
Implemented so far
import boto3
from botocore.exceptions import ClientError
# Retrieve the list of existing buckets
s3 = boto3.client('s3')
response = s3.list_buckets()
# Find out unencrypted bucket list
for bucket in response['Buckets']:
try:
enc = s3.get_bucket_encryption(Bucket=bucket["Name"])
except ClientError as e:
if e.response['Error']['Code'] == 'ServerSideEncryptionConfigurationNotFoundError':
print('Bucket with no server-side encryption: %s' % (bucket['Name']))
else:
print("Bucket with unexpected error: %s, unexpected error: %s" % (bucket['Name'], e))
Following line gives me the unencrypted bucketslist:
print('Bucket with no server-side encryption: %s' % (bucket['Name']))
Result:
Bucket with no server-side encryption: xyz1
Bucket with no server-side encryption: xyz2
Need support for following
I can get the list of unencrypted s3 buckets but not sure how to use the output from except python code and utilize unencrypted bucket names to add tag later.
If you declare a list outside of your try-catch, you can access it later on
E.g.
import boto3
from botocore.exceptions import ClientError
#this is our new list
buckets = []
# Retrieve the list of existing buckets
s3 = boto3.client('s3')
response = s3.list_buckets()
# Find out unencrypted bucket list
for bucket in response['Buckets']:
try:
enc = s3.get_bucket_encryption(Bucket=bucket["Name"])
except ClientError as e:
if e.response['Error']['Code'] == 'ServerSideEncryptionConfigurationNotFoundError':
#add the bucket name to our new list
buckets.append(bucket['Name'])
print('Bucket with no server-side encryption: %s' % (bucket['Name']))
else:
print("Bucket with unexpected error: %s, unexpected error: %s" % (bucket['Name'], e))
#now you can use the "buckets" variable and it will contain all the unencrypted buckets
for bucket in buckets:
print(bucket)

How to add Azure Python SDK exceptions to try/except statements?

I'm new to Python. I have a working, monolithic Python program that I'm breaking into individual Python functions. I'd like to use the try: - except: pattern to catch specific exceptions for each function.
Example: Create a Key Vault client and retrieve a secret from Key Vault
import logging
from azure.identity import DefaultAzureCredential
from azure.keyvault.secrets import SecretClient
credentials = DefaultAzureCredential()
def create_kv_client(kv_name, credentials):
kv_uri = 'https://' + kv_name + '.vault.azure.net'
kv_client = SecretClient(vault_url=kv_uri, credential=credentials)
return kv_client
kv_client = create_kv_client('mykeyvaultname', credentials)
def retrieve_secret(table_stg_acct_key, kv_client):
retrieved_account_key = kv_client.get_secret(table_stg_acct_key)
return retrieved_account_key
try:
retrieved_account_key = retrieve_secret('mykeyvaultsecretname', kv_client)
print(retrieved_account_key)
except:
logging.error('####### Failed to retrieve key from Key Vault #######')
raise BaseException
Rather than raise BaseException here, I'd like to use the Azure Core exceptions module and log the actual message in the exception.
How is the except: statement handled in the case where two exceptions could be raised?
Example: There could be two exceptions raised by the get_secret method.
If the Key Vault URL is incorrect, a ServiceRequestError is raised:
ServiceRequestError: <urllib3.connection.HTTPSConnection object at 0x000001BFA2299640>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed
If the Key Vault secret name is incorrect, a ResourceNotFoundError is raised:
ResourceNotFoundError: (SecretNotFound) A secret with (name/id) notmykeyvaultsecretname was not found in this key vault. If you recently deleted this secret you may be able to recover it using the correct recovery command. For help resolving this issue, please see https://go.microsoft.com/fwlink/?linkid=2125182
How is this accomplished?
Do I have to import the azure core exception module?
An example of this pattern would be very helpful.
The exception will be caught in order of "except" clauses, but beware of the subclass tree, since a except will catch all subclasses as well. For instance, this one leads to unreachable code.
try:
# do something
except BaseException:
# do something with
except DerivedException:
# assuming DerivedException is an extension of BaseException, you can't reach that code
So put them in most specific first.
In your Azure situation, this brings to something like:
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ServiceRequestError,
ResourceNotFoundError,
AzureError
)
try:
# do KV stuff
except ClientAuthenticationError as e:
# Can occur if either tenant_id, client_id or client_secret is incorrect
logger.critical("Azure SDK was not able to connect to Key Vault", e)
except HttpResponseError as e:
# One reason is when Key Vault Name is incorrect
logger.critical("Possible wrong Vault name given", e)
except ServiceRequestError:
# Network error, I will let it raise to higher level
raise
except ResourceNotFoundError:
# Let's assume it's not big deal here, just let it go
pass
except AzureError as e:
# Will catch everything that is from Azure SDK, but not the two previous
logger.critical("Azure SDK was not able to deal with my query", e)
raise
except Exception as e:
# Anything else that is not Azure related (network, stdlib, etc.)
logger.critical("Unknown error I can't blame Azure for", e)
raise

Boto3 upload_file is silently failing

I am trying to upload a file in s3 bucket and the following code i have used to achieve the same.
Code
accessKey = ''
secretKey = ''
session = boto3.Session(aws_access_key_id = accessKey, aws_secret_access_key = secretKey,)
s3 = session.resource('s3')
try:
response =s3.Object(bucket_name,'sample.docx').upload_file(Filename='C:/Users/Anushka/Desktop/sample.docx')
except Exception as e:
return e
The code does not do anything not even raising any error and if I print "response", "None" gets printed on the shell. I am not able to understand what is the problem with the code.

S3 Default server side encryption on large number of buckets using Python boto3

Hi Iam trying to turn on default s3 encryption on all my buckets in an account using python boto3 script see below.
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
response = s3.list_buckets()
for bucket in response['Buckets']:
enc = s3.get_bucket_encryption(Bucket=bucket['Name'])
s3.put_bucket_encryption(
Bucket=bucket['Name'],
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
},
]
}
)
But i am struggling with my code which is not working
gives error
File "apply.py", line 10, in <module>
enc = s3.get_bucket_encryption(Bucket=bucket['Name'])
File "/Users/hhaqqani/Library/Python/2.7/lib/python/site-packages/botocore/client.py", line 272, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/Users/hhaqqani/Library/Python/2.7/lib/python/site-packages/botocore/client.py", line 576, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (ServerSideEncryptionConfigurationNotFoundError) when calling the GetBucketEncryption operation: The server side encryption configuration was not found
You are passing the wrong bucket name. Change Bucket=enc to Bucket=bucket['Name'] in your call to put_bucket_encryption.
Note also that the call to get_bucket_encryption will throw an exception if the bucket does not actually have encryption configured. While that might seem odd, that's the way it works (see boto3/issues/1899 for more details). So, to handle this potential exception:
SSECNF = 'ServerSideEncryptionConfigurationNotFoundError'
try:
bucket = client.get_bucket_encryption(Bucket=bucket['Name'])
# check current encryption here, if it's not what you want then update it
# check bucket['ServerSideEncryptionConfiguration']['Rules']
except client.exceptions.ClientError as e:
if e.response['Error']['Code'] == SSECNF:
s3.put_bucket_encryption(...)
else:
print("Unexpected error: %s" % e)
Please see below fixed code thanks #jarmod for quick help.
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
response = s3.list_buckets()
client = boto3.client('s3')
SSECNF = 'ServerSideEncryptionConfigurationNotFoundError'
for bucket in response['Buckets']:
try:
bucket = client.get_bucket_encryption(Bucket=bucket['Name'])
# check current encryption here, if it's not what you want then update it
# check bucket['ServerSideEncryptionConfiguration']['Rules']
except client.exceptions.ClientError as e:
if e.response['Error']['Code'] == SSECNF:
s3.put_bucket_encryption(Bucket=bucket['Name'],
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
},
]
})
else:
print("Unexpected error: %s" % e)
```

Categories

Resources