BadDigest when calling the PutObject operation s3 boto - python

This code works fine
import boto3
def upload(request):
try:
get_file = request.POST['file'].file
d_filename = 'foo-'+uuid.uuid4().hex
s3 = boto3.resource('s3')
s3.Bucket('Bucket_name').put_object(Key=d_filename, Body=get_file, ContentType=ContentType)
return d_filename
except Exception, e:
log.error(str(e))
return 'error'
but when i want to put md5 hash of file in filename it throws error- "An error occurred (BadDigest) when calling the PutObject operation (reached max retries: 4): The Content-MD5 you specified did not match what we received."
import boto3
import hashlib
def upload(request):
try:
get_file = request.POST['file'].file
d_filename = 'foo-'+str(hashlib.md5(get_file.read()).hexdigest())
s3 = boto3.resource('s3')
s3.Bucket('Bucket_name').put_object(Key=d_filename, Body=get_file, ContentType=ContentType)
return d_filename
except Exception, e:
log.error(str(e))
return 'error'
I am not trying to calculate md5 of file already uploaded or trying to set it for file but want md5 in name of the file.

Related

ExpiredToken error when using python moto lib to mock s3

I am new to Python. Anyone knows how moto mock test work? I am going to test the code which will fetch data from S3 and convert data and upload to S3. I used moto lib to mock s3, however it shows "An error occurred (ExpiredToken) when calling the ListObjects operation" when calling S3 from my code.
Do I need to mock aws credential? How should I mock it? (I check the moto lib and there is no such thing mock_arn/mock_credential )
Thank you in advance.
Here is my code: inside parse_requests method, it will fetch data from S3 and convert data, then upload to S3.
Class Test_processing_data(TestCase):
def setUp(self):
self.mock_s3 = mock_s3()
self.mock_s3.start()
self.mock_logs = mock_logs()
self.mock_logs.start()
self.bucket_region = "us-east-1"
self.bucket_name = "test-bucket"
self.s3_client = boto3.client("s3", region_name=self.bucket_region)
self.s3_client.create_bucket(Bucket=self.bucket_name)
#mock_s3
def test_parse_requests(self):
bucket_name = "test-bucket"
prefix = "test/model/main/"
execution_date = "2022/09/13/12"
parse_requests(execution_date, bucket_name, prefix)
Here is the error message:
self = <botocore.client.S3 object at 0x10abf18e0>
operation_name = 'ListObjects'
api_params = {'Bucket': 'campaign-performance-forecasting-offline', 'EncodingType': 'url', 'Prefix': 'test/model/main/2022/09/13/12'}
...
if http.status_code >= 300:
error_code = parsed_response.get("Error", {}).get("Code")
error_class = self.exceptions.from_code(error_code)
> raise error_class(parsed_response, operation_name)
E botocore.exceptions.ClientError: An error occurred (ExpiredToken) when calling the ListObjects operation: The provided token has expired.
../../venv/lib/python3.8/site-packages/botocore/client.py:914: ClientError

Getting AccessDenied when trying to retrieve a file from S3

I am trying to retrieve a file from an S3 bucket (using boto3), but I keep getting "An error occurred (AccessDenied) when calling the GetObject operation: Access Denied". Below is how I created the S3 bucket:
# S3
bucket = s3.Bucket(
self,
"testS3Bucket",
bucket_name=f"test_s3_bucket",
versioned=True,
)
bucket.grant_read_write(service_lambda)
and the method I used to get the file from S3:
def download_file(self, file_name, s3_bucket):
try:
file = self.s3.Bucket(s3_bucket).Object(file_name)
file_content = file.get()["Body"].read()
except ClientError as e:
if e.response["Error"]["Code"] == "404":
log.error("File does not exist for partner")
return {}
else:
raise e
except Exception as e:
raise e
return file_content.decode()
I even went as far as to add "s3:*" action to the IAM policy statement, but I still got the same error. I was able to use the command below to successfully retrieve the file's metadata so I don't think it is a permissions error.
aws s3api head-object --bucket test_s3_bucket --key configuration.txt

To upload a file to amazon s3 using lambda

I used this python code to upload file to s3 bucket using lambda. Getting an error:
{
"errorMessage": "Syntax error in module 'lambda_function': (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \\UXXXXXXXX escape (lambda_function.py, line 10)",
"errorType": "Runtime.UserCodeSyntaxError",
"stackTrace": [
" File \"/var/task/lambda_function.py\" Line 10\n file_name= \"C:\\Users\\smanthriprag\\Pictures\\Screenshots\\s.jpeg\"\n"
]
}
import json
import boto3
from botocore.exceptions import ClientError
def lambda_handler(file_name, bucket, object_name=None):
file_name= "C:\Users\smanthriprag\Pictures\Screenshots\s.jpeg"
bucket= "serverlesswebapp0406"
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_name
# Upload the file
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
print('Step 3: upload done')
except ClientError as e:
logging.error(e)
return False
return True['response']
An AWS Lambda function written in Python should use the following handler:
def lambda_handler(event, context):
If information is being passed into the Lambda function, it will be made available via the event. The contents of event depends upon how the Lambda function is triggered (eg triggered by S3, triggered by SQS, or run via an Invoke() command).
Your program has an incorrect definition for the handler function.
See: Lambda function handler in Python - AWS Lambda

Boto3 upload_file is silently failing

I am trying to upload a file in s3 bucket and the following code i have used to achieve the same.
Code
accessKey = ''
secretKey = ''
session = boto3.Session(aws_access_key_id = accessKey, aws_secret_access_key = secretKey,)
s3 = session.resource('s3')
try:
response =s3.Object(bucket_name,'sample.docx').upload_file(Filename='C:/Users/Anushka/Desktop/sample.docx')
except Exception as e:
return e
The code does not do anything not even raising any error and if I print "response", "None" gets printed on the shell. I am not able to understand what is the problem with the code.

S3 Default server side encryption on large number of buckets using Python boto3

Hi Iam trying to turn on default s3 encryption on all my buckets in an account using python boto3 script see below.
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
response = s3.list_buckets()
for bucket in response['Buckets']:
enc = s3.get_bucket_encryption(Bucket=bucket['Name'])
s3.put_bucket_encryption(
Bucket=bucket['Name'],
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
},
]
}
)
But i am struggling with my code which is not working
gives error
File "apply.py", line 10, in <module>
enc = s3.get_bucket_encryption(Bucket=bucket['Name'])
File "/Users/hhaqqani/Library/Python/2.7/lib/python/site-packages/botocore/client.py", line 272, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/Users/hhaqqani/Library/Python/2.7/lib/python/site-packages/botocore/client.py", line 576, in _make_api_call
raise error_class(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (ServerSideEncryptionConfigurationNotFoundError) when calling the GetBucketEncryption operation: The server side encryption configuration was not found
You are passing the wrong bucket name. Change Bucket=enc to Bucket=bucket['Name'] in your call to put_bucket_encryption.
Note also that the call to get_bucket_encryption will throw an exception if the bucket does not actually have encryption configured. While that might seem odd, that's the way it works (see boto3/issues/1899 for more details). So, to handle this potential exception:
SSECNF = 'ServerSideEncryptionConfigurationNotFoundError'
try:
bucket = client.get_bucket_encryption(Bucket=bucket['Name'])
# check current encryption here, if it's not what you want then update it
# check bucket['ServerSideEncryptionConfiguration']['Rules']
except client.exceptions.ClientError as e:
if e.response['Error']['Code'] == SSECNF:
s3.put_bucket_encryption(...)
else:
print("Unexpected error: %s" % e)
Please see below fixed code thanks #jarmod for quick help.
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
response = s3.list_buckets()
client = boto3.client('s3')
SSECNF = 'ServerSideEncryptionConfigurationNotFoundError'
for bucket in response['Buckets']:
try:
bucket = client.get_bucket_encryption(Bucket=bucket['Name'])
# check current encryption here, if it's not what you want then update it
# check bucket['ServerSideEncryptionConfiguration']['Rules']
except client.exceptions.ClientError as e:
if e.response['Error']['Code'] == SSECNF:
s3.put_bucket_encryption(Bucket=bucket['Name'],
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
},
]
})
else:
print("Unexpected error: %s" % e)
```

Categories

Resources