FastAPI Upload to S3 - python

I've been scratching my head on this for days now. I still couldn't solve the problem. Basically, I just wanted to put a CSV file in a LocalStack S3, and I can't get it working.
Here's the snippet of my code:
api.py
from files import s3, AWS_S3_BUCKET_NAME, upload_file_to_bucket
#router.post('/api/customer/generate/upload',
name='Upload CSV to AWS S3 Bucket',
status_code=201)
async def post_upload_user_csv(file_obj: UploadFile = File(...)):
upload_obj = upload_file_to_bucket(s3_client=s3(),
file_obj=file_obj.file,
bucket=AWS_S3_BUCKET_NAME,
folder='CSV', # To Be updated
object_name=file_obj.filename)
if upload_obj:
return JSONResponse(content="Object has been uploaded to bucket successfully",
status_code=status.HTTP_201_CREATED)
else:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="File could not be uploaded")
files.py
import os
import boto3
import logging
from botocore.client import BaseClient
from botocore.exceptions import ClientError
AWS_ACCESS_KEY_ID = os.getenv('POSTGRES_HOST')
AWS_SECRET_KEY = os.getenv('AWS_SECRET_KEY')
AWS_S3_BUCKET_NAME = os.getenv('AWS_S3_BUCKET_NAME')
def s3() -> BaseClient:
client = boto3.client(service_name='s3',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_KEY,
endpoint_url='http://localhost:4566/') # Use LocalStack Endpoint
return client
def upload_file_to_bucket(s3_client, file_obj, bucket, folder, object_name=None):
"""Upload a file to an S3 bucket
:param s3_client: S3 Client
:param file_obj: File to upload
:param bucket: Bucket to upload to
:param folder: Folder to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_obj
# Upload the file
try:
# with open("files", "rb") as f:
s3_client.upload_fileobj(file_obj, bucket, f"{folder}/{object_name}")
except ClientError as e:
logging.error(e)
return False
return True
The problem is that s3_client needs to open the file in binary mode first before I can upload it to s3 bucket. However, this can't be done directly and the file needs to be saved temporarily on the FastAPI server but I really don't want to do that for obvious reasons.
Any help will be much appreciated. Thank you in advance!

Can you show the error first?
Bassicly. I have done it before. Upload directly a file from the front-end to aws-s3. Can you try to add ContentType to upload_fileobj. Here is my code
content_type = mimetypes.guess_type(fpath)[0]
s3.Bucket(bucket_name).upload_fileobj(Fileobj=file, Key=file_path,
ExtraArgs={"ACL": "public-read",
"ContentType": content_type})
another way. You should try to convert files to io.BytesIO
def s3_upload(self, file, file_path, bucket_name, width=None, height=None, make_thumb=False, make_cover=False):
s3 = boto3.resource(service_name='s3')
obj = BytesIO(self.image_optimize_from_buffer(file, width, height, make_thumb, make_cover))
s3.Bucket(bucket_name).upload_fileobj(Fileobj=obj, Key=file_path,
ExtraArgs={"ACL": "public-read", "ContentType": file.content_type})
return f'https://{bucket_name}.s3.amazonaws.com/{file_path}'

Related

Trying to read ssm parameter and working fine but write as text and uploading inside my bucket its not happening please find below code

import boto3
import os
client = boto3.client('ssm')
s3 = boto3.client("s3")
def lambda_handler(event, context):
parameter = client.get_parameter(Name='otherparam', WithDecryption=True)
#print(parameter)
return parameter ['Parameter']['Value']
#file = open("/sample.txt", "w")
#file.write(parameter)
#file.close
with open("/tmp/log.txt", "w") as f:
file.write(parameter)
s3.upload_file("/tmp/log.txt", "copys3toecsbucket-117", "logs.txt")
#bucket = "copys3toecsbucket-117"
#file = "/sample.txt"
#response = s3_client.put_object(Body=file,Bucket='bucket',key='file')
print(response)
trying in aws lambda only.
how to convert ssm parameter into text file which will be trigger file for next step and upload in s3 bucket?
Uploading to bucket is not happening because you are returning a value before the upload happens. When you return a value in the handler, the Lambda function completes.
Removing return will fix it.
import boto3
import os
client = boto3.client('ssm')
s3 = boto3.client("s3")
def lambda_handler(event, context):
parameter = client.get_parameter(Name='otherparam', WithDecryption=True)
print(parameter)
with open("/tmp/log.txt", "w") as f:
file.write(parameter)
s3.upload_file("/tmp/log.txt", "copys3toecsbucket-117", "logs.txt")
return True

Uploading image on AWS S3 using boto is having the size: 0 byte, Python, Flask

I am trying to upload the image to my bucket on AWS S3. Earlier it was working fine. But now the uploaded image is having the size 0 byte. I have tried rolling back to previous versions of my project on GitHub. But nothing seems to work now. I am stuck on this issue for 2 days now.
def upload_to_aws(local_file, bucket_name, s3_file):
s3 = boto3.client('s3', aws_access_key_id=BaseConfig.AWS_ACCESS_KEY_ID,
aws_secret_access_key=BaseConfig.AWS_SECRET_ACCESS_KEY)
s3.upload_fileobj(local_file, bucket_name, s3_file)
file_url = '%s/%s/%s' % (s3.meta.endpoint_url, bucket_name, s3_file)
return file_url
from werkzeug.datastructures import FileStorage
parser = reqparse.RequestParser()
parser.add_argument('image',
type=FileStorage,
required=True,
help='image is required',
location='files'
)
class Classifier(Resource):
def post(self):
data = Classifier.parser.parse_args()
image = data["image"]
key_name = "some-key-name"
upload_to_aws(image, BaseConfig.BUCKET_NAME, key_name)
return {message: "uploaded successfully"}, 200
The upload_fileobj() function will upload a file-like object to S3. You would pass it a file object returned from an open() command.
If the image variable contains a filename, you should be using upload_file() instead.

How to write parquet file to ECS in Flask python using boto or boto3

I have flask python rest api which is called by another flask rest api.
the input for my api is one parquet file (FileStorage object) and ECS connection and bucket details.
I want to save parquet file to ECS in a specific folder using boto or boto3
the code I have tried
def uploadFileToGivenBucket(self,inputData,file):
BucketName = inputData.ecsbucketname
calling_format = OrdinaryCallingFormat()
client = S3Connection(inputData.access_key_id, inputData.secret_key, port=inputData.ecsport,
host=inputData.ecsEndpoint, debug=2,
calling_format=calling_format)
#client.upload_file(BucketName, inputData.filename, inputData.folderpath)
bucket = client.get_bucket(BucketName,validate=False)
key = boto.s3.key.Key(bucket, inputData.filename)
fileName = NamedTemporaryFile(delete=False,suffix=".parquet")
file.save(fileName)
with open(fileName.name) as f:
key.send_file(f)
but it is not working and giving me error like...
signature_host = '%s:%d' % (self.host, port)
TypeError: %d format: a number is required, not str
I tried google but no luck Can anyone help me with this or any sample code for the same.
After a lot of hit and tried and time, I finally got the solution. I posting it for everyone else who are facing the same issue.
You need to use Boto3 and here is the code...
def uploadFileToGivenBucket(self,inputData,file):
BucketName = inputData.ecsbucketname
#bucket = client.get_bucket(BucketName,validate=False)
f = NamedTemporaryFile(delete=False,suffix=".parquet")
file.save(f)
endpointurl = "<your endpoints>"
s3_client = boto3.client('s3',endpoint_url=endpointurl, aws_access_key_id=inputData.access_key_id,aws_secret_access_key=inputData.secret_key)
try:
newkey = 'yourfolderpath/anotherfolder'+inputData.filename
response = s3_client.upload_file(f.name, BucketName,newkey)
except ClientError as e:
logging.error(e)
return False
return True

Get a specific file from s3 bucket (boto3)

So I have a file.csv on my bucket 'test', I'm creating a new session and I wanna download the contents of this file:
session = boto3.Session(
aws_access_key_id=KEY,
aws_secret_access_key=SECRET_KEY
)
s3 = session.resource('s3')
obj = s3.Bucket('test').objects.filter(Prefix='file.csv')
This returns me a collection but is there a way to fetch the file directly? Without any loops, I wanna do something like:
s3.Bucket('test').objects.get(key='file.csv')
I could achieve the same result without passing credentials like this:
s3 = boto3.client('s3')
obj = s3.get_object(Bucket='test', Key='file.csv')
If you take a look at the client method:
import boto3
s3_client = boto3.client('s3')
s3_client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
and the resource method:
import boto3
s3 = boto3.resource('s3')
s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
you'll notice that you can convert from the resource to the client with meta.client.
So, combine it with your code to get:
session = boto3.Session(aws_access_key_id=KEY, aws_secret_access_key=SECRET_KEY)
s3 = session.resource('s3')
obj = s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
I like mpu.aws.s3_download, but I'm biased ;-)
It does it like that:
import os
import boto3
def s3_download(bucket_name, key, profile_name, exists_strategy='raise'):
session = boto3.Session(profile_name=profile_name)
s3 = session.resource('s3')
if os.path.isfile(destination):
if exists_strategy == 'raise':
raise RuntimeError('File \'{}\' already exists.'
.format(destination))
elif exists_strategy == 'abort':
return
s3.Bucket(bucket_name).download_file(key, destination)
For authentication, I recommend using environment variables. See boto3: Configuring Credentials for details.
you can use the following boto3 method.
download_file(Bucket, Key, Filename, ExtraArgs=None, Callback=None,
Config=None)
s3 = boto3.resource('s3')
s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt')
find more details here - download_file()

Packing a binary into AWS Lambda for Python

I want to convert m4a files uploaded to S3 to mp3. The files would only be 15 seconds max, so using Elastic Transcoder would be overkill. I downloaded a binary from https://www.johnvansickle.com/ffmpeg/. But I am very new to AWS and im still not sure how uploading binaries works. How would I would include it so that I could convert a file?
import boto3
import urllib
print('Loading function')
s3 = boto3.client('s3')
def lambda_handler(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
try:
#convert to mp3
#upload to bucket
except Exception as e:
print(e)
print('Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.'.format(key, bucket))
raise e

Categories

Resources