Amazon S3 boto3 how to iterate through objects in a bucket? - python

In a flask app, I was trying to iterate through objects in a S3 Bucket and trying to print the key/ filename but my_bucket.objects.all() returns only the first object in the bucket. It's not returning the all the objects. The output is [001.pdf] instead of [001, 002, 003, 004, 005]
from flask import Flask, jsonify, Response, request
from flask_cors import CORS, cross_origin
from config import S3_BUCKET, S3_ACCESS_KEY, S3_SECRET_ACCESS_KEY
import boto3
import csv
import re
s3 = boto3.client(
's3',
aws_access_key_id=S3_ACCESS_KEY,
aws_secret_access_key=S3_SECRET_ACCESS_KEY
)
app = Flask(__name__)
CORS(app, supports_credentials=True)
#app.route('/')
def health():
return jsonify({"message": "app is working"})
#app.route('/files')
def list_of_files():
s3_resource = boto3.resource('s3')
my_bucket = s3_resource.Bucket(S3_BUCKET)
summaries = my_bucket.objects.all()
files = []
for file in summaries:
# this prints the bucket object
print("Object: {}".format(summaries))
files.append(file.key)
# file.key is supposed to return the names of the list of objects
# print(file.key)
return jsonify({"files":"{}".format(file.key)})
if __name__ == "__main__":
app.run()

You are exiting the loop by returning too early.
def list_of_files():
s3_resource = boto3.resource('s3')
my_bucket = s3_resource.Bucket(S3_BUCKET)
summaries = my_bucket.objects.all()
files = []
for file in summaries:
files.append(file.key)
return jsonify({"files": files})

Related

Trying to read ssm parameter and working fine but write as text and uploading inside my bucket its not happening please find below code

import boto3
import os
client = boto3.client('ssm')
s3 = boto3.client("s3")
def lambda_handler(event, context):
parameter = client.get_parameter(Name='otherparam', WithDecryption=True)
#print(parameter)
return parameter ['Parameter']['Value']
#file = open("/sample.txt", "w")
#file.write(parameter)
#file.close
with open("/tmp/log.txt", "w") as f:
file.write(parameter)
s3.upload_file("/tmp/log.txt", "copys3toecsbucket-117", "logs.txt")
#bucket = "copys3toecsbucket-117"
#file = "/sample.txt"
#response = s3_client.put_object(Body=file,Bucket='bucket',key='file')
print(response)
trying in aws lambda only.
how to convert ssm parameter into text file which will be trigger file for next step and upload in s3 bucket?
Uploading to bucket is not happening because you are returning a value before the upload happens. When you return a value in the handler, the Lambda function completes.
Removing return will fix it.
import boto3
import os
client = boto3.client('ssm')
s3 = boto3.client("s3")
def lambda_handler(event, context):
parameter = client.get_parameter(Name='otherparam', WithDecryption=True)
print(parameter)
with open("/tmp/log.txt", "w") as f:
file.write(parameter)
s3.upload_file("/tmp/log.txt", "copys3toecsbucket-117", "logs.txt")
return True

S3 folder to folder file copy using Lambda [duplicate]

I am trying to copy multiple files from one s3 bucket to another s3 bucket using lambda function but it is just copying 2 files in destination s3 bucket.
Here is my code:
# using python and boto3
import json
import boto3
s3_client = boto3.client('s3')
def lambda_handler(event, context):
source_bucket_name = event['Records'][0]['s3']['bucket']['name']
file_name = event['Records'][0]['s3']['object']['key']
destination_bucket_name = 'nishantnkd'
copy_object = {'Bucket': source_bucket_name, 'Key': file_name}
s3_client.copy_object(CopySource=copy_object,
Bucket=destination_bucket_name, Key=file_name)
return {'statusCode': 3000,
'body': json.dumps('File has been Successfully Copied')}
I presume that the Amazon S3 bucket is configured to trigger the AWS Lambda function when a new object is created.
When the Lambda function is triggered, it is possible that multiple event records are sent to the function. Therefore, it should loop through the event records like this:
# using python and boto3
import json
import boto3
s3_client = boto3.client('s3')
def lambda_handler(event, context):
for record in event['Records']: # This loop added
source_bucket_name = record['s3']['bucket']['name']
file_name = urllib.parse.unquote_plus(record['s3']['object']['key']) # Note this change too
destination_bucket_name = 'nishantnkd'
copy_object = {'Bucket': source_bucket_name, 'Key': file_name}
s3_client.copy_object(CopySource=copy_object, Bucket=destination_bucket_name, Key=file_name)
return {'statusCode': 3000,
'body': json.dumps('File has been Successfully Copied')}

how to access app object in other files of flask app

# flask packages
import jwt
from flask import Flask
from flask_restful import Api
from flask_mongoengine import MongoEngine
from flask_jwt_extended import JWTManager
import logging
# local packages
import models
from api.routes import create_routes
# external packages
import os
# default mongodb configuration
default_config = {'MONGODB_SETTINGS': {
'db': 'blog_db',
'host': 'localhost',
'port': 27017}
}
def get_flask_app(config: dict = None):
"""
Initializes Flask app with given configuration.
Main entry point for wsgi (gunicorn) server.
:param config: Configuration dictionary
:return: app
"""
# init flask
app = Flask(__name__)
# # configure app
# config = default_config if config is None else config
# app.config.update(config)
# load config variables
if 'MONGODB_URI' in os.environ:
app.config['MONGODB_SETTINGS'] = {'host': os.environ['MONGODB_URI'], 'retryWrites': False}
if 'JWT_SECRET_KEY' in os.environ:
app.config['JWT_SECRET_KEY'] = os.environ['JWT_SECRET_KEY']
app.config['JWT_BLACKLIST_ENABLED'] = True
app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['access', 'refresh']
# File upload configs
app.config['PROFILE_FOLDER'] = '/images/profiles'
app.config['PROFILE_FOLDER'] = '/images/posts'
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
app.config['ALLOWED_EXTENSIONS'] = set(['png', 'jpg', 'jpeg', 'gif'])
# init api and routes
api = Api(app=app)
create_routes(api=api)
# init mongoengine
db = MongoEngine(app=app)
# init jwt manager
jwt = JWTManager(app=app)
# #jwt.token_in_blocklist_loader
# def check_if_token_in_blacklist(decrypted_token):
# jti = decrypted_token['jti']
# return models.RevokedTokens.is_jti_blacklisted(jti)
return app
if __name__ == '__main__':
# Main entry point when run in stand-alone mode.
app = get_flask_app()
app.run(debug=True)
I am trying to use app object in the file in different folder utilities.py
import os
import sys
from werkzeug.utils import secure_filename
from flask import jsonify
from run import app
class Utilities:
FILE_ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
#staticmethod
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in Utilities.FILE_ALLOWED_EXTENSIONS
#staticmethod
def upload_files(file, filename, foldername):
try:
print(">>>>>>>>>" + foldername + filename, file=sys.stdout)
filename = secure_filename(filename)
file.save(os.path.join(app.root_path, foldername, filename))
resp = jsonify({'message': 'File successfully uploaded'})
resp.status_code = 201
return resp
except Exception as e:
print(">>>>>>>>>" + str(e), file=sys.stdout)
resp = jsonify({'message': 'File upload failed'})
resp.status_code = 400
return resp
but getting below error :
ImportError: cannot import name 'create_routes' from partially initialized module 'api.routes' (most likely due to a circular import) (D:\Python\Projects\XXXX\api\routes.py)
enter image description here

"errorMessage": "Handler 'lambda_handler' missing on module 'lambda_function'",

I was creating a bucket with below code. Is it compulsory to write lambda_function(event,context) while doing anything?
Code is below
import json
import boto3
BUCKET_NAME = 'ly_2020_s3'
def s3_client():
s3 = boto3.client('s3')
return s3
def create_bucket(bucket_name):
return s3_client().create_bucket(
Bucket=bucket_name,CreateBucketConfiguration={'LocationConstraint':'eu-central-1'})
if __name__=='__main__':
create_bucket(BUCKET_NAME)
A lambda handler function is required:
AWS Lambda function handler in Python
For example:
import json
import boto3
BUCKET_NAME = 'lilly_2020_s3'
def s3_client():
s3 = boto3.client('s3')
return s3
def create_bucket(bucket_name):
return s3_client().create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstrain':'eu-central-1'})
def lambda_handler(event, context):
bucket_name = 'some-backet-name'
create_bucket(bucket_name)

How to store an uploaded file in Heroku using Flask in Python?

I have made an app where image can be uploaded. Everything is working fine in the local server of Flask. But when I deployed my app on Heroku, after uploading an image it is not getting stored in the mentioned directory. Please any kind of help would be appreciated.
from flask import Flask,redirect,request,url_for
from flask import render_template as ren
import os
from werkzeug.utils import secure_filename
import uuid
app = Flask(__name__)
# FILE_PATH = os.environ.get("FILE_PATH")
FILE_PATH = "templates/uploads/"
#app.route("/")
def home():
return ren("index.html")
#app.route("/img-upload", methods=['GET','POST'])
def upload():
if request.method == 'POST':
if request.files:
image = request.files['image']
id = uuid.uuid1()
if secure_filename(image.filename):
filename = image.filename
ext = filename.rsplit(".",1)[1]
filename = id.hex + "." + ext ######### FileName of uploaded file ############
file_path = os.path.join(str(FILE_PATH),secure_filename(filename))
print(file_path)
image.save(file_path)
return redirect(request.url)
return ren("index.html")
if __name__ == '__main__':
app.run(debug=True)
Heroku has a ephemeral filesystem -- meaning that the file is only saved in Heroku while the dyno is running and is deleted afterwards.
The option I would use is AWS S3, where I have stored images on there.
Here's a good link to get you started with AWS S3 and how to set it up and use it: https://www.youtube.com/watch?v=kt3ZtW9MXhw
After you have set up your AWS S3 bucket:
import boto3
BUCKET = 'my-bucket-name'
s3 = boto3.client("s3", aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID'), aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY'))
bucket_resource = s3
bucket_resource.upload_file(Bucket = BUCKET, Filename=picture_fn, Key=picture_fn) # uploading
# retrieving
image_file = s3.generate_presigned_url('get_object',
Params={
'Bucket': BUCKET,
'Key': picture_fn,
},
ExpiresIn=3600)
# deleting
s3.delete_object(Bucket=BUCKET, Key=picture_fn)

Categories

Resources