I currently have a small api written in Flask made for interacting with a CNN, I'm setting up the configuracion for running it in Docker and everything runs fine, this is my actual configuration:
Dockerfile:
FROM python:2.7.15-jessie
RUN mkdir -p usr/src/app
COPY . usr/src/app
WORKDIR usr/src/app
RUN which python
RUN apt-get update && apt-get install -y
RUN pip install flask flask_uploads Werkzeug opencv-python numpy tensorflow
ENV PORT 5000
EXPOSE 5000
CMD ["flask", "run"]
Docker-compose:
version: "2.2"
services:
api:
container_name: "pyserver"
build: .
volumes:
- ".:/usr/src/app"
environment:
FLASK_APP: server.py
ports:
- "5000:5000"
command: ["flask", "run"]
server.py
import os
from base64 import b64encode, b64decode
from flask import Flask, redirect, request, url_for, json, send_file
from flask_uploads import UploadSet, configure_uploads
from werkzeug.utils import secure_filename
from GW_predict import predict
UPLOAD_FOLDER = 'CNN/Files'
ALLOWED_EXTENSIONS = set(['txt', 'csv', 'png', 'jpg', 'jpeg'])
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config["DEBUG"] = True
def allowed_file(filename):
ext = '.' in filename and \
filename.rsplit('.', 1)[1].lower()
return ext, ext in ALLOWED_EXTENSIONS
#app.route('/status', methods=['GET'])
def status():
return create_response({ 'online': True, 'message': 'UP AND RUNNING # 5000' }, 200)
#app.route('/uploadFile', methods=['POST'])
def upload_file():
if 'h1' not in request.files or 'l1' not in request.files:
return create_response({'result': False, 'message': 'File missing'}, 422)
h1_file = request.files['h1']
l1_file = request.files['l1']
if h1_file.filename == '' or l1_file.filename == '':
return create_response({'result': False, 'message': 'File missing'}, 422)
h1_ext, h1res = allowed_file(h1_file.filename)
l1_ext, l1res = allowed_file(l1_file.filename)
if h1_file and l1_file and h1res and l1res:
h1_filename = secure_filename(h1_file.filename)
l1_filename = secure_filename(l1_file.filename)
h1 = os.path.join(app.config['UPLOAD_FOLDER'], h1_filename)
l1 = os.path.join(app.config['UPLOAD_FOLDER'], l1_filename)
h1_file.save(h1)
l1_file.save(l1)
# img = b64encode((open(img, "rb").read()))
if h1_ext == 'png' and l1_ext == 'png':
result = predict(h1, l1)
return create_response({'result': True, 'prediction': result}, 200)
return create_response({'result': False, 'message': 'Images format must be png'}, 422)
return create_response({'result': False, 'message': 'No allowed file'}, 422)
def create_response(message, status):
response = app.response_class(
response= json.dumps(message),
status= status,
mimetype='application/json'
)
return response
if __name__ == '__main__':
app.run()
The problem is that in my docker-compose file I have configured the "ports" instruction in which I define to expose de port 5000 in my host and to forward to the same port in the container.
This does not work.
Inside the container I can make the request via CURL to the endpoint, but outside of the container I am unable to do so. What could be wrong?
maybe you need to specify the --host=0.0.0.0 flag? (from here).
Can you try to override your command in docker-compose?
version: "2.2"
services:
api:
container_name: "pyserver"
build: .
command: flask run --host=0.0.0.0
volumes:
- ".:/usr/src/app"
environment:
FLASK_APP: server.py
ports:
- "5000:5000"
command: ["flask", "run"]
By the way, not sure that the EXPOSE in dockerfile is necessary if you are communicate only from your host machine (rather than other container).
Related
I have a flask app that I am loading through Docker, and when I try and access the application on localhost:8000 I get the error message in the subject line. I believe the issue is that the flask application is not recognizing my application's SECRET_KEY, but I'm not sure how to fix it.
Here is my app structure (condensed for clarity):
config/
-- settings.py
instance/
-- settings.py
myapp/
-- app.py
blueprints/
user/
-- models.py
.env
docker-compose
Dockerfile
My app-factory function looks like this in app.py:
def create_app(settings_override=None):
"""
Create a Flask application using the app factory pattern.
:param settings_override: Override settings
:return: Flask app
"""
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config.settings')
app.config.from_pyfile('settings.py', silent=True)
if settings_override:
app.config.update(settings_override)
app.register_blueprint(admin)
app.register_blueprint(page)
app.register_blueprint(contact)
app.register_blueprint(user)
extensions(app)
authentication(app, User)
return app
The error is being triggered in the function called authentication:
def authentication(app, user_model):
"""
Initialize the Flask-Login extension (mutates the app passed in).
:param app: Flask application instance
:param user_model: Model that contains the authentication information
:type user_model: SQLAlchemy model
:return: None
"""
login_manager.login_view = 'user.login'
#login_manager.user_loader
def load_user(uid):
return user_model.query.get(uid)
#login_manager.token_loader
def load_token(token):
duration = app.config['REMEMBER_COOKIE_DURATION'].total_seconds()
serializer = URLSafeTimedSerializer(app.secret_key)
data = serializer.loads(token, max_age=duration)
user_uid = data[0]
return user_model.query.get(user_uid)
It's the line where it says data = serializer.loads(token, max_age=duration)
The token is usually generated from the secret_key of the application.
Here are some examples from my User class where a token is generated:
def serialize_token(self, expiration=3600):
"""
Sign and create a token that can be used for things such as resetting
a password or other tasks that involve a one off token.
:param expiration: Seconds until it expires, defaults to 1 hour
:type expiration: int
:return: JSON
"""
private_key = current_app.config['SECRET_KEY']
serializer = TimedJSONWebSignatureSerializer(private_key, expiration)
return serializer.dumps({'user_email': self.email}).decode('utf-8')
The SECRET_KEY variable is being set from my settings.py file from my config folder. Here is its contents:
from datetime import timedelta
DEBUG = True
SERVER_NAME = 'localhost:8000'
SECRET_KEY = 'insecurekeyfordev'
# Flask-Mail.
MAIL_DEFAULT_SENDER = 'contact#local.host'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USE_SSL = False
MAIL_USERNAME = 'you#gmail.com'
MAIL_PASSWORD = 'awesomepassword'
# Celery.
CELERY_BROKER_URL = 'redis://:devpassword#redis:6379/0'
CELERY_RESULT_BACKEND = 'redis://:devpassword#redis:6379/0'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_REDIS_MAX_CONNECTIONS = 5
# SQLAlchemy.
db_uri = 'postgresql://snakeeyes:devpassword#postgres:5432/snakeeyes'
SQLALCHEMY_DATABASE_URI = db_uri
SQLALCHEMY_TRACK_MODIFICATIONS = False
# User.
SEED_ADMIN_EMAIL = 'dev#local.host'
SEED_ADMIN_PASSWORD = 'devpassword'
REMEMBER_COOKIE_DURATION = timedelta(days=90)
I don't know why this information isn't loading correctly in the app, but when I run docker-compose up --build I get the error message in the title.
If it's at all useful here are the contents of my docker files.
docker-compose.yml
version: '2'
services:
postgres:
image: 'postgres:9.5'
env_file:
- '.env'
volumes:
- 'postgres:/var/lib/postgresql/data'
ports:
- '5432:5432'
redis:
image: 'redis:3.0-alpine'
command: redis-server --requirepass devpassword
volumes:
- 'redis:/var/lib/redis/data'
ports:
- '6379:6379'
website:
build: .
command: >
gunicorn -b 0.0.0.0:8000
--access-logfile -
--reload
"snakeeyes.app:create_app()"
env_file:
- '.env'
volumes:
- '.:/snakeeyes'
ports:
- '8000:8000'
celery:
build: .
command: celery worker -l info -A snakeeyes.blueprints.contact.tasks
env_file:
- '.env'
volumes:
- '.:/snakeeyes'
volumes:
postgres:
redis:
And my DOCKERFILE:
FROM python:3.7.5-slim-buster
MAINTAINER My Name <myname#gmail.com>
RUN apt-get update && apt-get install -qq -y \
build-essential libpq-dev --no-install-recommends
ENV INSTALL_PATH /myapp
RUN mkdir -p $INSTALL_PATH
WORKDIR $INSTALL_PATH
COPY requirements.txt requirements.txt
RUN pip install -r requirements.txt
COPY . .
RUN pip install --editable .
CMD gunicorn -b 0.0.0.0:8000 --access-logfile - "myapp.app:create_app()"
I am trying to run a docker-compose app that has two services. One to build a web server and the other to run the tests on it.
docker-compose.yml
version: "3.7"
services:
web:
build: .
ports:
- "127.0.0.1:5000:5000"
expose:
- 5000
test:
# expose:
# - 5000
depends_on:
- web
build: test_python/.
./Dockerfile
FROM python:buster
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
# Add .cargo/bin to PATH
ENV PATH="/root/.cargo/bin:${PATH}"
# Check cargo is visible
RUN cargo --help
WORKDIR /code
COPY requirements.txt requirements.txt
RUN pip3 install -r requirements.txt
EXPOSE 5000
COPY test_python .
CMD [ "python3", "base_routes.py" ]
test_python/Dockerfile
FROM python:buster
RUN pip3 install pytest requests
COPY . .
base_routes.py
from robyn import Robyn, static_file, jsonify
import asyncio
app = Robyn(__file__)
callCount = 0
#app.get("/")
async def h(request):
print(request)
global callCount
callCount += 1
message = "Called " + str(callCount) + " times"
return message
#app.get("/test")
async def test(request):
import os
path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "test_python/index.html"))
return static_file(path)
#app.get("/jsonify")
async def json_get(request):
return jsonify({"hello": "world"})
#app.post("/jsonify")
async def json(request):
print(request)
return jsonify({"hello": "world"})
#app.post("/post")
async def postreq(request):
return bytearray(request["body"]).decode("utf-8")
#app.put("/put")
async def putreq(request):
return bytearray(request["body"]).decode("utf-8")
#app.delete("/delete")
async def deletereq(request):
return bytearray(request["body"]).decode("utf-8")
#app.patch("/patch")
async def patchreq(request):
return bytearray(request["body"]).decode("utf-8")
#app.get("/sleep")
async def sleeper():
await asyncio.sleep(5)
return "sleep function"
#app.get("/blocker")
def blocker():
import time
time.sleep(10)
return "blocker function"
if __name__ == "__main__":
app.add_header("server", "robyn")
app.add_directory(route="/test_dir",directory_path="./test_dir/build", index_file="index.html")
app.start(port=5000)
These are the files that I have used in my project. When I try to open 127.0.0.1:5000 from my machine, it shows nothing. However, when I log in the web container and do curl http://localhost:5000/, it gives the right response.
I am unable to figure out how to access it on the host machine?
I had to make the python server listen at '0.0.0.0'.
I added the following line in my codebase
app.start(port=5000, url='0.0.0.0')
Firstly thank you for your time.
I can't get any date from an API in my docker container. Neverveless I can if I'm in the container with a curl command but not from postman or a curl command if I 'm not in the container I don't know why, somebody have a solution for me ? :(
I mapped the port, docker-compose ps :
python /bin/sh -c ./src/server.py - ... Up 0.0.0.0:9090->9090/tcp
Above my docker-compose file :
'''
# Docker Compose
version: "3"
# Docker Containers
services:
# python
python:
build: ./docker/python
working_dir: /root/app
command: ./src/server.py
environment:
- SERVER_PORT=9090
ports:
- 9090:9090
volumes:
- .:/root/app
- ./.bash-history/python:/root/.bash_history
'''
My python server :
'''
#!/usr/local/bin/python
import sys
sys.path.append('/root/app/python_modules')
from flask import Flask, json
companies = [{"id": 1, "name": "Company One"}, {"id": 2, "name": "Company Two"}]
api = Flask(__name__)
#api.route('/companies', methods=['GET'])
def get_companies():
return json.dumps(companies)
if __name__ == '__main__':
api.run(host='', port=9090, debug=True)
'''
I have two little
Python Flask
apps
Appone --> Producer
Apptwo --> Consumer
Both are in different docker-container and orchestrated by docker-compose
I dont get the Data from the Producer to the Consumer...Even when I start in apptwo the start.consuming() the Producer cant send any Data to the RabbitMQ Broker
Maybe someone can help me. Thank you very much
docker-compose:
version: '3'
services:
appone:
container_name: appone
restart: always
build:
context: ./appone
dockerfile: Dockerfile
environment:
FLASK_APP: ./app.py
volumes:
- './appone:/code/:cached'
ports:
- "5001:5001"
apptwo:
container_name: apptwo
restart: always
build:
context: ./apptwo
dockerfile: Dockerfile
environment:
FLASK_DEBUG: 1
FLASK_APP: ./app.py
volumes:
- ./apptwo:/code:cached
ports:
- "5002:5002"
rabbitmq:
image: "rabbitmq:3-management"
hostname: "rabbit"
ports:
- "15672:15672"
- "5672:5672"
labels:
NAME: "rabbitmq"
volumes:
- ./rabbitmq/rabbitmq-isolated.conf:/etc/rabbitmq/rabbitmq.config
appone (Producer)
from flask import Flask
from flask_restful import Resource, Api
import pika
app = Flask(__name__)
api = Api(app)
app.config['DEBUG'] = True
message = "Hello World, its me appone"
class HelloWorld(Resource):
def get(self):
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='rabbitmq'))
channel = connection.channel()
channel.queue_declare(queue='hello', durable=True)
channel.basic_publish(exchange='', routing_key='hello', body='Hello World!', properties=pika.BasicProperties(delivery_mode=2))
connection.close()
return {'message': message}
api.add_resource(HelloWorld, '/api/appone/post')
if __name__ == '__main__':
# Development
app.run(host="0.0.0.0", port=5001)
apptwo (Consumer)
from flask import Flask
from flask_restful import Resource, Api
import pika
from threading import Thread
app = Flask(__name__)
api = Api(app)
app.config['DEBUG'] = True
data = []
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='rabbitmq'))
channel = connection.channel()
channel.queue_declare(queue='hello', durable=True)
def callback(ch, method, properties, body):
data.append(body)
ch.basic_ack(delivery_tag = method.delivery_tag)
channel.basic_consume(queue='hello', on_message_callback=callback)
thread = Thread(channel.start_consuming())
thread.start()
class HelloWorld(Resource):
def get(self):
return {'message': data}
api.add_resource(HelloWorld, '/api/apptwo/get')
if __name__ == '__main__':
app.run(debug=True, host="0.0.0.0", port=5002)
Goal
In this easy example I just want to receice the data in apptwo and store it in the data list...
Thanks again!!
In apptwo (Consumer):
thread = Thread(channel.start_consuming())
thread.start()
Here the constructor call of Thread is never called, since channel.start_consuming is called before, which is blocking. Changing your code to the following might help.
thread = Thread(target = channel.start_consuming)
thread.start()
My app.yaml file is as follows:
runtime: python
env: flex
entrypoint: gunicorn -b :8443 main:app
threadsafe: true
runtime_config:
python_version: 2
So, when I run this python script in GAE (of course, having deleted the previous webhook), the webhook doesn't get set up. I couldn't figure out what did I do wrong.
import sys
import os
import time
from flask import Flask, request
import telegram
# CONFIG
TOKEN = '<token>'
HOST = 'example.appspot.com' # Same FQDN used when generating SSL Cert
PORT = 8443
CERT = "certificate.pem"
CERT_KEY = "key.pem"
bot = telegram.Bot(TOKEN)
app = Flask(__name__)
#app.route('/')
def hello():
return 'Hello World!'
#app.route('/' + TOKEN, methods=['POST','GET'])
def webhook():
update = telegram.Update.de_json( request.get_json(force = True), bot )
chat_id = update.message.chat.id
bot.sendMessage(chat_id = chat_id, text = 'Hello, there')
return 'OK'
def setwebhook():
bot.setWebhook(url = "https://%s:%s/%s" % (HOST, PORT, TOKEN), certificate = open(CERT, 'rb'))
if __name__ == '__main__':
context = (CERT, CERT_KEY)
setwebhook()
time.sleep(5)
app.run(host = '0.0.0.0', port = PORT, ssl_context = context, debug = True)
I thought there might be an issue with SSL certificates, but if I just do this without running the python code, everything works out fine:
curl -F "url=https://example.appspot.com:8443/<token>" -F "certificate=#certificate.pem"
https://api.telegram.org/bot<token>/setWebhook