I can't connect muy python app to postgres all run over docker, this is muy dockerfile:
FROM python:3.8
RUN mkdir /app
WORKDIR /app
ADD . /app/
ADD requirements.txt requirements.txt
RUN apt update -y
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
My docker-compose
version: '3'
services:
db:
image: postgres:13.4-alpine
environment:
POSTGRES_PASSWORD: secret
POSTGRES_HOST_AUTH_METHOD: trust
env_file:
- .env
ports:
- "5432:5432"
volumes:
- ./database/init.sql:/docker-entrypoint-initdb.d/init.sql
app:
build: .
restart: always
depends_on:
- db
stdin_open: true
tty: true
env_file:
- .env
and my .env file
DB_NAME=database_dev
DB_USER=postgres
DB_PASSWORD=secret
DB_HOST=localhost
DB_PORT=5432
I'm trying to connect with SQLAlchemy, and this is the error
sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) could not translate host name "{hostname}" to address: Name or service not known
edit
add my python code for connection, the env variables that I use are from .env file
class DatabaseManager:
def __init__(self):
self.db_url = 'postgresql+psycopg2://{user}:{password}\\#{hostname}/{database_name}'
self.db_url.format(
user=os.environ['DB_USER'],
password=os.environ['DB_PASSWORD'],
hostname=os.environ['DB_HOST'],
database_name=os.environ['DB_NAME']
)
self.engine = create_engine(self.db_url)
def make_session(self):
self.Session = sessionmaker(bind=self.engine)
self.session = self.Session()
def add_data(self, data):
self.session.add(data)
self.session.commit()
def close(self):
self.session.close()
According to your edit, your DB_HOST variable is not correct, here localhost is localhost inside the python container. Your python instance (app) should point to the hostname of your db.
Because docker-compose allow for refering to services with their name, you can simply do in your env file :
DB_HOST=db
Related
I'm building a flask RESTful API using flask and using PostgreSQL database and I'm trying to dockerize the app using docker-compose and Dockerfile, but when i'm trying to run the containers i'm getting this error
Traceback (most recent call last): File "/app/try_flask.py", line 12, in \<module\> conn=psycopg.connect("dbname=testing user=postgres port=5432 password=postgres") File "/usr/local/lib/python3.9/site-packages/psycopg/connection.py", line 728, in connect raise ex.with_traceback(None) psycopg.OperationalError: connection is bad: No such file or directory Is the server running locally and accepting connections on Unix domain socket "/var/run/postgresql/.s.PGSQL.5432"?
the app file
try_flask.py
from flask import Flask, request, json, jsonify
import psycopg
app = Flask(__name__)
conn=psycopg.connect("dbname=testing user=postgres port=5432 password=postgres")
cur=conn.cursor()
#app.route('/business_table/',methods=['POST','GET'])
def insert_locations():
if request.method=="POST":
business_name=request.form["name"]
category=request.form["category"]
cur.execute("select exists(select name from public.business where name=%s)", (business_name,))
row = cur.fetchone()[0]
if row ==False:
cur.execute("INSERT INTO business(name,category) VALUES(%s, %s) RETURNING business_id ",
(business_name,category))
row = cur.fetchone()[0]
else:
return "already exists on the database"
conn.commit()
return f"business with {business_name} is added with id {row}"
elif request.method=="GET":
cur.execute("SELECT * FROM business")
business=[
dict(business_id=row[0],name=row[1],category=row[2])
for row in cur.fetchall()
]
conn.commit()
return jsonify(business)
if __name__ == "__main__":
app.run(debug=True,host="0.0.0.0", port=5000)
docker-compose.yaml file
version: '3'
services:
postgres:
restart: always
image: postgis/postgis:15-3.3-alpine
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=testing
volumes:
- ./postgres-data/postgres:/var/lib/postgresql/data
ports:
- "5432:5432"
app:
restart: always
build: .
ports:
- 5000:5000
volumes:
- .:/app
depends_on:
- postgres
entrypoint: ["python", "try_flask.py","runserver"]
Dockerfile
FROM python:3.9.5-slim-buster
RUN apt-get update && apt-get -y install libpq-dev gcc && pip install psycopg
RUN mkdir /app
WORKDIR /app
COPY . /app
RUN pip install --no-cache-dir -r requirements.txt
EXPOSE 5000
the PostgreSQL container is running fine, but the problem is on the python container, I have tried with these files but always the same error it seems a connection problem, any solution please?
You need to specify the hostname for the Postgres container when you make the database connection. Otherwise, it defaults to localhost.
Since you're using Docker Compose, the hostname in the network that Docker creates is the name of the service, i.e. postgres in your case.
See https://docs.docker.com/compose/networking/.
I'm doing a practice application to learn docker, I'm doing the application with Python and fast Api along with its tutorial, I'm using everything with dockerfile and docker compose, which has a connection to the postgresql database, for that I'm using a orm sqlalchemy, running my application 'normally' from the command line, the project runs without any problem, but when running it with docker compose it generates several errors like the following:
ModuleNotFoundError: No module named 'routers'
When I 'solve' it, it generates another error which I have not been able to solve and I do not understand which is the following:
ModuleNotFoundError: No module named 'sqlalchemy'
Well, if I have sqlalchemy install, requirements.txt:
SQLAlchemy==1.4.39
This is my Python code, this is the main one:
from fastapi import FastAPI
from .routers import roles
app = FastAPI()
app.include_router(roles.router)
And this is the code where the error is generated:
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from db.postgres_connection import SessionLocal, engine
from models import roles
from schemas import roles as schemas
roles.Base.metadata.create_all(bind=engine)
router = APIRouter()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
#router.get('/api/v1/roles/', response_model=list[schemas.RoleBase])
async def get_roles(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
roles = get_roles(db, skip=skip, limit=limit)
return roles
This is my Dockerfile:
FROM python:3.10.5-slim-buster
WORKDIR /code
COPY ./app ./code/app
COPY ./requirements.txt /code/
RUN pip install -r requirements.txt
EXPOSE 8000
CMD [ "uvicorn", "app.main:app", "--reload" ]
This is my docker compose file:
version: '3.9'
services:
web:
build: .
ports:
- '8000:8000'
volumes:
- .:/app
db:
image: postgres
restart: always
environment:
POSTGRES_USER: hamel
POSTGRES_PASSWORD: contrasena
POSTGRES_DB: bankmel
volumes:
- /home/isla/storage:/var/lib/postgresql/data
ports:
- '5432:5432'
structure folder:
you got a couple of problems
In the docker file change, your copy path code is creating one more directory under code.
COPY ./app ./app
then you will have a couple of other problems in your code.
Modify your Dockerfile as follows, I'm sure this error will be fixed:
FROM python:3.10.5-slim-buster
WORKDIR /code
COPY ./app ./app
COPY ./requirements.txt ./requirements.txt
RUN pip install -r requirements.txt
EXPOSE 8000
CMD [ "uvicorn", "app.main:app", "--reload" ]
also in docker-compose.yml you should change volume in web as follows:
volumes:
- .:/code
I know this question has been asked a million times, and I've read as many of the answers as I can find. They all seem to come to one conclusion (db hostname is the container service name).
I got it to work in my actual code base, but it started failing when I added ffmpeg install to the Dockerfile. Nothing else had to be done, just installing FFPMEG via apt-get install -y ffmpeg would cause my python code to get the connection refused message. If I removed the ffmpeg install line, then my code would connect to the db just fine. Although re-running the container would trigger the dreaded connection refused error.
So I created a quick sample app so I could post here and try to get some thoughts on what's going on. But now this sample code won't connect to the db no matter what I do.
So here goes - And thanks in advance for any help:
myapp.py
# import ffmpeg
import psycopg2
if __name__ == "__main__":
print("Starting app...")
# probe = ffmpeg.probe("131698249.mp4")
# print(probe)
try:
connection = psycopg2.connect(
user="docker", password="docker", host="db", port="5432", database="docker")
cursor = connection.cursor()
postgreSQL_select_Query = "select * from test_table"
cursor.execute(postgreSQL_select_Query)
print("Selecting rows from table using cursor.fetchall")
records = cursor.fetchall()
print("Print each row and it's columns values")
for row in records:
print(row)
cursor.close()
connection.close()
except (Exception, psycopg2.Error) as error:
print("Error while fetching data from PostgreSQL", error)
Dockerfile
WORKDIR /usr/src/app
COPY requirements.txt .
RUN python -m pip install -r requirements.txt
COPY . .
CMD ["python", "myapp.py"]
docker-compose.yml
version: '3.8'
services:
db:
container_name: pg_container
image: postgres:14.1
restart: always
environment:
POSTGRES_USER: docker
POSTGRES_PASSWORD: docker
POSTGRES_DB: docker
ports:
- "8000:5432"
expose:
- "5432"
volumes:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
- pg_data:/var/lib/postgresql/data
myapp:
container_name: myapp
build:
context: .
dockerfile: ./Dockerfile
restart: "no"
depends_on:
- db
volumes:
pg_data:
If I build and run the code: docker compose up --detach
Everything gets built and started. The Database starts up and gets populated with table/data from the init.sql (not included here)
The app container starts and the code executes, but immediately fails with the Connection refused error.
However, if from my computer I run: psql -U docker -h localhost -p 8000 -d docker
it connects without any error and I can query the database as expected.
But the app in the container won't connect and if I run the container with docker run -it myapp /bin/bash and then from inside the container run: psql -U docker -h db -p 5432 -d docker I get the Connection refused error.
If anyone has any thoughts or ideas I would be so grateful. I've been wrestling with this for three days now.
Looks like I've resolved it. I was sure I'd tried this before, but regardless adding a networks section to the docker-compose.yml seemed to fix the issue.
I also had to do the second docker-compose up -d as suggested by David Maze's comment. But the combination of the two seem to have fixed my issue.
Here's my updated docker-compose.yml for complete clarity:
version: '3.8'
services:
postgres-db:
container_name: pg_container
image: postgres:14.1
restart: always
environment:
POSTGRES_USER: docker
POSTGRES_PASSWORD: docker
POSTGRES_DB: docker
ports:
- "5500:5432"
volumes:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
networks:
- dock-db-test
myapp:
container_name: myapp
build:
context: .
dockerfile: ./Dockerfile
restart: "no"
depends_on:
- db
networks:
- dock-db-test
networks:
dock-db-test:
external: false
name: dock-db-test
I have a watiress API, I am trying to make it public in a docker contain, However i am receiving the following error:
return self.socket.bind(addr)
fleettracker_1 | OSError: [Errno 99] Cannot assign requested address
Here is my Docker-compose yml
version: '3'
services:
fleettracker:
build: ./fleettracker
ports:
- "5000:5017"
links:
- db
networks:
- fullstack
db:
image: mysql
ports:
- "3306:3306"
networks:
fullstack:
driver: bridge
Here is my dockerfile
FROM python:3
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD [ "python", "waitress API.py" ]
and here is my waitress API main:
if __name__ == "__main__":
waitress.serve(app=app, host="SOme IP", port=Someport)
I have a flask app that can be initialized successfully and connects to Postgresql database. However, when i try to dockerize this app, i get the below error message. "SQLALCHEMY_DATABASE_URI" is correct and i can connect to it, so i can't figure where I have gone wrong.
docker-compose logs
app_1 | File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/url.py", line 60, in __init__
app_1 | self.port = int(port)
app_1 | ValueError: invalid literal for int() with base 10: 'None'
Postgres database connects successfully in Docker container
postgres_1 | LOG: database system is ready to accept connections
config.py
from os import environ
import os
RDS_USERNAME = environ.get('RDS_USERNAME')
RDS_PASSWORD = environ.get('RDS_PASSWORD')
RDS_HOSTNAME = environ.get('RDS_HOSTNAME')
RDS_PORT = environ.get('RDS_PORT')
RDS_DB_NAME = environ.get('RDS_DB_NAME')
SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://{username}:{password}#{hostname}:{port}/{dbname}"\
.format(username = RDS_USERNAME, password = RDS_PASSWORD, \
hostname = RDS_HOSTNAME, port = RDS_PORT, dbname = RDS_DB_NAME)
flask_app.py (entry point)
def create_app():
app = Flask(__name__, static_folder="./static", template_folder="./static")
app.config.from_pyfile('./app/config.py', silent=True)
register_blueprint(app)
register_extension(app)
with app.app_context():
print(db) -> This prints the correct path for SQLALCHEMY_DATABASE_URI
db.create_all()
db.session.commit()
return app
def register_blueprint(app):
app.register_blueprint(view_blueprint)
app.register_blueprint(race_blueprint)
def register_extension(app):
db.init_app(app)
migrate.init_app(app)
app = create_app()
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
Dockerfile
FROM ubuntu
RUN apt-get update && apt-get -y upgrade
RUN apt-get install -y python-pip && pip install --upgrade pip
RUN mkdir /home/ubuntu
WORKDIR /home/ubuntu/celery-scheduler
ADD requirements.txt /home/ubuntu/celery-scheduler/
RUN pip install -r requirements.txt
COPY . /home/ubuntu/celery-scheduler
EXPOSE 5000
CMD ["python", "flask_app.py", "--host", "0.0.0.0"]
docker-compose.yml
version: '2'
services:
app:
restart: always
build:
context: .
dockerfile: Dockerfile
volumes:
- .:/app
depends_on:
- postgres
postgres:
restart: always
image: postgres:9.6
environment:
- POSTGRES_USER=${RDS_USERNAME}
- POSTGRES_PASSWORD=${RDS_PASSWORD}
- POSTGRES_HOSTNAME=${RDS_HOSTNAME}
- POSTGRES_DB=${RDS_DB_NAME}
ports:
- "5432:5432"
You need to set environment variables RDS_USERNAME, RDS_PASSWORD, RDS_HOSTNAME, RDS_PORT , and RDS_DB_NAME in Dockerfile with ENV key value, for example
ENV RDS_PORT 5432
Answer:
1) Create a .env file with the variable definitions (I assumed that env variables will be 'pulled' from .bash_profile, but this is not the case...Remember to add .env to .gitignore for privacy)
RDS_USERNAME=xxx
RDS_PASSWORD=xxx
2) Specify the environment variables in docker-compose under app.
docker-compose.yml
services:
app:
restart: always
build:
context: .
dockerfile: Dockerfile
environment:
- RDS_USERNAME=${RDS_USERNAME}
- RDS_PASSWORD=${RDS_PASSWORD}
- RDS_HOSTNAME=${RDS_HOSTNAME}
- RDS_DB_NAME=${RDS_DB_NAME}
volumes:
- .:/app
depends_on:
- postgres