I am trying to prepare a docker-compose file that stands up 2 containers. They are postgres and python app inside an alpine image. Just consider before reading ı need to use python inside alpine.
My Dockerfile is:
FROM python:3
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD [ "python", "./app.py" ]
My app.py file:
import psycopg2
from config import config
def connect():
""" Connect to the PostgreSQL database server """
conn = None
try:
# read connection parameters
params = config()
# connect to the PostgreSQL server
print('Connecting to the PostgreSQL database...')
conn = psycopg2.connect(**params)
# create a cursor
cur = conn.cursor()
# execute a statement
print('PostgreSQL database version:')
cur.execute ("SELECT * FROM my_table;")
# display the PostgreSQL database server version
db_version = cur.fetchone()
print(db_version)
# close the communication with the PostgreSQL
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
if __name__ == '__main__':
connect()
I started python container with that command:
docker run -it my_image app.py
I seperately started 2 container(postgres and python) and make it worked. However my container works only once and its job is to make an select job inside postgresql database.
That was first part. My main goal is below.
For the simplicity i prepared a docker-compose.yml file:
version: '3'
services:
python:
image: python
build:
context: .
dockerfile: Dockerfile
postgres:
image: postgres:${TAG:-latest}
build:
context: .
environment:
POSTGRES_PASSWORD: example
ports:
- "5435:5432"
networks:
- postgres
networks:
postgres:
My dockerfile is here
When i type docker-compose up my postgres starts but python exits with code 0.
my_file_python_1 exited with code 0
What should i do for stands alone container for python apps with docker-compose? It always works only once. I can make it work constantly with
docker run -d -it my_image app.py
But my goal is to make it with docker-compose.
version: '3'
services:
python:
image: python
build:
context: .
dockerfile: Dockerfile
postgres:
image: postgres:${TAG:-latest}
build:
context: .
environment:
POSTGRES_PASSWORD: example
ports:
- "5435:5432"
networks:
- postgres
networks:
postgres:
tty: true
If exit code is 0 means container exited after all execution, means you have to run the process in foreground to keep container running. if exit code is other than 0, means it is exiting because of code issue. so try to run any foreground process.
Could you check if enabling the tty option (see reference) in your docker-compose.yml file the container keeps running?
Related
Can't connect to mssql in docker container Code piece for connecting mssql in python:
def connectsql():
engine = sqlalchemy.create_engine(
"mssql+pymssql://service name")
ms_sql_conn = engine.connect()
df = pd.read_sql('select * from table name',
ms_sql_conn,
parse_dates=["rest_date"])
ms_sql_conn.close()
return df
When I run the script, the connection is successful, but when I try to put this code in docker, there is no connection. As I understand it, I need to write something in the environment in the docker-compose file, but I don’t understand what exactly and do I need python code for this? Dockerfile contents:
FROM python:3
RUN pip install --upgrade pip --default-timeout=100 future
WORKDIR /check
COPY . /check
RUN pip install -r requirements.txt
CMD [ "python", "/check/bot2.py" ]
docker-compose content:
version: '3.1'
services:
bot2:
image: first
build: ./
restart: always
I tried to register a connection in the environment, but it didn’t work out and I also don’t know if I need to change the python code then?
In your docker compose file you need add two line:
extra_hosts:
- "host.docker.internal:host-gateway"
version: '3.1'
services:
bot2:
image: first
build: ./
restart: always
extra_hosts:
- "host.docker.internal:host-gateway"
I'm building a flask RESTful API using flask and using PostgreSQL database and I'm trying to dockerize the app using docker-compose and Dockerfile, but when i'm trying to run the containers i'm getting this error
Traceback (most recent call last): File "/app/try_flask.py", line 12, in \<module\> conn=psycopg.connect("dbname=testing user=postgres port=5432 password=postgres") File "/usr/local/lib/python3.9/site-packages/psycopg/connection.py", line 728, in connect raise ex.with_traceback(None) psycopg.OperationalError: connection is bad: No such file or directory Is the server running locally and accepting connections on Unix domain socket "/var/run/postgresql/.s.PGSQL.5432"?
the app file
try_flask.py
from flask import Flask, request, json, jsonify
import psycopg
app = Flask(__name__)
conn=psycopg.connect("dbname=testing user=postgres port=5432 password=postgres")
cur=conn.cursor()
#app.route('/business_table/',methods=['POST','GET'])
def insert_locations():
if request.method=="POST":
business_name=request.form["name"]
category=request.form["category"]
cur.execute("select exists(select name from public.business where name=%s)", (business_name,))
row = cur.fetchone()[0]
if row ==False:
cur.execute("INSERT INTO business(name,category) VALUES(%s, %s) RETURNING business_id ",
(business_name,category))
row = cur.fetchone()[0]
else:
return "already exists on the database"
conn.commit()
return f"business with {business_name} is added with id {row}"
elif request.method=="GET":
cur.execute("SELECT * FROM business")
business=[
dict(business_id=row[0],name=row[1],category=row[2])
for row in cur.fetchall()
]
conn.commit()
return jsonify(business)
if __name__ == "__main__":
app.run(debug=True,host="0.0.0.0", port=5000)
docker-compose.yaml file
version: '3'
services:
postgres:
restart: always
image: postgis/postgis:15-3.3-alpine
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
- POSTGRES_DB=testing
volumes:
- ./postgres-data/postgres:/var/lib/postgresql/data
ports:
- "5432:5432"
app:
restart: always
build: .
ports:
- 5000:5000
volumes:
- .:/app
depends_on:
- postgres
entrypoint: ["python", "try_flask.py","runserver"]
Dockerfile
FROM python:3.9.5-slim-buster
RUN apt-get update && apt-get -y install libpq-dev gcc && pip install psycopg
RUN mkdir /app
WORKDIR /app
COPY . /app
RUN pip install --no-cache-dir -r requirements.txt
EXPOSE 5000
the PostgreSQL container is running fine, but the problem is on the python container, I have tried with these files but always the same error it seems a connection problem, any solution please?
You need to specify the hostname for the Postgres container when you make the database connection. Otherwise, it defaults to localhost.
Since you're using Docker Compose, the hostname in the network that Docker creates is the name of the service, i.e. postgres in your case.
See https://docs.docker.com/compose/networking/.
I'm running an app inside a docker container. That app uses docker Postgres image to save data in a database. I need to keep a local copy of this database's data to avoid losing data if the container is removed or purged somehow ..so I am using volumes inside my `docker-compose.YAML file,, but still the local DB folder is always empty .. so whenever I move the container or purge it the data are lost
docker-compose.yaml
version: "2"
services:
db:
image: postgres
volumes:
- ./data/db:/var/lib/postgresql/data
ports:
- '5433:5432'
restart: always
command: -p 5433
environment:
- POSTGRES_DB=mydata
- POSTGRES_USER=mydata
- POSTGRES_PASSWORD=mydata#
- PGDATA=/tmp
django-apache2:
build: .
container_name: rolla_django
restart: always
environment:
- POSTGRES_DB=mydata
- POSTGRES_USER=mydata
- POSTGRES_PASSWORD=mydata#
- PGDATA=/tmp
ports:
- '4002:80'
- '4003:443'
volumes:
- ./www/:/var/www/html
- ./www/demo_app/static_files:/var/www/html/demo_app/static_files
- ./www/demo_app/media:/var/www/html/demo_app/media
# command: sh -c 'python manage.py migrate && python manage.py loaddata db_backkup.json && apache2ctl -D FOREGROUND'
command: sh -c 'wait-for-it db:5433 -- python manage.py migrate && apache2ctl -D FOREGROUND'
depends_on:
- db
as you can see i used ./data/db:/var/lib/postgresql/data , but locally the ./data/db directory is always empty !!
NOTE
when I use docker volume list it shows no volumes at all
According to your setup, the data is in /tmp: PGDATA=/tmp. Remove this and you volume mapping should work.
Also your command -p 5433 makes postgres run on port 5433, but you still map the port 5432. So if you cant reach the database it might be because of that.
I know this question has been asked a million times, and I've read as many of the answers as I can find. They all seem to come to one conclusion (db hostname is the container service name).
I got it to work in my actual code base, but it started failing when I added ffmpeg install to the Dockerfile. Nothing else had to be done, just installing FFPMEG via apt-get install -y ffmpeg would cause my python code to get the connection refused message. If I removed the ffmpeg install line, then my code would connect to the db just fine. Although re-running the container would trigger the dreaded connection refused error.
So I created a quick sample app so I could post here and try to get some thoughts on what's going on. But now this sample code won't connect to the db no matter what I do.
So here goes - And thanks in advance for any help:
myapp.py
# import ffmpeg
import psycopg2
if __name__ == "__main__":
print("Starting app...")
# probe = ffmpeg.probe("131698249.mp4")
# print(probe)
try:
connection = psycopg2.connect(
user="docker", password="docker", host="db", port="5432", database="docker")
cursor = connection.cursor()
postgreSQL_select_Query = "select * from test_table"
cursor.execute(postgreSQL_select_Query)
print("Selecting rows from table using cursor.fetchall")
records = cursor.fetchall()
print("Print each row and it's columns values")
for row in records:
print(row)
cursor.close()
connection.close()
except (Exception, psycopg2.Error) as error:
print("Error while fetching data from PostgreSQL", error)
Dockerfile
WORKDIR /usr/src/app
COPY requirements.txt .
RUN python -m pip install -r requirements.txt
COPY . .
CMD ["python", "myapp.py"]
docker-compose.yml
version: '3.8'
services:
db:
container_name: pg_container
image: postgres:14.1
restart: always
environment:
POSTGRES_USER: docker
POSTGRES_PASSWORD: docker
POSTGRES_DB: docker
ports:
- "8000:5432"
expose:
- "5432"
volumes:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
- pg_data:/var/lib/postgresql/data
myapp:
container_name: myapp
build:
context: .
dockerfile: ./Dockerfile
restart: "no"
depends_on:
- db
volumes:
pg_data:
If I build and run the code: docker compose up --detach
Everything gets built and started. The Database starts up and gets populated with table/data from the init.sql (not included here)
The app container starts and the code executes, but immediately fails with the Connection refused error.
However, if from my computer I run: psql -U docker -h localhost -p 8000 -d docker
it connects without any error and I can query the database as expected.
But the app in the container won't connect and if I run the container with docker run -it myapp /bin/bash and then from inside the container run: psql -U docker -h db -p 5432 -d docker I get the Connection refused error.
If anyone has any thoughts or ideas I would be so grateful. I've been wrestling with this for three days now.
Looks like I've resolved it. I was sure I'd tried this before, but regardless adding a networks section to the docker-compose.yml seemed to fix the issue.
I also had to do the second docker-compose up -d as suggested by David Maze's comment. But the combination of the two seem to have fixed my issue.
Here's my updated docker-compose.yml for complete clarity:
version: '3.8'
services:
postgres-db:
container_name: pg_container
image: postgres:14.1
restart: always
environment:
POSTGRES_USER: docker
POSTGRES_PASSWORD: docker
POSTGRES_DB: docker
ports:
- "5500:5432"
volumes:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
networks:
- dock-db-test
myapp:
container_name: myapp
build:
context: .
dockerfile: ./Dockerfile
restart: "no"
depends_on:
- db
networks:
- dock-db-test
networks:
dock-db-test:
external: false
name: dock-db-test
I would like to have a python flask application that runs with a postgresql database (psycopg2). So I made this docker-compose file:
version: "3"
services:
web:
depends_on:
- database
container_name: web
build:
context: "."
dockerfile: "docker/Dockerfile.web"
ports:
- 5000:5000
volumes:
- database:/var/run/postgresql
database:
container_name: database
environment:
POSTGRES_PASSWORD: "password"
POSTGRES_USER: "user"
POSTGRES_DB: "products"
image: postgres
expose:
- 5432
volumes:
- database:/var/run/postgresql
volumes:
database:
In my app.py I try to connect to postgres like this:
conn = psycopg2.connect(database="products", user="user", password="password", host="database", port="5432")
When I run docker-compose up I get the following error:
"Is the server running on host "database" (172.21.0.2) and accepting TCP/IP connections on port 5432?"
I don't know where I have mistaken here.
The container "database" exposes its port 5432.
Both containers are on the same network which is "web_app_default".
The socket file existes in /var/run/postgresql directory on "web" container.
Any ideas ?
Thanks for replies and have a nice day.
I think what happened is that even though you have the flag depends_on set to database, that only means that the web container will start after database container starts. However, for the first time, the database will generally take quite some time to set up and when your web server is up, the database is still not ready to accept the connection.
2 ways to work around the problem here:
Easy way with no change in code: run docker-compose up -d (detach mode) and wait for the database to finish initializing. Then run docker-compose up -d again and your web container will now be able to connect to the database.
Second way is to update the web container with restart: always so docker-compose will keep trying to restart your web container until it runs successfully (until the database is ready to accept connection)
version: "3"
services:
web:
depends_on:
- database
...
restart: always
...