0

I have this docker-compose file

version: '3.7'
services:
  # MongoDB service: Used for storing and managing MongoDB databases.
  database:
    image: mongo:5.0.17
    container_name: db_cont
    environment:
      - MONGO_INITDB_ROOT_USERNAME=${MONGO_INITDB_ROOT_USERNAME}  # MongoDB root user username
      - MONGO_INITDB_ROOT_PASSWORD=${MONGO_INITDB_ROOT_PASSWORD}  # MongoDB root user password
      - MONGO_INITDB_ROOT_DATABASE=${MONGO_INITDB_ROOT_DATABASE}  # MongoDB root database name
    ports:
      - '27017:27017'  # Port mapping: Host port -> Container port
    volumes:
      - db_vols:/data/db  # Persistent volume for MongoDB data # Persistent volume for MongoDB data
    networks:
      - compose_network

  # PostgreSQL service: Used for storing and managing PostgreSQL databases.
  datawarehouse:
    image: postgres:15.3
    restart: always
    container_name: dw_cont
    environment:
      - POSTGRES_USER=${POSTGRES_USER}  # PostgreSQL superuser username
      - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}  # PostgreSQL superuser password
      - POSTGRES_DB=${POSTGRES_DB}  # Default PostgreSQL database name
    ports:
      - '5432:5432'  # Port mapping: Host port -> Container port
    volumes:
      - dw_vols:/var/lib/postgresql/data  # Persistent volume for PostgreSQL data
    networks:
      - compose_network

  # Prefect service: Runs the Prefect server for workflow management.
  prefect:
    image: prefecthq/prefect:2-python3.10
    container_name: prefect_cont
    entrypoint: ["prefect", "server", "start"]
    environment:
      - PREFECT_API_URL=http://127.0.0.1:4300/api/
      - PREFECT_SERVER_API_HOST=0.0.0.0
      - PREFECT_ORION_DATABASE_CONNECTION_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@datawarehouse:5432/${POSTGRES_DB}  # Connection URL for the Prefect server to connect to PostgreSQL
    ports:
      - '4300:4200'  # Port mapping: Host port -> Container port
    volumes:
      - prefect_vols:/data/prefect  # Persistent volume for Prefect server data
    depends_on:
      - datawarehouse  # Ensure PostgreSQL is running before starting Prefect
    networks:
      - compose_network
      
  # PgAdmin service: Web-based administration tool for PostgreSQL.
  pgadmin:
    image: dpage/pgadmin4:7.4
    container_name: pgadmin
    environment:
      - PGADMIN_DEFAULT_EMAIL=${PGADMIN_DEFAULT_EMAIL} #pgadmin email
      - PGADMIN_DEFAULT_PASSWORD=${PGADMIN_DEFAULT_PASSWORD} #pgadmin password
    ports:
      - '5050:80' # Port mapping: Host port -> Container port
    depends_on:
      - datawarehouse # Ensure PostgreSQL is running before starting PgAdmin
    networks:
      - compose_network
  #Service to manage requirements.txt
  requirements:
    build:
      dockerfile: Dockerfile
    container_name: requirements_cont
    ports: 
      - '8000:8000' #Port mapping: host port --> container port
    networks:
      - compose_network
    command: sh -c "tail -f /dev/null && pre-commit install"
volumes:
  db_vols:  # Volume for MongoDB data
  dw_vols:  # Volume for PostgreSQL data
  prefect_vols:  # Volume for Prefect server data

networks:
  compose_network:
    driver: bridge

I am trying to connect to the postgresql after the container is running using sqlalchemy (I used test as my user, password, and db_name):

from sqlalchemy import Column, Integer, Text, Numeric, Date, TIMESTAMP
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base


engine = create_engine("postgresql://test:test@datawarehouse/test")

Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)

BaseModel = declarative_base()

But I keep getting this error:

sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) could not translate host name "datawarehouse" to address: nodename nor servname provided, or not known

Anyone able to help? Is it because I am writing this python file outside of the docker container? I

3
  • 1
    try postgresql://test:test@localhost/test -- if you are running the python code from the host machine (not from within a container), you will need to use the exposed port (5432) to connect into the container Commented Jul 13, 2023 at 22:20
  • @richyen thanks for the suggestion - I now got this error: sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) connection to server at "localhost" (::1), port 5432 failed: FATAL: role "test" does not exist which does not make sense as I create the role test with the POSTGRES_USER env variable in the docker-compose Commented Jul 13, 2023 at 22:25
  • It seems you already have a postgres instance running on your local machine, so the docker container can't publish the 5432 port. You can shut down postgres on your machine (or use a different port on your docker-compose.yml, like 5444:5432, then in your create_engine() call, use postgresql://test:test@localhost:5444/test Commented Jul 14, 2023 at 16:07

1 Answer 1

0

Your error:

sqlalchemy.exc.OperationalError: (psycopg2.OperationalError) could not translate host name "datawarehouse" to address: nodename nor servname provided, or not known

datawarehouse is the name on your docker compose network. To access it locally, you need to use "localhost" instead.

Here's an example to set up a Docker container running PostgreSQL 15, and how to connect to it:

version: "3.9"

services:
    postgres15:
        image: postgres:15
        container_name: postgres15
        restart: "unless-stopped"
        ports:
            - 5432:5432
        environment:
            - POSTGRES_USER=test_user
            - POSTGRES_PASSWORD=test_password
            - POSTGRES_DB=test
            - POSTGRES_INITDB_ARGS="--encoding=UTF8"
        networks:
            - docker_compose

networks:
    docker_compose:
from sqlalchemy import String, create_engine
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, sessionmaker


engine = create_engine(
    "postgresql+psycopg2://test_user:test_password@localhost:5432/test",
    echo=True,
)

Session = sessionmaker(
    bind=engine,
)


class Base(DeclarativeBase):
    pass


class Test(Base):
    __tablename__ = "test"

    id: Mapped[int] = mapped_column(primary_key=True)
    data: Mapped[str] = mapped_column(String(255))


if __name__ == "__main__":
    Base.metadata.create_all(engine)
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.