Python package detected in docker image run but not in docker-compose up

Hi,
I’m trying to modify an Airflow docker-compose set-up with an extended image from a Dockerfile to have dbt installed on the container but the docker-compose file seems to be ignoring the Dockerfile: the different airflow containers are launched and run correctly but not a single one has dbt (fully) installed. I get the following error which means it understands dbt is a command (as opposed to the error dbt command not found) :

root@42b2358a7792:/opt/airflow# dbt --version
Traceback (most recent call last):
  File "/home/airflow/.local/bin/dbt", line 5, in <module>
    from dbt.main import main
ModuleNotFoundError: No module named 'dbt'

The docker-compose.yml file:

version:                                        "3.7"
#https://github.com/compose-spec/compose-spec/blob/master/spec.md#using-extensions-as-fragments

# Airflow extensions
x-airflow-common:                               &airflow-common
  build:                                        .
  environment:                                  &airflow-common-env
    AIRFLOW__CORE__EXECUTOR:                    CeleryExecutor
    AIRFLOW__CORE__SQL_ALCHEMY_CONN:            postgresql+psycopg2://airflow:airflow@postgres/airflow
    AIRFLOW__CELERY__RESULT_BACKEND:            db+postgresql://airflow:airflow@postgres/airflow
    AIRFLOW__CELERY__BROKER_URL:                redis://:@redis:6379/0
    AIRFLOW__CORE__FERNET_KEY:                  ''
    AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
    AIRFLOW__CORE__LOAD_EXAMPLES:               'true'
    AIRFLOW__API__AUTH_BACKEND:                 'airflow.api.auth.backend.basic_auth'
  volumes:
    - ./dags:/opt/airflow/dags
    - ./logs:/opt/airflow/logs
    - ./plugins:/opt/airflow/plugins

  user:                                         "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}"
  depends_on:
    redis:
      condition:                                service_healthy
    postgres:
      condition:                                service_healthy


services:
 
  # Database
  postgres:
      image:                                    postgres:13
      environment:
        POSTGRES_USER:                          airflow
        POSTGRES_PASSWORD:                      airflow
        POSTGRES_DB:                            airflow
      volumes:
        - postgres-db-volume:/var/lib/postgresql/data
      healthcheck:
        test:                                   ["CMD", "pg_isready", "-U", "airflow"]
        interval:                               5s
        retries:                                5
      restart:                                  always
      ports:
              - 5432:5432

  # Airflow services
  redis:
      image:                                    redis:latest
      container_name:                           airflow-redis
      ports:
        - 6379:6379
      healthcheck:
        test:                                   ["CMD", "redis-cli", "ping"]
        interval:                               5s
        timeout:                                30s
        retries:                                50
      restart:                                  always
  airflow-webserver:
      <<:                                       *airflow-common
      container_name:                           airflow-webserver
      command:                                  webserver
      ports:
        - 8080:8080
      healthcheck:
        test:                                   ["CMD", "curl", "--fail", "http://localhost:8080/health"]
        interval:                               10s
        timeout:                                10s
        retries:                                5
      restart:                                  always
  airflow-scheduler:
      <<:                                       *airflow-common
      container_name:                           airflow-scheduler
      command:                                  scheduler
      healthcheck:
        test:                                   ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "${HOSTNAME}"']
        interval:                               10s
        timeout:                                10s
        retries:                                5
      restart:                                  always
  airflow-worker:
      <<:                                       *airflow-common
      container_name:                           airflow-worker
      command:                                  celery worker
      healthcheck:
        test:
          - "CMD-SHELL"
          - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@${HOSTNAME}"'
        interval:                               10s
        timeout:                                10s
        retries:                                5
      restart:                                  always

  airflow-init:
      <<:                                       *airflow-common
      container_name:                           airflow-init
      command:                                  version
      environment:
        <<:                                     *airflow-common-env
        _AIRFLOW_DB_UPGRADE:                    'true'
        _AIRFLOW_WWW_USER_CREATE:               'true'
        _AIRFLOW_WWW_USER_USERNAME:             ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
        _AIRFLOW_WWW_USER_PASSWORD:             ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}

volumes:
  workspace:
     name:                                      ${WORKSPACE_DOCKER_MOUNT}
  data:
     name:                                      ${DATA_DOCKER_MOUNT}
  db:
     name:                                      ${DB_DOCKER_MOUNT}
  postgres-db-volume:

The dockerfile:

0

I’m trying to modify an Airflow docker-compose set-up with an extended image from a Dockerfile to have dbt installed on the container but the docker-compose file seems to be ignoring the Dockerfile: the different airflow containers are launched and run correctly but not a single one has dbt (fully) installed. I get the following error :

root@42b2358a7792:/opt/airflow# dbt --version
Traceback (most recent call last):
  File "/home/airflow/.local/bin/dbt", line 5, in <module>
    from dbt.main import main
ModuleNotFoundError: No module named 'dbt'

The docker-compose.yml file:

version:                                        "3.7"
#https://github.com/compose-spec/compose-spec/blob/master/spec.md#using-extensions-as-fragments

# Airflow extensions
x-airflow-common:                               &airflow-common
  build:                                        .
  environment:                                  &airflow-common-env
    AIRFLOW__CORE__EXECUTOR:                    CeleryExecutor
    AIRFLOW__CORE__SQL_ALCHEMY_CONN:            postgresql+psycopg2://airflow:airflow@postgres/airflow
    AIRFLOW__CELERY__RESULT_BACKEND:            db+postgresql://airflow:airflow@postgres/airflow
    AIRFLOW__CELERY__BROKER_URL:                redis://:@redis:6379/0
    AIRFLOW__CORE__FERNET_KEY:                  ''
    AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
    AIRFLOW__CORE__LOAD_EXAMPLES:               'true'
    AIRFLOW__API__AUTH_BACKEND:                 'airflow.api.auth.backend.basic_auth'
  volumes:
    - ./dags:/opt/airflow/dags
    - ./logs:/opt/airflow/logs
    - ./plugins:/opt/airflow/plugins

  user:                                         "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}"
  depends_on:
    redis:
      condition:                                service_healthy
    postgres:
      condition:                                service_healthy


services:
 
  # Database
  postgres:
      image:                                    postgres:13
      environment:
        POSTGRES_USER:                          airflow
        POSTGRES_PASSWORD:                      airflow
        POSTGRES_DB:                            airflow
      volumes:
        - postgres-db-volume:/var/lib/postgresql/data
      healthcheck:
        test:                                   ["CMD", "pg_isready", "-U", "airflow"]
        interval:                               5s
        retries:                                5
      restart:                                  always
      ports:
              - 5432:5432

  # Airflow services
  redis:
      image:                                    redis:latest
      container_name:                           airflow-redis
      ports:
        - 6379:6379
      healthcheck:
        test:                                   ["CMD", "redis-cli", "ping"]
        interval:                               5s
        timeout:                                30s
        retries:                                50
      restart:                                  always
  airflow-webserver:
      <<:                                       *airflow-common
      container_name:                           airflow-webserver
      command:                                  webserver
      ports:
        - 8080:8080
      healthcheck:
        test:                                   ["CMD", "curl", "--fail", "http://localhost:8080/health"]
        interval:                               10s
        timeout:                                10s
        retries:                                5
      restart:                                  always
  airflow-scheduler:
      <<:                                       *airflow-common
      container_name:                           airflow-scheduler
      command:                                  scheduler
      healthcheck:
        test:                                   ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "${HOSTNAME}"']
        interval:                               10s
        timeout:                                10s
        retries:                                5
      restart:                                  always
  airflow-worker:
      <<:                                       *airflow-common
      container_name:                           airflow-worker
      command:                                  celery worker
      healthcheck:
        test:
          - "CMD-SHELL"
          - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@${HOSTNAME}"'
        interval:                               10s
        timeout:                                10s
        retries:                                5
      restart:                                  always

  airflow-init:
      <<:                                       *airflow-common
      container_name:                           airflow-init
      command:                                  version
      environment:
        <<:                                     *airflow-common-env
        _AIRFLOW_DB_UPGRADE:                    'true'
        _AIRFLOW_WWW_USER_CREATE:               'true'
        _AIRFLOW_WWW_USER_USERNAME:             ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
        _AIRFLOW_WWW_USER_PASSWORD:             ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}



volumes:
  workspace:
     name:                                      ${WORKSPACE_DOCKER_MOUNT}
  data:
     name:                                      ${DATA_DOCKER_MOUNT}
  db:
     name:                                      ${DB_DOCKER_MOUNT}
  postgres-db-volume:

The dockerfile:

FROM apache/airflow:2.1.0
USER root
RUN apt-get update \
  && apt-get install -y git libpq-dev python3 python3-pip \
  && apt-get autoremove -yqq --purge \
  && apt-get clean \
  && rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
USER airflow
RUN pip install -r requirements.txt
RUN pip install --upgrade cffi
RUN pip install cryptography~=3.4 dbt==0.19.0

I have tried:

  • Building the Dockerfile with docker build . and dbt is accessible
  • Manually installing dbt in the docker-compose launched container

The only alternative which seems to work but I would like to avoid is manual installation:

root@42b2358a7792:/opt/airflow# pip install dbt==0.19.0

Any ideas why the docker-compose container is losing track of the dbt python package ?.

just remove : after build in your docker-compose and replace it with .
build .