Creating Directory instead of file

  • Issue Type
    Directory getting created instead of file
    Issue that i am facing is requirements.txt is getting created as directory and not as file. I tried providing absolute path and few other fixes nothing worked. How do i fix this issue?

  • OS Version
    Apple Air M2 - Sonoma 14.5

  • App Version
    Docker Desktop version 4.28

  • Steps to reproduce
    You can run the docker compose up command, do reproduce the issue, i have attached the docker compose and entrypoint.sh file

version: '3'

services:
  zookeeper:
    image: confluentinc/cp-zookeeper:7.4.0
    hostname: zookeeper
    container_name: zookeeper
    ports:
      - "2181:2181"
    environment:
      ZOOKEEPER_CLIENT_PORT: 2181
      ZOOKEEPER_TICK_TIME: 2000
    healthcheck:
      test: ['CMD','bash','-c',"echo 'ruok' | nc localhost 2181"]
      interval: 10s
      timeout: 5s
    networks:
      - confluent

  broker:
    image: confluentinc/cp-server:7.4.0
    hostname: broker
    container_name: broker
    depends_on:
      zookeeper:
        condition: service_healthy
    ports:
      - "9092:9092"
      - "9101:9101"
    environment:
      KAFKA_BROKER_ID: 1
      KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
      KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
      KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
      KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
      KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
      KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
      KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
      KAFKA_JMX_PORT: 9101
      KAFKA_JMX_HOSTNAME: localhost
      KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
      CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092
      #CONFLUENT_METRICS_REPORTER_BOOTSTRAP_REPLICAS: 1
      CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
      CONFLUENT_METRICS_ENABLE: 'false'
      CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
    networks:
      - confluent
    healthcheck:
      test: ['CMD','bash','-c','nc -z localhost 9092']
      interval: 10s
      timeout: 5s

  
  schema-registry:
    image: confluentinc/cp-schema-registry:7.4.0
    hostname: schema-registry
    container_name: schema-registry
    depends_on:
      broker:
        condition: service_healthy
    ports:
      - "8081:8081"
    environment:
      SCHEMA_REGISTRY_HOST_NAME: schema-registry
      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:29092'
      SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
    healthcheck:
      test: ['CMD','curl','-f','http://localhost:8081/']
      interval: 30s
      timeout: 10s
    networks:
      - confluent

  
  control-center:
    image: confluentinc/cp-enterprise-control-center:7.4.0
    hostname: control-center
    container_name: control-center
    depends_on:
      broker:
        condition: service_healthy
      schema-registry: 
        condition: service_healthy
    ports:
      - "9021:9021"
    environment:
      CONTROL_CENTER_BOOTSTRAP_SERVERS: 'broker:29092'
      CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
      CONTROL_CENTER_REPLICATION_FACTOR: 1
      CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1
      CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1
      CONFLUENT_METRICS_TOPIC_REPLICATION: 1
      CONFLUENT_METRICS_ENABLE: 'false'
      PORT: 9021
    healthcheck:
      test: [ "CMD", "curl", "-f", "http://localhost:9021/health" ]
      interval: 30s
      timeout: 10s
    networks:
      - confluent

  webserver:
    image: apache/airflow:2.6.0-python3.9
    command: webserver
    entrypoint: ['/opt/airflow/script/entrypoint.sh']
    depends_on:
      - postgres
    environment:
      - LOAD_EX=n
      - EXECUTOR=Sequential
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
      - AIRFLOW_WEBSERVER_SECRET_KEY=this_is_a_very_secured_key
    logging:
      options:
        max-size: 10m
        max-file: "3"
    volumes:
      - ./dags:/opt/airflow/dags
      - ./script/entrypoint.sh:/opt/airflow/script/entrypoint.sh
      - ./test/requirements.txt:/opt/airflow/requirements.txt
    ports:
      - "8080:8080"
    healthcheck:
      test: ['CMD-SHELL',"[ -f /opt/airflow/airflow-webserver.pid ]"]
      interval: 30s
      timeout: 30s
      retries: 3
    networks:
      - confluent

  scheduler:
    image: apache/airflow:2.6.0-python3.9
    depends_on:
      webserver:
        condition: service_healthy
    volumes:
      - ./dags:/opt/airflow/dags
      - ./script/entrypoint.sh:/opt/airflow/script/entrypoint.sh
      - ./test/requirements.txt:/opt/airflow/requirements.txt
    environment:
      - LOAD_EX=n
      - EXECUTOR=Sequential
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
      - AIRFLOW_WEBSERVER_SECRET_KEY=this_is_a_very_secured_key
    command: bash -c "pip install -r ./test/requirements.txt && airflow db upgrade && airflow scheduler"
    networks:
      - confluent
    

  postgres:
    image: postgres:14.0
    environment:
      - POSTGRES_USER=airflow
      - POSTGRES_PASSWORD=airflow
      - POSTGRES_DB=airflow
    logging:
      options:
        max-size: 10m
        max-file: "3"
    networks:
      - confluent


networks:
  confluent:
#!/bin/bash
set -e

if [ -e "/opt/airflow/requirements.txt" ]; then
    $(command python) pip install --upgrade pip
    $(command -v pip) install --user -r requirements.txt
fi

airflow db init

if [ ! -f "/opt/airflow/airflow.db" ]; then
    airflow users create \
        --username admin \
        --firstname admin \
        --lastname admin \
        --role Admin \
        --email admin@example.com \
        --password asd
fi

$(command -v airflow) db upgrade

exec airflow webserver

If the bind-mount path does not exist, \Docker will always create it as a directory.
If you want to mount a requirements.txt file, then create it first, then start the container up

Also, what do you expect Docker to install, if you do not provide the requirements.txt file yourself? Had it been created as a file, how would it know what the file contains, and install it?

Also, as for requirements for your Docker application, you should probably install them in the image using a Dockerfile, rather than having a container install them every time it is started

I have the feeling you think you will mount a file created inside the container to the host. But no, it is the other way around.

EDIT:

Forget what I wrote above :slight_smile: There is no requirements txt in the container so I’m not sure what you expected.

I was following some tutorial wherein the requirements.txt file was getting created on the fly, but then i pulled that file and added that in my project, then got an error saying ‘Trying to mount a directory onto a file’, then i found a way to fix it and updated my docker-compose.yml file this way(Volume section) and its working fine now.

  webserver:
    image: apache/airflow:2.6.0-python3.9
    command: webserver
    entrypoint: ['/opt/airflow/script/entrypoint.sh']
    depends_on:
      - postgres
    environment:
      - LOAD_EX=n
      - EXECUTOR=Sequential
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
      - AIRFLOW_WEBSERVER_SECRET_KEY=this_is_a_very_secured_key
    logging:
      options:
        max-size: 10m
        max-file: "3"
    volumes:
      - type: bind
        source: ./dags
        target: /opt/airflow/dags
        bind:
          create_host_path: false
      - type: bind
        source: ./script/entrypoint.sh
        target: /opt/airflow/script/entrypoint.sh
        bind:
          create_host_path: false
      - type: bind
        source: ./requirements.txt
        target: /opt/airflow/requirements.txt
        bind:
          create_host_path: false
    ports:
      - "8080:8080"
    healthcheck:
      test: ['CMD-SHELL',"[ -f /opt/airflow/airflow-webserver.pid ]"]
      interval: 30s
      timeout: 30s
      retries: 3
    networks:
      - confluent

  scheduler:
    image: apache/airflow:2.6.0-python3.9
    depends_on:
      webserver:
        condition: service_healthy
    volumes:
      - type: bind
        source: ./dags
        target: /opt/airflow/dags
        bind:
          create_host_path: false
      - type: bind
        source: ./script/entrypoint.sh
        target: /opt/airflow/script/entrypoint.sh
        bind:
          create_host_path: false
      - type: bind
        source: ./requirements.txt
        target: /opt/airflow/requirements.txt
        bind:
          create_host_path: false
    environment:
      - LOAD_EX=n
      - EXECUTOR=Sequential
      - AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
      - AIRFLOW_WEBSERVER_SECRET_KEY=this_is_a_very_secured_key
    command: bash -c "pip install -r ./requirements.txt && airflow db upgrade && airflow scheduler"
    networks:
      - confluent

create_host_path: false used in your compose file should be the default unless there is a bug. But the short syntax of the volume definition automatically created the a folder whem the file was not created before.