mic3333 commited on
Commit
64b05cb
·
1 Parent(s): 0e91968
Files changed (2) hide show
  1. Dockerfile +31 -15
  2. requirements.txt +3 -2
Dockerfile CHANGED
@@ -1,24 +1,40 @@
1
- # Use an official Python runtime as a parent image
2
- FROM python:3.9-slim
3
 
4
- # Set the working directory in the container
5
  WORKDIR /code
6
 
7
- # Copy the requirements file into the container at /code
8
- COPY requirements.txt .
 
 
 
 
 
 
9
 
10
- # Install any needed packages specified in requirements.txt
11
- # --no-cache-dir: Disables the cache which reduces the image size
12
- # --trusted-host pypi.python.org: Solves issues with SSL/TLS verification in some environments
13
- RUN pip install --no-cache-dir --trusted-host pypi.python.org -r requirements.txt
14
 
15
- # Copy the rest of the application files into the container at /code
16
- COPY . .
17
 
18
- # Make port 7860 available to the world outside this container
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  EXPOSE 7860
20
 
21
- # Define the command to run the application
22
- # Use the --server-name 0.0.0.0 flag to make the app accessible from outside the container
23
- # Use the --share flag to create a public link if needed (optional)
24
  CMD ["python3", "app.py"]
 
1
+ FROM python:3.10-slim
 
2
 
 
3
  WORKDIR /code
4
 
5
+ # Install system dependencies required for building some Python packages
6
+ RUN apt-get update && apt-get install -y \
7
+ gcc \
8
+ g++ \
9
+ git \
10
+ wget \
11
+ ffmpeg \
12
+ && rm -rf /var/lib/apt/lists/*
13
 
14
+ # Set environment variables for Hugging Face cache and tokenizers
15
+ ENV HF_HOME=/code/cache
16
+ ENV TRANSFORMERS_CACHE=/code/cache
17
+ ENV TOKENIZERS_PARALLELISM=false
18
 
19
+ # Copy requirements first for better Docker layer caching
20
+ COPY requirements.txt /code/requirements.txt
21
 
22
+ # Upgrade pip and install Python dependencies
23
+ RUN pip install --no-cache-dir --upgrade pip
24
+ RUN pip install --no-cache-dir -r /code/requirements.txt
25
+
26
+ # Pre-download the models at build time to avoid runtime delays.
27
+ # This ensures the application starts up quickly in the Space.
28
+ RUN python -c "from transformers import pipeline; import whisper; pipeline('summarization', model='facebook/bart-large-cnn'); whisper.load_model('base')"
29
+
30
+ # Copy the rest of the application code into the container
31
+ COPY . /code/
32
+
33
+ # Create the cache directory (the ENV variables point here)
34
+ RUN mkdir -p /code/cache
35
+
36
+ # Expose the port Gradio runs on
37
  EXPOSE 7860
38
 
39
+ # Command to run the application
 
 
40
  CMD ["python3", "app.py"]
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- gradio==4.44.1
2
  transformers==4.35.2
3
  torch==2.1.1
4
  openai-whisper==20231117
@@ -7,4 +7,5 @@ python-docx==1.1.0
7
  datasets==2.14.6
8
  accelerate==0.24.1
9
  sentencepiece==0.1.99
10
- protobuf==4.25.0
 
 
1
+ gradio==4.44.0
2
  transformers==4.35.2
3
  torch==2.1.1
4
  openai-whisper==20231117
 
7
  datasets==2.14.6
8
  accelerate==0.24.1
9
  sentencepiece==0.1.99
10
+ protobuf==4.25.0
11
+ numpy<2.0