File size: 1,233 Bytes
64b05cb
0e91968
 
 
64b05cb
 
 
 
 
 
 
 
0e91968
64b05cb
 
 
 
0e91968
64b05cb
 
0e91968
64b05cb
 
 
 
 
 
9711b92
64b05cb
 
 
 
 
 
 
 
0e91968
 
64b05cb
0e91968
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
FROM python:3.10-slim

WORKDIR /code

# Install system dependencies required for building some Python packages
RUN apt-get update && apt-get install -y \
    gcc \
    g++ \
    git \
    wget \
    ffmpeg \
    && rm -rf /var/lib/apt/lists/*

# Set environment variables for Hugging Face cache and tokenizers
ENV HF_HOME=/code/cache
ENV TRANSFORMERS_CACHE=/code/cache
ENV TOKENIZERS_PARALLELISM=false

# Copy requirements first for better Docker layer caching
COPY requirements.txt /code/requirements.txt

# Upgrade pip and install Python dependencies
RUN pip install --no-cache-dir --upgrade pip
RUN pip install --no-cache-dir -r /code/requirements.txt

# Pre-download the models at build time to avoid runtime delays.
# This ensures the application starts up quickly in the Space.
RUN python -c "from transformers import pipeline; import whisper; pipeline('summarization', model='facebook/bart-large-cnn'); whisper.load_model('base', download_root='/code/cache')"

# Copy the rest of the application code into the container
COPY . /code/

# Create the cache directory (the ENV variables point here)
RUN mkdir -p /code/cache

# Expose the port Gradio runs on
EXPOSE 7860

# Command to run the application
CMD ["python3", "app.py"]