Spaces:
Running
Running
seanpedrickcase
commited on
Commit
·
d34af22
1
Parent(s):
1e2bb3e
Updated Gradio version for spaces. Updated Dockerfile to enable Llama.cpp build with Cmake
Browse files- Dockerfile +7 -7
- README.md +1 -1
Dockerfile
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
# First stage: build dependencies
|
2 |
FROM public.ecr.aws/docker/library/python:3.11.9-slim-bookworm
|
3 |
|
4 |
-
# Install Lambda web adapter in case you want to run with with an AWS Lamba function URL
|
5 |
-
COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.3 /lambda-adapter /opt/extensions/lambda-adapter
|
6 |
|
7 |
-
# Install wget and
|
8 |
RUN apt-get update && apt-get install -y \
|
9 |
wget \
|
10 |
-
curl
|
|
|
11 |
|
12 |
# Create a directory for the model
|
13 |
RUN mkdir /model
|
@@ -19,7 +20,7 @@ COPY requirements.txt .
|
|
19 |
RUN pip install --no-cache-dir -r requirements.txt
|
20 |
|
21 |
# Gradio needs to be installed after due to conflict with spacy in requirements
|
22 |
-
RUN pip install --no-cache-dir gradio==4.
|
23 |
|
24 |
# Download the quantised phi model directly with curl
|
25 |
RUN curl -L -o Phi-3-mini-128k-instruct.Q4_K_M.gguf https://huggingface.co/QuantFactory/Phi-3-mini-128k-instruct-GGUF/tree/main/Phi-3-mini-128k-instruct.Q4_K_M.gguf
|
@@ -53,6 +54,7 @@ ENV HOME=/home/user \
|
|
53 |
PATH=/home/user/.local/bin:$PATH \
|
54 |
PYTHONPATH=$HOME/app \
|
55 |
PYTHONUNBUFFERED=1 \
|
|
|
56 |
GRADIO_ALLOW_FLAGGING=never \
|
57 |
GRADIO_NUM_PORTS=1 \
|
58 |
GRADIO_SERVER_NAME=0.0.0.0 \
|
@@ -68,7 +70,5 @@ WORKDIR $HOME/app
|
|
68 |
|
69 |
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
|
70 |
COPY --chown=user . $HOME/app
|
71 |
-
#COPY . $HOME/app
|
72 |
-
|
73 |
|
74 |
CMD ["python", "app.py"]
|
|
|
1 |
# First stage: build dependencies
|
2 |
FROM public.ecr.aws/docker/library/python:3.11.9-slim-bookworm
|
3 |
|
4 |
+
# Install Lambda web adapter in case you want to run with with an AWS Lamba function URL (not essential if not using Lambda)
|
5 |
+
#COPY --from=public.ecr.aws/awsguru/aws-lambda-adapter:0.8.3 /lambda-adapter /opt/extensions/lambda-adapter
|
6 |
|
7 |
+
# Install wget, curl, and build-essential
|
8 |
RUN apt-get update && apt-get install -y \
|
9 |
wget \
|
10 |
+
curl \
|
11 |
+
build-essential
|
12 |
|
13 |
# Create a directory for the model
|
14 |
RUN mkdir /model
|
|
|
20 |
RUN pip install --no-cache-dir -r requirements.txt
|
21 |
|
22 |
# Gradio needs to be installed after due to conflict with spacy in requirements
|
23 |
+
RUN pip install --no-cache-dir gradio==4.41.0
|
24 |
|
25 |
# Download the quantised phi model directly with curl
|
26 |
RUN curl -L -o Phi-3-mini-128k-instruct.Q4_K_M.gguf https://huggingface.co/QuantFactory/Phi-3-mini-128k-instruct-GGUF/tree/main/Phi-3-mini-128k-instruct.Q4_K_M.gguf
|
|
|
54 |
PATH=/home/user/.local/bin:$PATH \
|
55 |
PYTHONPATH=$HOME/app \
|
56 |
PYTHONUNBUFFERED=1 \
|
57 |
+
PYTHONDONTWRITEBYTECODE=1 \
|
58 |
GRADIO_ALLOW_FLAGGING=never \
|
59 |
GRADIO_NUM_PORTS=1 \
|
60 |
GRADIO_SERVER_NAME=0.0.0.0 \
|
|
|
70 |
|
71 |
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
|
72 |
COPY --chown=user . $HOME/app
|
|
|
|
|
73 |
|
74 |
CMD ["python", "app.py"]
|
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: 🚀
|
|
4 |
colorFrom: red
|
5 |
colorTo: yellow
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 4.
|
8 |
app_file: app.py
|
9 |
pinned: true
|
10 |
license: apache-2.0
|
|
|
4 |
colorFrom: red
|
5 |
colorTo: yellow
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 4.41.0
|
8 |
app_file: app.py
|
9 |
pinned: true
|
10 |
license: apache-2.0
|