Spaces:
Paused
Paused
Commit
·
13c70a1
1
Parent(s):
ddc9885
add gradio dependencies
Browse files- Dockerfile +1 -0
- app.py +1 -1
Dockerfile
CHANGED
@@ -34,6 +34,7 @@ ENV HOME=/home/user \
|
|
34 |
|
35 |
RUN pip3 install torch==2.3.1 torchvision==0.18.1 torchaudio==2.3.1 --index-url https://download.pytorch.org/whl/cu121
|
36 |
RUN pip3 install --no-cache-dir --upgrade -r /code/requirements.txt
|
|
|
37 |
# RUN FLASH_ATTN_VER=$(python3 /code/get_flash_attn.py) && \
|
38 |
# pip3 install "https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/${FLASH_ATTN_VER}" --no-cache-dir
|
39 |
RUN pip3 install https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu11torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl --no-cache-dir
|
|
|
34 |
|
35 |
RUN pip3 install torch==2.3.1 torchvision==0.18.1 torchaudio==2.3.1 --index-url https://download.pytorch.org/whl/cu121
|
36 |
RUN pip3 install --no-cache-dir --upgrade -r /code/requirements.txt
|
37 |
+
RUN pip3 install gradio
|
38 |
# RUN FLASH_ATTN_VER=$(python3 /code/get_flash_attn.py) && \
|
39 |
# pip3 install "https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/${FLASH_ATTN_VER}" --no-cache-dir
|
40 |
RUN pip3 install https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.2.post1/flash_attn-2.7.2.post1+cu11torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl --no-cache-dir
|
app.py
CHANGED
@@ -458,4 +458,4 @@ def create_demo():
|
|
458 |
|
459 |
if __name__ == "__main__":
|
460 |
demo = create_demo()
|
461 |
-
demo.launch()
|
|
|
458 |
|
459 |
if __name__ == "__main__":
|
460 |
demo = create_demo()
|
461 |
+
demo.launch(server_name='0.0.0.0',server_port=7860)
|