knox hat die Gist bearbeitet . Zu Änderung gehen
1 file changed, 21 insertions
LLaMA-Factory-15.sh(Datei erstellt)
@@ -0,0 +1,21 @@ | |||
1 | + | docker build -f ./docker/docker-cuda/Dockerfile \ | |
2 | + | --build-arg INSTALL_BNB=false \ | |
3 | + | --build-arg INSTALL_VLLM=false \ | |
4 | + | --build-arg INSTALL_DEEPSPEED=false \ | |
5 | + | --build-arg INSTALL_FLASHATTN=false \ | |
6 | + | --build-arg PIP_INDEX=https://pypi.org/simple \ | |
7 | + | -t llamafactory:latest . | |
8 | + | ||
9 | + | docker run -dit --gpus=all \ | |
10 | + | -v ./hf_cache:/root/.cache/huggingface \ | |
11 | + | -v ./ms_cache:/root/.cache/modelscope \ | |
12 | + | -v ./om_cache:/root/.cache/openmind \ | |
13 | + | -v ./data:/app/data \ | |
14 | + | -v ./output:/app/output \ | |
15 | + | -p 7860:7860 \ | |
16 | + | -p 8000:8000 \ | |
17 | + | --shm-size 16G \ | |
18 | + | --name llamafactory \ | |
19 | + | llamafactory:latest | |
20 | + | ||
21 | + | docker exec -it llamafactory bash |
Neuer
Älter