Create start_services.sh
Browse files- start_services.sh +10 -0
start_services.sh
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
# Start the llama.cpp server in the background[citation:2]
|
| 3 |
+
llama-server -m /models/Wizard-Vicuna-13B-Uncensored.Q8_0.gguf --host 0.0.0.0 --port 8080 --n-gpu-layers 32 &
|
| 4 |
+
|
| 5 |
+
# Start the Diamond ecosystem Python application
|
| 6 |
+
cd /workspace/diamond
|
| 7 |
+
python diamond_api_server.py --host 0.0.0.0 --port 8000 &
|
| 8 |
+
|
| 9 |
+
# Keep the container running
|
| 10 |
+
wait
|