#!/bin/bash # 启动 Ollama 服务 ollama serve & # 等待 Ollama 服务完全启动 echo "Waiting for Ollama service to start..." sleep 10 # 检查 Ollama 服务是否正在运行 curl --retry 5 --retry-delay 2 --retry-connrefused http://127.0.0.1:11434/api/version if [ $? -ne 0 ]; then echo "Error: Ollama service failed to start" exit 1 fi # 检查模型是否存在,如果不存在则创建 if ! ollama list | grep -q "llama3-zh"; then echo "Creating llama3-zh model..." ollama create llama3-zh -f /app/Modelfile if [ $? -ne 0 ]; then echo "Error: Failed to create llama3-zh model" exit 1 fi fi echo "Starting Python application..." # 启动 Python 应用 python app.py