sudo tee /etc/systemd/system/ollama.service > /dev/null <<EOF
[Unit]
Description=Ollama Service
After=network.target
[Service]
User=ollama
ExecStart=/usr/local/bin/ollama serve
Restart=always
Environment="OLLAMA_DEBUG=0"
Environment="OLLAMA_LOG_LEVEL=error"
Environment="OLLAMA_NUM_PARALLEL=8"
Environment="OLLAMA_MAX_LOADED_MODELS=2"
Environment="OMP_NUM_THREADS=16"
Environment="OLLAMA_GPU_LAYERS=40"
Environment="CUDA_VISIBLE_DEVICES=0,1"
Environment="OLLAMA_ENABLE_CUDA=1"
Environment="OLLAMA_HOST=0.0.0.0:5111"
Environment="OLLAMA_MODELS=/AI/OLLAMA/models"
[Install]
WantedBy=multi-user.target
EOF
如果你是离线环境 不能安装sh版本的ollama 只能通过该Tar安装时候,需要以上代码
之后进行服务重启
# 重载配置
sudo systemctl daemon-reload
# 启动服务
sudo systemctl start ollama
# 设置开机自启
sudo systemctl enable ollama
# 验证状态
systemctl status ollama
如果出现异常 建议将系统变量设置为你需要监听的需要的端口
sudo usermod -g ollama funcir #权限给 用户
sudo usermod -aG ollama funcir #给用户添加到Ollama组
将模型文件夹权限给到 funcir
/usr/share/
属于系统级目录
sudo mkdir -p /AI/OLLAMA/models
sudo chown -R ollama:ollama /AI/OLLAMA
sudo chmod 775 /AI/OLLAMA/models
sudo chown -R funcir:ollama /usr/share/ollama
sudo chmod -R 750 /usr/share/ollama