This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
Ollma部署
下载Ollama, 上传至服务器, 解压缩
#下载
wget https://github.com/ollama/ollama/releases/download/v0.5.12/ollama-linux-amd64.tgz
#解压
mkdir /opt/ollama && tar -zxvf ollama-linux-amd64.tgz -C /opt/ollama
#创建软链接到/usr/bin目录
ln -s /opt/ollama/bin/ollama /usr/bin/ollama
创建和启动服务参考链接
#创建用户
sudo useradd -r -s /bin/false -U -m -d /usr/share/ollama ollama
sudo usermod -a -G ollama $(whoami)
#授权文件夹目录
chown -R ollama /opt/ollama
#创建服务
vim /etc/systemd/system/ollama.service
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
ExecStart=/usr/bin/ollama serve
User=ollama
Group=ollama
Restart=always
RestartSec=3
Environment="PATH=$PATH"
#环境变量,指定当前服务监听的IP和端口
#Environment="OLLAMA_HOST=0.0.0.0:11434"
#环境变量,服务器无内网时需要配置代理拉取大模型
#Environment="HTTP_PROXY=http://your-proxy-server:port"
#Environment="HTTPS_PROXY=http://your-proxy-server:port"
[Install]
WantedBy=default.target
#加载服务
systemctl daemon-reload
#启动服务
systemctl start ollama
systemctl enable ollama
通过ollama部署deepseek模型
#需要哪个版本就运行哪个版本,会进行自动下载和运行
ollama run deepseek-r1:8b