# 不使用centos7是因为有些依赖已过时,无法直接ollama;使用centos8直接运行、方便快捷.
# 部署环境: centos8 4核8G+40G | ollama+deepseek-r1:1.5b | dify 0.15.3
# 解压 ollama
tar -xzf ollama-linux-amd64.tgz -C /usr/local
ollama --version
ollama serve
# 启用后运行查看已部署模型
ollama list
# 创建服务
mkdir -p /deepseek/ollama/models
vi /etc/systemd/system/ollama.service
# 加入自启服务
[Unit]
Description=Ollama Service
After=network-online.target
[Service]
ExecStart=/usr/local/bin/ollama serve
User=root
Group=root
Restart=always
RestartSec=3
Environment="PATH=$PATH"
Environment="OLLAMA_MODELS=/deepseek/ollama/models"
Environment="OLLAMA_HOST=0.0.0.0"
[Install]
WantedBy=default.target
# 重启进程
systemctl daemon-reload
systemctl start ollama
systemctl status ollama
systemctl stop ollama
systemctl enable ollama
systemctl disable ollama
# 私有部署永久关闭防火墙
systemctl stop firewalld
systemctl disable firewalld
# 查看ollama是否运行(自己的IP+端口11434)
http://192.168.184.137:11434/
# 下载模型(也可以直接run下载)
ollama pull deepseek-r1:1.5b
ollama pull nomic-embed-text
ollama pull bge-m3
# 运行模型(开启对话)
ollama run deepseek-r1:1.5b
# 关闭模型
Use Ctrl + d or /bye to exit.
# 注意(出现网络拉取网络不可用 ping https://registry.ollama.ai 重新拉取就好)
[root@localhost ~]# ollama pull deepseek-r1:1.5b
pulling manifest
Error: pull model manifest: Get "https://registry.ollama.ai/v2/library/deepseek-r1/manifests/1.5b": dial tcp [2606:4700:3034::ac43:b6e5]:443: connect: network is unreachable
# 卸载老版本
dnf remove docker docker-client docker-client-latest docker-common docker-latest docker-latest-logrotate docker-logrotate docker-engine
# 查看已安装的包
rpm -q docker-*
# cd /deepseek/docker 离线版安装(26.1.3-1)
# dnf install docker-ce-26.1.3-1.el8.x86_64.rpm docker-ce-cli-26.1.3-1.el8.x86_64.rpm containerd.io-1.6.9-3.1.el8.x86_64.rpm docker-buildx-plugin-0.14.0-1.el8.x86_64.rpm docker-compose-plugin-2.6.0-3.el8.x86_64.rpm
rpm -Uvh docker-ce-26.1.3-1.el8.x86_64.rpm --force --nodeps
rpm -Uvh docker-ce-cli-26.1.3-1.el8.x86_64.rpm --force --nodeps
rpm -Uvh containerd.io-1.6.9-3.1.el8.x86_64.rpm --force --nodeps
rpm -Uvh docker-buildx-plugin-0.14.0-1.el8.x86_64.rpm --force --nodeps
rpm -Uvh docker-compose-plugin-2.6.0-3.el8.x86_64.rpm --force --nodeps
# 查看 docker
docker version
docker compose version
systemctl start docker
systemctl status docker
systemctl stop docker
systemctl enable docker
systemctl disable docker
# 原有镜像系统导入导出示例
# docker save myimage:latest | gzip > myimage_latest.tar.gz
# docker load < busybox.tar.gz
# cd /deepseek/docker 导入镜像(docker images)
docker load < dify-api_0.15.3.tar.gz
docker load < dify-sandbox_0.2.10.tar.gz
docker load < dify-web_0.15.3.tar.gz
docker load < nginx_latest.tar.gz
docker load < postgres_15-alpine.tar.gz
docker load < redis_6-alpine.tar.gz
docker load < squid_latest.tar.gz
docker load < weaviate_1.19.0.tar.gz
# cd /deepseek/dify (ll -a 查看隐藏文件)
cd docker
cp .env.example .env
docker compose up -d
docker compose ps
# 访问dify服务(默认80端口、IP1访问即可)
# 初始化管理员账号: 507668447@qq.com\507668447@qq.com
http://192.168.184.137/
# 添加模型(添加之前需要模型运行起来 ollama 需要运行)
# 头像->设置->模型供应商->Ollama->模型名称和基础 URL必须匹配你自己的
模型名称: deepseek-r1:1.5b
基础 URL: http://192.168.184.137:11434/
# 工作室创建运用后 -> 发布 -> 嵌入网站 => 自己应用