搭建
大部分参考官方教程
https://github.com/chatchat-space/Langchain-Chatchat/blob/master/docs/install/README_docker.md
下面是docker-composer.yaml文件,我修改了本地映射的目录。本地目录我都提前建好了。
version: '3.9'
services:
xinference:
image: xprobe/xinference:v0.12.3
restart: always
command: xinference-local -H 0.0.0.0
ports: # 不使用 host network 时可打开.
- "9997:9997"
# network_mode: "host"
# 将本地路径(~/xinference)挂载到容器路径(/root/.xinference)中,
# 详情见: https://inference.readthedocs.io/zh-cn/latest/getting_started/using_docker_image.html
volumes:
- /root/xxx/langchain-chatchat/model_data/xinference:/root/.xinference
- /root/xxx/langchain-chatchat/model_data/cache/huggingface:/root/.cache/huggingface
- /root/xxx/langchain-chatchat/model_data/cache/modelscope:/root/.cache/modelscope
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
runtime: nvidia
# 模型源更改为 ModelScope, 默认为 HuggingFace
environment:
- XINFERENCE_MODEL_SRC=modelscope
chatchat:
image: chatimage/chatchat:0.3.1.3-0f4eb00-20240729
# image: ccr.ccs.tencentyun.com/chatchat/chatchat:0.3.1.2-2024-0720
restart: always
ports: # 不使用 host network 时可打开.
- "7861:7861"
- "8501:8501"
# network_mode: "host"
# 将本地路径(~/chatchat)挂载到容器默认数据路径($CHATCHAT_ROOT)中
volumes:
- /root/xxx/langchain-chatchat/chatchat_data:/root/chatchat_data