chatglm.cpp/Dockerfile-chatglm

25 lines
950 B
Plaintext

#Usage:
#1.build image:
# docker build -f Dockerfile-chatglm -t chatglm_image .
#2.run image:
# docker run -it --security-opt seccomp=unconfined chatglm_image:latest
#base image
FROM openeuler/openeuler:22.03
#update openEuler2309 source and install chatglm
RUN echo '[everything]' > /etc/yum.repos.d/openEuler.repo && \
echo 'name=everything' >> /etc/yum.repos.d/openEuler.repo && \
echo 'baseurl=http://121.36.84.172/dailybuild/EBS-openEuler-23.09/EBS-openEuler-23.09/everything/$basearch/' >> /etc/yum.repos.d/openEuler.repo && \
echo 'enabled=1' >> /etc/yum.repos.d/openEuler.repo && \
echo 'gpgcheck=0' >> /etc/yum.repos.d/openEuler.repo && \
yum install -y sentencepiece chatglm-cpp wget
#download ggml model
WORKDIR /model_path
RUN wget -P /model_path https://huggingface.co/Xorbits/chatglm2-6B-GGML/resolve/main/chatglm2-ggml-q4_1.bin
# run ggml model
CMD /usr/bin/chatglm_cpp_main -m /model_path/chatglm2-ggml-q4_1.bin -i