@Albert337
import os
from huggingface_hub import login, logout, hf_hub_download
repo_id = 'THUDM/chatglm-6b-int4'
local_dir = './chatglm-6b-int4'
token = os.getenv('HUGGINGFACE_TOKEN')
login(token=token, add_to_git_credential=True)
hf_hub_download(repo_id=repo_id, filename="ice_text.model", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="pytorch_model.bin", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="configuration_chatglm.py", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="config.json", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="modeling_chatglm.py", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="quantization.py", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="quantization_kernels.c", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="quantization_kernels_parallel.c", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="tokenization_chatglm.py", local_dir=local_dir, local_dir_use_symlinks=False)
hf_hub_download(repo_id=repo_id, filename="tokenizer_config.json", local_dir=local_dir, local_dir_use_symlinks=False)
logout()
可以用脚本下载,模型在 huggingface 上都有 https://huggingface.co/THUDM/chatglm-6b-int4