1. 下载模型
import os
import shutil
from huggingface_hub import snapshot_download
model_name = 'openai/clip-vit-base-patch32'
download_path = '/path/to/model/ducumentary'
os.makedirs(download_path, exist_ok=True)
print('*'*10, f'Start downloading {model_name}', '*'*10)
snapshot_download(repo_id=model_name,
cache_dir=download_path,
local_dir=download_path,
local_dir_use_symlinks=False,
ignore_patterns=["*.h5","*safetensors","*msgpack"],
force_download=True,
resume_download=False,
etag_timeout=60)
print('*'*10, 'Download finish', '*'*10)
for item in os.listdir(download_path):
if os.path.isdir(os.path.join(download_path, item)) and item.startswith('models'):
shutil.rmtree(os.path.join(download_path, item))
2. 下载数据集
from huggingface_hub import snapshot_download
local_dir = "/path/to/dataset/documentary"
snapshot_download(
repo_id="facebook/natural_reasoning",
repo_type="dataset",
local_dir=local_dir,
local_dir_use_symlinks=False,
resume_download=True
)
3. 具体步骤
- 在linux系统,先使用命令行:
export HF_ENDPOINT=https://hf-mirror.com
- 创建一个目录,用来当模型/数据集,然后修改代码中的路径
- 使用命令
nohup python -u download.py>download.log 2>&1 &
让程序在后台运行