shutil 复制,移除,数据的model

1 #!/usr/bin/env python
2 import shutil
3
4 shutil.copytree('/home/aircoder/python/api','/home/aircoder/python/bakshutil');
5
Downloading Model from https://www.modelscope.cn to directory: /home/administrator/.cache/modelscope/hub/models/Qwen/Qwen3-8B 2025-05-20 09:11:20,638 - modelscope - WARNING - Using branch: master as version is unstable, use with caution Traceback (most recent call last): File "/home/administrator/tools/LLM/LLaMA-Factory/src/llamafactory/model/loader.py", line 82, in load_tokenizer tokenizer = AutoTokenizer.from_pretrained( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/transformers/models/auto/tokenization_auto.py", line 946, in from_pretrained tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/transformers/models/auto/tokenization_auto.py", line 800, in get_tokenizer_config result = json.load(reader) ^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/json/__init__.py", line 293, in load return loads(fp.read(), ^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/json/__init__.py", line 346, in loads return _default_decoder.decode(s) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/json/decoder.py", line 340, in decode raise JSONDecodeError("Extra data", s, end) json.decoder.JSONDecodeError: Extra data: line 240 column 1 (char 9732) During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/queueing.py", line 715, in process_events response = await route_utils.call_process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/route_utils.py", line 322, in call_process_api output = await app.get_blocks().process_api( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/blocks.py", line 2137, in process_api result = await self.call_function( ^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/blocks.py", line 1675, in call_function prediction = await utils.async_iteration(iterator) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/utils.py", line 735, in async_iteration return await anext(iterator) ^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/utils.py", line 729, in __anext__ return await anyio.to_thread.run_sync( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/anyio/to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 2470, in run_sync_in_worker_thread return await future ^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 967, in run result = context.run(func, *args) ^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/utils.py", line 712, in run_sync_iterator_async return next(iterator) ^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/gradio/utils.py", line 873, in gen_wrapper response = next(iterator) ^^^^^^^^^^^^^^ File "/home/administrator/tools/LLM/LLaMA-Factory/src/llamafactory/webui/chatter.py", line 144, in load_model super().__init__(args) File "/home/administrator/tools/LLM/LLaMA-Factory/src/llamafactory/chat/chat_model.py", line 53, in __init__ self.engine: BaseEngine = HuggingfaceEngine(model_args, data_args, finetuning_args, generating_args) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/tools/LLM/LLaMA-Factory/src/llamafactory/chat/hf_engine.py", line 54, in __init__ tokenizer_module = load_tokenizer(model_args) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/tools/LLM/LLaMA-Factory/src/llamafactory/model/loader.py", line 90, in load_tokenizer tokenizer = AutoTokenizer.from_pretrained( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/transformers/models/auto/tokenization_auto.py", line 946, in from_pretrained tokenizer_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/site-packages/transformers/models/auto/tokenization_auto.py", line 800, in get_tokenizer_config result = json.load(reader) ^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/json/__init__.py", line 293, in load return loads(fp.read(), ^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/json/__init__.py", line 346, in loads return _default_decoder.decode(s) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/administrator/anaconda3/envs/llm/lib/python3.11/json/decoder.py", line 340, in decode raise JSONDecodeError("Extra data", s, end) json.decoder.JSONDecodeError: Extra data: line 240 column 1 (char 9732)
05-21
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值