这里先是CMD反馈的报错代码
2023-11-15 22:24:08.356 Uncaught app exception
Traceback (most recent call last):
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 264, in _get_or_create_cached_value
cached_result = cache.read_result(value_key)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_resource_api.py", line 500, in read_result
raise CacheKeyNotFoundError()
streamlit.runtime.caching.cache_errors.CacheKeyNotFoundError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 312, in _handle_cache_miss
cached_result = cache.read_result(value_key)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_resource_api.py", line 500, in read_result
raise CacheKeyNotFoundError()
streamlit.runtime.caching.cache_errors.CacheKeyNotFoundError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 534, in _run_script
exec(code, module.__dict__)
File "D:\openai.wiki\ChatGLM2-6B\web_demo2.py", line 72, in <module>
st.session_state["state"] = predict(prompt_text, max_length, top_p, temperature, st.session_state["state"])
File "D:\openai.wiki\ChatGLM2-6B\web_demo2.py", line 26, in predict
tokenizer, model = get_model()
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 212, in wrapper
return cached_func(*args, **kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 241, in __call__
return self._get_or_create_cached_value(args, kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 267, in _get_or_create_cached_value
return self._handle_cache_miss(cache, value_key, func_args, func_kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 321, in _handle_cache_miss
computed_value = self._info.func(*func_args, **func_kwargs)
File "D:\openai.wiki\ChatGLM2-6B\web_demo2.py", line 16, in get_model
model = AutoModel.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True).quantize(8).cuda()
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\models\auto\auto_factory.py", line 456, in from_pretrained
config, kwargs = AutoConfig.from_pretrained(
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\models\auto\configuration_auto.py", line 953, in from_pretrained
config_class = get_class_from_dynamic_module(class_ref, pretrained_model_name_or_path, **kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\dynamic_module_utils.py", line 443, in get_class_from_dynamic_module
return get_class_in_module(class_name, final_module.replace(".py", ""))
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\dynamic_module_utils.py", line 164, in get_class_in_module
module = importlib.import_module(module_path)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 961, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 973, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'transformers_modules.THUDM/chatglm2-6b'
2023-11-15 22:24:08.356 Uncaught app exception
Traceback (most recent call last):
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 264, in _get_or_create_cached_value
cached_result = cache.read_result(value_key)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_resource_api.py", line 500, in read_result
raise CacheKeyNotFoundError()
streamlit.runtime.caching.cache_errors.CacheKeyNotFoundError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 312, in _handle_cache_miss
cached_result = cache.read_result(value_key)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_resource_api.py", line 500, in read_result
raise CacheKeyNotFoundError()
streamlit.runtime.caching.cache_errors.CacheKeyNotFoundError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 534, in _run_script
exec(code, module.__dict__)
File "D:\openai.wiki\ChatGLM2-6B\web_demo2.py", line 72, in <module>
st.session_state["state"] = predict(prompt_text, max_length, top_p, temperature, st.session_state["state"])
File "D:\openai.wiki\ChatGLM2-6B\web_demo2.py", line 26, in predict
tokenizer, model = get_model()
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 212, in wrapper
return cached_func(*args, **kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 241, in __call__
return self._get_or_create_cached_value(args, kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 267, in _get_or_create_cached_value
return self._handle_cache_miss(cache, value_key, func_args, func_kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\streamlit\runtime\caching\cache_utils.py", line 321, in _handle_cache_miss
computed_value = self._info.func(*func_args, **func_kwargs)
File "D:\openai.wiki\ChatGLM2-6B\web_demo2.py", line 16, in get_model
model = AutoModel.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True).quantize(8).cuda()
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\models\auto\auto_factory.py", line 456, in from_pretrained
config, kwargs = AutoConfig.from_pretrained(
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\models\auto\configuration_auto.py", line 953, in from_pretrained
config_class = get_class_from_dynamic_module(class_ref, pretrained_model_name_or_path, **kwargs)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\dynamic_module_utils.py", line 443, in get_class_from_dynamic_module
return get_class_in_module(class_name, final_module.replace(".py", ""))
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\site-packages\transformers\dynamic_module_utils.py", line 164, in get_class_in_module
module = importlib.import_module(module_path)
File "D:\openai.wiki\ChatGLM2-6B\ENV\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 961, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 973, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'transformers_modules.THUDM/chatglm2-6b'