ValueError:not enough values to unpack (expected 2, got 1)

#19
by henrywang0314 - opened

randomly occur this error

File "c:\Users\vic\Desktop\chatGLM\Vitual_pet_GLM\chatGPTAPI.py", line 77, in role_play_qa
role_response = conversation_with_summary({"input": question, "role": role})["response"]
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\chains\base.py", line 310, in call
raise e
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\chains\base.py", line 304, in call
self._call(inputs, run_manager=run_manager)
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\chains\llm.py", line 108, in _call
response = self.generate([inputs], run_manager=run_manager)
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\chains\llm.py", line 120, in generate
return self.llm.generate_prompt(
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\llms\base.py", line 507, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs)
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\llms\base.py", line 656, in generate
output = self._generate_helper(
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\llms\base.py", line 544, in _generate_helper
raise e
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\llms\base.py", line 531, in _generate_helper
self._generate(
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\langchain\llms\base.py", line 1055, in _generate
else self._call(prompt, stop=stop, **kwargs)
File "c:\Users\vic\Desktop\chatGLM\Vitual_pet_GLM\ChatGLM.py", line 30, in _call
response, _ = self.model.chat(
File "C:\Users\vic\Desktop\chatGLM.conda\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "C:\Users\vic/.cache\huggingface\modules\transformers_modules\THUDM\chatglm3-6b\e46a14881eae613281abbd266ee918e93a56018f\modeling_chatglm.py", line 1039, in chat
response, history = self.process_response(response, history)
File "C:\Users\vic/.cache\huggingface\modules\transformers_modules\THUDM\chatglm3-6b\e46a14881eae613281abbd266ee918e93a56018f\modeling_chatglm.py", line 1003, in process_response
metadata, content = response.split("\n", maxsplit=1)

Knowledge Engineering Group (KEG) & Data Mining at Tsinghua University org

问题已经解决,更新一下模型配置文件

zRzRzRzRzRzRzR changed discussion status to closed

Sign up or log in to comment