chatglm3官方给出的测试文件bug
运行api_server正常,但是启动openai_api_request之后server端报错:
2024-07-08 14:21:39.196 | DEBUG | __main__:create_chat_completion:242 - ==== request ====
{'messages': [ChatMessage(role='system', content="You are ChatGLM3, a large language model trained by Zhipu.AI. Follow the user's instructions carefully. Respond using markdown.", name=None, function_call=None), ChatMessage(role='user', content='你好�请你����
�语��讲�个��
��nvs/chatglm3/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
await super().__call__(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/applications.py", line 123, in __call__
await self.middleware_stack(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in __call__
raise exc
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in __call__
await self.app(scope, receive, _send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/middleware/cors.py", line 85, in __call__
await self.app(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 65, in __call__
await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
raise exc
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
await app(scope, receive, sender)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/routing.py", line 756, in __call__
await self.middleware_stack(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/routing.py", line 776, in app
await route.handle(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/routing.py", line 297, in handle
await self.app(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/routing.py", line 77, in app
await wrap_app_handling_exceptions(app, request)(scope, receive, send)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/_exception_handler.py", line 64, in wrapped_app
raise exc
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
await app(scope, receive, sender)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/starlette/routing.py", line 72, in app
response = await func(request)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/fastapi/routing.py", line 278, in app
raw_response = await run_endpoint_function(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/fastapi/routing.py", line 191, in run_endpoint_function
return await dependant.call(**values)
File "/root/chatglm3/ChatGLM3/openai_api_demo/api_server.py", line 298, in create_chat_completion
response = generate_chatglm3(model, tokenizer, gen_params)
File "/root/chatglm3/ChatGLM3/openai_api_demo/utils.py", line 165, in generate_chatglm3
for response in generate_stream_chatglm3(model, tokenizer, params):
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 35, in generator_context
response = gen.send(None)
File "/root/chatglm3/ChatGLM3/openai_api_demo/utils.py", line 81, in generate_stream_chatglm3
for total_ids in model.stream_generate(**inputs, eos_token_id=eos_token_id, **gen_kwargs):
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 35, in generator_context
response = gen.send(None)
File "/root/.cache/huggingface/modules/transformers_modules/model/modeling_chatglm.py", line 1156, in stream_generate
logits_warper = self._get_logits_warper(generation_config)
TypeError: GenerationMixin._get_logits_warper() missing 1 required positional argument: 'device'
调用open_api_request端报错:
(chatglm3) root@a4ca65dd166b:~/chatglm3/ChatGLM3/openai_api_demo# python3 openai_api_request.py
Traceback (most recent call last):
File "/root/chatglm3/ChatGLM3/openai_api_demo/openai_api_request.py", line 96, in <module>
simple_chat(use_stream=False)
File "/root/chatglm3/ChatGLM3/openai_api_demo/openai_api_request.py", line 67, in simple_chat
response = client.chat.completions.create(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_utils/_utils.py", line 277, in wrapper
return func(*args, **kwargs)
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/resources/chat/completions.py", line 643, in create
return self._post(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 1261, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 942, in request
return self._request(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 1026, in _request
return self._retry_request(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 1074, in _retry_request
return self._request(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 1026, in _request
return self._retry_request(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 1074, in _retry_request
return self._request(
File "/root/anaconda3/envs/chatglm3/lib/python3.10/site-packages/openai/_base_client.py", line 1041, in _request
raise self._make_status_error_from_response(err.response) from None
openai.InternalServerError: Internal Server Error