You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
执行examples/parallel_doc_qa.py demo, 将llm换成
bot = ParallelDocQA(llm={'model': 'gs_qwen:7b-chat',
'model_server': 'http://IP:11433',
'api_key': 'EMPTY',
# Use a model service compatible with the OpenAI API, such as vLLM or Ollama:
# 'model': 'Qwen2-7B-Chat',
# 'model_server': 'http://localhost:8000/v1', # base_url, also known as api_base
# 'api_key': 'EMPTY',
# 'generate_cfg': {
# 'max_retries': 10}
}
)
执行会报错
Traceback (most recent call last):
File "/home/qqhu/workdir/ml/OpenAI/Qwen-Agent/qwen_agent/llm/oai.py", line 77, in _chat_stream
response = self._chat_complete_create(model=self.model, messages=messages, stream=True, **generate_cfg)
File "/home/qqhu/workdir/ml/OpenAI/Qwen-Agent/qwen_agent/llm/oai.py", line 63, in _chat_complete_create
return client.chat.completions.create(*args, **kwargs)
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_utils/_utils.py", line 303, in wrapper
return func(*args, **kwargs)
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/resources/chat/completions.py", line 645, in create
return self._post(
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_base_client.py", line 1088, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_base_client.py", line 853, in request
return self._request(
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_base_client.py", line 930, in _request
raise self._make_status_error_from_response(err.response) from None
openai.NotFoundError:
执行examples/parallel_doc_qa.py demo, 将llm换成
<title>404 Not Found</title>bot = ParallelDocQA(llm={'model': 'gs_qwen:7b-chat',
'model_server': 'http://IP:11433',
'api_key': 'EMPTY',
# Use a model service compatible with the OpenAI API, such as vLLM or Ollama:
# 'model': 'Qwen2-7B-Chat',
# 'model_server': 'http://localhost:8000/v1', # base_url, also known as api_base
# 'api_key': 'EMPTY',
# 'generate_cfg': {
# 'max_retries': 10}
}
)
执行会报错
Traceback (most recent call last):
File "/home/qqhu/workdir/ml/OpenAI/Qwen-Agent/qwen_agent/llm/oai.py", line 77, in _chat_stream
response = self._chat_complete_create(model=self.model, messages=messages, stream=True, **generate_cfg)
File "/home/qqhu/workdir/ml/OpenAI/Qwen-Agent/qwen_agent/llm/oai.py", line 63, in _chat_complete_create
return client.chat.completions.create(*args, **kwargs)
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_utils/_utils.py", line 303, in wrapper
return func(*args, **kwargs)
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/resources/chat/completions.py", line 645, in create
return self._post(
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_base_client.py", line 1088, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_base_client.py", line 853, in request
return self._request(
File "/home/qqhu/anaconda3/envs/py39/lib/python3.9/site-packages/openai/_base_client.py", line 930, in _request
raise self._make_status_error_from_response(err.response) from None
openai.NotFoundError:
404 Not Found
nginx/1.18.0 (Ubuntu) 是什么原因呢
该链接使用client是可以正常使用的,正常使用代码
client = Client(host='http://IP:11433')
load_dotenv()
def llmresponse(messages):
try:
response = client.chat(
model = 'qwen2:7b', #llama3:latest gs_qwen:7b-chat yi:9b qwen2:7b
messages = messages,
)
out = response['message']['content'].strip()
except Exception as e:
print(e)
out = 'llmresponse error'
return out
The text was updated successfully, but these errors were encountered: