[geekan/MetaGPT]采用最新版本的metagpt运行实例demo异常,异常信息为openai.types.completion_usage.CompletionUsage() **后面的参数必须是一个映射,而不是NoneType

2024-05-16 951 views
7

C:\Users\glh19\MetaGPT\MetaGPT\Scripts\python.exe E:\code\python\MetaGPT\examples\di\custom_tool.py 2024-05-08 16:55:41.973 |信息| metagpt.const:get_metagpt_package_root:29 - 包根目录设置为 E:\code\python\MetaGPT


[
    {
        "task_id": "1",
        "dependent_task_ids": [],
        "instruction": "Call the magic function with arguments 'A' and 2, then provide the result.",
        "task_type": "other"
    }
]
```2024-05-08 16:56:09.817 | WARNING  | metagpt.utils.common:wrapper:649 - There is a exception in role's execution, in order to resume, we delete the newest role communication message in the role's memory.
Traceback (most recent call last):
  File "E:\code\python\MetaGPT\metagpt\utils\common.py", line 640, in wrapper
    return await func(self, *args, **kwargs)
  File "E:\code\python\MetaGPT\metagpt\roles\role.py", line 555, in run
    rsp = await self.react()
  File "E:\code\python\MetaGPT\metagpt\roles\role.py", line 526, in react
    rsp = await self._plan_and_act()
  File "E:\code\python\MetaGPT\metagpt\roles\di\data_interpreter.py", line 95, in _plan_and_act
    raise e
  File "E:\code\python\MetaGPT\metagpt\roles\di\data_interpreter.py", line 90, in _plan_and_act
    rsp = await super()._plan_and_act()
  File "E:\code\python\MetaGPT\metagpt\roles\role.py", line 486, in _plan_and_act
    await self.planner.update_plan(goal=goal)
  File "E:\code\python\MetaGPT\metagpt\strategy\planner.py", line 75, in update_plan
    rsp = await WritePlan().run(context, max_tasks=max_tasks)
  File "E:\code\python\MetaGPT\metagpt\actions\di\write_plan.py", line 49, in run
    rsp = await self._aask(prompt)
  File "E:\code\python\MetaGPT\metagpt\actions\action.py", line 93, in _aask
    return await self.llm.aask(prompt, system_msgs)
  File "E:\code\python\MetaGPT\metagpt\provider\base_llm.py", line 150, in aask
    rsp = await self.acompletion_text(message, stream=stream, timeout=self.get_timeout(timeout))
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\_asyncio.py", line 88, in async_wrapped
    return await fn(*args, **kwargs)
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\_asyncio.py", line 47, in __call__
    do = self.iter(retry_state=retry_state)
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\__init__.py", line 314, in iter
    return fut.result()
  File "D:\Develop\python\lib\concurrent\futures\_base.py", line 451, in result
    return self.__get_result()
  File "D:\Develop\python\lib\concurrent\futures\_base.py", line 403, in __get_result
    raise self._exception
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\_asyncio.py", line 50, in __call__
    result = await fn(*args, **kwargs)
  File "E:\code\python\MetaGPT\metagpt\provider\openai_api.py", line 155, in acompletion_text
    return await self._achat_completion_stream(messages, timeout=timeout)
  File "E:\code\python\MetaGPT\metagpt\provider\openai_api.py", line 105, in _achat_completion_stream
    usage = CompletionUsage(**chunk.usage)
TypeError: openai.types.completion_usage.CompletionUsage() argument after ** must be a mapping, not NoneType

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "E:\code\python\MetaGPT\examples\di\custom_tool.py", line 36, in <module>
    asyncio.run(main())
  File "D:\Develop\python\lib\asyncio\runners.py", line 44, in run
    return loop.run_until_complete(main)
  File "D:\Develop\python\lib\asyncio\base_events.py", line 649, in run_until_complete
    return future.result()
  File "E:\code\python\MetaGPT\examples\di\custom_tool.py", line 30, in main
    await di.run("Just call the magic function with arg1 'A' and arg2 2. Tell me the result.")
  File "E:\code\python\MetaGPT\metagpt\utils\common.py", line 662, in wrapper
    raise Exception(format_trackback_info(limit=None))
Exception: Traceback (most recent call last):
  File "E:\code\python\MetaGPT\metagpt\utils\common.py", line 640, in wrapper
    return await func(self, *args, **kwargs)
  File "E:\code\python\MetaGPT\metagpt\roles\role.py", line 555, in run
    rsp = await self.react()
  File "E:\code\python\MetaGPT\metagpt\roles\role.py", line 526, in react
    rsp = await self._plan_and_act()
  File "E:\code\python\MetaGPT\metagpt\roles\di\data_interpreter.py", line 95, in _plan_and_act
    raise e
  File "E:\code\python\MetaGPT\metagpt\roles\di\data_interpreter.py", line 90, in _plan_and_act
    rsp = await super()._plan_and_act()
  File "E:\code\python\MetaGPT\metagpt\roles\role.py", line 486, in _plan_and_act
    await self.planner.update_plan(goal=goal)
  File "E:\code\python\MetaGPT\metagpt\strategy\planner.py", line 75, in update_plan
    rsp = await WritePlan().run(context, max_tasks=max_tasks)
  File "E:\code\python\MetaGPT\metagpt\actions\di\write_plan.py", line 49, in run
    rsp = await self._aask(prompt)
  File "E:\code\python\MetaGPT\metagpt\actions\action.py", line 93, in _aask
    return await self.llm.aask(prompt, system_msgs)
  File "E:\code\python\MetaGPT\metagpt\provider\base_llm.py", line 150, in aask
    rsp = await self.acompletion_text(message, stream=stream, timeout=self.get_timeout(timeout))
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\_asyncio.py", line 88, in async_wrapped
    return await fn(*args, **kwargs)
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\_asyncio.py", line 47, in __call__
    do = self.iter(retry_state=retry_state)
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\__init__.py", line 314, in iter
    return fut.result()
  File "D:\Develop\python\lib\concurrent\futures\_base.py", line 451, in result
    return self.__get_result()
  File "D:\Develop\python\lib\concurrent\futures\_base.py", line 403, in __get_result
    raise self._exception
  File "C:\Users\glh19\MetaGPT\MetaGPT\lib\site-packages\tenacity\_asyncio.py", line 50, in __call__
    result = await fn(*args, **kwargs)
  File "E:\code\python\MetaGPT\metagpt\provider\openai_api.py", line 155, in acompletion_text
    return await self._achat_completion_stream(messages, timeout=timeout)
  File "E:\code\python\MetaGPT\metagpt\provider\openai_api.py", line 105, in _achat_completion_stream
    usage = CompletionUsage(**chunk.usage)
TypeError: openai.types.completion_usage.CompletionUsage() argument after ** must be a mapping, not NoneType

进程已结束,退出代码为 1

回答

2

目前分析处于 async def _achat_completion_stream(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT) -> str: response: AsyncStream[ChatCompletionChunk] = wait self.aclient.chat.completions.create( **self._cons_kwargs(messages) 、 timeout=self.get_timeout(timeout))、stream=True )usage = Nonecollected_messages = [] async for chunk in response: chunk_message = chunk.choices[0].delta.content 或 "" if chunk.choices else "" # 提取消息 finish_reason = ( chunk.choices[0].finish_reason if chunk.choices and hasattr(chunk.choices[0], "finish_reason") else None ) log_llm_stream(chunk_message)collected_messages.append(chunk_message) if finish_reason: if hasattr(块,“用法”):

某些服务将用途作为块的属性,例如 Fireworks
                usage = CompletionUsage(**chunk.usage)
            elif hasattr(chunk.choices[0], "usage") :
                # The usage of some services is an attribute of chunk.choices[0], such as Moonshot
                usage = CompletionUsage(**chunk.choices[0].usage)
            elif "openrouter.ai" in self.config.base_url:
                # due to it get token cost from api
                usage = await get_openrouter_tokens(chunk)

中的if hasattr(chunk, "usage")和elif hasattr(chunk.choices[0], "usage")没有判空导致的

7

我有一个问题

5

是的,我对 metagpt version=0.8.1 有同样的问题,在我使用 pip install metagpt==0.7.1 后也再次发生

9

我也是。错误报告 #1250