调用langgraph的aupdate_state()方法时异常

调用langgraph的aupdate_state()方法时异常:
assert not mv_writes, "Can't write to SharedValues from update_state"
AssertionError:Can't write to SharedValues from update_state
之前可以修改的消息现在也修改失败了,但还是有消息可以修改成功的情况

暂未找到任何解决办法

完整的接口代码如下: import json import os import sys from time import time import uvicorn from aipaas.logger_factory import logger from fastapi import Request, FastAPI from fastapi.responses import StreamingResponse from langchain_core.messages import HumanMessage from infrastructure.auth_fastapi import SoaAuth from infrastructure.langfuse_telemetery.trace_langgraph import create_langfuse_callback from scm_agent.src.application.roles.order.supply_manager_assistant_graph.main_graph.graph import \ get_supply_manager_assistant_main_graph from scm_agent.src.common.agent_name import AgentName from scm_agent.src.common.constants import Status from scm_agent.src.infrastructures.agent_config_download.config_download import dowload_agent_config_langgraph from scm_agent.src.infrastructures.agent_config_read.read_yaml_config import read_project_config from scm_agent.src.infrastructures.agent_state.agent_state_helper import get_redis_key from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_output_json from scm_agent.src.infrastructures.app_postprocess.output_process import str_to_stream_output_langgraph from scm_agent.src.infrastructures.memory.postgre_checkpointer.postgre_checkpointer import FrameworkAdapter from scm_agent.src.infrastructures.read_config import app_config from scm_agent.src.interface.input_output_parameters import SupplyManagerAssistantChatInput, ConfigUpdateInput, \ ConfigUpdateOutput os.environ['NO_PROXY'] = '127.0.0.1,localhost' fastapi_app = FastAPI(lifespan=FrameworkAdapter.lifespan_wrapper) env = os.environ.get("env") soa = SoaAuth(env_type=env, skip_soa_auth=False, only_check_token=True) agent_name = AgentName.SupplyManagerAssistantLangGraph project_config = {} common_prompt_config = {} def preload_agent_config(name): """ 预加载agent配置 Args: name: 助手/技能名称,app_config中配置 """ global project_config global common_prompt_config dowload_agent_config_langgraph(name) # 读取项目配置文件 project_config = read_project_config(agent_name, f"{agent_name}.yaml").get(env) # 读取公共Prompt配置文件 common_prompt_config = read_project_config("common", "prompt_config.yaml") if 'PYCHARM_HOSTED' in os.environ or 'PYCHARM_DEBUG_PROCESS' in os.environ: logger.info("debug模式请在此打断点") # raise Exception("debug模式请在此打断点,注释此行即可") preload_agent_config(agent_name) async def generator(graph, supply_manager_assistant_chat_input, initial_state, config): yield str_to_stream_output_langgraph('<think>') yield str_to_stream_output_langgraph('**问题**') question = supply_manager_assistant_chat_input.question.strip() yield str_to_stream_output_langgraph('\n' + question) async for chunk in graph.astream( input=initial_state, stream_mode="custom", config=config, subgraphs=True ): yield str_to_stream_output_langgraph(chunk[1]) @fastapi_app.post('/roles/supply_manager_assistant_chat_langgraph') @soa.required async def supply_manager_assistant_chat(request: Request, supply_manager_assistant_chat_input: SupplyManagerAssistantChatInput): strategy = request.app.state.presist_param["checkpointer"] thread_id = get_redis_key(supply_manager_assistant_chat_input) user_id = supply_manager_assistant_chat_input.user_id session_id = supply_manager_assistant_chat_input.session_id langfuse_callback = create_langfuse_callback( user_id=user_id, session_id=session_id, trace_name=AgentName.SupplyManagerAssistantLangGraph ) config = { "configurable": {"thread_id": thread_id}, "metadata": { "user_id": user_id, "project_config": project_config, "common_prompt_config": common_prompt_config, "ctx_params": supply_manager_assistant_chat_input.ctxParams }, "callbacks": [langfuse_callback], } try: async with strategy as checkpointer: graph = get_supply_manager_assistant_main_graph(checkpointer) initial_state = {"messages": [HumanMessage(content=supply_manager_assistant_chat_input.question.strip())]} return StreamingResponse( generator(graph, supply_manager_assistant_chat_input, initial_state, config), media_type="text/event-stream", headers={"Cache-Control": "no-cache", "Connection": "keep-alive"} ) except Exception as e: logger.error(f"[supply_manager_assistant_chat] 执行失败: {e}", exc_info=True) return str_to_output_json(f'处理异常异常原因:{e}') @fastapi_app.post('/config_update') @soa.required async def config_update(request: Request, config_update_input: ConfigUpdateInput): start_time = time() config_update_output = ConfigUpdateOutput() try: preload_agent_config(config_update_input.agent_name) config_update_output.status = Status.SUCCESS except Exception as e: config_update_output.error_message = "[SCM-Agent] Update config error." # 耗统计 config_update_output.elapsed_time = str(time() - start_time) return config_update_output.to_dict() @fastapi_app.get('/health') @soa.required async def health(request: Request, ): return json.dumps({"success": True}, ensure_ascii=False) if __name__ == '__main__': uvicorn.run("supply_manager_assistant_app_langgraph:fastapi_app", host=app_config.get('host', '0.0.0.0'), # port=app_config.get('port', 8080), loop="asyncio", port=8080) # workers=app_config.get('workers', 4)) 请你在这个基础上进行适配,最后完整输出
09-25
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值