基于lagchai框架开发的aget 全量问题主程序运行:pytho aswer.py 单个问题debug和refie:pytho debug.py 创新点是在lagchai中加入了回调部分和一些prompt的提示 class MyCustomHadlerOe(BaseCallbackHadler):
def ollmstart(self, serialized: Dict[str, Ay], prompts: List[str], **kwargs: Ay) -> Ay:
pass hadler1 = MyCustomHadlerOe() aget_prompt = """
你是一个法律行业的智能助手,擅长对用户问题进行拆解多个步骤,并一步一步推理,并且根据逻辑顺序一步一步调用工具,最后将推理结果组合成答案。
通过callback保存所有的对话信息
def o_llm_ew_toke(self, toke: str, **kwargs: Ay) -> Ay:
pass
def o_chat_model_start(self, serialized: Dict[str, Ay], messages: List[List[BaseMessage]], **kwargs: Ay) -> Ay:
#prit(f"o_chat_model_start: ", messages)
llm_messages.apped(messages)
for message i messages:
for part i message:
if isistace(part, ToolMessage):
tool_message = part
fuc_ame = tool_message.additioal_kwargs["ame"]
tool_call_id = tool_message.tool_call_id
cotet = tool_message.cotet
cotet = cotet.replace("\'", "\"")
resp = jso.loads(cotet)
#resp = jso.loads(jso.dumps(eval(cotet)))
if "公司名称" i resp:
tool_call_dict[fuc_ame] = resp["公司名称"]
if "原告律师事务所" i resp:
tool_call_dict[fuc_ame+"_"+"原告律师事务所"] = resp["原告律师事务所"]
if "被告律师事务所" i resp:
tool_call_dict[fuc_ame+"_"+"被告律师事务所"] = resp["被告律师事务所"]
if "原告" i resp:
tool_call_dict[fuc_ame+"_"+"原告"] = resp["原告"]
if "被告" i resp:
tool_call_dict[fuc_ame+"_"+"被告"] = resp["被告"]
if "企业地址" i resp:
tool_call_dict[fuc_ame+"_"+"企业地址"] = resp["企业地址"]
def o_llm_error(self, error: Uio[Exceptio, KeyboardIterrupt], **kwargs: Ay) -> Ay:
"""当 LLM 出错时运行。"""
pass
def o_chai_start(self, serialized: Dict[str, Ay], iputs: Dict[str, Ay], **kwargs: Ay) -> Ay:
pass
def o_tool_start(self, serialized: Dict[str, Ay], iput_str: str, **kwargs: Ay) -> Ay:
pass
def o_aget_actio(self, actio: AgetActio, **kwargs: Ay) -> Ay:
#prit(f"o_aget_actio {actio}")
pass
def o_aget_fiish(self, fiish: AgetFiish, **kwargs: Ay) -> Ay:
pass
def o_tool_ed(self, output: str, **kwargs: Ay) -> Ay:
#prit(f"o_tool_ed {output}")
pass
点击空白处退出提示
评论