#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2025/4/19 18:27 # @Author : old-tom # @File : local_test # @Project : reActLLMDemo # @Desc : 本地终端运行对话 from llmagent.assistant_graph import graph def stream_graph_updates(user_input: str): """ 流式输出 :param user_input: :return: """ config = {"configurable": {"thread_id": "1"}} for chunk, metadata in graph.stream({"messages": [{"role": "user", "content": user_input}]}, config, stream_mode='messages'): if chunk.content: print(chunk.content, end='', flush=True) print('\n') if __name__ == '__main__': while True: try: user_input = input("User: ") if user_input.lower() in ["quit", "exit", "q"]: print("Goodbye!") break stream_graph_updates(user_input) except Exception as e: user_input = f"出错了,系统提示{e}" print("User: " + user_input) stream_graph_updates(user_input) break