feat: 提交

main
old-tom 4 months ago
parent 7a0e526eff
commit 78719ba4eb

@ -5,3 +5,26 @@
# @File : __init__.py
# @Project : reActLLMDemo
# @Desc :
from langchain_openai import ChatOpenAI
from llmagent.llm_config import LLMConfigLoader
from llmagent.llm_config import base_conf
from llmtools.tool_impl import tools
# 初始化LLM模型
llm_conf = LLMConfigLoader.load(item_name=base_conf.model_form)
llm = ChatOpenAI(
model=llm_conf.model, api_key=llm_conf.api_key,
base_url=llm_conf.base_url, max_tokens=llm_conf.max_tokens,
temperature=llm_conf.temperature,
streaming=llm_conf.streaming
)
# 绑定工具
llm_with_tools = llm.bind_tools(tools)
# 提示词模版
PROMPT_TEMPLATE = {
'SMART_ASSISTANT': {
'description': '智能助手',
'template': """"""
}
}

@ -0,0 +1,62 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2025/4/17 15:28
# @Author : old-tom
# @File : llm_agent
# @Project : reActLLMDemo
# @Desc : 用图构建ReAct
from typing import Annotated
from langgraph.checkpoint.memory import MemorySaver
from llmagent import llm_with_tools
from llmtools.tool_impl import tool_node
from langchain_core.messages import AnyMessage, SystemMessage
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import tools_condition
from langchain_core.runnables import RunnableConfig
# 内存记忆
memory = MemorySaver()
class AgentState(TypedDict):
"""
状态机
add_messages 函数会自动合并message到一个list中例如HumanMessage\AIMessage
"""
messages: Annotated[list[AnyMessage], add_messages]
graph_builder = StateGraph(AgentState)
def chat(state: AgentState, config: RunnableConfig):
"""
调用LLM
:param state: 状态机
:param config: 配置
LLM需要从状态机获取message
:return:
"""
# 设置系统提示词
system_prompt = SystemMessage(
"You are a helpful AI assistant, please respond to the users query to the best of your ability!"
)
return {"messages": [llm_with_tools.invoke([system_prompt] + state["messages"], config)]}
# 以下步骤可以替换为预构建的create_react_agent函数
# LLM节点
graph_builder.add_node("chat_llm", chat)
# 工具节点
graph_builder.add_node("tools", tool_node)
graph_builder.add_edge(START, "chat_llm")
graph_builder.add_edge("chat_llm", END)
# 添加条件边tools_condition 是官方实现的函数用于判断是否应该调用tool或者直接结束
graph_builder.add_conditional_edges("chat_llm", tools_condition)
graph_builder.add_edge("tools", "chat_llm")
# checkpointer 是检查点设置
graph = graph_builder.compile(name='smart_assistant', checkpointer=memory)

@ -1,91 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2025/4/17 15:28
# @Author : old-tom
# @File : llm_agent
# @Project : reActLLMDemo
# @Desc : 代理
from typing import Annotated
from langgraph.checkpoint.memory import MemorySaver
from langchain_openai import ChatOpenAI
from llmagent.llm_config import LLMConfigLoader
from llmagent.llm_config import base_conf
from llmtools.tool_impl import tools, tool_node
from langchain_core.messages import AnyMessage
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import tools_condition
# 内存记忆
memory = MemorySaver()
# 初始化LLM模型
llm_conf = LLMConfigLoader.load(item_name=base_conf.model_form)
llm = ChatOpenAI(
model=llm_conf.model, api_key=llm_conf.api_key,
base_url=llm_conf.base_url, max_tokens=llm_conf.max_tokens,
temperature=llm_conf.temperature,
streaming=llm_conf.streaming
)
# 绑定工具
llm_with_tools = llm.bind_tools(tools)
class AgentState(TypedDict):
"""
状态机
add_messages 函数会自动合并message到一个list中例如HumanMessage\AIMessage
"""
messages: Annotated[list[AnyMessage], add_messages]
graph_builder = StateGraph(AgentState)
def chat(state: AgentState):
"""
LLM单轮对话
:param state: 状态机
LLM需要从状态机获取message
:return:
"""
return {"messages": [llm_with_tools.invoke(state["messages"])]}
# LLM节点
graph_builder.add_node("chat_llm", chat)
# 工具节点
graph_builder.add_node("tools", tool_node)
graph_builder.add_edge(START, "chat_llm")
graph_builder.add_edge("chat_llm", END)
# 添加条件边tools_condition 是官方实现的函数用于判断是否应该调用tool或者直接结束
graph_builder.add_conditional_edges("chat_llm", tools_condition)
graph_builder.add_edge("tools", "chat_llm")
# checkpointer 是检查点设置
graph = graph_builder.compile(name='语音助手', checkpointer=memory)
def stream_graph_updates(user_input: str):
config = {"configurable": {"thread_id": "1"}}
for chunk, metadata in graph.stream({"messages": [{"role": "user", "content": user_input}]}, config,
stream_mode='messages'):
if chunk.content:
print(chunk.content, end='', flush=True)
print('\n')
while True:
try:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye!")
break
stream_graph_updates(user_input)
except:
user_input = "What do you know about LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
break

@ -0,0 +1,39 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2025/4/19 18:27
# @Author : old-tom
# @File : local_test
# @Project : reActLLMDemo
# @Desc : 本地终端运行对话
import langgraph.store.memory
from llmagent.assistant_graph import graph
def stream_graph_updates(user_input: str):
"""
流式输出
:param user_input:
:return:
"""
config = {"configurable": {"thread_id": "1"}}
for chunk, metadata in graph.stream({"messages": [{"role": "user", "content": user_input}]}, config,
stream_mode='messages'):
if chunk.content:
print(chunk.content, end='', flush=True)
print('\n')
if __name__ == '__main__':
while True:
try:
user_input = input("User: ")
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye!")
break
stream_graph_updates(user_input)
except:
user_input = "What do you know about LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
break

@ -1,6 +0,0 @@
def main():
print("Hello from reactllmdemo!")
if __name__ == "__main__":
main()

@ -0,0 +1,14 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2025/4/19 18:27
# @Author : old-tom
# @File : local_test
# @Project : reActLLMDemo
# @Desc : 作为服务端运行
def main():
print("Hello from reactllmdemo!")
if __name__ == "__main__":
main()

@ -8,13 +8,13 @@
import marqo
# 索引名称
INDEX_NAME = 'test_index'
INDEX_NAME = 'camera_collection'
# 初始化marqo
mq = marqo.Client(url='http://171.92.0.3:8882')
# mq.delete_index(INDEX_NAME)
#
# settings = {
# "treatUrlsAndPointersAsImages": False,
# "model": "hf/bge-large-zh-v1.5",
@ -28,6 +28,7 @@ def create_and_set_index():
:return:
"""
mq.create_index(INDEX_NAME, model='hf/e5-base-v2')
# mq.create_index(INDEX_NAME, settings_dict=settings)
# 添加文档(测试用)
mq.index(INDEX_NAME).add_documents([
{
@ -625,7 +626,7 @@ def query_vector_db(query):
if __name__ == '__main__':
# create_and_set_index()
rt = query_vector_db('利丰高点')
rt = query_vector_db('南卡口')
# TODO 根据 _score字段 取出相似度最高的结果
if rt:
for ele in rt['hits']:

Loading…
Cancel
Save