@node(start=True)
async def node_start(input: dict):
"""Entry point for our graph."""
return {"input": input}
@router
async def node_router(input: dict):
"""Routes the query to either search or memory."""
query = input["query"]
messages = [
Message(
role="system",
content="You are a helpful assistant. Select the best route to answer the user query. ONLY choose one function.",
),
Message(role="user", content=query),
]
response = llm(
messages=messages,
function_schemas=[
openai.pydantic_function_tool(Search),
openai.pydantic_function_tool(Memory),
],
)
choice = ast.literal_eval(response)[0]
return {
"choice": choice["function_name"].lower(),
"input": {**input, **choice["arguments"]},
}
@node
async def memory(input: dict):
"""Retrieves information from memory."""
query = input["query"]
# In a real implementation, this would query a vector database
return {"input": {"text": "The user is in Bali right now.", **input}}
@node
async def search(input: dict):
"""Searches for information."""
query = input["query"]
# In a real implementation, this would make a web search
return {
"input": {
"text": "The most famous photo spot in Bali is the Uluwatu Temple.",
**input,
}
}
@node
async def llm_node(input: dict):
"""Generates a response using the retrieved information."""
chat_history = [
Message(role=message["role"], content=message["content"])
for message in input["chat_history"]
]
messages = [
Message(role="system", content="You are a helpful assistant."),
*chat_history,
Message(
role="user",
content=(
f"Response to the following query from the user: {input['query']}\n"
"Here is additional context. You can use it to answer the user query. "
f"But do not directly reference it: {input.get('text', '')}."
),
),
]
response = llm(messages=messages)
return {"output": response}
@node(end=True)
async def node_end(input: dict):
"""Exit point for our graph."""
return {"output": input["output"]}