@router
async def llm_router(input: dict):
from semantic_router.llms import OpenAILLM
from semantic_router.schema import Message
import openai
from pydantic import BaseModel, Field
class SearchRoute(BaseModel):
query: str = Field(description="Route to search when needing external information")
class MemoryRoute(BaseModel):
query: str = Field(description="Route to memory when information is likely known")
llm = OpenAILLM(name="gpt-4")
messages = [
Message(role="system", content="Select the best route for the user query."),
Message(role="user", content=input["query"])
]
response = llm(
messages=messages,
function_schemas=[
openai.pydantic_function_tool(SearchRoute),
openai.pydantic_function_tool(MemoryRoute)
]
)
# Parse response to get route choice
import ast
choice = ast.literal_eval(response)[0]
return {
"choice": choice["function_name"].lower(),
"input": {**input, **choice["arguments"]}
}