1vartools=[]openai.Tool{ 2{ 3Type:openai.ToolTypeFunction, 4Function:&openai.FunctionDefinition{ 5Name:"get_current_weather", 6Description:"Retrieve current weather conditions", 7Parameters:jsonschema.Definition{ 8Type:jsonschema.Object, 9Properties:map[string]jsonschema.Definition{10"location":{11Type:jsonschema.String,12Description:"City and state, e.g. San Francisco, CA",13},14"unit":{Type:jsonschema.String,Enum:[]string{"celsius","fahrenheit"}},15},16Required:[]string{"location"},17},18},19},20}2122funcchatLLM(brModel.BrainRuntime)error{23messages:=b.GetMemory("messages").([]openai.ChatCompletionMessage)2425client:=openai.NewClient(os.Getenv("OPENAI_API_KEY"))26resp,err:=client.CreateChatCompletion(27context.Background(),28openai.ChatCompletionRequest{29Model:openai.GPT3Dot5Turbo0125,30Messages:messages,31Tools:tools,32},33)3435// Handle response36iferr==nil&&len(resp.Choices)>0{37msg:=resp.Choices[0].Message38messages=append(messages,msg)39b.SetMemory("messages",messages)40}41returnerr42}
Tool Execution Module
1funccallTools(brModel.BrainRuntime)error{ 2messages:=b.GetMemory("messages").([]openai.ChatCompletionMessage) 3lastMsg:=messages[len(messages)-1] 4 5for_,call:=rangelastMsg.ToolCalls{ 6switchcall.Function.Name{ 7case"get_current_weather": 8// Execute weather API call (mock implementation) 9fmt.Printf("Executing %s with params: %s\n",10call.Function.Name,call.Function.Arguments)1112// Simulate API response13messages=append(messages,openai.ChatCompletionMessage{14Role:openai.ChatMessageRoleTool,15Content:"Sunny, 22°C",16ToolCallID:call.ID,17Name:call.Function.Name,18})19}20}21b.SetMemory("messages",messages)22returnnil23}