Skip to content

Commit 8b67ef3

Browse files
authored
examples: clarify openai-function-call-example (#751)
examples: clearify openai-function-call-example clean up the flow and don't use globals
1 parent d161462 commit 8b67ef3

File tree

1 file changed

+27
-18
lines changed

1 file changed

+27
-18
lines changed

examples/openai-function-call-example/openai_function_call_example.go

Lines changed: 27 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -24,31 +24,30 @@ func main() {
2424
}
2525

2626
fmt.Println("Querying for weather in Boston and Chicago..")
27-
resp := queryLLM(ctx, llm, messageHistory)
27+
resp := queryLLM(ctx, llm, messageHistory, availableTools)
28+
fmt.Println("Initial response:", showResponse(resp))
2829
messageHistory = updateMessageHistory(messageHistory, resp)
2930

30-
if resp.Choices[0].Content != "" {
31-
fmt.Println("Response to weather query:", resp.Choices[0].Content)
32-
}
33-
3431
messageHistory = executeToolCalls(ctx, llm, messageHistory, resp)
3532

3633
messageHistory = append(messageHistory, llms.TextParts(schema.ChatMessageTypeHuman, "Can you compare the two?"))
37-
fmt.Println("Querying for comparison..")
38-
resp = queryLLM(ctx, llm, messageHistory)
34+
fmt.Println("Querying with tool response...")
35+
resp = queryLLM(ctx, llm, messageHistory, availableTools)
3936
fmt.Println(resp.Choices[0].Content)
4037
}
4138

42-
// queryLLM queries the LLM with the given message history and returns the response.
43-
func queryLLM(ctx context.Context, llm llms.Model, messageHistory []llms.MessageContent) *llms.ContentResponse {
39+
// queryLLM queries the LLM with the given message history and list of available
40+
// tools, and returns the response.
41+
func queryLLM(ctx context.Context, llm llms.Model, messageHistory []llms.MessageContent, tools []llms.Tool) *llms.ContentResponse {
4442
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(tools))
4543
if err != nil {
4644
log.Fatal(err)
4745
}
4846
return resp
4947
}
5048

51-
// updateMessageHistory updates the message history with the assistant's response.
49+
// updateMessageHistory updates the message history with the assistant's
50+
// response, and translates tool calls.
5251
func updateMessageHistory(messageHistory []llms.MessageContent, resp *llms.ContentResponse) []llms.MessageContent {
5352
assistantResponse := llms.MessageContent{
5453
Role: schema.ChatMessageTypeAI,
@@ -66,7 +65,8 @@ func updateMessageHistory(messageHistory []llms.MessageContent, resp *llms.Conte
6665
return append(messageHistory, assistantResponse)
6766
}
6867

69-
// executeToolCalls executes the tool calls in the response and returns the updated message history.
68+
// executeToolCalls executes the tool calls in the response and returns the
69+
// updated message history.
7070
func executeToolCalls(ctx context.Context, llm llms.Model, messageHistory []llms.MessageContent, resp *llms.ContentResponse) []llms.MessageContent {
7171
for _, toolCall := range resp.Choices[0].ToolCalls {
7272
switch toolCall.FunctionCall.Name {
@@ -95,7 +95,6 @@ func executeToolCalls(ctx context.Context, llm llms.Model, messageHistory []llms
9595
},
9696
}
9797
messageHistory = append(messageHistory, weatherCallResponse)
98-
fmt.Println("Response to weather query:", args, response)
9998
default:
10099
log.Fatalf("Unsupported tool: %s", toolCall.FunctionCall.Name)
101100
}
@@ -105,6 +104,11 @@ func executeToolCalls(ctx context.Context, llm llms.Model, messageHistory []llms
105104
}
106105

107106
func getCurrentWeather(location string, unit string) (string, error) {
107+
weatherResponses := map[string]string{
108+
"boston": "72 and sunny",
109+
"chicago": "65 and windy",
110+
}
111+
108112
weatherInfo, ok := weatherResponses[strings.ToLower(location)]
109113
if !ok {
110114
return "", fmt.Errorf("no weather info for %q", location)
@@ -118,12 +122,9 @@ func getCurrentWeather(location string, unit string) (string, error) {
118122
return string(b), nil
119123
}
120124

121-
var weatherResponses = map[string]string{
122-
"boston": "72 and sunny",
123-
"chicago": "65 and windy",
124-
}
125-
126-
var tools = []llms.Tool{
125+
// availableTools simulates the tools/functions we're making available for
126+
// the model.
127+
var availableTools = []llms.Tool{
127128
{
128129
Type: "function",
129130
Function: &llms.FunctionDefinition{
@@ -146,3 +147,11 @@ var tools = []llms.Tool{
146147
},
147148
},
148149
}
150+
151+
func showResponse(resp *llms.ContentResponse) string {
152+
b, err := json.MarshalIndent(resp, "", " ")
153+
if err != nil {
154+
log.Fatal(err)
155+
}
156+
return string(b)
157+
}

0 commit comments

Comments
 (0)