Skip to content

Commit

Permalink
examples: clarify openai-function-call-example (#751)
Browse files Browse the repository at this point in the history
examples: clearify openai-function-call-example

clean up the flow and don't use globals
  • Loading branch information
eliben authored Apr 7, 2024
1 parent d161462 commit 8b67ef3
Showing 1 changed file with 27 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,31 +24,30 @@ func main() {
}

fmt.Println("Querying for weather in Boston and Chicago..")
resp := queryLLM(ctx, llm, messageHistory)
resp := queryLLM(ctx, llm, messageHistory, availableTools)
fmt.Println("Initial response:", showResponse(resp))
messageHistory = updateMessageHistory(messageHistory, resp)

if resp.Choices[0].Content != "" {
fmt.Println("Response to weather query:", resp.Choices[0].Content)
}

messageHistory = executeToolCalls(ctx, llm, messageHistory, resp)

messageHistory = append(messageHistory, llms.TextParts(schema.ChatMessageTypeHuman, "Can you compare the two?"))
fmt.Println("Querying for comparison..")
resp = queryLLM(ctx, llm, messageHistory)
fmt.Println("Querying with tool response...")
resp = queryLLM(ctx, llm, messageHistory, availableTools)
fmt.Println(resp.Choices[0].Content)
}

// queryLLM queries the LLM with the given message history and returns the response.
func queryLLM(ctx context.Context, llm llms.Model, messageHistory []llms.MessageContent) *llms.ContentResponse {
// queryLLM queries the LLM with the given message history and list of available
// tools, and returns the response.
func queryLLM(ctx context.Context, llm llms.Model, messageHistory []llms.MessageContent, tools []llms.Tool) *llms.ContentResponse {
resp, err := llm.GenerateContent(ctx, messageHistory, llms.WithTools(tools))
if err != nil {
log.Fatal(err)
}
return resp
}

// updateMessageHistory updates the message history with the assistant's response.
// updateMessageHistory updates the message history with the assistant's
// response, and translates tool calls.
func updateMessageHistory(messageHistory []llms.MessageContent, resp *llms.ContentResponse) []llms.MessageContent {
assistantResponse := llms.MessageContent{
Role: schema.ChatMessageTypeAI,
Expand All @@ -66,7 +65,8 @@ func updateMessageHistory(messageHistory []llms.MessageContent, resp *llms.Conte
return append(messageHistory, assistantResponse)
}

// executeToolCalls executes the tool calls in the response and returns the updated message history.
// executeToolCalls executes the tool calls in the response and returns the
// updated message history.
func executeToolCalls(ctx context.Context, llm llms.Model, messageHistory []llms.MessageContent, resp *llms.ContentResponse) []llms.MessageContent {
for _, toolCall := range resp.Choices[0].ToolCalls {
switch toolCall.FunctionCall.Name {
Expand Down Expand Up @@ -95,7 +95,6 @@ func executeToolCalls(ctx context.Context, llm llms.Model, messageHistory []llms
},
}
messageHistory = append(messageHistory, weatherCallResponse)
fmt.Println("Response to weather query:", args, response)
default:
log.Fatalf("Unsupported tool: %s", toolCall.FunctionCall.Name)
}
Expand All @@ -105,6 +104,11 @@ func executeToolCalls(ctx context.Context, llm llms.Model, messageHistory []llms
}

func getCurrentWeather(location string, unit string) (string, error) {
weatherResponses := map[string]string{
"boston": "72 and sunny",
"chicago": "65 and windy",
}

weatherInfo, ok := weatherResponses[strings.ToLower(location)]
if !ok {
return "", fmt.Errorf("no weather info for %q", location)
Expand All @@ -118,12 +122,9 @@ func getCurrentWeather(location string, unit string) (string, error) {
return string(b), nil
}

var weatherResponses = map[string]string{
"boston": "72 and sunny",
"chicago": "65 and windy",
}

var tools = []llms.Tool{
// availableTools simulates the tools/functions we're making available for
// the model.
var availableTools = []llms.Tool{
{
Type: "function",
Function: &llms.FunctionDefinition{
Expand All @@ -146,3 +147,11 @@ var tools = []llms.Tool{
},
},
}

func showResponse(resp *llms.ContentResponse) string {
b, err := json.MarshalIndent(resp, "", " ")
if err != nil {
log.Fatal(err)
}
return string(b)
}

0 comments on commit 8b67ef3

Please sign in to comment.