@@ -402,26 +402,29 @@ You can describe and pass in functions and the model will intelligently choose t
402
402
``` ruby
403
403
404
404
def get_current_weather (location: , unit: " fahrenheit" )
405
- # use a weather api to fetch weather
405
+ # Here you could use a weather api to fetch the weather.
406
+ " The weather in #{ location } is nice 🌞 #{ unit } "
406
407
end
407
408
409
+ messages = [
410
+ {
411
+ " role" : " user" ,
412
+ " content" : " What is the weather like in San Francisco?" ,
413
+ },
414
+ ]
415
+
408
416
response =
409
417
client.chat(
410
418
parameters: {
411
419
model: " gpt-4o" ,
412
- messages: [
413
- {
414
- " role" : " user" ,
415
- " content" : " What is the weather like in San Francisco?" ,
416
- },
417
- ],
420
+ messages: messages, # Defined above because we'll use it again
418
421
tools: [
419
422
{
420
423
type: " function" ,
421
424
function: {
422
425
name: " get_current_weather" ,
423
426
description: " Get the current weather in a given location" ,
424
- parameters: {
427
+ parameters: { # Format: https://json-schema.org/understanding-json-schema
425
428
type: :object ,
426
429
properties: {
427
430
location: {
@@ -438,31 +441,51 @@ response =
438
441
},
439
442
}
440
443
],
441
- tool_choice: {
442
- type: " function" ,
443
- function: {
444
- name: " get_current_weather"
445
- }
446
- }
444
+ tool_choice: " required" # Optional, defaults to "auto"
445
+ # Can also put "none" or specific functions, see docs
447
446
},
448
447
)
449
448
450
449
message = response.dig(" choices" , 0 , " message" )
451
450
452
451
if message[" role" ] == " assistant" && message[" tool_calls" ]
453
- function_name = message.dig(" tool_calls" , 0 , " function" , " name" )
454
- args =
455
- JSON .parse(
456
- message.dig(" tool_calls" , 0 , " function" , " arguments" ),
452
+ message[" tool_calls" ].each do |tool_call |
453
+ tool_call_id = tool_call.dig(" id" )
454
+ function_name = tool_call.dig(" function" , " name" )
455
+ function_args = JSON .parse(
456
+ tool_call.dig(" function" , " arguments" ),
457
457
{ symbolize_names: true },
458
458
)
459
+ function_response = case function_name
460
+ when " get_current_weather"
461
+ get_current_weather(** function_args) # => "The weather is nice 🌞"
462
+ else
463
+ # decide how to handle
464
+ end
465
+
466
+ # For a subsequent message with the role "tool", OpenAI requires the preceding message to have a tool_calls argument.
467
+ messages << message
459
468
460
- case function_name
461
- when " get_current_weather"
462
- get_current_weather(** args)
469
+ messages << {
470
+ tool_call_id: tool_call_id,
471
+ role: " tool" ,
472
+ name: function_name,
473
+ content: function_response
474
+ } # Extend the conversation with the results of the functions
463
475
end
476
+
477
+ second_response = client.chat(
478
+ parameters: {
479
+ model: " gpt-4o" ,
480
+ messages: messages
481
+ })
482
+
483
+ puts second_response.dig(" choices" , 0 , " message" , " content" )
484
+
485
+ # At this point, the model has decided to call functions, you've called the functions
486
+ # and provided the response back, and the model has considered this and responded.
464
487
end
465
- # => "The weather is nice 🌞 "
488
+ # => "It looks like the weather is nice and sunny in San Francisco! If you're planning to go out, it should be a pleasant day. "
466
489
```
467
490
468
491
### Completions
0 commit comments