Skip to content

Commit 065955f

Browse files
committed
feat: enhance llm.Config with ThinkingConfig and add corresponding test
1 parent 73857a0 commit 065955f

4 files changed

Lines changed: 64 additions & 7 deletions

File tree

llm/llm_configs.go

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,20 @@
11
package llm
22

33
type Config struct {
4-
Temperature *float32 `json:"temperature"`
5-
TopP *float32 `json:"top_p"`
6-
TopK *int `json:"top_k"`
7-
MaxOutputTokens *int `json:"max_output_tokens"`
8-
StopSequences []string `json:"stop_sequences"`
4+
Temperature *float32 `json:"temperature,omitempty"`
5+
TopP *float32 `json:"top_p,omitempty"`
6+
TopK *int `json:"top_k,omitempty"`
7+
MaxOutputTokens *int `json:"max_output_tokens,omitempty"`
8+
StopSequences []string `json:"stop_sequences,omitempty"`
9+
ThinkingConfig *ThinkingConfig `json:"thinking_config,omitempty"`
910

10-
SystemInstruction string `json:"system_instruction"`
11-
SafetyFilterThreshold BlockThreshold `json:"filter_threshold"`
11+
SystemInstruction string `json:"system_instruction,omitempty"`
12+
SafetyFilterThreshold BlockThreshold `json:"filter_threshold,omitempty"`
13+
}
14+
15+
type ThinkingConfig struct {
16+
IncludeThoughts *bool `json:"include_thoughts,omitempty"`
17+
ThinkingBudget *int `json:"thinking_budget,omitempty"`
1218
}
1319

1420
type BlockThreshold uint16

pconf/utils.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
package pconf
2+
3+
func Ptrify[T any](v T) *T {
4+
return &v
5+
}

provider/aistudio/aistudio_model.go

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -273,6 +273,16 @@ func (g *generativeLanguageModel) GenerateStream(ctx context.Context, chat *llm.
273273
config.SystemInstruction = &genai.Content{Parts: []*genai.Part{{Text: g.config.SystemInstruction + chat.SystemInstruction}}}
274274
}
275275

276+
if g.config.ThinkingConfig != nil {
277+
config.ThinkingConfig = &genai.ThinkingConfig{}
278+
if g.config.ThinkingConfig.IncludeThoughts != nil {
279+
config.ThinkingConfig.IncludeThoughts = *g.config.ThinkingConfig.IncludeThoughts
280+
}
281+
if g.config.ThinkingConfig.ThinkingBudget != nil {
282+
config.ThinkingConfig.ThinkingBudget = ptrify(int32(*g.config.ThinkingConfig.ThinkingBudget))
283+
}
284+
}
285+
276286
stream := make(chan llm.Segment, 128)
277287
v := &llm.StreamContent{
278288
Content: &llm.Content{},

provider/aistudio/aistudio_model_test.go

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -207,3 +207,39 @@ func TestAIStudioToolCall(t *testing.T) {
207207
return
208208
}
209209
}
210+
211+
func TestAIStudioGenerateWithThinking(t *testing.T) {
212+
client := getClient()
213+
defer client.Close()
214+
215+
config := &llm.Config{
216+
ThinkingConfig: &llm.ThinkingConfig{
217+
ThinkingBudget: pconf.Ptrify(0),
218+
},
219+
}
220+
221+
model, err := client.NewLLM("gemini-2.5-flash-preview-04-17", config)
222+
if err != nil {
223+
panic(err)
224+
}
225+
defer model.Close()
226+
227+
output := model.GenerateStream(
228+
context.Background(),
229+
nil,
230+
&llm.Content{
231+
Role: llm.RoleUser,
232+
Parts: []llm.Segment{llm.Text("Do hamsters eat cats!")},
233+
},
234+
)
235+
236+
for segment := range output.Stream {
237+
fmt.Print(segment)
238+
}
239+
fmt.Println()
240+
241+
if output.Err != nil {
242+
t.Error(output.Err)
243+
return
244+
}
245+
}

0 commit comments

Comments
 (0)