Skip to content

Commit

Permalink
Add temperature support to conversational api (#3566)
Browse files Browse the repository at this point in the history
Signed-off-by: yaron2 <[email protected]>
  • Loading branch information
yaron2 authored Oct 14, 2024
1 parent 28d46f6 commit 1cbedb3
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 4 deletions.
8 changes: 7 additions & 1 deletion conversation/anthropic/anthropic.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,13 @@ func (a *Anthropic) Converse(ctx context.Context, r *conversation.ConversationRe
})
}

resp, err := a.llm.GenerateContent(ctx, messages)
opts := []llms.CallOption{}

if r.Temperature > 0 {
opts = append(opts, conversation.LangchainTemperature(r.Temperature))
}

resp, err := a.llm.GenerateContent(ctx, messages, opts...)
if err != nil {
return nil, err
}
Expand Down
8 changes: 7 additions & 1 deletion conversation/aws/bedrock/bedrock.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,13 @@ func (b *AWSBedrock) Converse(ctx context.Context, r *conversation.ConversationR
})
}

resp, err := b.llm.GenerateContent(ctx, messages)
opts := []llms.CallOption{}

if r.Temperature > 0 {
opts = append(opts, conversation.LangchainTemperature(r.Temperature))
}

resp, err := b.llm.GenerateContent(ctx, messages, opts...)
if err != nil {
return nil, err
}
Expand Down
1 change: 1 addition & 0 deletions conversation/converse.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ type ConversationRequest struct {
Inputs []ConversationInput `json:"inputs"`
Parameters map[string]*anypb.Any `json:"parameters"`
ConversationContext string `json:"conversationContext"`
Temperature float64 `json:"temperature"`

// from metadata
Key string `json:"key"`
Expand Down
5 changes: 3 additions & 2 deletions conversation/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,9 @@ func (o *OpenAI) Converse(ctx context.Context, r *conversation.ConversationReque
}

req := openai.ChatCompletionRequest{
Model: o.model,
Messages: messages,
Model: o.model,
Messages: messages,
Temperature: float32(r.Temperature),
}

// TODO: support ConversationContext
Expand Down
22 changes: 22 additions & 0 deletions conversation/temperature.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
/*
Copyright 2024 The Dapr Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package conversation

import "github.com/tmc/langchaingo/llms"

// LangchainTemperature returns a langchain compliant LLM temperature
func LangchainTemperature(temperature float64) llms.CallOption {
return llms.WithTemperature(temperature)
}

0 comments on commit 1cbedb3

Please sign in to comment.