Skip to content

Commit

Permalink
llms: Add Seed option to all supporting backends (#732)
Browse files Browse the repository at this point in the history
* feat: add Seed in mistral

* feat: add Seed in mistral , openai issue #723
  • Loading branch information
devalexandre authored Mar 31, 2024
1 parent 05ab264 commit 0b63daa
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 0 deletions.
1 change: 1 addition & 0 deletions llms/mistral/mistralmodel.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ func mistralChatParamsFromCallOptions(callOpts *llms.CallOptions) sdk.ChatReques
chatOpts := sdk.DefaultChatRequestParams
chatOpts.MaxTokens = callOpts.MaxTokens
chatOpts.Temperature = callOpts.Temperature
chatOpts.RandomSeed = callOpts.Seed
chatOpts.Tools = make([]sdk.Tool, 0)
for _, function := range callOpts.Functions {
chatOpts.Tools = append(chatOpts.Tools, sdk.Tool{
Expand Down
1 change: 1 addition & 0 deletions llms/openai/internal/openaiclient/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ type ChatRequest struct {
Stream bool `json:"stream,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
Seed int `json:"seed,omitempty"`

// ResponseFormat is the format of the response.
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
Expand Down
2 changes: 2 additions & 0 deletions llms/openai/internal/openaiclient/completions.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ type CompletionRequest struct {
PresencePenalty float64 `json:"presence_penalty,omitempty"`
TopP float64 `json:"top_p,omitempty"`
StopWords []string `json:"stop,omitempty"`
Seed int `json:"seed,omitempty"`

// StreamingFunc is a function to be called for each chunk of a streaming response.
// Return an error to stop streaming early.
Expand Down Expand Up @@ -85,5 +86,6 @@ func (c *Client) createCompletion(ctx context.Context, payload *CompletionReques
FrequencyPenalty: payload.FrequencyPenalty,
PresencePenalty: payload.PresencePenalty,
StreamingFunc: payload.StreamingFunc,
Seed: payload.Seed,
})
}
1 change: 1 addition & 0 deletions llms/openai/openaillm.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ func (o *LLM) GenerateContent(ctx context.Context, messages []llms.MessageConten
PresencePenalty: opts.PresencePenalty,

FunctionCallBehavior: openaiclient.FunctionCallBehavior(opts.FunctionCallBehavior),
Seed: opts.Seed,
}
if opts.JSONMode {
req.ResponseFormat = ResponseFormatJSON
Expand Down

0 comments on commit 0b63daa

Please sign in to comment.