Skip to content

Commit

Permalink
openai: Rename MaxTokens to MaxCompletionTokens
Browse files Browse the repository at this point in the history
  • Loading branch information
tmc committed Sep 12, 2024
1 parent b5d8bd0 commit 75c2ca4
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 31 deletions.
24 changes: 13 additions & 11 deletions llms/openai/internal/openaiclient/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,19 @@ type StreamOptions struct {

// ChatRequest is a request to complete a chat completion..
type ChatRequest struct {
Model string `json:"model"`
Messages []*ChatMessage `json:"messages"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
StopWords []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
Seed int `json:"seed,omitempty"`
Model string `json:"model"`
Messages []*ChatMessage `json:"messages"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
// Deprecated: Use MaxCompletionTokens
MaxTokens int `json:"-,omitempty"`
MaxCompletionTokens int `json:"max_completion_tokens,omitempty"`
N int `json:"n,omitempty"`
StopWords []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
Seed int `json:"seed,omitempty"`

// ResponseFormat is the format of the response.
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
Expand Down
40 changes: 21 additions & 19 deletions llms/openai/internal/openaiclient/completions.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,18 @@ import (

// CompletionRequest is a request to complete a completion.
type CompletionRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
TopP float64 `json:"top_p,omitempty"`
StopWords []string `json:"stop,omitempty"`
Seed int `json:"seed,omitempty"`
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature float64 `json:"temperature"`
// Deprecated: Use MaxCompletionTokens
MaxTokens int `json:"-,omitempty"`
MaxCompletionTokens int `json:"max_completion_tokens,omitempty"`
N int `json:"n,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
TopP float64 `json:"top_p,omitempty"`
StopWords []string `json:"stop,omitempty"`
Seed int `json:"seed,omitempty"`

// StreamingFunc is a function to be called for each chunk of a streaming response.
// Return an error to stop streaming early.
Expand Down Expand Up @@ -78,14 +80,14 @@ func (c *Client) createCompletion(ctx context.Context, payload *CompletionReques
Messages: []*ChatMessage{
{Role: "user", Content: payload.Prompt},
},
Temperature: payload.Temperature,
TopP: payload.TopP,
MaxTokens: payload.MaxTokens,
N: payload.N,
StopWords: payload.StopWords,
FrequencyPenalty: payload.FrequencyPenalty,
PresencePenalty: payload.PresencePenalty,
StreamingFunc: payload.StreamingFunc,
Seed: payload.Seed,
Temperature: payload.Temperature,
TopP: payload.TopP,
MaxCompletionTokens: payload.MaxTokens,
N: payload.N,
StopWords: payload.StopWords,
FrequencyPenalty: payload.FrequencyPenalty,
PresencePenalty: payload.PresencePenalty,
StreamingFunc: payload.StreamingFunc,
Seed: payload.Seed,
})
}
3 changes: 2 additions & 1 deletion llms/openai/openaillm.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,11 +101,12 @@ func (o *LLM) GenerateContent(ctx context.Context, messages []llms.MessageConten
Messages: chatMsgs,
StreamingFunc: opts.StreamingFunc,
Temperature: opts.Temperature,
MaxTokens: opts.MaxTokens,
N: opts.N,
FrequencyPenalty: opts.FrequencyPenalty,
PresencePenalty: opts.PresencePenalty,

MaxCompletionTokens: opts.MaxTokens,

ToolChoice: opts.ToolChoice,
FunctionCallBehavior: openaiclient.FunctionCallBehavior(opts.FunctionCallBehavior),
Seed: opts.Seed,
Expand Down

0 comments on commit 75c2ca4

Please sign in to comment.