Skip to content

Commit 1ad88a3

Browse files
committed
Deprecate and refactor
- Deprecate `ErrO1BetaLimitationsLogprobs` and `ErrO1BetaLimitationsOther` - Implement `validationRequestForReasoningModels`, which works on both o1 & o3, and has per-model-type restrictions on functionality (eg, o3 class are allowed function calls and system messages, o1 isn't)
1 parent 24e00aa commit 1ad88a3

File tree

4 files changed

+52
-65
lines changed

4 files changed

+52
-65
lines changed

chat.go

+1-5
Original file line numberDiff line numberDiff line change
@@ -392,11 +392,7 @@ func (c *Client) CreateChatCompletion(
392392
return
393393
}
394394

395-
if err = validateRequestForO1Models(request); err != nil {
396-
return
397-
}
398-
399-
if err = validateRequestForO3Models(request); err != nil {
395+
if err = validateOSeriesRequest(request); err != nil {
400396
return
401397
}
402398

chat_stream.go

+1-5
Original file line numberDiff line numberDiff line change
@@ -80,11 +80,7 @@ func (c *Client) CreateChatCompletionStream(
8080
}
8181

8282
request.Stream = true
83-
if err = validateRequestForO1Models(request); err != nil {
84-
return
85-
}
86-
87-
if err = validateRequestForO3Models(request); err != nil {
83+
if err = validateOSeriesRequest(request); err != nil {
8884
return
8985
}
9086

chat_test.go

+8-8
Original file line numberDiff line numberDiff line change
@@ -64,15 +64,15 @@ func TestO1ModelsChatCompletionsDeprecatedFields(t *testing.T) {
6464
MaxTokens: 5,
6565
Model: openai.O1Preview,
6666
},
67-
expectedError: openai.ErrO1MaxTokensDeprecated,
67+
expectedError: openai.ErrO3MaxTokensDeprecated,
6868
},
6969
{
7070
name: "o1-mini_MaxTokens_deprecated",
7171
in: openai.ChatCompletionRequest{
7272
MaxTokens: 5,
7373
Model: openai.O1Mini,
7474
},
75-
expectedError: openai.ErrO1MaxTokensDeprecated,
75+
expectedError: openai.ErrO3MaxTokensDeprecated,
7676
},
7777
}
7878

@@ -104,7 +104,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
104104
LogProbs: true,
105105
Model: openai.O1Preview,
106106
},
107-
expectedError: openai.ErrO1BetaLimitationsLogprobs,
107+
expectedError: openai.ErrO3BetaLimitationsLogprobs,
108108
},
109109
{
110110
name: "message_type_unsupported",
@@ -155,7 +155,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
155155
},
156156
Temperature: float32(2),
157157
},
158-
expectedError: openai.ErrO1BetaLimitationsOther,
158+
expectedError: openai.ErrO3BetaLimitationsOther,
159159
},
160160
{
161161
name: "set_top_unsupported",
@@ -173,7 +173,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
173173
Temperature: float32(1),
174174
TopP: float32(0.1),
175175
},
176-
expectedError: openai.ErrO1BetaLimitationsOther,
176+
expectedError: openai.ErrO3BetaLimitationsOther,
177177
},
178178
{
179179
name: "set_n_unsupported",
@@ -192,7 +192,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
192192
TopP: float32(1),
193193
N: 2,
194194
},
195-
expectedError: openai.ErrO1BetaLimitationsOther,
195+
expectedError: openai.ErrO3BetaLimitationsOther,
196196
},
197197
{
198198
name: "set_presence_penalty_unsupported",
@@ -209,7 +209,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
209209
},
210210
PresencePenalty: float32(1),
211211
},
212-
expectedError: openai.ErrO1BetaLimitationsOther,
212+
expectedError: openai.ErrO3BetaLimitationsOther,
213213
},
214214
{
215215
name: "set_frequency_penalty_unsupported",
@@ -226,7 +226,7 @@ func TestO1ModelsChatCompletionsBetaLimitations(t *testing.T) {
226226
},
227227
FrequencyPenalty: float32(0.1),
228228
},
229-
expectedError: openai.ErrO1BetaLimitationsOther,
229+
expectedError: openai.ErrO3BetaLimitationsOther,
230230
},
231231
}
232232

completion.go

+42-47
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,23 @@ import (
77
)
88

99
var (
10+
// Deprecated: use ErrO3MaxTokensDeprecated instead.
1011
ErrO1MaxTokensDeprecated = errors.New("this model is not supported MaxTokens, please use MaxCompletionTokens") //nolint:lll
1112
ErrCompletionUnsupportedModel = errors.New("this model is not supported with this method, please use CreateChatCompletion client method instead") //nolint:lll
1213
ErrCompletionStreamNotSupported = errors.New("streaming is not supported with this method, please use CreateCompletionStream") //nolint:lll
1314
ErrCompletionRequestPromptTypeNotSupported = errors.New("the type of CompletionRequest.Prompt only supports string and []string") //nolint:lll
1415
)
1516

1617
var (
17-
ErrO1BetaLimitationsMessageTypes = errors.New("this model has beta-limitations, user and assistant messages only, system messages are not supported") //nolint:lll
18-
ErrO1BetaLimitationsTools = errors.New("this model has beta-limitations, tools, function calling, and response format parameters are not supported") //nolint:lll
19-
ErrO1BetaLimitationsLogprobs = errors.New("this model has beta-limitations, logprobs not supported") //nolint:lll
20-
ErrO1BetaLimitationsOther = errors.New("this model has beta-limitations, temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0") //nolint:lll
18+
ErrO1BetaLimitationsMessageTypes = errors.New("this model has beta-limitations, user and assistant messages only, system messages are not supported") //nolint:lll
19+
ErrO1BetaLimitationsTools = errors.New("this model has beta-limitations, tools, function calling, and response format parameters are not supported") //nolint:lll
20+
// Deprecated: use Err03BetaLimitations* instead.
21+
ErrO1BetaLimitationsLogprobs = errors.New("this model has beta-limitations, logprobs not supported") //nolint:lll
22+
ErrO1BetaLimitationsOther = errors.New("this model has beta-limitations, temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0") //nolint:lll
2123
)
2224

2325
var (
26+
ErrO3MaxTokensDeprecated = errors.New("this model is not supported MaxTokens, please use MaxCompletionTokens")
2427
ErrO3BetaLimitationsLogprobs = errors.New("this model has beta-limitations, logprobs not supported") //nolint:lll
2528
ErrO3BetaLimitationsOther = errors.New("this model has beta-limitations, temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0") //nolint:lll
2629
)
@@ -207,67 +210,44 @@ var availableMessageRoleForO1Models = map[string]struct{}{
207210
ChatMessageRoleAssistant: {},
208211
}
209212

210-
// validateRequestForO1Models checks for deprecated fields of OpenAI models.
211-
func validateRequestForO1Models(request ChatCompletionRequest) error {
212-
if _, found := O1SeriesModels[request.Model]; !found {
213+
// validateOSeriesRequest checks for fields in requests which are not allowed for o-series models
214+
// this includes: o1 does not support function calling, or system messages.
215+
func validateOSeriesRequest(request ChatCompletionRequest) error {
216+
_, o1Series := O1SeriesModels[request.Model]
217+
_, o3Series := O3SeriesModels[request.Model]
218+
219+
if !o1Series && !o3Series {
213220
return nil
214221
}
215222

216-
if request.MaxTokens > 0 {
217-
return ErrO1MaxTokensDeprecated
223+
if err := validateBasicParams(request); err != nil {
224+
return err
218225
}
219226

220-
// Logprobs: not supported.
221-
if request.LogProbs {
222-
return ErrO1BetaLimitationsLogprobs
227+
if err := validateModelParams(request); err != nil {
228+
return err
223229
}
224230

225-
// Message types: user and assistant messages only, system messages are not supported.
226-
for _, m := range request.Messages {
227-
if _, found := availableMessageRoleForO1Models[m.Role]; !found {
228-
return ErrO1BetaLimitationsMessageTypes
231+
if o1Series {
232+
if err := validateO1Specific(request); err != nil {
233+
return err
229234
}
230235
}
231236

232-
// Tools: tools, function calling, and response format parameters are not supported
233-
for _, t := range request.Tools {
234-
if _, found := unsupportedToolsForO1Models[t.Type]; found {
235-
return ErrO1BetaLimitationsTools
236-
}
237-
}
238-
239-
// Other: temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0.
240-
if request.Temperature > 0 && request.Temperature != 1 {
241-
return ErrO1BetaLimitationsOther
242-
}
243-
if request.TopP > 0 && request.TopP != 1 {
244-
return ErrO1BetaLimitationsOther
245-
}
246-
if request.N > 0 && request.N != 1 {
247-
return ErrO1BetaLimitationsOther
248-
}
249-
if request.PresencePenalty > 0 {
250-
return ErrO1BetaLimitationsOther
251-
}
252-
if request.FrequencyPenalty > 0 {
253-
return ErrO1BetaLimitationsOther
254-
}
255-
256237
return nil
257238
}
258239

259-
// validateRequestForO3Models checks for deprecated fields of OpenAI models.
260-
func validateRequestForO3Models(request ChatCompletionRequest) error {
261-
if _, found := O3SeriesModels[request.Model]; !found {
262-
return nil
240+
func validateBasicParams(request ChatCompletionRequest) error {
241+
if request.MaxTokens > 0 {
242+
return ErrO3MaxTokensDeprecated
263243
}
264-
265-
// Logprobs: not supported.
266244
if request.LogProbs {
267245
return ErrO3BetaLimitationsLogprobs
268246
}
247+
return nil
248+
}
269249

270-
// Other: temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0.
250+
func validateModelParams(request ChatCompletionRequest) error {
271251
if request.Temperature > 0 && request.Temperature != 1 {
272252
return ErrO3BetaLimitationsOther
273253
}
@@ -287,6 +267,21 @@ func validateRequestForO3Models(request ChatCompletionRequest) error {
287267
return nil
288268
}
289269

270+
func validateO1Specific(request ChatCompletionRequest) error {
271+
for _, m := range request.Messages {
272+
if _, found := availableMessageRoleForO1Models[m.Role]; !found {
273+
return ErrO1BetaLimitationsMessageTypes
274+
}
275+
}
276+
277+
for _, t := range request.Tools {
278+
if _, found := unsupportedToolsForO1Models[t.Type]; found {
279+
return ErrO1BetaLimitationsTools
280+
}
281+
}
282+
return nil
283+
}
284+
290285
// CompletionRequest represents a request structure for completion API.
291286
type CompletionRequest struct {
292287
Model string `json:"model"`

0 commit comments

Comments
 (0)