Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion src/engine/openAi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';

const gpt5_models = [
"gpt-5",
"gpt-5-mini",
"gpt-5-nano"
]

export interface OpenAiConfig extends AiEngineConfig {}

export class OpenAiEngine implements AiEngine {
Expand Down Expand Up @@ -39,7 +45,7 @@ export class OpenAiEngine implements AiEngine {
const params = {
model: this.config.model,
messages,
temperature: 0,
temperature: gpt5_models.includes(this.config.model) ? 1 : 0,
top_p: 0.1,
max_tokens: this.config.maxTokensOutput
Copy link

@404pilo 404pilo Oct 4, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

for the gpt-5 model family, the api expects this property to be max_completion_tokens

  error: {
    message: "Unsupported parameter: 'max_tokens' is not supported with this model. Use 'max_completion_tokens' instead.",
    type: 'invalid_request_error',
    param: 'max_tokens',
    code: 'unsupported_parameter'
  },
  code: 'unsupported_parameter',
  param: 'max_tokens',
  type: 'invalid_request_error'
}
│
└  ✖ 400 Unsupported parameter: 'max_tokens' is not supported with this model. Use 'max_completion_tokens' instead.

};
Expand Down