1010 required : true
1111 type : number
1212 llm_provider :
13- description : " LLM provider"
13+ description : " LLM provider (optional: openai, gemini, anthropic) "
1414 required : false
1515 default : " "
16- type : choice
17- options :
18- - " "
19- - openai
20- - gemini
21- - anthropic
16+ type : string
2217 llm_model :
23- description : " Model name (provider-specific)"
18+ description : " Model name (provider-specific, e.g. gpt-5.4, gpt-5.4-pro, gpt-5.3-codex )"
2419 required : false
2520 default : " "
2621 type : string
@@ -223,14 +218,88 @@ jobs:
223218
224219 const userPrompt = promptParts.join("\n");
225220
221+ function getOpenAIReasoningEffort(model) {
222+ const normalized = String(model || "").trim().toLowerCase();
223+ if (!normalized) return null;
224+ if (normalized === "gpt-5.4" || normalized === "gpt-5.4-pro" || normalized === "gpt-5.3-codex") {
225+ return "xhigh";
226+ }
227+ if (normalized === "gpt-5-pro") {
228+ return "high";
229+ }
230+ if (/^gpt-5(?:[.-]|$)/.test(normalized)) {
231+ return "high";
232+ }
233+ return null;
234+ }
235+
236+ function extractResponsesText(data) {
237+ if (typeof data?.output_text === "string" && data.output_text.trim()) {
238+ return data.output_text.trim();
239+ }
240+
241+ const parts = [];
242+ for (const item of data?.output || []) {
243+ if (item?.type !== "message") continue;
244+ for (const content of item.content || []) {
245+ if ((content?.type === "output_text" || content?.type === "text") && typeof content.text === "string") {
246+ parts.push(content.text);
247+ }
248+ }
249+ }
250+
251+ return parts.join("\n").trim();
252+ }
253+
226254 async function callOpenAI({ apiKey, baseUrl, model, messages }) {
227255 if (!apiKey) throw new Error("OPENAI_API_KEY is not set.");
256+ const normalizedModel = String(model || "").trim().toLowerCase();
257+ const reasoningEffort = getOpenAIReasoningEffort(normalizedModel);
258+
259+ if (normalizedModel === "gpt-5.3-codex") {
260+ const url = `${baseUrl.replace(/\/$/, "")}/responses`;
261+ const payload = {
262+ model,
263+ input: messages.map(message => ({
264+ role: message.role,
265+ content: message.content,
266+ })),
267+ max_output_tokens: 2048,
268+ };
269+
270+ if (reasoningEffort) {
271+ payload.reasoning = { effort: reasoningEffort };
272+ }
273+
274+ const resp = await fetch(url, {
275+ method: "POST",
276+ headers: {
277+ "Authorization": `Bearer ${apiKey}`,
278+ "Content-Type": "application/json",
279+ },
280+ body: JSON.stringify(payload),
281+ });
282+
283+ if (!resp.ok) {
284+ const text = await resp.text();
285+ throw new Error(`OpenAI Responses API error (${resp.status}): ${text}`);
286+ }
287+
288+ const data = await resp.json();
289+ const content = extractResponsesText(data);
290+ if (!content) throw new Error("OpenAI Responses API returned no content.");
291+ return { content, reasoningEffort };
292+ }
293+
228294 const url = `${baseUrl.replace(/\/$/, "")}/chat/completions`;
229295 const payload = { model, messages };
230296
231297 const isGpt5ish = /gpt-?5/i.test(model) || /^o\d/i.test(model) || /^o1/i.test(model);
232298 if (isGpt5ish) {
233299 payload.max_completion_tokens = 2048;
300+ if (reasoningEffort) {
301+ payload.reasoning_effort = reasoningEffort;
302+ }
234303 } else {
235304 payload.max_tokens = 2048;
236305 payload.temperature = 0.2;
@@ -253,7 +322,7 @@ jobs:
253322 const data = await resp.json();
254323 const content = data?.choices?.[0]?.message?.content;
255324 if (!content) throw new Error("OpenAI API returned no content.");
256- return content;
325+ return { content, reasoningEffort } ;
257326 }
258327
259328 async function callGemini({ apiKey, model, prompt }) {
@@ -317,9 +386,10 @@ jobs:
317386 }
318387
319388 let reviewText = "";
389+ let reasoningEffort = "";
320390 try {
321391 if (provider === "openai") {
322- reviewText = await callOpenAI({
392+ const openAIResult = await callOpenAI({
323393 apiKey: process.env.OPENAI_API_KEY,
324394 baseUrl: process.env.OPENAI_BASE_URL,
325395 model,
@@ -328,6 +398,8 @@ jobs:
328398 { role: "user", content: userPrompt },
329399 ],
330400 });
401+ reviewText = openAIResult.content;
402+ reasoningEffort = openAIResult.reasoningEffort || "";
331403 } else if (provider === "gemini") {
332404 reviewText = await callGemini({
333405 apiKey: process.env.GEMINI_API_KEY,
@@ -353,6 +425,7 @@ jobs:
353425 marker,
354426 `Provider: ${provider}`,
355427 `Model: ${model}`,
428+ ...(reasoningEffort ? [`Reasoning effort: ${reasoningEffort}`] : []),
356429 "",
357430 reviewText,
358431 ].join("\n");
0 commit comments