diff --git a/index.ts b/index.ts index 58372b4..0eb6bd6 100644 --- a/index.ts +++ b/index.ts @@ -25,7 +25,7 @@ const openai = new OpenAI({ }); // Define supported models -const SUPPORTED_MODELS = ["gpt-4o", "gpt-4o-mini", "o1-preview", "o1-mini"] as const; +const SUPPORTED_MODELS = ["gpt-4o", "gpt-4o-mini", "o1-preview", "o1-mini", "o1", "o3-mini", "o3-mini-low", "o3-mini-high"] as const; const DEFAULT_MODEL = "gpt-4o" as const; type SupportedModel = typeof SUPPORTED_MODELS[number]; @@ -99,22 +99,34 @@ server.setRequestHandler(CallToolRequestSchema, async (request): Promise<{ messages: Array<{ role: string; content: string }>; model?: SupportedModel; }; - + // Validate model if (!SUPPORTED_MODELS.includes(model!)) { throw new Error(`Unsupported model: ${model}. Must be one of: ${SUPPORTED_MODELS.join(", ")}`); } - + + // Map o3-mini variants: always use "o3-mini" as the model value, with reasoning_effort based on variant. + let targetModel = model!; + let extraParams: Record = {}; + if (model === "o3-mini-high") { + targetModel = "o3-mini"; + extraParams = { reasoning_effort: "high" }; + } else if (model === "o3-mini-low") { + targetModel = "o3-mini"; + extraParams = { reasoning_effort: "low" }; + } + // Convert messages to OpenAI's expected format const messages: ChatCompletionMessageParam[] = rawMessages.map(msg => ({ role: msg.role as "system" | "user" | "assistant", content: msg.content })); - - // Call OpenAI API with fixed temperature + + // Call OpenAI API with fixed temperature and reasoning model parameters const completion = await openai.chat.completions.create({ messages, - model: model! + model: targetModel, + ...extraParams }); // Return the response @@ -147,4 +159,5 @@ const transport = new StdioServerTransport(); server.connect(transport).catch((error) => { console.error("Failed to start server:", error); process.exit(1); -}); \ No newline at end of file +}); + diff --git a/package.json b/package.json index e2c2199..b7a0d2a 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "bugs": "https://github.com/mzxrai/mcp-openai/issues", "type": "module", "bin": { - "mcp-server-webresearch": "dist/index.js" + "mcp-openai": "dist/index.js" }, "files": [ "dist" @@ -42,4 +42,5 @@ "tsx": "^4.19.2", "typescript": "^5.6.2" } -} \ No newline at end of file +} +