Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
142 changes: 105 additions & 37 deletions src/adapter/openai-to-cli.ts
Original file line number Diff line number Diff line change
@@ -1,78 +1,150 @@
/**
* Convert OpenAI Chat Completion requests into a prompt string
* suitable for the Cursor CLI `agent -p` command.
*
* Model IDs are aligned with `agent --list-models` (update when Cursor ships new models).
*/

import type { OpenAIChatMessage, OpenAIChatRequest, OpenAIContentPart } from "../types/openai.js";

/**
* Real `agent --model` IDs. Keep in sync with `agent --list-models` periodically.
*/
const KNOWN_CURSOR_MODELS = new Set([
"auto",
"composer-2",
"composer-2-fast",
"composer-1.5",
"composer-1",
"claude-4.6-sonnet-medium",
"claude-4.6-sonnet-medium-thinking",
"claude-4.6-opus-high",
"claude-4.6-opus-high-thinking",
"claude-4.6-opus-max",
"claude-4.6-opus-max-thinking",
"claude-4.5-sonnet",
"claude-4.5-sonnet-thinking",
"claude-4.5-opus-high",
"claude-4.5-opus-high-thinking",
"claude-4-sonnet",
"claude-4-sonnet-1m",
"claude-4-sonnet-thinking",
"claude-4-sonnet-1m-thinking",
"gpt-5.4-low",
"gpt-5.4-medium",
"gpt-5.4-medium-fast",
"gpt-5.4-high",
"gpt-5.4-high-fast",
"gpt-5.4-xhigh",
"gpt-5.4-xhigh-fast",
"gpt-5.4-mini-none",
"gpt-5.4-mini-low",
"gpt-5.4-mini-medium",
"gpt-5.4-mini-high",
"gpt-5.4-mini-xhigh",
"gpt-5.4-nano-none",
"gpt-5.4-nano-low",
"gpt-5.4-nano-medium",
"gpt-5.4-nano-high",
"gpt-5.4-nano-xhigh",
"gpt-5.3-codex",
"gpt-5.3-codex-low",
"gpt-5.3-codex-high",
"gpt-5.3-codex-xhigh",
"gpt-5.3-codex-fast",
"gpt-5.3-codex-low",
"gpt-5.3-codex-low-fast",
"gpt-5.3-codex-high",
"gpt-5.3-codex-high-fast",
"gpt-5.3-codex-xhigh",
"gpt-5.3-codex-xhigh-fast",
"gpt-5.3-codex-spark-preview",
"gpt-5.3-codex-spark-preview-low",
"gpt-5.3-codex-spark-preview-high",
"gpt-5.3-codex-spark-preview-xhigh",
"gpt-5.2",
"gpt-5.2-low",
"gpt-5.2-low-fast",
"gpt-5.2-fast",
"gpt-5.2-high",
"gpt-5.2-high-fast",
"gpt-5.2-xhigh",
"gpt-5.2-xhigh-fast",
"gpt-5.2-codex",
"gpt-5.2-codex-high",
"gpt-5.2-codex-low",
"gpt-5.2-codex-xhigh",
"gpt-5.2-codex-low-fast",
"gpt-5.2-codex-fast",
"gpt-5.2-codex-high",
"gpt-5.2-codex-high-fast",
"gpt-5.2-codex-low-fast",
"gpt-5.2-codex-xhigh",
"gpt-5.2-codex-xhigh-fast",
"gpt-5.1-codex-max",
"gpt-5.1-codex-max-high",
"opus-4.6-thinking",
"sonnet-4.5-thinking",
"gpt-5.2-high",
"opus-4.6",
"opus-4.5",
"opus-4.5-thinking",
"sonnet-4.5",
"gpt-5.1",
"gpt-5.1-low",
"gpt-5.1-high",
"gemini-3-pro",
"gpt-5.1-codex-max-low",
"gpt-5.1-codex-max-low-fast",
"gpt-5.1-codex-max-medium",
"gpt-5.1-codex-max-medium-fast",
"gpt-5.1-codex-max-high",
"gpt-5.1-codex-max-high-fast",
"gpt-5.1-codex-max-xhigh",
"gpt-5.1-codex-max-xhigh-fast",
"gpt-5.1-codex-mini-low",
"gpt-5.1-codex-mini",
"gpt-5.1-codex-mini-high",
"gpt-5-mini",
"gemini-3.1-pro",
"gemini-3-flash",
"grok",
"grok-4-20",
"grok-4-20-thinking",
"kimi-k2.5",
]);

/** Friendly / legacy names → real CLI IDs (backward compatibility). */
const MODEL_ALIASES: Record<string, string> = {
"sonnet-4.6": "claude-4.6-sonnet-medium",
"sonnet-4.6-thinking": "claude-4.6-sonnet-medium-thinking",
"opus-4.6": "claude-4.6-opus-high",
"opus-4.6-thinking": "claude-4.6-opus-high-thinking",
"opus-4.6-max": "claude-4.6-opus-max",
"opus-4.6-max-thinking": "claude-4.6-opus-max-thinking",
"sonnet-4.5": "claude-4.5-sonnet",
"sonnet-4.5-thinking": "claude-4.5-sonnet-thinking",
"opus-4.5": "claude-4.5-opus-high",
"opus-4.5-thinking": "claude-4.5-opus-high-thinking",
"sonnet-4": "claude-4-sonnet",
"sonnet-4-thinking": "claude-4-sonnet-thinking",
"gpt-5.4": "gpt-5.4-medium",
"gpt-5.4-mini": "gpt-5.4-mini-medium",
"gpt-5.4-nano": "gpt-5.4-nano-medium",
"gpt-5.1-codex-max": "gpt-5.1-codex-max-medium",
"gemini-3-pro": "gemini-3.1-pro",
"grok": "grok-4-20",
};

export interface CliInput {
prompt: string;
model: string;
}

function resolveOne(id: string): string {
if (MODEL_ALIASES[id]) return MODEL_ALIASES[id]!;
if (KNOWN_CURSOR_MODELS.has(id)) return id;
return "auto";
}

/**
* Resolve the Cursor CLI model name from an OpenAI-style model string.
*
* Supported formats:
* "cursor/opus-4.6" -> "opus-4.6"
* "cursor-opus-4.6" -> "opus-4.6"
* "auto" -> "auto"
* "opus-4.6-thinking" -> "opus-4.6-thinking"
*/
export function extractModel(model: string): string {
if (model.startsWith("cursor/")) {
return model.slice("cursor/".length) || "auto";
const id = model.slice("cursor/".length) || "auto";
return resolveOne(id);
}

if (model.startsWith("cursor-")) {
const remainder = model.slice("cursor-".length);
if (remainder && KNOWN_CURSOR_MODELS.has(remainder)) {
return remainder;
}
if (remainder) return remainder;
if (remainder) return resolveOne(remainder);
}

if (KNOWN_CURSOR_MODELS.has(model)) {
return model;
}

return "auto";
return resolveOne(model);
}

function messageContentToText(content: string | OpenAIContentPart[]): string {
Expand All @@ -86,10 +158,6 @@ function messageContentToText(content: string | OpenAIContentPart[]): string {

/**
* Flatten an array of OpenAI messages into a single prompt string.
*
* When there's only one user message (the common case), pass the text
* directly without role markers to keep the prompt clean.
* Multi-turn conversations get [System]/[User]/[Assistant] prefixes.
*/
export function messagesToPrompt(messages: OpenAIChatMessage[]): string {
const nonEmpty = messages.filter((m) => {
Expand Down
41 changes: 35 additions & 6 deletions src/server/routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,39 @@ import {
} from "../adapter/cli-to-openai.js";
import type { OpenAIChatRequest } from "../types/openai.js";

/**
* IDs advertised by GET /v1/models — real CLI IDs plus friendly aliases
* (aliases must match MODEL_ALIASES in openai-to-cli.ts).
*/
const KNOWN_MODELS = [
"auto",
"composer-2",
"composer-2-fast",
"composer-1.5",
"composer-1",
"opus-4.6-thinking",
"claude-4.6-sonnet-medium",
"claude-4.6-sonnet-medium-thinking",
"claude-4.6-opus-high",
"claude-4.6-opus-high-thinking",
"claude-4.6-opus-max",
"claude-4.6-opus-max-thinking",
"claude-4.5-sonnet",
"claude-4.5-sonnet-thinking",
"claude-4.5-opus-high",
"claude-4.5-opus-high-thinking",
"claude-4-sonnet",
"claude-4-sonnet-thinking",
"sonnet-4.6",
"opus-4.6",
"opus-4.5-thinking",
"opus-4.5",
"sonnet-4.5-thinking",
"opus-4.6-thinking",
"sonnet-4.5",
"sonnet-4.5-thinking",
"opus-4.5",
"opus-4.5-thinking",
"gpt-5.4-low",
"gpt-5.4-medium",
"gpt-5.4-high",
"gpt-5.4-xhigh",
"gpt-5.3-codex",
"gpt-5.3-codex-fast",
"gpt-5.3-codex-low",
Expand All @@ -37,10 +60,16 @@ const KNOWN_MODELS = [
"gpt-5.2-codex",
"gpt-5.2-codex-low",
"gpt-5.2-codex-low-fast",
"gpt-5.1-codex-max",
"gemini-3-pro",
"gpt-5.1-codex-max-low",
"gpt-5.1-codex-max-medium",
"gpt-5.1-codex-max-high",
"gpt-5.1-codex-mini",
"gemini-3.1-pro",
"gemini-3-flash",
"gemini-3-pro",
"grok-4-20",
"grok",
"kimi-k2.5",
];

function extractApiKey(req: Request): string | undefined {
Expand Down
4 changes: 4 additions & 0 deletions src/subprocess/manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,10 @@ export class CursorSubprocess extends EventEmitter {
"stream-json",
"--stream-partial-output",
"--yolo",
// Non-interactive HTTP proxy: no TTY and no user to answer prompts.
// --trust lets the CLI skip trust/workspace confirmation that would
// otherwise block or exit without emitting stream-json results.
"--trust",
];

if (options.model && options.model !== "auto") {
Expand Down