diff --git a/README.md b/README.md
index 124abaa..5e6be27 100644
--- a/README.md
+++ b/README.md
@@ -29,12 +29,13 @@
## ⚡ Quick Start (Community Edition)
-Get PROMPTC running in your local environment in under 60 seconds. Our *Plug & Play* installer automatically configures the engine and connects it with **Claude Desktop**.
+Get PROMPTC running in your local environment in under 60 seconds. Our *Plug & Play* installer compiles the engine and registers it as a local MCP server for **Codex**.
**Prerequisites:**
* macOS (M1/M2/M3) or Linux.
-* Claude Desktop installed.
-* A [Google AI Studio API Key](https://aistudio.google.com/app/apikey).
+* Codex CLI or Codex app installed.
+* An [OpenAI API key](https://platform.openai.com/api-keys) for cloud fallback.
+* Optional: a [Google AI Studio API Key](https://aistudio.google.com/app/apikey) as secondary fallback.
Run in your terminal:
@@ -42,6 +43,13 @@ Run in your terminal:
curl -sSL https://raw.githubusercontent.com/andesdevroot/promptc/master/install.sh | bash
```
+The installer now:
+* compiles `promptc` into `~/.promptc/promptc`
+* registers `PROMPTC` in Codex through `codex mcp add`
+* injects `PROMPTC_MACMINI_IP`, `PROMPTC_MCP_CLIENT=codex-desktop`, `OPENAI_API_KEY`, optional `OPENAI_MODEL`, and optional `GEMINI_API_KEY`
+
+If Codex is not available yet, the installer leaves a ready-to-run helper script at `~/.promptc/codex-mcp-setup.sh`.
+
---
## 🏗️ Architecture: Private-First AI (Dual-Tier)
@@ -50,7 +58,8 @@ In regulated sectors like **Mining, Banking, and Legal**, business logic is a cr
### 1. Community Mode (Agile Development)
* **Orchestration:** Ultra-lightweight local binary execution.
-* **Inference:** Routes optimization to **Gemini 1.5 Pro** transparently.
+* **Client:** Designed for **Codex** as the MCP-native operator surface.
+* **Inference:** Routes optimization to **OpenAI** transparently when local sovereignty is unavailable, with optional **Gemini** secondary fallback.
### 2. Enterprise Mode (Air-Gapped / Full Sovereignty)
* **Orchestration:** Local interceptor for all outgoing prompts.
@@ -73,6 +82,8 @@ In regulated sectors like **Mining, Banking, and Legal**, business logic is a cr
* **Static Go Binary**: Zero dependencies, runtime-free, and high performance (RAM < 15MB).
* **Prompt-as-Code (PaC)**: Manage templates through versioned, pre-certified components.
* **Deterministic Compilation**: Transforms ambiguous language into structured Markdown (Role, Context, Task, Constraints).
+* **Codex-Ready Local Ops**: Works as a stdio MCP server that Codex can register and call locally.
+* **OpenAI Responses API Fallback**: Uses the official `/v1/responses` API for cloud optimization when the local node is unavailable.
---
diff --git a/cmd/promptc/config.go b/cmd/promptc/config.go
index ecbef5b..a53fa2b 100644
--- a/cmd/promptc/config.go
+++ b/cmd/promptc/config.go
@@ -21,7 +21,8 @@ var configCmd = &cobra.Command{
reader := bufio.NewReader(os.Stdin)
fmt.Println(cli.ColorCyan + "¿Qué proveedor de IA deseas usar?" + cli.ColorReset)
- fmt.Println("1) Google Gemini")
+ fmt.Println("1) OpenAI")
+ fmt.Println("2) Google Gemini")
fmt.Print(cli.ColorYellow + "> " + cli.ColorReset)
providerOption, _ := reader.ReadString('\n')
@@ -30,9 +31,11 @@ var configCmd = &cobra.Command{
var provider string
switch providerOption {
case "1":
+ provider = "openai"
+ case "2":
provider = "gemini"
default:
- provider = "gemini"
+ provider = "openai"
}
fmt.Printf("\n🔑 Ingresa tu API Key:\n")
@@ -41,9 +44,23 @@ var configCmd = &cobra.Command{
apiKey, _ := reader.ReadString('\n')
apiKey = strings.TrimSpace(apiKey)
+ openAIModel := ""
+ if provider == "openai" {
+ fmt.Printf("\n🧠 Modelo OpenAI (Enter para usar gpt-5.4-mini):\n")
+ fmt.Print(cli.ColorYellow + "> " + cli.ColorReset)
+ openAIModel, _ = reader.ReadString('\n')
+ openAIModel = strings.TrimSpace(openAIModel)
+ }
+
cfg := config.AppConfig{
- Provider: provider,
- APIKey: apiKey,
+ Provider: provider,
+ APIKey: apiKey,
+ OpenAIModel: openAIModel,
+ }
+ if provider == "openai" {
+ cfg.OpenAIAPIKey = apiKey
+ } else {
+ cfg.GeminiAPIKey = apiKey
}
config.Save(cfg)
diff --git a/cmd/promptc/fix.go b/cmd/promptc/fix.go
index ba67036..5e7e3c7 100644
--- a/cmd/promptc/fix.go
+++ b/cmd/promptc/fix.go
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"os"
+ "strings"
"github.com/andesdevroot/promptc/internal/cli"
"github.com/andesdevroot/promptc/internal/config"
@@ -18,7 +19,12 @@ var fixCmd = &cobra.Command{
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
cli.PrintBanner()
- cfg, _ := config.Load()
+ cfg, err := config.Load()
+ if err != nil {
+ fmt.Println("Error cargando configuración:", err)
+ os.Exit(1)
+ }
+
p, err := parser.ParseFile(args[0])
if err != nil {
fmt.Println("Error:", err)
@@ -26,43 +32,49 @@ var fixCmd = &cobra.Command{
}
ctx := context.Background()
- promptcSDK, _ := sdk.NewSDK(ctx, cfg.APIKey, "")
+ openAIKey := strings.TrimSpace(cfg.OpenAIAPIKey)
+ geminiKey := strings.TrimSpace(cfg.GeminiAPIKey)
- analysis := promptcSDK.Analyze(p)
-
- analysisResult, ok := analysis.(map[string]interface{})
- if !ok {
- fmt.Println("Error: invalid analysis result type")
- os.Exit(1)
+ switch cfg.Provider {
+ case "openai":
+ if openAIKey == "" {
+ openAIKey = strings.TrimSpace(cfg.APIKey)
+ }
+ case "gemini":
+ if geminiKey == "" {
+ geminiKey = strings.TrimSpace(cfg.APIKey)
+ }
}
- score, ok := analysisResult["Score"].(float64)
- if !ok {
- fmt.Println("Error: Score field not found or invalid type")
- os.Exit(1)
+ if envKey := strings.TrimSpace(os.Getenv("OPENAI_API_KEY")); envKey != "" {
+ openAIKey = envKey
+ }
+ if envKey := strings.TrimSpace(os.Getenv("GEMINI_API_KEY")); envKey != "" {
+ geminiKey = envKey
}
- fmt.Printf("Score: %d/100\n", int(score))
- analysisResult, ok = analysis.(map[string]interface{})
- if !ok {
- fmt.Println("Error: invalid analysis result type")
- os.Exit(1)
+ openAIModel := strings.TrimSpace(cfg.OpenAIModel)
+ if envModel := strings.TrimSpace(os.Getenv("OPENAI_MODEL")); envModel != "" {
+ openAIModel = envModel
}
- isReliable, ok := analysisResult["IsReliable"].(bool)
- if !ok {
- fmt.Println("Error: IsReliable field not found or invalid type")
+ promptcSDK, err := sdk.NewSDK(ctx, openAIKey, openAIModel, geminiKey, os.Getenv("PROMPTC_MACMINI_IP"))
+ if err != nil {
+ fmt.Println("Error inicializando SDK:", err)
os.Exit(1)
}
- if !isReliable {
+ analysis := promptcSDK.Analyze(p)
+ fmt.Printf("Score: %d/100\n", analysis.Score)
+
+ if !analysis.IsReliable {
optimized, err := promptcSDK.Optimize(ctx, p)
if err != nil {
fmt.Printf("\n❌ Error Crítico: %v\n", err)
os.Exit(1)
}
cli.PrintSuccess("\n✨ Prompt Optimizado:")
- fmt.Println("\n" + fmt.Sprint(optimized))
+ fmt.Println("\n" + optimized)
} else {
output, _ := promptcSDK.Engine.Compile(p)
fmt.Println("\n" + output)
diff --git a/cmd/promptc/main.go b/cmd/promptc/main.go
index 4f51004..df2c454 100644
--- a/cmd/promptc/main.go
+++ b/cmd/promptc/main.go
@@ -9,6 +9,7 @@ import (
"os"
"os/signal"
"runtime"
+ "strings"
"sync"
"sync/atomic"
"syscall"
@@ -44,6 +45,8 @@ type Template struct {
Content string `json:"content"`
}
+const appVersion = "0.3.1"
+
// --- SISTEMA DE AUDITORÍA ---
// AuditEvent representa un evento estructurado de auditoría.
// Cada evento tiene tipo semántico, actor, recurso y resultado.
@@ -51,7 +54,7 @@ type AuditEvent struct {
Timestamp string `json:"ts"`
Type string `json:"type"` // KERNEL | MCP | TEMPLATE | INFERENCE | POLICY | SYSTEM
Action string `json:"action"`
- Actor string `json:"actor"` // claude-desktop | promptc-engine | mac-mini | gemini
+ Actor string `json:"actor"` // codex-desktop | promptc-engine | mac-mini | gemini
Resource string `json:"resource,omitempty"`
Result string `json:"result"` // OK | FAIL | WARN
LatencyMs int64 `json:"latency_ms,omitempty"`
@@ -89,7 +92,7 @@ func auditLog(evt AuditEvent) {
}
hub.Unlock()
- // 2. A stderr (visible en mcp.log de Claude Desktop)
+ // 2. A stderr (visible en el cliente MCP, por ejemplo Codex Desktop)
fmt.Fprintf(os.Stderr, "%s\n", line)
// 3. Al archivo de auditoría append-only (registro regulatorio)
@@ -228,7 +231,7 @@ func startMetricsPersistence() {
}()
}
-func recordInference(success bool, latencyMs int64, tokens int64, usedGemini bool) {
+func recordInference(success bool, latencyMs int64, tokens int64, usedCloudFallback bool) {
atomic.AddInt64(&metrics.InferenceCount, 1)
atomic.AddInt64(&metrics.TotalLatencyMs, latencyMs)
atomic.AddInt64(&metrics.TotalTokens, tokens)
@@ -237,7 +240,7 @@ func recordInference(success bool, latencyMs int64, tokens int64, usedGemini boo
} else {
atomic.AddInt64(&metrics.InferenceFail, 1)
}
- if usedGemini {
+ if usedCloudFallback {
atomic.AddInt64(&metrics.GeminiCallCount, 1)
}
if atomic.LoadInt64(&metrics.InferenceCount)%10 == 0 {
@@ -354,7 +357,7 @@ func startHeartbeat(remoteIP string) {
Actor: "mac-mini",
Resource: remoteIP + ":11434",
Result: "WARN",
- Detail: "Nodo no responde — activando fallback Gemini",
+ Detail: "Nodo no responde — activando fallback cloud",
})
}
}
@@ -566,7 +569,7 @@ const dashboardHTML = `
@@ -597,7 +600,7 @@ const dashboardHTML = `
ok:0 / err:0
- Gemini Quota
+ Cloud Fallback
0
calls hoy
@@ -808,7 +811,7 @@ func startDashboard() {
metrics.Unlock()
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "ok",
- "version": "0.3.0",
+ "version": appVersion,
"node_online": nodeOnline,
"last_heartbeat": lastHeartbeat,
"templates_count": tmplCount,
@@ -850,7 +853,7 @@ func startDashboard() {
var startTime = time.Now()
// --- TOOL HANDLERS ---
-func handleToolCall(req JSONRPCMessage, app *sdk.PromptC) {
+func handleToolCall(req JSONRPCMessage, app *sdk.PromptC, mcpClient string) {
var call struct {
Name string `json:"name"`
Arguments json.RawMessage `json:"arguments"`
@@ -859,7 +862,7 @@ func handleToolCall(req JSONRPCMessage, app *sdk.PromptC) {
auditLog(AuditEvent{
Type: "MCP",
Action: "TOOL_PARSE_ERROR",
- Actor: "claude-desktop",
+ Actor: mcpClient,
Result: "FAIL",
Detail: err.Error(),
})
@@ -871,11 +874,11 @@ func handleToolCall(req JSONRPCMessage, app *sdk.PromptC) {
return
}
- // Evento MCP: Claude Desktop invocó una herramienta
+ // Evento MCP: el cliente activo invocó una herramienta
auditLog(AuditEvent{
Type: "MCP",
Action: "TOOL_INVOKED",
- Actor: "claude-desktop",
+ Actor: mcpClient,
Resource: call.Name,
Result: "OK",
Detail: "Solicitud recibida vía MCP stdio",
@@ -1001,7 +1004,12 @@ func handleToolCall(req JSONRPCMessage, app *sdk.PromptC) {
metrics.Unlock()
inferenceActor := "mac-mini"
if !nodeOnline {
- inferenceActor = "gemini-cloud"
+ inferenceActor = "cloud-fallback"
+ if strings.TrimSpace(os.Getenv("OPENAI_API_KEY")) != "" {
+ inferenceActor = "openai-cloud"
+ } else if strings.TrimSpace(os.Getenv("GEMINI_API_KEY")) != "" {
+ inferenceActor = "gemini-cloud"
+ }
}
auditLog(AuditEvent{
@@ -1072,7 +1080,7 @@ func handleToolCall(req JSONRPCMessage, app *sdk.PromptC) {
auditLog(AuditEvent{
Type: "MCP",
Action: "TOOL_NOT_FOUND",
- Actor: "claude-desktop",
+ Actor: mcpClient,
Resource: call.Name,
Result: "FAIL",
Detail: "Herramienta no registrada en el servidor MCP",
@@ -1115,8 +1123,18 @@ func main() {
// 5. Persistencia periódica
startMetricsPersistence()
+ // 6. Cliente MCP
+ mcpClient := os.Getenv("PROMPTC_MCP_CLIENT")
+ if mcpClient == "" {
+ mcpClient = "codex-desktop"
+ }
+
+ openAIKey := strings.TrimSpace(os.Getenv("OPENAI_API_KEY"))
+ openAIModel := strings.TrimSpace(os.Getenv("OPENAI_MODEL"))
+ geminiKey := strings.TrimSpace(os.Getenv("GEMINI_API_KEY"))
+
// 6. SDK
- app, err := sdk.NewSDK(context.Background(), os.Getenv("GEMINI_API_KEY"), remoteIP)
+ app, err := sdk.NewSDK(context.Background(), openAIKey, openAIModel, geminiKey, remoteIP)
if err != nil {
fmt.Fprintf(os.Stderr, "[SDK_ERROR] %v — continuando sin optimizadores\n", err)
}
@@ -1143,7 +1161,9 @@ func main() {
Action: "BOOT",
Actor: "promptc-engine",
Result: "OK",
- Detail: fmt.Sprintf("PROMPTC v0.3.0 iniciado — nodo=%s templates=%d inferencias_previas=%d",
+ Detail: fmt.Sprintf("PROMPTC v%s iniciado — cliente=%s nodo=%s templates=%d inferencias_previas=%d",
+ appVersion,
+ mcpClient,
remoteIP,
len(hub.Templates),
atomic.LoadInt64(&metrics.InferenceCount),
@@ -1166,13 +1186,13 @@ func main() {
auditLog(AuditEvent{
Type: "MCP",
Action: "HANDSHAKE_INIT",
- Actor: "claude-desktop",
+ Actor: mcpClient,
Result: "OK",
Detail: "Protocolo MCP 2024-11-05 — negociación iniciada",
})
sendResponse(req.ID, map[string]interface{}{
"protocolVersion": "2024-11-05",
- "serverInfo": map[string]string{"name": "PROMPTC", "version": "0.3.0"},
+ "serverInfo": map[string]string{"name": "PROMPTC", "version": appVersion},
"capabilities": map[string]interface{}{"tools": map[string]interface{}{}},
})
@@ -1180,7 +1200,7 @@ func main() {
auditLog(AuditEvent{
Type: "MCP",
Action: "HANDSHAKE_CONFIRMED",
- Actor: "claude-desktop",
+ Actor: mcpClient,
Result: "OK",
Detail: "Canal MCP establecido — herramientas disponibles",
})
@@ -1189,7 +1209,7 @@ func main() {
auditLog(AuditEvent{
Type: "MCP",
Action: "TOOLS_LIST_REQUESTED",
- Actor: "claude-desktop",
+ Actor: mcpClient,
Result: "OK",
Detail: "Enviando schema de 2 herramientas: get_template, optimize_prompt",
})
@@ -1211,7 +1231,7 @@ func main() {
},
{
"name": "optimize_prompt",
- "description": "Compila y optimiza un prompt usando el Mac Mini vía Tailscale con fallback a Gemini. Acepta template_name para usar una plantilla como base con resolución automática de variables.",
+ "description": "Compila y optimiza un prompt usando el Mac Mini vía Tailscale con fallback a OpenAI y Gemini. Acepta template_name para usar una plantilla como base con resolución automática de variables.",
"inputSchema": map[string]interface{}{
"type": "object",
"required": []string{"role", "context", "task"},
@@ -1251,7 +1271,7 @@ func main() {
})
case "tools/call":
- handleToolCall(req, app)
+ handleToolCall(req, app, mcpClient)
}
}
diff --git a/cmd/promptc/root.go b/cmd/promptc/root.go
new file mode 100644
index 0000000..096450d
--- /dev/null
+++ b/cmd/promptc/root.go
@@ -0,0 +1,27 @@
+package main
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/spf13/cobra"
+)
+
+var rootCmd = &cobra.Command{
+ Use: "promptc",
+ Short: "PROMPTC CLI para compilacion de prompts industriales",
+ Long: "PROMPTC expone un servidor MCP y utilidades CLI para compilacion, analisis y operacion auditable de prompts industriales.",
+ Run: func(cmd *cobra.Command, args []string) {
+ if err := cmd.Help(); err != nil {
+ fmt.Println("Error:", err)
+ os.Exit(1)
+ }
+ },
+}
+
+func execute() {
+ if err := rootCmd.Execute(); err != nil {
+ fmt.Println("Error:", err)
+ os.Exit(1)
+ }
+}
diff --git a/cmd/promptc/version.go b/cmd/promptc/version.go
index 2bc1bd8..82b0ecb 100644
--- a/cmd/promptc/version.go
+++ b/cmd/promptc/version.go
@@ -8,9 +8,9 @@ import (
var versionCmd = &cobra.Command{
Use: "version",
- Short: "Muestra la versión",
+ Short: "Muestra la versión de PROMPTC",
Run: func(cmd *cobra.Command, args []string) {
- fmt.Println("PromptC v0.1.0-alpha")
+ fmt.Println("PROMPTC v0.3.1 (Codex-Ready Industrial MCP)")
},
}
diff --git a/install.sh b/install.sh
index 90665e8..409014d 100755
--- a/install.sh
+++ b/install.sh
@@ -1,12 +1,11 @@
#!/bin/bash
set -e
-# --- UI & Branding ---
CYAN='\033[1;36m'
GREEN='\033[1;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
-NC='\033[0m' # No Color
+NC='\033[0m'
echo -e "${CYAN}"
echo " ____ ____ ____ __ ______ __________ "
@@ -15,31 +14,38 @@ echo " / /_/ / /_/ / / / / /|_/ / /_/ // / / / "
echo " / ____/ _, _/ /_/ / / / / ____// / / /___ "
echo "/_/ /_/ |_|\____/_/ /_/_/ /_/ \____/ "
echo -e "${NC}"
-echo "=> Iniciando instalación de PROMPTC v0.3.1 (Community Edition)..."
+echo "=> Iniciando instalación de PROMPTC v0.3.1 (Codex Edition)..."
echo ""
-# --- 1. Verificación de Dependencias ---
-if ! command -v go &> /dev/null; then
+if ! command -v go >/dev/null 2>&1; then
echo -e "${RED}[FATAL] Go no está instalado.${NC} Descárgalo desde https://go.dev/dl/ e inténtalo de nuevo."
exit 1
fi
-if ! command -v python3 &> /dev/null; then
- echo -e "${RED}[FATAL] Python3 no está instalado.${NC} Es necesario para configurar Claude."
+if ! command -v git >/dev/null 2>&1; then
+ echo -e "${RED}[FATAL] Git no está instalado.${NC} Es necesario para clonar PROMPTC."
exit 1
fi
-# --- 2. Preparación de Entorno ---
PROMPTC_DIR="$HOME/.promptc"
echo "=> Creando directorio base en $PROMPTC_DIR..."
mkdir -p "$PROMPTC_DIR"
-# --- 3. Obtención de la API Key (Lectura directa del TTY) ---
-echo -n "=> Pega tu GEMINI_API_KEY (Presiona Enter si prefieres configurarla luego): "
+echo -n "=> Pega tu OPENAI_API_KEY (Enter para omitir fallback cloud): "
+read -r USER_OPENAI_KEY < /dev/tty || true
+
+echo -n "=> Modelo OpenAI [gpt-5.4-mini]: "
+read -r USER_OPENAI_MODEL < /dev/tty || true
+USER_OPENAI_MODEL="${USER_OPENAI_MODEL:-gpt-5.4-mini}"
+
+echo -n "=> Pega tu GEMINI_API_KEY (Enter para fallback secundario opcional): "
read -r USER_GEMINI_KEY < /dev/tty || true
-export USER_GEMINI_KEY
-# --- 4. Descarga y Compilación ---
+DEFAULT_REMOTE_IP="100.90.6.101"
+echo -n "=> PROMPTC_MACMINI_IP [$DEFAULT_REMOTE_IP]: "
+read -r USER_REMOTE_IP < /dev/tty || true
+USER_REMOTE_IP="${USER_REMOTE_IP:-$DEFAULT_REMOTE_IP}"
+
echo "=> Descargando código fuente desde GitHub (rama master)..."
TEMP_DIR=$(mktemp -d)
git clone -q -b master https://github.com/andesdevroot/promptc.git "$TEMP_DIR"
@@ -48,50 +54,50 @@ echo "=> Compilando binario estático optimizado..."
cd "$TEMP_DIR"
go build -ldflags="-s -w" -o "$PROMPTC_DIR/promptc" ./cmd/promptc/main.go
-# --- 5. Inyección de Configuración en Claude Desktop ---
-echo "=> Inyectando servidor MCP en Claude Desktop..."
-
-# Usamos comillas simples para proteger el script de Python de la expansión de Bash
-python3 -c '
-import json, os
-
-path = os.path.expanduser("~/Library/Application Support/Claude/claude_desktop_config.json")
-data = {"mcpServers": {}}
-
-if os.path.exists(path):
- try:
- with open(path, "r") as f:
- data = json.load(f)
- except Exception as e:
- print(f" [WARN] No se pudo leer config previa: {e}")
-
-if "mcpServers" not in data:
- data["mcpServers"] = {}
-
-api_key = os.environ.get("USER_GEMINI_KEY", "")
-env_vars = {}
-if api_key:
- env_vars["GEMINI_API_KEY"] = api_key
+ENV_ARGS=(--env "PROMPTC_MCP_CLIENT=codex-desktop" --env "PROMPTC_MACMINI_IP=$USER_REMOTE_IP")
+if [ -n "$USER_OPENAI_KEY" ]; then
+ ENV_ARGS+=(--env "OPENAI_API_KEY=$USER_OPENAI_KEY" --env "OPENAI_MODEL=$USER_OPENAI_MODEL")
+fi
+if [ -n "$USER_GEMINI_KEY" ]; then
+ ENV_ARGS+=(--env "GEMINI_API_KEY=$USER_GEMINI_KEY")
+fi
-data["mcpServers"]["PROMPTC"] = {
- "command": os.path.expanduser("~/.promptc/promptc"),
- "args": ["-mode=community"],
- "env": env_vars
-}
+MANUAL_CMD="codex mcp add PROMPTC ${ENV_ARGS[*]} -- $PROMPTC_DIR/promptc"
-os.makedirs(os.path.dirname(path), exist_ok=True)
+if command -v codex >/dev/null 2>&1; then
+ echo "=> Registrando PROMPTC como servidor MCP en Codex..."
+ codex mcp remove PROMPTC >/dev/null 2>&1 || true
+ codex mcp add PROMPTC "${ENV_ARGS[@]}" -- "$PROMPTC_DIR/promptc"
+ CONFIG_STATUS="Codex configurado automáticamente"
+else
+ CONFIG_STATUS="Codex no detectado; configuración manual requerida"
+fi
-with open(path, "w") as f:
- json.dump(data, f, indent=2)
-'
+cat > "$PROMPTC_DIR/codex-mcp-setup.sh" <> PASO FINAL:${NC} Reinicia Claude Desktop (Cmd + Q) para aplicar los cambios."
\ No newline at end of file
+
+if ! command -v codex >/dev/null 2>&1; then
+ echo -e "${YELLOW}>> PASO FINAL:${NC} Instala Codex y luego ejecuta:"
+ echo " $MANUAL_CMD"
+else
+ echo -e "${YELLOW}>> PASO FINAL:${NC} Reinicia Codex para que tome el nuevo servidor MCP."
+fi
diff --git a/internal/config/config.go b/internal/config/config.go
index 801aae9..1951f94 100644
--- a/internal/config/config.go
+++ b/internal/config/config.go
@@ -9,8 +9,11 @@ import (
// AppConfig representa la estructura del archivo ~/.promptc/config.yaml
type AppConfig struct {
- Provider string `yaml:"provider"`
- APIKey string `yaml:"api_key"`
+ Provider string `yaml:"provider"`
+ APIKey string `yaml:"api_key"`
+ OpenAIAPIKey string `yaml:"openai_api_key,omitempty"`
+ OpenAIModel string `yaml:"openai_model,omitempty"`
+ GeminiAPIKey string `yaml:"gemini_api_key,omitempty"`
}
// getConfigPath resuelve la ruta absoluta al archivo de configuración del usuario
diff --git a/pkg/provider/openai.go b/pkg/provider/openai.go
new file mode 100644
index 0000000..ea03d97
--- /dev/null
+++ b/pkg/provider/openai.go
@@ -0,0 +1,146 @@
+package provider
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/andesdevroot/promptc/pkg/core"
+)
+
+const defaultOpenAIModel = "gpt-5.4-mini"
+
+type OpenAIProvider struct {
+ APIKey string
+ Model string
+ BaseURL string
+ Client *http.Client
+}
+
+func NewOpenAIProvider(apiKey string, model string) *OpenAIProvider {
+ if strings.TrimSpace(model) == "" {
+ model = defaultOpenAIModel
+ }
+
+ return &OpenAIProvider{
+ APIKey: apiKey,
+ Model: model,
+ BaseURL: "https://api.openai.com/v1/responses",
+ Client: &http.Client{
+ Timeout: 60 * time.Second,
+ },
+ }
+}
+
+func (o *OpenAIProvider) Name() string {
+ return fmt.Sprintf("OpenAI (%s)", o.Model)
+}
+
+func (o *OpenAIProvider) Optimize(ctx context.Context, p core.Prompt, issues []string) (string, error) {
+ if strings.TrimSpace(o.APIKey) == "" {
+ return "", fmt.Errorf("OPENAI_API_KEY no configurada")
+ }
+
+ systemMsg := `Eres el motor de compilación PROMPTC.
+Devuelve exclusivamente el prompt final optimizado.
+
+REGLAS:
+1. IDIOMA: responde en español técnico.
+2. FORMATO: entrega un prompt final estructurado y accionable.
+3. SEGURIDAD: no inventes datos regulatorios ni hechos no presentes en el contexto.
+4. ESTILO: prioriza claridad, determinismo y cumplimiento.`
+
+ userMsg := fmt.Sprintf(
+ "Optimiza este prompt industrial corrigiendo los siguientes hallazgos: %s\n\nROL: %s\nCONTEXTO: %s\nTAREA: %s\nRESTRICCIONES: %s",
+ strings.Join(issues, ", "),
+ p.Role,
+ p.Context,
+ p.Task,
+ strings.Join(p.Constraints, " | "),
+ )
+
+ payload := map[string]interface{}{
+ "model": o.Model,
+ "instructions": systemMsg,
+ "input": []map[string]interface{}{
+ {
+ "role": "user",
+ "content": []map[string]string{
+ {
+ "type": "input_text",
+ "text": userMsg,
+ },
+ },
+ },
+ },
+ }
+
+ body, err := json.Marshal(payload)
+ if err != nil {
+ return "", err
+ }
+
+ req, err := http.NewRequestWithContext(ctx, http.MethodPost, o.BaseURL, bytes.NewBuffer(body))
+ if err != nil {
+ return "", err
+ }
+ req.Header.Set("Authorization", "Bearer "+o.APIKey)
+ req.Header.Set("Content-Type", "application/json")
+
+ resp, err := o.Client.Do(req)
+ if err != nil {
+ return "", err
+ }
+ defer resp.Body.Close()
+
+ var response struct {
+ Error *struct {
+ Message string `json:"message"`
+ } `json:"error,omitempty"`
+ Output []struct {
+ Type string `json:"type"`
+ Role string `json:"role"`
+ Content []struct {
+ Type string `json:"type"`
+ Text string `json:"text,omitempty"`
+ } `json:"content,omitempty"`
+ } `json:"output,omitempty"`
+ }
+
+ if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
+ return "", err
+ }
+
+ if resp.StatusCode >= 400 {
+ if response.Error != nil && response.Error.Message != "" {
+ return "", fmt.Errorf("openai responses api: %s", response.Error.Message)
+ }
+ return "", fmt.Errorf("openai responses api devolvió status %d", resp.StatusCode)
+ }
+
+ var out strings.Builder
+ for _, item := range response.Output {
+ if item.Type != "message" || item.Role != "assistant" {
+ continue
+ }
+ for _, content := range item.Content {
+ if content.Type == "output_text" && strings.TrimSpace(content.Text) != "" {
+ if out.Len() > 0 {
+ out.WriteString("\n")
+ }
+ out.WriteString(content.Text)
+ }
+ }
+ }
+
+ text := strings.TrimSpace(out.String())
+ if text == "" {
+ return "", fmt.Errorf("openai no devolvió texto utilizable")
+ }
+
+ return text, nil
+}
diff --git a/pkg/sdk/sdk.go b/pkg/sdk/sdk.go
index cfd8885..3bee8db 100644
--- a/pkg/sdk/sdk.go
+++ b/pkg/sdk/sdk.go
@@ -2,6 +2,7 @@ package sdk
import (
"context"
+ "fmt"
"log"
"github.com/andesdevroot/promptc/pkg/core"
@@ -14,16 +15,41 @@ type PromptC struct {
Optimizers []core.Optimizer
}
-func (s *PromptC) Optimize(ctx context.Context, p core.Prompt) (any, any) {
- panic("unimplemented")
+func (s *PromptC) Optimize(ctx context.Context, p core.Prompt) (string, error) {
+ if s == nil || s.Engine == nil {
+ return "", fmt.Errorf("sdk no inicializado")
+ }
+
+ analysis := s.Engine.Analyze(p)
+
+ for _, opt := range s.Optimizers {
+ log.Printf("[SDK] Intentando con: %s", opt.Name())
+ optimized, err := opt.Optimize(ctx, p, analysis.Issues)
+ if err == nil {
+ return optimized, nil
+ }
+ log.Printf("[SDK] Error con %s: %v", opt.Name(), err)
+ }
+
+ return s.Engine.Compile(p)
}
-func (s *PromptC) Analyze(p core.Prompt) any {
- panic("unimplemented")
+func (s *PromptC) Analyze(p core.Prompt) core.Result {
+ if s == nil || s.Engine == nil {
+ return core.Result{
+ Score: 0,
+ IsReliable: false,
+ Issues: []string{"sdk no inicializado"},
+ Suggestions: []string{"Inicializa PROMPTC antes de ejecutar Analyze."},
+ }
+ }
+
+ return s.Engine.Analyze(p)
}
-// NewSDK ahora acepta 3 argumentos para incluir tu nodo de Tailscale
-func NewSDK(ctx context.Context, geminiKey string, remoteIP string) (*PromptC, error) {
+// NewSDK inicializa la cadena de optimizadores:
+// nodo local -> OpenAI -> Gemini.
+func NewSDK(ctx context.Context, openAIKey string, openAIModel string, geminiKey string, remoteIP string) (*PromptC, error) {
eng := engine.New()
var optimizers []core.Optimizer
@@ -32,6 +58,11 @@ func NewSDK(ctx context.Context, geminiKey string, remoteIP string) (*PromptC, e
optimizers = append(optimizers, provider.NewOllamaProvider(remoteIP))
}
+ // Respaldo cloud primario: OpenAI
+ if openAIKey != "" {
+ optimizers = append(optimizers, provider.NewOpenAIProvider(openAIKey, openAIModel))
+ }
+
// Respaldo: Gemini Cloud
if geminiKey != "" {
g, err := provider.NewGeminiProvider(ctx, geminiKey)
@@ -48,6 +79,10 @@ func NewSDK(ctx context.Context, geminiKey string, remoteIP string) (*PromptC, e
// CompileAndOptimize es el método que main.go intentaba llamar
func (s *PromptC) CompileAndOptimize(ctx context.Context, p core.Prompt) (string, error) {
+ if s == nil || s.Engine == nil {
+ return "", fmt.Errorf("sdk no inicializado")
+ }
+
analysis := s.Engine.Analyze(p)
// Si el prompt es perfecto, no gastamos ciclos de GPU