From 08ed7de261997403dad09869aaf6caca18b0d610 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Mar 2026 04:30:50 +0000 Subject: [PATCH 1/4] Initial plan From c4e5aaba2144188a49b8bfdb44b1c755b87bf929 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 3 Mar 2026 04:46:58 +0000 Subject: [PATCH 2/4] feat: add Ollama client and chat UI; all @mentions route to local Ollama Co-authored-by: blackboxprogramming <118287761+blackboxprogramming@users.noreply.github.com> --- index.html | 455 +++++++++++++++++++++++++++++++++++++++++++++++++ ollama.js | 97 +++++++++++ ollama.test.js | 105 ++++++++++++ 3 files changed, 657 insertions(+) create mode 100644 index.html create mode 100644 ollama.js create mode 100644 ollama.test.js diff --git a/index.html b/index.html new file mode 100644 index 0000000..e25248f --- /dev/null +++ b/index.html @@ -0,0 +1,455 @@ + + + + + + Codex Agent Runner — Powered by Ollama + + + + + +
+

⚡ Codex Agent Runner

+ checking… + All requests → Ollama (local) +
+ + +
+ + + + +
+ + +
+
+ Type a message. Prefix with @ollama, @copilot, + @lucidia, or @blackboxprogramming — all route to your local Ollama. +
+
+ + +
+ Try: @ollama explain quantum entanglement  ·  + @copilot write a Python hello world  ·  + @blackboxprogramming list sorting algorithms  ·  Shift+Enter for new line +
+
+ + +
+ + + + + diff --git a/ollama.js b/ollama.js new file mode 100644 index 0000000..374ab27 --- /dev/null +++ b/ollama.js @@ -0,0 +1,97 @@ +/** + * Ollama Client — all agent handles route here. + * + * Supported handles (case-insensitive): + * @ollama, @copilot, @lucidia, @blackboxprogramming + * + * No external AI provider is used. Every request goes directly to the + * local Ollama HTTP API (default: http://localhost:11434). + */ + +const OLLAMA_HANDLES = ['ollama', 'copilot', 'lucidia', 'blackboxprogramming']; + +/** + * Strip leading @handle from the user message and return the clean prompt. + * If no recognised handle is found the original text is returned unchanged. + * + * @param {string} text + * @returns {{ handle: string|null, prompt: string }} + */ +function parseHandle(text) { + const trimmed = text.trim(); + const match = trimmed.match(/^@([\w.]+)\s*/i); + if (match) { + const handle = match[1].replace(/\.$/, '').toLowerCase(); + if (OLLAMA_HANDLES.includes(handle)) { + return { handle, prompt: trimmed.slice(match[0].length) }; + } + } + return { handle: null, prompt: trimmed }; +} + +/** + * Send a chat message to the local Ollama instance and stream the response. + * + * @param {object} options + * @param {string} options.baseUrl - Ollama base URL (default: http://localhost:11434) + * @param {string} options.model - Model name (default: "llama3") + * @param {Array} options.messages - OpenAI-style message array + * @param {Function} options.onChunk - Called with each streamed text chunk + * @param {Function} options.onDone - Called when the stream is complete + * @param {Function} options.onError - Called with Error on failure + * @returns {Promise} + */ +async function ollamaChat({ baseUrl = 'http://localhost:11434', model = 'llama3', messages, onChunk, onDone, onError }) { + const url = `${baseUrl}/api/chat`; + let response; + try { + response = await fetch(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ model, messages, stream: true }), + }); + } catch (err) { + onError(new Error(`Cannot reach Ollama at ${baseUrl}. Is it running? (${err.message})`)); + return; + } + + if (!response.ok) { + onError(new Error(`Ollama returned HTTP ${response.status}: ${await response.text()}`)); + return; + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + + try { + while (true) { + const { value, done } = await reader.read(); + if (done) break; + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split('\n'); + buffer = lines.pop(); // keep incomplete line + for (const line of lines) { + if (!line.trim()) continue; + let parsed; + try { parsed = JSON.parse(line); } catch (parseErr) { + console.debug('ollama: skipping non-JSON line:', parseErr.message, line); + continue; + } + if (parsed.message?.content) onChunk(parsed.message.content); + if (parsed.done) { onDone(); return; } + } + } + if (buffer.trim()) { + try { + const parsed = JSON.parse(buffer); + if (parsed.message?.content) onChunk(parsed.message.content); + } catch { /* ignore */ } + } + onDone(); + } catch (err) { + onError(new Error(`Stream error: ${err.message}`)); + } +} + +export { OLLAMA_HANDLES, parseHandle, ollamaChat }; diff --git a/ollama.test.js b/ollama.test.js new file mode 100644 index 0000000..5d1babb --- /dev/null +++ b/ollama.test.js @@ -0,0 +1,105 @@ +/** + * Tests for ollama.js – parseHandle() + * Run with: node --experimental-vm-modules ollama.test.js + * (No test framework needed – uses Node's built-in assert) + */ +import assert from 'node:assert/strict'; +import { OLLAMA_HANDLES, parseHandle } from './ollama.js'; + +let passed = 0; +let failed = 0; + +function test(name, fn) { + try { + fn(); + console.log(` ✅ ${name}`); + passed++; + } catch (err) { + console.error(` ❌ ${name}\n ${err.message}`); + failed++; + } +} + +// ── OLLAMA_HANDLES ──────────────────────────────────────────────────────────── +test('OLLAMA_HANDLES contains ollama', () => assert.ok(OLLAMA_HANDLES.includes('ollama'))); +test('OLLAMA_HANDLES contains copilot', () => assert.ok(OLLAMA_HANDLES.includes('copilot'))); +test('OLLAMA_HANDLES contains lucidia', () => assert.ok(OLLAMA_HANDLES.includes('lucidia'))); +test('OLLAMA_HANDLES contains blackboxprogramming', () => assert.ok(OLLAMA_HANDLES.includes('blackboxprogramming'))); + +// ── parseHandle – recognised handles ───────────────────────────────────────── +test('@ollama strips handle', () => { + const r = parseHandle('@ollama tell me a joke'); + assert.equal(r.handle, 'ollama'); + assert.equal(r.prompt, 'tell me a joke'); +}); + +test('@copilot strips handle', () => { + const r = parseHandle('@copilot write a function'); + assert.equal(r.handle, 'copilot'); + assert.equal(r.prompt, 'write a function'); +}); + +test('@lucidia strips handle', () => { + const r = parseHandle('@lucidia summarise this'); + assert.equal(r.handle, 'lucidia'); + assert.equal(r.prompt, 'summarise this'); +}); + +test('@blackboxprogramming strips handle', () => { + const r = parseHandle('@blackboxprogramming list algorithms'); + assert.equal(r.handle, 'blackboxprogramming'); + assert.equal(r.prompt, 'list algorithms'); +}); + +// ── Trailing dot variants (@copilot. / @blackboxprogramming.) ──────────────── +test('@copilot. trailing dot is stripped', () => { + const r = parseHandle('@copilot. hello'); + assert.equal(r.handle, 'copilot'); + assert.equal(r.prompt, 'hello'); +}); + +test('@blackboxprogramming. trailing dot is stripped', () => { + const r = parseHandle('@blackboxprogramming. sort this list'); + assert.equal(r.handle, 'blackboxprogramming'); + assert.equal(r.prompt, 'sort this list'); +}); + +// ── Case-insensitive ────────────────────────────────────────────────────────── +test('@OLLAMA is case-insensitive', () => { + const r = parseHandle('@OLLAMA hello'); + assert.equal(r.handle, 'ollama'); +}); + +test('@Copilot is case-insensitive', () => { + const r = parseHandle('@Copilot hello'); + assert.equal(r.handle, 'copilot'); +}); + +// ── Unknown / no handle ─────────────────────────────────────────────────────── +test('unknown handle returns null handle', () => { + const r = parseHandle('@gpt4 hello'); + assert.equal(r.handle, null); + assert.equal(r.prompt, '@gpt4 hello'); +}); + +test('no handle returns null handle', () => { + const r = parseHandle('plain message'); + assert.equal(r.handle, null); + assert.equal(r.prompt, 'plain message'); +}); + +test('empty string returns null handle', () => { + const r = parseHandle(''); + assert.equal(r.handle, null); + assert.equal(r.prompt, ''); +}); + +test('@ollama with no prompt returns empty string', () => { + const r = parseHandle('@ollama'); + assert.equal(r.handle, 'ollama'); + assert.equal(r.prompt, ''); +}); + +// ── Summary ─────────────────────────────────────────────────────────────────── +console.log(`\nResults: ${passed} passed, ${failed} failed`); +if (failed > 0) process.exit(1); From 6ccd5fee8a8eca32547fa06a10bbc7ebd3fd785c Mon Sep 17 00:00:00 2001 From: Alexa Amundson <118287761+blackboxprogramming@users.noreply.github.com> Date: Thu, 5 Mar 2026 20:14:41 -0600 Subject: [PATCH 3/4] Update index.html Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.html b/index.html index e25248f..f1c720c 100644 --- a/index.html +++ b/index.html @@ -396,7 +396,7 @@

⚡ Codex Agent Runner

// Display user bubble (show handle tag if one was detected) addMessage('user', raw, null); - history.push({ role: 'user', content: prompt || raw }); + history.push({ role: 'user', content: prompt }); promptEl.value = ''; promptEl.style.height = ''; From 2ab5c8c17977bfbf94169a6b5460d9521fb7b8ab Mon Sep 17 00:00:00 2001 From: Alexa Amundson <118287761+blackboxprogramming@users.noreply.github.com> Date: Thu, 5 Mar 2026 20:14:46 -0600 Subject: [PATCH 4/4] Update index.html Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- index.html | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/index.html b/index.html index f1c720c..43a0d32 100644 --- a/index.html +++ b/index.html @@ -372,9 +372,21 @@

⚡ Codex Agent Runner

statusBadge.textContent = 'Ollama online'; const data = await r.json(); const models = (data.models || []).map(m => m.name); - modelList.innerHTML = models.length - ? models.map(m => ``).join('') - : ''; + // Safely populate modelList without using innerHTML to avoid injection + modelList.innerHTML = ''; + if (models.length) { + models.forEach(m => { + const opt = document.createElement('option'); + opt.value = m; + opt.textContent = m; + modelList.appendChild(opt); + }); + } else { + const opt = document.createElement('option'); + opt.value = ''; + opt.textContent = 'no models found'; + modelList.appendChild(opt); + } modelList.onchange = () => { modelInput.value = modelList.value; }; return true; }