diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
deleted file mode 100644
index b2261c6..0000000
--- a/.devcontainer/devcontainer.json
+++ /dev/null
@@ -1,49 +0,0 @@
-{
- "name": "locus workbench",
- "image": "mcr.microsoft.com/devcontainers/python:3.12-bookworm",
- "features": {
- "ghcr.io/devcontainers/features/node:1": {
- "version": "20"
- },
- "ghcr.io/devcontainers/features/sshd:1": {
- "version": "latest"
- }
- },
- "updateContentCommand": "bash .devcontainer/postCreate.sh",
- "postAttachCommand": {
- "workbench": "bash .devcontainer/workbench-attach.sh"
- },
- "forwardPorts": [5173, 3101, 8100],
- "portsAttributes": {
- "5173": {
- "label": "Workbench",
- "onAutoForward": "openPreview",
- "visibility": "public"
- },
- "3101": {
- "label": "BFF (internal)",
- "onAutoForward": "silent"
- },
- "8100": {
- "label": "Backend (internal)",
- "onAutoForward": "silent"
- }
- },
- "customizations": {
- "codespaces": {
- "openFiles": [".devcontainer/welcome.md"]
- },
- "vscode": {
- "extensions": [
- "ms-python.python",
- "ms-python.vscode-pylance",
- "esbenp.prettier-vscode",
- "dbaeumer.vscode-eslint"
- ],
- "settings": {
- "workbench.startupEditor": "none",
- "workbench.tips.enabled": false
- }
- }
- }
-}
diff --git a/.devcontainer/postCreate.sh b/.devcontainer/postCreate.sh
deleted file mode 100755
index f09664b..0000000
--- a/.devcontainer/postCreate.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-# One-shot setup, runs once per codespace creation.
-# Installs locus (editable) + the workbench's two npm projects.
-
-set -euo pipefail
-
-cd "$(dirname "$0")/.."
-
-# Install locus with:
-# - [dev] β ruff/mypy/pytest/etc.
-# - [llm] β openai + anthropic + ollama + oci. The workbench backend
-# imports these eagerly to build providers, so any one of
-# them missing crashes runner.py before serving a request.
-# Plus fastapi + python-multipart β runner.py uses FastAPI directly, but
-# fastapi isn't a transitive dep of locus-sdk's [dev] extra (fastmcp pulls
-# starlette, not fastapi).
-echo "[postCreate] installing locus + dev/llm deps + workbench backend deps"
-pip install --upgrade pip
-pip install -e ".[dev,llm]" fastapi python-multipart
-
-# Always fetch from the public npm registry. Some contributors generate
-# `package-lock.json` on Oracle's corp network, which rewrites every
-# package's `resolved` URL to an internal mirror that codespaces (and
-# external users) can't reach. The explicit `--registry` flag plus the
-# committed lockfiles using public URLs (this PR) defends against both.
-echo "[postCreate] installing workbench/bff deps"
-(cd workbench/bff && npm install --no-audit --no-fund --registry=https://registry.npmjs.org/)
-
-echo "[postCreate] installing workbench/web deps"
-(cd workbench/web && npm install --no-audit --no-fund --registry=https://registry.npmjs.org/)
-
-echo "[postCreate] done"
diff --git a/.devcontainer/welcome.md b/.devcontainer/welcome.md
deleted file mode 100644
index 71760fa..0000000
--- a/.devcontainer/welcome.md
+++ /dev/null
@@ -1,57 +0,0 @@
-# π locus workbench β welcome
-
-You've just clicked the **Launch workbench** button. While this VS Code
-window finishes booting, a **separate browser tab** is opening with the
-workbench UI itself.
-
-## Where's the workbench?
-
-Look for a new tab in your browser at:
-
-> `https://-5173.app.github.dev`
-
-The actual URL is printed in the terminal panel below (look for the
-π banner). **β-click** that URL to open it.
-
-If your browser blocks the auto-popup, just click the URL from the
-terminal. Same destination.
-
-## Two-click flow
-
-1. Open the workbench tab (above).
-2. **Provider settings** β paste an OpenAI or Anthropic API key β
- pick a tutorial in the sidebar β **Run**.
-
-OCI options in **Provider settings** won't work here β they need a
-local `~/.oci/config` that doesn't exist in this Codespace. Use OpenAI
-or Anthropic for the in-browser path. (For OCI, clone the repo and run
-the workbench locally; see [`docs/workbench.md`](../docs/workbench.md).)
-
-## What's running
-
-Three tiers are auto-started by `postStart.sh`:
-
-| Tier | Port | Process |
-|---|---|---|
-| FastAPI runner (Python) | 8100 | `uvicorn workbench.backend.runner:app` |
-| BFF (Node Express) | 3101 | `tsx watch workbench/bff/src/server.ts` |
-| Vite dev server (web UI) | 5173 | `vite` in `workbench/web/` |
-
-Logs at `/tmp/wb-backend.log`, `/tmp/wb-bff.log`, `/tmp/wb-web.log` β
-the banner terminal panel tails all three.
-
-## Restarting tiers (if they die)
-
-```bash
-# Manually re-fire the lifecycle scripts:
-bash .devcontainer/postStart.sh
-```
-
-## Further reading
-
-- [`docs/workbench.md`](../docs/workbench.md) β the public workbench page
-- [`docs/index.md`](../docs/index.md) β locus SDK landing page
-- [`workbench/README.md`](../workbench/README.md) β three-tier
- architecture explained
-- [`examples/`](../examples/) β 55 progressive tutorials, all runnable
- from the workbench
diff --git a/.devcontainer/workbench-attach.sh b/.devcontainer/workbench-attach.sh
deleted file mode 100755
index a965c11..0000000
--- a/.devcontainer/workbench-attach.sh
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env bash
-# postAttachCommand entry point.
-#
-# Runs every time a VS Code client attaches to the codespace. Does three
-# things, in this order:
-#
-# 1. **Idempotently** start the three tiers (backend / BFF / Vite).
-# "Idempotently" because postAttach fires on every attach β second
-# attach must NOT double-launch the tiers. Each tier is launched
-# only if its port isn't already listening.
-# 2. Wait for Vite to bind on :5173, then actively open VS Code's
-# Simple Browser pointed at the forwarded workbench URL. Uses the
-# correct Command-URI format `vscode://command/simpleBrowser.show?
-# ` which VS Code Web honours.
-# 3. Print a banner with the URL and tail the three tier logs so the
-# attached terminal panel stays useful.
-#
-# Architectural rationale: this runs in postAttachCommand, not
-# postStartCommand, because `portsAttributes.onAutoForward: openPreview`
-# only fires when a client is attached at the moment the port binds.
-# If tiers boot in postStart (before the client attaches), the
-# port-forward event is already past by the time the user shows up,
-# and openPreview silently no-ops. Streamlit-example uses exactly this
-# pattern β server in postAttachCommand β and the auto-open works
-# reliably for their users.
-
-set -euo pipefail
-
-cd "$(dirname "$0")/.."
-
-# Codespace β forwarded URL; anything else (local devcontainer / Docker)
-# β localhost.
-if [ -n "${CODESPACE_NAME:-}" ]; then
- URL="https://${CODESPACE_NAME}-5173.app.github.dev"
-else
- URL="http://localhost:5173"
-fi
-
-# Helper: boot one tier as a fully detached process (survives this
-# script's exit). Same pattern as PR #150's postStart.sh.
-boot_tier() {
- local name="$1" port="$2" log="$3"; shift 3
- if ss -tlnp 2>/dev/null | grep -qE ":${port}\b"; then
- echo "[attach] $name already listening on :$port β skipping"
- return
- fi
- : > "$log"
- setsid bash -c "exec \"\$@\" >>\"$log\" 2>&1" _ "$@" < /dev/null &
- echo "[attach] $name pid=$! β $log"
-}
-
-# 1. Boot the three tiers, only if not already up.
-echo "[attach] checking tier state"
-boot_tier backend 8100 /tmp/wb-backend.log \
- python -m uvicorn --app-dir workbench/backend runner:app \
- --host 127.0.0.1 --port 8100
-(cd workbench/bff && boot_tier bff 3101 /tmp/wb-bff.log npm run dev)
-(cd workbench/web && boot_tier web 5173 /tmp/wb-web.log npm run dev)
-
-# 2. Wait for Vite to bind, then ask VS Code to open Simple Browser.
-echo "[attach] waiting for Vite on :5173..."
-VITE_READY=0
-for _ in $(seq 1 60); do
- if curl -sf http://127.0.0.1:5173/ > /dev/null 2>&1; then
- VITE_READY=1
- break
- fi
- sleep 3
-done
-
-# Build the proper Command-URI for `simpleBrowser.show`. VS Code wants
-# JSON-encoded args, then URL-encoded as the query string of a
-# `vscode://command/?` URI. Python is in the base
-# image so this is portable.
-if [ "$VITE_READY" = "1" ] && command -v code >/dev/null 2>&1; then
- ENCODED_ARGS=$(python3 -c "
-import urllib.parse, json, sys
-print(urllib.parse.quote(json.dumps([sys.argv[1]])))
-" "$URL")
- code --open-url "vscode://command/simpleBrowser.show?${ENCODED_ARGS}" \
- >/dev/null 2>&1 || true
-fi
-
-# 3. Banner + log tail.
-cat </dev/null
diff --git a/README.md b/README.md
index 13f9804..e8b3bb2 100644
--- a/README.md
+++ b/README.md
@@ -24,12 +24,6 @@
Workbench
-
---
## Your first agent β 5 lines
@@ -227,28 +221,25 @@ metadata), and **Patterns** (the nine first-class
runtimes β including [Cognitive routing](docs/workbench.md#cognitive-routing-pattern)
with a Rule-based β¬ LLM-picker toggle).
-Pick the launch path that fits.
+Two ways to run it. Pick whichever fits.
-### Path A β GitHub Codespaces (zero install, free tier)
+### Run locally (from source)
-[](https://codespaces.new/oracle-samples/locus?devcontainer_path=.devcontainer%2Fdevcontainer.json)
+```bash
+git clone https://github.com/oracle-samples/locus.git && cd locus
+pip install -e ".[server,oci,openai,anthropic]"
-Click the badge above (or [this link](https://codespaces.new/oracle-samples/locus?devcontainer_path=.devcontainer%2Fdevcontainer.json)).
-GitHub provisions a Linux container in your account, runs
-`.devcontainer/postCreate.sh` (Python 3.12 + Node 20 + `pip install
--e ".[dev,llm]"` + `npm install` for the workbench projects), then
-backgrounds the three tiers (FastAPI runner :8100, Express BFF
-:3101, Vite :5173). After ~2 minutes the workbench UI opens in a
-second tab β VS Code Web on tab 1, the live app on tab 2. Click
-**Provider settings** in the header, paste an OpenAI or Anthropic
-key, pick a tutorial, hit **Run**.
+# Three terminals, one per tier:
+cd workbench/bff && npm install && npm run dev # BFF on :3101
+cd workbench/web && npm install && npm run dev # Vite on :5173
+cd workbench/backend && python -m uvicorn --app-dir . runner:app --port 8100
+```
-You burn your own free Codespaces minutes (60 hrs/month on a personal
-account). Oracle pays nothing. The OCI options in *Provider settings*
-require a local `~/.oci/config` so they don't apply in
-Codespaces β use OpenAI or Anthropic for the cloud demo path.
+Open , click **Provider settings**, pick a
+provider, fill in the credentials, save. OCI options work out of the
+box because the backend reads your local `~/.oci/config`.
-### Path B β Docker (local, BYO key)
+### Run in Docker
```bash
git clone https://github.com/oracle-samples/locus.git && cd locus
@@ -257,32 +248,16 @@ docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 locus-workbench
# open http://localhost:5173
```
-Image is ~1.3 GB on first build (Oracle Linux 9-slim base + Python
-3.12 + Node 20 + locus + the workbench source). Subsequent builds
-hit the layer cache. If ports 5173 / 3101 / 8100 are in use locally,
-remap them:
+OpenAI and Anthropic work as-is β paste the key into *Provider settings*.
+For the OCI providers (api-key or session token), bind-mount your `~/.oci`
+into the container at the same host path and pass `HOME` so the OCI SDK
+finds both the config and the `key_file` paths it references:
```bash
-docker run --rm \
- -p 5273:5173 -p 3201:3101 -p 8200:8100 \
+docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 \
+ -v "$HOME/.oci:$HOME/.oci:ro" \
+ -e "HOME=$HOME" \
locus-workbench
-# then http://localhost:5273
-```
-
-Stop with `Ctrl-C`; the `--rm` flag removes the container on exit.
-
-### Path C β From source (development)
-
-For iterating on the workbench itself:
-
-```bash
-git clone https://github.com/oracle-samples/locus.git && cd locus
-pip install -e ".[server,oci,openai,anthropic]"
-
-# Three terminals, one per tier:
-cd workbench/bff && npm install && npm run dev # :3101
-cd workbench/web && npm install && npm run dev # :5173
-cd workbench/backend && python -m uvicorn --app-dir . runner:app --port 8100
```
β Full walkthrough: [Workbench guide](docs/workbench.md) Β· [Provider settings](docs/workbench.md#provider-settings) Β· [Cognitive routing pattern](docs/workbench.md#cognitive-routing-pattern) Β· [Troubleshooting](docs/workbench.md#troubleshooting)
@@ -338,7 +313,7 @@ src/locus/
βββ integrations/ MCP (client + server)
workbench/ Browser playground β Tutorials / Skills / Protocols tabs,
- three model slots, SSE event stream, Codespaces-ready.
+ three model slots, SSE event stream, Docker-ready.
examples/ 56 progressive tutorials, each a single runnable file.
tests/unit/ Deterministic, no external deps. Runs in CI on every PR.
tests/integration/ Live OCI / OpenAI / Oracle Database 26ai. Gated on credentials.
diff --git a/docs/index.md b/docs/index.md
index e8ca77f..ce6fffc 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -17,7 +17,7 @@ Describe the task. locus selects the protocol and coordinates the agents.
- **Self-critiquing agents with grounded outputs.** Every turn is scored; every factual claim is verified against the source that produced it. Hallucinations caught at the source, not in production.
- **Full causal traceability.** Every decision, tool call, and reasoning step is a typed event you can replay, audit, and debug β at 2 a.m. or in your compliance report.
-[Launch workbench](https://codespaces.new/oracle-samples/locus?devcontainer_path=.devcontainer%2Fdevcontainer.json){ .md-button .md-button--primary }
+[Workbench guide](workbench.md){ .md-button .md-button--primary }
[GitHub](https://github.com/oracle-samples/locus){ .md-button }
```bash
diff --git a/docs/workbench.md b/docs/workbench.md
index 38d3bc5..3402c08 100644
--- a/docs/workbench.md
+++ b/docs/workbench.md
@@ -1,18 +1,15 @@
# Locus workbench
-A browser-based playground for every locus pattern. Two clicks to a
-running agent β no CLI, no `pip install`, no editor setup.
+A browser-based playground for every locus pattern. Two ways to run
+it β straight from source on your laptop, or inside a Docker
+container β both end at the same UI at .
-[Launch in Codespaces](https://codespaces.new/oracle-samples/locus?devcontainer_path=.devcontainer%2Fdevcontainer.json){ .md-button .md-button--primary }
-[View on GitHub](https://github.com/oracle-samples/locus){ .md-button }
+[View on GitHub](https://github.com/oracle-samples/locus){ .md-button .md-button--primary }
+[Workbench README](https://github.com/oracle-samples/locus/tree/main/workbench){ .md-button }
-**Click 1 β Launch.** GitHub provisions a Codespace, installs Python +
-Node deps, and boots all three tiers (FastAPI runner, Node BFF, Vite
-front-end). After ~2 minutes the workbench UI opens in a Simple Browser tab.
-
-**Click 2 β Run.** Open *Provider settings*, paste an OpenAI or
-Anthropic key, pick a tutorial in the sidebar, hit **Run**. A real
-agent streams events back into the browser.
+Once it's up: open *Provider settings*, paste an OpenAI / Anthropic
+key or wire up an OCI profile, pick a tutorial in the sidebar, hit
+**Run**. A real agent streams events back into the browser.

@@ -27,9 +24,8 @@ sequential pipeline, a map-reduce fan-out, a critic loop with
that imports locus, builds the agent, and streams events through to
your browser.
-It's also the canonical demo for Codespaces and Docker: visitors
-arrive at this app, pick a workflow, and learn the SDK by running
-real ones.
+It's also the canonical demo: visitors arrive at this app, pick a
+workflow, and learn the SDK by running real ones.
```
βββββββββββββββββββββββββββββββββββββββββ
@@ -54,104 +50,144 @@ You paste your provider key once per tab β **the workbench never
persists API keys to localStorage**, so closing the tab discards
everything.
-## Three paths to spin it up
+## Run it locally (from source)
+
+The dev-loop path. Best for iterating on the workbench code itself,
+debugging a pattern, or extending the runner.
-Pick whichever fits β **Codespaces** for zero install, **Docker** for a
-local container with BYO key, or **From source** for iterating on the
-workbench itself.
+### Prerequisites
-### Path A β GitHub Codespaces (zero install, free)
+- **Python 3.11+** with `pip` (3.12 is what CI uses).
+- **Node 20+** with `npm`.
+- A model provider β one of: an `OPENAI_API_KEY`, an
+ `ANTHROPIC_API_KEY`, or a populated `~/.oci/config` for OCI GenAI.
-[](https://codespaces.new/oracle-samples/locus?devcontainer_path=.devcontainer%2Fdevcontainer.json)
+### Step-by-step
-Click the badge on the [repo home page](https://github.com/oracle-samples/locus).
-GitHub provisions a Linux container in your account, runs
-`.devcontainer/postCreate.sh` to install Python + Node deps, then
-forwards port 5173 publicly. ~2-min cold start. You burn your own
-free Codespaces minutes (60 hrs/month), nothing on the locus side.
+```bash
+git clone https://github.com/oracle-samples/locus.git
+cd locus
+pip install -e ".[server,oci,openai,anthropic]" # core + provider extras
+```
-### Path B β Docker (local, BYO key)
+Three tiers, three terminals (or three tmux panes). They don't depend
+on each other at startup, but every tier expects the one downstream
+of it to come up within ~30 s:
```bash
-git clone https://github.com/oracle-samples/locus.git && cd locus
-docker build -t locus-workbench -f workbench/Dockerfile .
-docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 locus-workbench
-# open http://localhost:5173
-# β paste OpenAI / Anthropic key in Provider settings β Run a tutorial
+# Terminal 1 β FastAPI runner (the actual workbench backend)
+cd workbench/backend
+python -m uvicorn --app-dir . runner:app --port 8100
+
+# Terminal 2 β Express BFF (proxies /api/* from the web tier to the runner)
+cd workbench/bff
+npm install
+npm run dev # binds :3101
+
+# Terminal 3 β Vite dev server (the UI)
+cd workbench/web
+npm install
+npm run dev # binds :5173
```
-Image is ~1.3 GB on first build (Oracle Linux 9-slim base + Python
-3.12 + Node 20 + locus + the workbench source). Subsequent builds
-hit the layer cache.
-
-## Codespaces β what happens after you click
-
-1. **Cold start** β GitHub builds the container from
- `.devcontainer/devcontainer.json` (Python 3.12 + Node 20). First
- boot runs `postCreate.sh` to `pip install -e ".[dev,llm]"` plus
- `fastapi` + `python-multipart`, and `npm install` both workbench
- projects against the public npm registry. ~2 minutes.
-2. **Two tabs open** β GitHub Codespaces opens a **VS Code Web** tab
- first (the editor session that owns the container). When Vite
- binds on `:5173`, a **second tab** opens with the
- workbench UI itself (`https://-5173.app.github.dev`)
- per `5173.onAutoForward: openBrowserOnce`. **The workbench is the
- second tab, not VS Code.** If your browser blocks the popup, the
- VS Code terminal panel shows a clearly-labelled `π locus
- workbench is ready` banner with a β-clickable URL β same
- destination.
-3. **Auto-boot** β `postStart.sh` backgrounds the three tiers in
- detached `setsid` sessions so they survive after the lifecycle hook
- exits: `uvicorn runner:app` on `:8100`, `npm run dev` (Express) on
- `:3101`, `npm run dev` (Vite) on `:5173`.
-4. **Run a pattern** β *Provider settings* β paste an OpenAI or
- Anthropic key β pick a tutorial β **Run**.
-
-The OCI options in the Provider settings modal will not work in
-Codespaces β they need a local `~/.oci/config` that doesn't exist
-in the container. Use OpenAI or Anthropic for the cloud demo path.
-
-## Docker β port-remap if 5173 is taken
+Or use the convenience `Makefile`:
```bash
-docker run --rm \
- -p 5273:5173 -p 3201:3101 -p 8200:8100 \
- locus-workbench
-# then http://localhost:5273
+cd workbench
+make install # npm install in bff + web
+make backend # pane 1 β :8100
+make bff # pane 2 β :3101
+make web # pane 3 β :5173
+```
+
+`make install` also runs `npx playwright install chromium` for the
+end-to-end test suite in `workbench/e2e/`. The `make backend` target
+is the workbench runner β distinct from `make backend-research` and
+`make backend-finance`, which spin up the A2A mesh demo peers for
+[tutorial 34](tutorials/tutorial_34_a2a_protocol.md), not the
+workbench.
+
+### Verify it's up
+
+```bash
+curl -s http://127.0.0.1:8100/api/health | jq # runner
+curl -s http://127.0.0.1:3101/api/health | jq # bff
+curl -sI http://127.0.0.1:5173/ | head -1 # web β HTTP/1.1 200 OK
```
-Stop with `Ctrl-C`; the `--rm` flag removes the container on exit.
+Then open . Click **Provider settings** (top
+right), pick your provider, fill the credentials, hit Save. Pick a
+tutorial from the sidebar, hit **Run**.
-### Path C β From source (development)
+## Run it in Docker
-For iterating on the workbench itself:
+The packaged path. Best for handing the workbench to a teammate, a
+new laptop, or a demo machine where you don't want to install the
+Python and Node toolchains directly.
+
+### Build
```bash
git clone https://github.com/oracle-samples/locus.git
cd locus
-pip install -e ".[server,oci,openai,anthropic]" # core + extras
+docker build -t locus-workbench -f workbench/Dockerfile .
+```
+
+Image is ~1.3 GB on first build (Oracle Linux 9-slim base + Python
+3.12 + Node 20 + locus + workbench source). Subsequent builds hit
+the BuildKit layer cache.
-# Three terminals, one per tier:
-cd workbench/bff && npm install && npm run dev # :3101
-cd workbench/web && npm install && npm run dev # :5173
-cd workbench/backend && python -m uvicorn --app-dir . runner:app --port 8100
+### Run
+
+For **OpenAI / Anthropic** providers β paste the key into *Provider
+settings* once the UI is up. Nothing extra to pass to the container:
+
+```bash
+docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 locus-workbench
+# open http://localhost:5173
```
-Or use the `Makefile` in `workbench/`:
+For **OCI** providers (api-key or session token), the OCI SDK reads
+`~/.oci/config` at runtime β and that config file contains an
+**absolute** `key_file` path on your host. The container has no such
+path by default, so the SDK reads the config but fails to load the
+key. The fix is to bind-mount your host's `~/.oci` at the same path
+inside the container *and* set `HOME` so the SDK looks for the
+config in the mirrored location:
```bash
-cd workbench && make install
-make backend # pane 1 β FastAPI runner on :8100
-make bff # pane 2 β Express BFF on :3101
-make web # pane 3 β Vite dev server on :5173
+docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 \
+ -v "$HOME/.oci:$HOME/.oci:ro" \
+ -e "HOME=$HOME" \
+ locus-workbench
```
-`make install` runs `npm install` for both `bff/` and `web/` plus
-`npx playwright install chromium` for the e2e suite. The `backend`
-target is the workbench's FastAPI runner β distinct from
-`backend-research` and `backend-finance`, which spin up the A2A
-mesh demo peers for [tutorial 34](tutorials/tutorial_34_a2a_protocol.md),
-not the workbench itself.
+Both pieces matter β the mount alone gets the config file readable
+but the `key_file` line points at a path that still doesn't resolve;
+the `HOME` env alone redirects the SDK to a path nothing is mounted
+at. Together they mirror your host layout into the container so every
+absolute reference inside `config` lines up.
+
+The mount is read-only (`:ro`) β the workbench never writes to your
+OCI directory.
+
+### Port collisions
+
+If 5173 / 3101 / 8100 are taken on the host (you have the local
+workbench running, for instance), remap them:
+
+```bash
+docker run --rm \
+ -p 5273:5173 -p 3201:3101 -p 8200:8100 \
+ locus-workbench
+# then http://localhost:5273
+```
+
+The container ports stay 5173/3101/8100 β only the host-side port
+changes. The Vite dev server inside the container always listens on
+5173; remapping doesn't break the BFFβbackend or webβBFF wiring.
+
+Stop with `Ctrl-C`; `--rm` removes the container on exit.
## Provider settings
@@ -162,9 +198,11 @@ The header's **Provider settings** modal accepts four shapes:
(defaults to `claude-sonnet-4-6`).
- **OCI session token** β `profile` (e.g. `MY_PROFILE`) +
`compartment_id` + `region`. Reads `~/.oci/config` at runtime;
- needs a valid session token. Local-machine only.
-- **OCI api-key** β same shape, different OCI auth type. Local-machine
- only.
+ needs a valid session token. Works on localhost out of the box;
+ works in Docker when you bind-mount `~/.oci` (see
+ [Run it in Docker](#run-it-in-docker)).
+- **OCI api-key** β same shape, different OCI auth type. Same
+ hosting requirements as OCI session token.
Settings live in the page's memory. Closing the tab discards them.
Reopening the page = paste again. This is intentional: an API key
@@ -272,22 +310,31 @@ for the architectural details.
## Cost
-**You pay $0** when someone uses the workbench. Each visitor's
-compute hits their own free GitHub / their own Docker, and their
-model calls hit their own provider key. Oracle pays $0 unless an
-oracle-employee opens it AND `oracle-samples` org Codespaces billing
-is enabled.
+**You pay $0 to run the workbench itself.** All three tiers run
+locally β your laptop or your Docker daemon. The only thing you pay
+for is the model calls your tutorials make, and those go directly
+to *your* provider key (OpenAI / Anthropic) or *your* OCI tenancy.
+Oracle pays nothing.
## Troubleshooting
-- **Sidebar is empty** β BFF couldn't reach the backend. Check
- `docker logs ` or the runner pane: usually means the
- backend hasn't finished starting yet (10-20s on cold boot).
+- **Sidebar is empty** β the BFF couldn't reach the backend. The
+ runner takes 10β20 s to start; reload the page once you see
+ `Uvicorn running on http://0.0.0.0:8100` in the backend logs
+ (or `docker logs ` for the Docker path).
- **"Provider settings: setup required" never goes away** β you
closed the modal without hitting Save. Reopen and click Save.
-- **OCI session-token auth says "no profile"** β you're running in
- Codespaces / Docker; OCI auth needs `~/.oci/config` mounted in.
- Switch to OpenAI or Anthropic.
+- **OCI auth says "no profile" or `KeyError: 'tenancy'`** β the OCI
+ SDK can't find `~/.oci/config`. On localhost: verify
+ `~/.oci/config` exists and the `[]` section has
+ `tenancy`, `user`, `fingerprint`, `key_file`. In Docker: you
+ forgot the bind-mount and `HOME` env β see [Run it in Docker](#run-it-in-docker)
+ for the exact command.
+- **OCI auth says the key file is missing** β your `key_file` line
+ in `~/.oci/config` is an absolute path. In Docker, that path has
+ to resolve inside the container. The `-v "$HOME/.oci:$HOME/.oci:ro"
+ -e "HOME=$HOME"` pair mirrors the host layout so absolute paths
+ line up.
- **Tutorial fails with "no parsed Pydantic" / empty output** β your
model is too small for structured output. Use `gpt-5.5-2026-04-23`,
`gpt-4o`, or `claude-sonnet-4-6` for the demos that use
diff --git a/workbench/README.md b/workbench/README.md
index 48035e8..dc792fe 100644
--- a/workbench/README.md
+++ b/workbench/README.md
@@ -83,6 +83,38 @@ make web # vite dev server
make e2e
```
+## Run in Docker
+
+Single-image build, all three tiers in one container:
+
+```bash
+# from the repo root
+docker build -t locus-workbench -f workbench/Dockerfile .
+docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 locus-workbench
+# open http://localhost:5173
+```
+
+For OpenAI / Anthropic, paste the key into *Provider settings* once
+the UI is up β no extra container args needed.
+
+For OCI (api-key or session-token), the SDK reads `~/.oci/config`
+which references the private key at an **absolute** host path. The
+container has no such path by default, so bind-mount `~/.oci` at the
+same host path inside the container and set `HOME` so the SDK looks
+there:
+
+```bash
+docker run --rm -p 5173:5173 -p 3101:3101 -p 8100:8100 \
+ -v "$HOME/.oci:$HOME/.oci:ro" \
+ -e "HOME=$HOME" \
+ locus-workbench
+```
+
+The mount is read-only (`:ro`); the workbench never writes to your
+OCI directory. The host's `$HOME/.oci` is mirrored inside the
+container at the same path, so absolute `key_file` lines in
+`~/.oci/config` resolve.
+
## Tests
`workbench/e2e/` β Playwright + chromium.
diff --git a/workbench/web/src/main.ts b/workbench/web/src/main.ts
index 5f4155f..78656ea 100644
--- a/workbench/web/src/main.ts
+++ b/workbench/web/src/main.ts
@@ -199,12 +199,12 @@ function syncSettingsRows() {
function openSettings() {
// First-open default depends on where we're running. On localhost the user
// typically has a populated `~/.oci/config`, so OCI session is the natural
- // pick. On a Codespace (or any non-localhost) the OCI session option is
- // filtered out of the dropdown β handing it as the default makes the
- //